diff --git a/clients/client-amplifybackend/.gitignore b/clients/client-amplifybackend/.gitignore new file mode 100644 index 000000000000..b41c05b597c4 --- /dev/null +++ b/clients/client-amplifybackend/.gitignore @@ -0,0 +1,14 @@ +/node_modules/ +/build/ +/coverage/ +/docs/ +/types/ +/dist/ +*.tsbuildinfo +*.tgz +*.log +package-lock.json + +*.d.ts +*.js +*.js.map diff --git a/clients/client-amplifybackend/.npmignore b/clients/client-amplifybackend/.npmignore new file mode 100644 index 000000000000..b7ff81137c4a --- /dev/null +++ b/clients/client-amplifybackend/.npmignore @@ -0,0 +1,4 @@ +/coverage/ +/docs/ +tsconfig.test.json +*.tsbuildinfo diff --git a/clients/client-amplifybackend/AmplifyBackend.ts b/clients/client-amplifybackend/AmplifyBackend.ts new file mode 100644 index 000000000000..4518096f6af1 --- /dev/null +++ b/clients/client-amplifybackend/AmplifyBackend.ts @@ -0,0 +1,852 @@ +import { AmplifyBackendClient } from "./AmplifyBackendClient"; +import { + CloneBackendCommand, + CloneBackendCommandInput, + CloneBackendCommandOutput, +} from "./commands/CloneBackendCommand"; +import { + CreateBackendAPICommand, + CreateBackendAPICommandInput, + CreateBackendAPICommandOutput, +} from "./commands/CreateBackendAPICommand"; +import { + CreateBackendAuthCommand, + CreateBackendAuthCommandInput, + CreateBackendAuthCommandOutput, +} from "./commands/CreateBackendAuthCommand"; +import { + CreateBackendCommand, + CreateBackendCommandInput, + CreateBackendCommandOutput, +} from "./commands/CreateBackendCommand"; +import { + CreateBackendConfigCommand, + CreateBackendConfigCommandInput, + CreateBackendConfigCommandOutput, +} from "./commands/CreateBackendConfigCommand"; +import { CreateTokenCommand, CreateTokenCommandInput, CreateTokenCommandOutput } from "./commands/CreateTokenCommand"; +import { + DeleteBackendAPICommand, + DeleteBackendAPICommandInput, + DeleteBackendAPICommandOutput, +} from "./commands/DeleteBackendAPICommand"; +import { + DeleteBackendAuthCommand, + DeleteBackendAuthCommandInput, + DeleteBackendAuthCommandOutput, +} from "./commands/DeleteBackendAuthCommand"; +import { + DeleteBackendCommand, + DeleteBackendCommandInput, + DeleteBackendCommandOutput, +} from "./commands/DeleteBackendCommand"; +import { DeleteTokenCommand, DeleteTokenCommandInput, DeleteTokenCommandOutput } from "./commands/DeleteTokenCommand"; +import { + GenerateBackendAPIModelsCommand, + GenerateBackendAPIModelsCommandInput, + GenerateBackendAPIModelsCommandOutput, +} from "./commands/GenerateBackendAPIModelsCommand"; +import { + GetBackendAPICommand, + GetBackendAPICommandInput, + GetBackendAPICommandOutput, +} from "./commands/GetBackendAPICommand"; +import { + GetBackendAPIModelsCommand, + GetBackendAPIModelsCommandInput, + GetBackendAPIModelsCommandOutput, +} from "./commands/GetBackendAPIModelsCommand"; +import { + GetBackendAuthCommand, + GetBackendAuthCommandInput, + GetBackendAuthCommandOutput, +} from "./commands/GetBackendAuthCommand"; +import { GetBackendCommand, GetBackendCommandInput, GetBackendCommandOutput } from "./commands/GetBackendCommand"; +import { + GetBackendJobCommand, + GetBackendJobCommandInput, + GetBackendJobCommandOutput, +} from "./commands/GetBackendJobCommand"; +import { GetTokenCommand, GetTokenCommandInput, GetTokenCommandOutput } from "./commands/GetTokenCommand"; +import { + ListBackendJobsCommand, + ListBackendJobsCommandInput, + ListBackendJobsCommandOutput, +} from "./commands/ListBackendJobsCommand"; +import { + RemoveAllBackendsCommand, + RemoveAllBackendsCommandInput, + RemoveAllBackendsCommandOutput, +} from "./commands/RemoveAllBackendsCommand"; +import { + RemoveBackendConfigCommand, + RemoveBackendConfigCommandInput, + RemoveBackendConfigCommandOutput, +} from "./commands/RemoveBackendConfigCommand"; +import { + UpdateBackendAPICommand, + UpdateBackendAPICommandInput, + UpdateBackendAPICommandOutput, +} from "./commands/UpdateBackendAPICommand"; +import { + UpdateBackendAuthCommand, + UpdateBackendAuthCommandInput, + UpdateBackendAuthCommandOutput, +} from "./commands/UpdateBackendAuthCommand"; +import { + UpdateBackendConfigCommand, + UpdateBackendConfigCommandInput, + UpdateBackendConfigCommandOutput, +} from "./commands/UpdateBackendConfigCommand"; +import { + UpdateBackendJobCommand, + UpdateBackendJobCommandInput, + UpdateBackendJobCommandOutput, +} from "./commands/UpdateBackendJobCommand"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@aws-sdk/types"; + +/** + *

AWS Amplify Admin API

+ */ +export class AmplifyBackend extends AmplifyBackendClient { + /** + *

This operation clones an existing backend.

+ */ + public cloneBackend( + args: CloneBackendCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public cloneBackend(args: CloneBackendCommandInput, cb: (err: any, data?: CloneBackendCommandOutput) => void): void; + public cloneBackend( + args: CloneBackendCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CloneBackendCommandOutput) => void + ): void; + public cloneBackend( + args: CloneBackendCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CloneBackendCommandOutput) => void), + cb?: (err: any, data?: CloneBackendCommandOutput) => void + ): Promise | void { + const command = new CloneBackendCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

This operation creates a backend for an Amplify app. Backends are automatically created at the time of app creation.

+ */ + public createBackend( + args: CreateBackendCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public createBackend( + args: CreateBackendCommandInput, + cb: (err: any, data?: CreateBackendCommandOutput) => void + ): void; + public createBackend( + args: CreateBackendCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateBackendCommandOutput) => void + ): void; + public createBackend( + args: CreateBackendCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateBackendCommandOutput) => void), + cb?: (err: any, data?: CreateBackendCommandOutput) => void + ): Promise | void { + const command = new CreateBackendCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Creates a new backend API resource.

+ */ + public createBackendAPI( + args: CreateBackendAPICommandInput, + options?: __HttpHandlerOptions + ): Promise; + public createBackendAPI( + args: CreateBackendAPICommandInput, + cb: (err: any, data?: CreateBackendAPICommandOutput) => void + ): void; + public createBackendAPI( + args: CreateBackendAPICommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateBackendAPICommandOutput) => void + ): void; + public createBackendAPI( + args: CreateBackendAPICommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateBackendAPICommandOutput) => void), + cb?: (err: any, data?: CreateBackendAPICommandOutput) => void + ): Promise | void { + const command = new CreateBackendAPICommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Creates a new backend authentication resource.

+ */ + public createBackendAuth( + args: CreateBackendAuthCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public createBackendAuth( + args: CreateBackendAuthCommandInput, + cb: (err: any, data?: CreateBackendAuthCommandOutput) => void + ): void; + public createBackendAuth( + args: CreateBackendAuthCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateBackendAuthCommandOutput) => void + ): void; + public createBackendAuth( + args: CreateBackendAuthCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateBackendAuthCommandOutput) => void), + cb?: (err: any, data?: CreateBackendAuthCommandOutput) => void + ): Promise | void { + const command = new CreateBackendAuthCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Creates a config object for a backend.

+ */ + public createBackendConfig( + args: CreateBackendConfigCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public createBackendConfig( + args: CreateBackendConfigCommandInput, + cb: (err: any, data?: CreateBackendConfigCommandOutput) => void + ): void; + public createBackendConfig( + args: CreateBackendConfigCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateBackendConfigCommandOutput) => void + ): void; + public createBackendConfig( + args: CreateBackendConfigCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateBackendConfigCommandOutput) => void), + cb?: (err: any, data?: CreateBackendConfigCommandOutput) => void + ): Promise | void { + const command = new CreateBackendConfigCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Generates a one time challenge code to authenticate a user into your Amplify Admin UI.

+ */ + public createToken(args: CreateTokenCommandInput, options?: __HttpHandlerOptions): Promise; + public createToken(args: CreateTokenCommandInput, cb: (err: any, data?: CreateTokenCommandOutput) => void): void; + public createToken( + args: CreateTokenCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateTokenCommandOutput) => void + ): void; + public createToken( + args: CreateTokenCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateTokenCommandOutput) => void), + cb?: (err: any, data?: CreateTokenCommandOutput) => void + ): Promise | void { + const command = new CreateTokenCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Removes an existing environment from your Ampify project.

+ */ + public deleteBackend( + args: DeleteBackendCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteBackend( + args: DeleteBackendCommandInput, + cb: (err: any, data?: DeleteBackendCommandOutput) => void + ): void; + public deleteBackend( + args: DeleteBackendCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteBackendCommandOutput) => void + ): void; + public deleteBackend( + args: DeleteBackendCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteBackendCommandOutput) => void), + cb?: (err: any, data?: DeleteBackendCommandOutput) => void + ): Promise | void { + const command = new DeleteBackendCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Deletes an existing backend API resource.

+ */ + public deleteBackendAPI( + args: DeleteBackendAPICommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteBackendAPI( + args: DeleteBackendAPICommandInput, + cb: (err: any, data?: DeleteBackendAPICommandOutput) => void + ): void; + public deleteBackendAPI( + args: DeleteBackendAPICommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteBackendAPICommandOutput) => void + ): void; + public deleteBackendAPI( + args: DeleteBackendAPICommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteBackendAPICommandOutput) => void), + cb?: (err: any, data?: DeleteBackendAPICommandOutput) => void + ): Promise | void { + const command = new DeleteBackendAPICommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Deletes an existing backend authentication resource.

+ */ + public deleteBackendAuth( + args: DeleteBackendAuthCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteBackendAuth( + args: DeleteBackendAuthCommandInput, + cb: (err: any, data?: DeleteBackendAuthCommandOutput) => void + ): void; + public deleteBackendAuth( + args: DeleteBackendAuthCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteBackendAuthCommandOutput) => void + ): void; + public deleteBackendAuth( + args: DeleteBackendAuthCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteBackendAuthCommandOutput) => void), + cb?: (err: any, data?: DeleteBackendAuthCommandOutput) => void + ): Promise | void { + const command = new DeleteBackendAuthCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Deletes the challenge token based on the given appId and sessionId.

+ */ + public deleteToken(args: DeleteTokenCommandInput, options?: __HttpHandlerOptions): Promise; + public deleteToken(args: DeleteTokenCommandInput, cb: (err: any, data?: DeleteTokenCommandOutput) => void): void; + public deleteToken( + args: DeleteTokenCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteTokenCommandOutput) => void + ): void; + public deleteToken( + args: DeleteTokenCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteTokenCommandOutput) => void), + cb?: (err: any, data?: DeleteTokenCommandOutput) => void + ): Promise | void { + const command = new DeleteTokenCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Generates a model schema for an existing backend API resource.

+ */ + public generateBackendAPIModels( + args: GenerateBackendAPIModelsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public generateBackendAPIModels( + args: GenerateBackendAPIModelsCommandInput, + cb: (err: any, data?: GenerateBackendAPIModelsCommandOutput) => void + ): void; + public generateBackendAPIModels( + args: GenerateBackendAPIModelsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GenerateBackendAPIModelsCommandOutput) => void + ): void; + public generateBackendAPIModels( + args: GenerateBackendAPIModelsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: GenerateBackendAPIModelsCommandOutput) => void), + cb?: (err: any, data?: GenerateBackendAPIModelsCommandOutput) => void + ): Promise | void { + const command = new GenerateBackendAPIModelsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Provides project level details for your Amplify UI project.

+ */ + public getBackend(args: GetBackendCommandInput, options?: __HttpHandlerOptions): Promise; + public getBackend(args: GetBackendCommandInput, cb: (err: any, data?: GetBackendCommandOutput) => void): void; + public getBackend( + args: GetBackendCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBackendCommandOutput) => void + ): void; + public getBackend( + args: GetBackendCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: GetBackendCommandOutput) => void), + cb?: (err: any, data?: GetBackendCommandOutput) => void + ): Promise | void { + const command = new GetBackendCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Gets the details for a backend api.

+ */ + public getBackendAPI( + args: GetBackendAPICommandInput, + options?: __HttpHandlerOptions + ): Promise; + public getBackendAPI( + args: GetBackendAPICommandInput, + cb: (err: any, data?: GetBackendAPICommandOutput) => void + ): void; + public getBackendAPI( + args: GetBackendAPICommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBackendAPICommandOutput) => void + ): void; + public getBackendAPI( + args: GetBackendAPICommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: GetBackendAPICommandOutput) => void), + cb?: (err: any, data?: GetBackendAPICommandOutput) => void + ): Promise | void { + const command = new GetBackendAPICommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Generates a model schema for existing backend API resource.

+ */ + public getBackendAPIModels( + args: GetBackendAPIModelsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public getBackendAPIModels( + args: GetBackendAPIModelsCommandInput, + cb: (err: any, data?: GetBackendAPIModelsCommandOutput) => void + ): void; + public getBackendAPIModels( + args: GetBackendAPIModelsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBackendAPIModelsCommandOutput) => void + ): void; + public getBackendAPIModels( + args: GetBackendAPIModelsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: GetBackendAPIModelsCommandOutput) => void), + cb?: (err: any, data?: GetBackendAPIModelsCommandOutput) => void + ): Promise | void { + const command = new GetBackendAPIModelsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Gets a backend auth details.

+ */ + public getBackendAuth( + args: GetBackendAuthCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public getBackendAuth( + args: GetBackendAuthCommandInput, + cb: (err: any, data?: GetBackendAuthCommandOutput) => void + ): void; + public getBackendAuth( + args: GetBackendAuthCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBackendAuthCommandOutput) => void + ): void; + public getBackendAuth( + args: GetBackendAuthCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: GetBackendAuthCommandOutput) => void), + cb?: (err: any, data?: GetBackendAuthCommandOutput) => void + ): Promise | void { + const command = new GetBackendAuthCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Returns information about a specific job.

+ */ + public getBackendJob( + args: GetBackendJobCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public getBackendJob( + args: GetBackendJobCommandInput, + cb: (err: any, data?: GetBackendJobCommandOutput) => void + ): void; + public getBackendJob( + args: GetBackendJobCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetBackendJobCommandOutput) => void + ): void; + public getBackendJob( + args: GetBackendJobCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: GetBackendJobCommandOutput) => void), + cb?: (err: any, data?: GetBackendJobCommandOutput) => void + ): Promise | void { + const command = new GetBackendJobCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Gets the challenge token based on the given appId and sessionId.

+ */ + public getToken(args: GetTokenCommandInput, options?: __HttpHandlerOptions): Promise; + public getToken(args: GetTokenCommandInput, cb: (err: any, data?: GetTokenCommandOutput) => void): void; + public getToken( + args: GetTokenCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetTokenCommandOutput) => void + ): void; + public getToken( + args: GetTokenCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: GetTokenCommandOutput) => void), + cb?: (err: any, data?: GetTokenCommandOutput) => void + ): Promise | void { + const command = new GetTokenCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Lists the jobs for the backend of an Amplify app.

+ */ + public listBackendJobs( + args: ListBackendJobsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listBackendJobs( + args: ListBackendJobsCommandInput, + cb: (err: any, data?: ListBackendJobsCommandOutput) => void + ): void; + public listBackendJobs( + args: ListBackendJobsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListBackendJobsCommandOutput) => void + ): void; + public listBackendJobs( + args: ListBackendJobsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListBackendJobsCommandOutput) => void), + cb?: (err: any, data?: ListBackendJobsCommandOutput) => void + ): Promise | void { + const command = new ListBackendJobsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Removes all backend environments from your Amplify project.

+ */ + public removeAllBackends( + args: RemoveAllBackendsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public removeAllBackends( + args: RemoveAllBackendsCommandInput, + cb: (err: any, data?: RemoveAllBackendsCommandOutput) => void + ): void; + public removeAllBackends( + args: RemoveAllBackendsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: RemoveAllBackendsCommandOutput) => void + ): void; + public removeAllBackends( + args: RemoveAllBackendsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: RemoveAllBackendsCommandOutput) => void), + cb?: (err: any, data?: RemoveAllBackendsCommandOutput) => void + ): Promise | void { + const command = new RemoveAllBackendsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Removes the AWS resources required to access the Amplify Admin UI.

+ */ + public removeBackendConfig( + args: RemoveBackendConfigCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public removeBackendConfig( + args: RemoveBackendConfigCommandInput, + cb: (err: any, data?: RemoveBackendConfigCommandOutput) => void + ): void; + public removeBackendConfig( + args: RemoveBackendConfigCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: RemoveBackendConfigCommandOutput) => void + ): void; + public removeBackendConfig( + args: RemoveBackendConfigCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: RemoveBackendConfigCommandOutput) => void), + cb?: (err: any, data?: RemoveBackendConfigCommandOutput) => void + ): Promise | void { + const command = new RemoveBackendConfigCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Updates an existing backend API resource.

+ */ + public updateBackendAPI( + args: UpdateBackendAPICommandInput, + options?: __HttpHandlerOptions + ): Promise; + public updateBackendAPI( + args: UpdateBackendAPICommandInput, + cb: (err: any, data?: UpdateBackendAPICommandOutput) => void + ): void; + public updateBackendAPI( + args: UpdateBackendAPICommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateBackendAPICommandOutput) => void + ): void; + public updateBackendAPI( + args: UpdateBackendAPICommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: UpdateBackendAPICommandOutput) => void), + cb?: (err: any, data?: UpdateBackendAPICommandOutput) => void + ): Promise | void { + const command = new UpdateBackendAPICommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Updates an existing backend authentication resource.

+ */ + public updateBackendAuth( + args: UpdateBackendAuthCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public updateBackendAuth( + args: UpdateBackendAuthCommandInput, + cb: (err: any, data?: UpdateBackendAuthCommandOutput) => void + ): void; + public updateBackendAuth( + args: UpdateBackendAuthCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateBackendAuthCommandOutput) => void + ): void; + public updateBackendAuth( + args: UpdateBackendAuthCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: UpdateBackendAuthCommandOutput) => void), + cb?: (err: any, data?: UpdateBackendAuthCommandOutput) => void + ): Promise | void { + const command = new UpdateBackendAuthCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Updates the AWS resources required to access the Amplify Admin UI.

+ */ + public updateBackendConfig( + args: UpdateBackendConfigCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public updateBackendConfig( + args: UpdateBackendConfigCommandInput, + cb: (err: any, data?: UpdateBackendConfigCommandOutput) => void + ): void; + public updateBackendConfig( + args: UpdateBackendConfigCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateBackendConfigCommandOutput) => void + ): void; + public updateBackendConfig( + args: UpdateBackendConfigCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: UpdateBackendConfigCommandOutput) => void), + cb?: (err: any, data?: UpdateBackendConfigCommandOutput) => void + ): Promise | void { + const command = new UpdateBackendConfigCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Updates a specific job.

+ */ + public updateBackendJob( + args: UpdateBackendJobCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public updateBackendJob( + args: UpdateBackendJobCommandInput, + cb: (err: any, data?: UpdateBackendJobCommandOutput) => void + ): void; + public updateBackendJob( + args: UpdateBackendJobCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateBackendJobCommandOutput) => void + ): void; + public updateBackendJob( + args: UpdateBackendJobCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: UpdateBackendJobCommandOutput) => void), + cb?: (err: any, data?: UpdateBackendJobCommandOutput) => void + ): Promise | void { + const command = new UpdateBackendJobCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } +} diff --git a/clients/client-amplifybackend/AmplifyBackendClient.ts b/clients/client-amplifybackend/AmplifyBackendClient.ts new file mode 100644 index 000000000000..1e71768b091f --- /dev/null +++ b/clients/client-amplifybackend/AmplifyBackendClient.ts @@ -0,0 +1,288 @@ +import { CloneBackendCommandInput, CloneBackendCommandOutput } from "./commands/CloneBackendCommand"; +import { CreateBackendAPICommandInput, CreateBackendAPICommandOutput } from "./commands/CreateBackendAPICommand"; +import { CreateBackendAuthCommandInput, CreateBackendAuthCommandOutput } from "./commands/CreateBackendAuthCommand"; +import { CreateBackendCommandInput, CreateBackendCommandOutput } from "./commands/CreateBackendCommand"; +import { + CreateBackendConfigCommandInput, + CreateBackendConfigCommandOutput, +} from "./commands/CreateBackendConfigCommand"; +import { CreateTokenCommandInput, CreateTokenCommandOutput } from "./commands/CreateTokenCommand"; +import { DeleteBackendAPICommandInput, DeleteBackendAPICommandOutput } from "./commands/DeleteBackendAPICommand"; +import { DeleteBackendAuthCommandInput, DeleteBackendAuthCommandOutput } from "./commands/DeleteBackendAuthCommand"; +import { DeleteBackendCommandInput, DeleteBackendCommandOutput } from "./commands/DeleteBackendCommand"; +import { DeleteTokenCommandInput, DeleteTokenCommandOutput } from "./commands/DeleteTokenCommand"; +import { + GenerateBackendAPIModelsCommandInput, + GenerateBackendAPIModelsCommandOutput, +} from "./commands/GenerateBackendAPIModelsCommand"; +import { GetBackendAPICommandInput, GetBackendAPICommandOutput } from "./commands/GetBackendAPICommand"; +import { + GetBackendAPIModelsCommandInput, + GetBackendAPIModelsCommandOutput, +} from "./commands/GetBackendAPIModelsCommand"; +import { GetBackendAuthCommandInput, GetBackendAuthCommandOutput } from "./commands/GetBackendAuthCommand"; +import { GetBackendCommandInput, GetBackendCommandOutput } from "./commands/GetBackendCommand"; +import { GetBackendJobCommandInput, GetBackendJobCommandOutput } from "./commands/GetBackendJobCommand"; +import { GetTokenCommandInput, GetTokenCommandOutput } from "./commands/GetTokenCommand"; +import { ListBackendJobsCommandInput, ListBackendJobsCommandOutput } from "./commands/ListBackendJobsCommand"; +import { RemoveAllBackendsCommandInput, RemoveAllBackendsCommandOutput } from "./commands/RemoveAllBackendsCommand"; +import { + RemoveBackendConfigCommandInput, + RemoveBackendConfigCommandOutput, +} from "./commands/RemoveBackendConfigCommand"; +import { UpdateBackendAPICommandInput, UpdateBackendAPICommandOutput } from "./commands/UpdateBackendAPICommand"; +import { UpdateBackendAuthCommandInput, UpdateBackendAuthCommandOutput } from "./commands/UpdateBackendAuthCommand"; +import { + UpdateBackendConfigCommandInput, + UpdateBackendConfigCommandOutput, +} from "./commands/UpdateBackendConfigCommand"; +import { UpdateBackendJobCommandInput, UpdateBackendJobCommandOutput } from "./commands/UpdateBackendJobCommand"; +import { ClientDefaultValues as __ClientDefaultValues } from "./runtimeConfig"; +import { + EndpointsInputConfig, + EndpointsResolvedConfig, + RegionInputConfig, + RegionResolvedConfig, + resolveEndpointsConfig, + resolveRegionConfig, +} from "@aws-sdk/config-resolver"; +import { getContentLengthPlugin } from "@aws-sdk/middleware-content-length"; +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, + getHostHeaderPlugin, + resolveHostHeaderConfig, +} from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { RetryInputConfig, RetryResolvedConfig, getRetryPlugin, resolveRetryConfig } from "@aws-sdk/middleware-retry"; +import { + AwsAuthInputConfig, + AwsAuthResolvedConfig, + getAwsAuthPlugin, + resolveAwsAuthConfig, +} from "@aws-sdk/middleware-signing"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, + getUserAgentPlugin, + resolveUserAgentConfig, +} from "@aws-sdk/middleware-user-agent"; +import { HttpHandler as __HttpHandler } from "@aws-sdk/protocol-http"; +import { + Client as __Client, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@aws-sdk/smithy-client"; +import { + RegionInfoProvider, + Credentials as __Credentials, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, +} from "@aws-sdk/types"; + +export type ServiceInputTypes = + | CloneBackendCommandInput + | CreateBackendAPICommandInput + | CreateBackendAuthCommandInput + | CreateBackendCommandInput + | CreateBackendConfigCommandInput + | CreateTokenCommandInput + | DeleteBackendAPICommandInput + | DeleteBackendAuthCommandInput + | DeleteBackendCommandInput + | DeleteTokenCommandInput + | GenerateBackendAPIModelsCommandInput + | GetBackendAPICommandInput + | GetBackendAPIModelsCommandInput + | GetBackendAuthCommandInput + | GetBackendCommandInput + | GetBackendJobCommandInput + | GetTokenCommandInput + | ListBackendJobsCommandInput + | RemoveAllBackendsCommandInput + | RemoveBackendConfigCommandInput + | UpdateBackendAPICommandInput + | UpdateBackendAuthCommandInput + | UpdateBackendConfigCommandInput + | UpdateBackendJobCommandInput; + +export type ServiceOutputTypes = + | CloneBackendCommandOutput + | CreateBackendAPICommandOutput + | CreateBackendAuthCommandOutput + | CreateBackendCommandOutput + | CreateBackendConfigCommandOutput + | CreateTokenCommandOutput + | DeleteBackendAPICommandOutput + | DeleteBackendAuthCommandOutput + | DeleteBackendCommandOutput + | DeleteTokenCommandOutput + | GenerateBackendAPIModelsCommandOutput + | GetBackendAPICommandOutput + | GetBackendAPIModelsCommandOutput + | GetBackendAuthCommandOutput + | GetBackendCommandOutput + | GetBackendJobCommandOutput + | GetTokenCommandOutput + | ListBackendJobsCommandOutput + | RemoveAllBackendsCommandOutput + | RemoveBackendConfigCommandOutput + | UpdateBackendAPICommandOutput + | UpdateBackendAuthCommandOutput + | UpdateBackendConfigCommandOutput + | UpdateBackendJobCommandOutput; + +export interface ClientDefaults extends Partial<__SmithyResolvedConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandler; + + /** + * A constructor for a class implementing the @aws-sdk/types.Hash interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + */ + sha256?: __HashConstructor; + + /** + * The function that will be used to convert strings into HTTP endpoints. + */ + urlParser?: __UrlParser; + + /** + * A function that can calculate the length of a request body. + */ + bodyLengthChecker?: (body: any) => number | undefined; + + /** + * A function that converts a stream into an array of bytes. + */ + streamCollector?: __StreamCollector; + + /** + * The function that will be used to convert a base64-encoded string to a byte array + */ + base64Decoder?: __Decoder; + + /** + * The function that will be used to convert binary data to a base64-encoded string + */ + base64Encoder?: __Encoder; + + /** + * The function that will be used to convert a UTF8-encoded string to a byte array + */ + utf8Decoder?: __Decoder; + + /** + * The function that will be used to convert binary data to a UTF-8 encoded string + */ + utf8Encoder?: __Encoder; + + /** + * The string that will be used to populate default value in 'User-Agent' header + */ + defaultUserAgent?: string; + + /** + * The runtime environment + */ + runtime?: string; + + /** + * Disable dyanamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + + /** + * The service name with which to sign requests. + */ + signingName?: string; + + /** + * Default credentials provider; Not available in browser runtime + */ + credentialDefaultProvider?: (input: any) => __Provider<__Credentials>; + + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + + /** + * Fetch related hostname, signing name or signing region with given region. + */ + regionInfoProvider?: RegionInfoProvider; +} + +export type AmplifyBackendClientConfig = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & + ClientDefaults & + RegionInputConfig & + EndpointsInputConfig & + AwsAuthInputConfig & + RetryInputConfig & + UserAgentInputConfig & + HostHeaderInputConfig; + +export type AmplifyBackendClientResolvedConfig = __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RegionResolvedConfig & + EndpointsResolvedConfig & + AwsAuthResolvedConfig & + RetryResolvedConfig & + UserAgentResolvedConfig & + HostHeaderResolvedConfig; + +/** + *

AWS Amplify Admin API

+ */ +export class AmplifyBackendClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + AmplifyBackendClientResolvedConfig +> { + readonly config: AmplifyBackendClientResolvedConfig; + + constructor(configuration: AmplifyBackendClientConfig) { + let _config_0 = { + ...__ClientDefaultValues, + ...configuration, + }; + let _config_1 = resolveRegionConfig(_config_0); + let _config_2 = resolveEndpointsConfig(_config_1); + let _config_3 = resolveAwsAuthConfig(_config_2); + let _config_4 = resolveRetryConfig(_config_3); + let _config_5 = resolveUserAgentConfig(_config_4); + let _config_6 = resolveHostHeaderConfig(_config_5); + super(_config_6); + this.config = _config_6; + this.middlewareStack.use(getAwsAuthPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + } + + destroy(): void { + super.destroy(); + } +} diff --git a/clients/client-amplifybackend/LICENSE b/clients/client-amplifybackend/LICENSE new file mode 100644 index 000000000000..dd65ae06be7a --- /dev/null +++ b/clients/client-amplifybackend/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/clients/client-amplifybackend/README.md b/clients/client-amplifybackend/README.md new file mode 100644 index 000000000000..a9536c7cbe27 --- /dev/null +++ b/clients/client-amplifybackend/README.md @@ -0,0 +1,6 @@ +# @aws-sdk/client-amplifybackend + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/client-amplifybackend/rc.svg)](https://www.npmjs.com/package/@aws-sdk/client-amplifybackend) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/client-amplifybackend.svg)](https://www.npmjs.com/package/@aws-sdk/client-amplifybackend) + +For SDK usage, please step to [SDK readme](https://github.com/aws/aws-sdk-js-v3). diff --git a/clients/client-amplifybackend/commands/CloneBackendCommand.ts b/clients/client-amplifybackend/commands/CloneBackendCommand.ts new file mode 100644 index 000000000000..c815afdc69fc --- /dev/null +++ b/clients/client-amplifybackend/commands/CloneBackendCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { CloneBackendRequest, CloneBackendResponse } from "../models/models_0"; +import { + deserializeAws_restJson1CloneBackendCommand, + serializeAws_restJson1CloneBackendCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CloneBackendCommandInput = CloneBackendRequest; +export type CloneBackendCommandOutput = CloneBackendResponse & __MetadataBearer; + +/** + *

This operation clones an existing backend.

+ */ +export class CloneBackendCommand extends $Command< + CloneBackendCommandInput, + CloneBackendCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CloneBackendCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "CloneBackendCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CloneBackendRequest.filterSensitiveLog, + outputFilterSensitiveLog: CloneBackendResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CloneBackendCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1CloneBackendCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1CloneBackendCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/CreateBackendAPICommand.ts b/clients/client-amplifybackend/commands/CreateBackendAPICommand.ts new file mode 100644 index 000000000000..8c6d617f9478 --- /dev/null +++ b/clients/client-amplifybackend/commands/CreateBackendAPICommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { CreateBackendAPIRequest, CreateBackendAPIResponse } from "../models/models_0"; +import { + deserializeAws_restJson1CreateBackendAPICommand, + serializeAws_restJson1CreateBackendAPICommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateBackendAPICommandInput = CreateBackendAPIRequest; +export type CreateBackendAPICommandOutput = CreateBackendAPIResponse & __MetadataBearer; + +/** + *

Creates a new backend API resource.

+ */ +export class CreateBackendAPICommand extends $Command< + CreateBackendAPICommandInput, + CreateBackendAPICommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateBackendAPICommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "CreateBackendAPICommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateBackendAPIRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreateBackendAPIResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateBackendAPICommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1CreateBackendAPICommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1CreateBackendAPICommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/CreateBackendAuthCommand.ts b/clients/client-amplifybackend/commands/CreateBackendAuthCommand.ts new file mode 100644 index 000000000000..de98dd804402 --- /dev/null +++ b/clients/client-amplifybackend/commands/CreateBackendAuthCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { CreateBackendAuthRequest, CreateBackendAuthResponse } from "../models/models_0"; +import { + deserializeAws_restJson1CreateBackendAuthCommand, + serializeAws_restJson1CreateBackendAuthCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateBackendAuthCommandInput = CreateBackendAuthRequest; +export type CreateBackendAuthCommandOutput = CreateBackendAuthResponse & __MetadataBearer; + +/** + *

Creates a new backend authentication resource.

+ */ +export class CreateBackendAuthCommand extends $Command< + CreateBackendAuthCommandInput, + CreateBackendAuthCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateBackendAuthCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "CreateBackendAuthCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateBackendAuthRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreateBackendAuthResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateBackendAuthCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1CreateBackendAuthCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1CreateBackendAuthCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/CreateBackendCommand.ts b/clients/client-amplifybackend/commands/CreateBackendCommand.ts new file mode 100644 index 000000000000..7a91cab86ce7 --- /dev/null +++ b/clients/client-amplifybackend/commands/CreateBackendCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { CreateBackendRequest, CreateBackendResponse } from "../models/models_0"; +import { + deserializeAws_restJson1CreateBackendCommand, + serializeAws_restJson1CreateBackendCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateBackendCommandInput = CreateBackendRequest; +export type CreateBackendCommandOutput = CreateBackendResponse & __MetadataBearer; + +/** + *

This operation creates a backend for an Amplify app. Backends are automatically created at the time of app creation.

+ */ +export class CreateBackendCommand extends $Command< + CreateBackendCommandInput, + CreateBackendCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateBackendCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "CreateBackendCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateBackendRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreateBackendResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateBackendCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1CreateBackendCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1CreateBackendCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/CreateBackendConfigCommand.ts b/clients/client-amplifybackend/commands/CreateBackendConfigCommand.ts new file mode 100644 index 000000000000..beed2fdce68a --- /dev/null +++ b/clients/client-amplifybackend/commands/CreateBackendConfigCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { CreateBackendConfigRequest, CreateBackendConfigResponse } from "../models/models_0"; +import { + deserializeAws_restJson1CreateBackendConfigCommand, + serializeAws_restJson1CreateBackendConfigCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateBackendConfigCommandInput = CreateBackendConfigRequest; +export type CreateBackendConfigCommandOutput = CreateBackendConfigResponse & __MetadataBearer; + +/** + *

Creates a config object for a backend.

+ */ +export class CreateBackendConfigCommand extends $Command< + CreateBackendConfigCommandInput, + CreateBackendConfigCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateBackendConfigCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "CreateBackendConfigCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateBackendConfigRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreateBackendConfigResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateBackendConfigCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1CreateBackendConfigCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1CreateBackendConfigCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/CreateTokenCommand.ts b/clients/client-amplifybackend/commands/CreateTokenCommand.ts new file mode 100644 index 000000000000..5fe9ee32fe27 --- /dev/null +++ b/clients/client-amplifybackend/commands/CreateTokenCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { CreateTokenRequest, CreateTokenResponse } from "../models/models_0"; +import { + deserializeAws_restJson1CreateTokenCommand, + serializeAws_restJson1CreateTokenCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateTokenCommandInput = CreateTokenRequest; +export type CreateTokenCommandOutput = CreateTokenResponse & __MetadataBearer; + +/** + *

Generates a one time challenge code to authenticate a user into your Amplify Admin UI.

+ */ +export class CreateTokenCommand extends $Command< + CreateTokenCommandInput, + CreateTokenCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateTokenCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "CreateTokenCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateTokenRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreateTokenResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateTokenCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1CreateTokenCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1CreateTokenCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/DeleteBackendAPICommand.ts b/clients/client-amplifybackend/commands/DeleteBackendAPICommand.ts new file mode 100644 index 000000000000..38d076f456a7 --- /dev/null +++ b/clients/client-amplifybackend/commands/DeleteBackendAPICommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { DeleteBackendAPIRequest, DeleteBackendAPIResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DeleteBackendAPICommand, + serializeAws_restJson1DeleteBackendAPICommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteBackendAPICommandInput = DeleteBackendAPIRequest; +export type DeleteBackendAPICommandOutput = DeleteBackendAPIResponse & __MetadataBearer; + +/** + *

Deletes an existing backend API resource.

+ */ +export class DeleteBackendAPICommand extends $Command< + DeleteBackendAPICommandInput, + DeleteBackendAPICommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteBackendAPICommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "DeleteBackendAPICommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteBackendAPIRequest.filterSensitiveLog, + outputFilterSensitiveLog: DeleteBackendAPIResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteBackendAPICommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DeleteBackendAPICommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DeleteBackendAPICommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/DeleteBackendAuthCommand.ts b/clients/client-amplifybackend/commands/DeleteBackendAuthCommand.ts new file mode 100644 index 000000000000..d975b859f741 --- /dev/null +++ b/clients/client-amplifybackend/commands/DeleteBackendAuthCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { DeleteBackendAuthRequest, DeleteBackendAuthResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DeleteBackendAuthCommand, + serializeAws_restJson1DeleteBackendAuthCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteBackendAuthCommandInput = DeleteBackendAuthRequest; +export type DeleteBackendAuthCommandOutput = DeleteBackendAuthResponse & __MetadataBearer; + +/** + *

Deletes an existing backend authentication resource.

+ */ +export class DeleteBackendAuthCommand extends $Command< + DeleteBackendAuthCommandInput, + DeleteBackendAuthCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteBackendAuthCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "DeleteBackendAuthCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteBackendAuthRequest.filterSensitiveLog, + outputFilterSensitiveLog: DeleteBackendAuthResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteBackendAuthCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DeleteBackendAuthCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DeleteBackendAuthCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/DeleteBackendCommand.ts b/clients/client-amplifybackend/commands/DeleteBackendCommand.ts new file mode 100644 index 000000000000..78dcb1503295 --- /dev/null +++ b/clients/client-amplifybackend/commands/DeleteBackendCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { DeleteBackendRequest, DeleteBackendResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DeleteBackendCommand, + serializeAws_restJson1DeleteBackendCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteBackendCommandInput = DeleteBackendRequest; +export type DeleteBackendCommandOutput = DeleteBackendResponse & __MetadataBearer; + +/** + *

Removes an existing environment from your Ampify project.

+ */ +export class DeleteBackendCommand extends $Command< + DeleteBackendCommandInput, + DeleteBackendCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteBackendCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "DeleteBackendCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteBackendRequest.filterSensitiveLog, + outputFilterSensitiveLog: DeleteBackendResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteBackendCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DeleteBackendCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DeleteBackendCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/DeleteTokenCommand.ts b/clients/client-amplifybackend/commands/DeleteTokenCommand.ts new file mode 100644 index 000000000000..e5d9f3f59607 --- /dev/null +++ b/clients/client-amplifybackend/commands/DeleteTokenCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { DeleteTokenRequest, DeleteTokenResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DeleteTokenCommand, + serializeAws_restJson1DeleteTokenCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteTokenCommandInput = DeleteTokenRequest; +export type DeleteTokenCommandOutput = DeleteTokenResponse & __MetadataBearer; + +/** + *

Deletes the challenge token based on the given appId and sessionId.

+ */ +export class DeleteTokenCommand extends $Command< + DeleteTokenCommandInput, + DeleteTokenCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteTokenCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "DeleteTokenCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteTokenRequest.filterSensitiveLog, + outputFilterSensitiveLog: DeleteTokenResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteTokenCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DeleteTokenCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DeleteTokenCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/GenerateBackendAPIModelsCommand.ts b/clients/client-amplifybackend/commands/GenerateBackendAPIModelsCommand.ts new file mode 100644 index 000000000000..7213fcac6460 --- /dev/null +++ b/clients/client-amplifybackend/commands/GenerateBackendAPIModelsCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { GenerateBackendAPIModelsRequest, GenerateBackendAPIModelsResponse } from "../models/models_0"; +import { + deserializeAws_restJson1GenerateBackendAPIModelsCommand, + serializeAws_restJson1GenerateBackendAPIModelsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type GenerateBackendAPIModelsCommandInput = GenerateBackendAPIModelsRequest; +export type GenerateBackendAPIModelsCommandOutput = GenerateBackendAPIModelsResponse & __MetadataBearer; + +/** + *

Generates a model schema for an existing backend API resource.

+ */ +export class GenerateBackendAPIModelsCommand extends $Command< + GenerateBackendAPIModelsCommandInput, + GenerateBackendAPIModelsCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: GenerateBackendAPIModelsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "GenerateBackendAPIModelsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: GenerateBackendAPIModelsRequest.filterSensitiveLog, + outputFilterSensitiveLog: GenerateBackendAPIModelsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: GenerateBackendAPIModelsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1GenerateBackendAPIModelsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1GenerateBackendAPIModelsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/GetBackendAPICommand.ts b/clients/client-amplifybackend/commands/GetBackendAPICommand.ts new file mode 100644 index 000000000000..55031c3a3274 --- /dev/null +++ b/clients/client-amplifybackend/commands/GetBackendAPICommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { GetBackendAPIRequest, GetBackendAPIResponse } from "../models/models_0"; +import { + deserializeAws_restJson1GetBackendAPICommand, + serializeAws_restJson1GetBackendAPICommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type GetBackendAPICommandInput = GetBackendAPIRequest; +export type GetBackendAPICommandOutput = GetBackendAPIResponse & __MetadataBearer; + +/** + *

Gets the details for a backend api.

+ */ +export class GetBackendAPICommand extends $Command< + GetBackendAPICommandInput, + GetBackendAPICommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: GetBackendAPICommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "GetBackendAPICommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: GetBackendAPIRequest.filterSensitiveLog, + outputFilterSensitiveLog: GetBackendAPIResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: GetBackendAPICommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1GetBackendAPICommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1GetBackendAPICommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/GetBackendAPIModelsCommand.ts b/clients/client-amplifybackend/commands/GetBackendAPIModelsCommand.ts new file mode 100644 index 000000000000..c701048618c1 --- /dev/null +++ b/clients/client-amplifybackend/commands/GetBackendAPIModelsCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { GetBackendAPIModelsRequest, GetBackendAPIModelsResponse } from "../models/models_0"; +import { + deserializeAws_restJson1GetBackendAPIModelsCommand, + serializeAws_restJson1GetBackendAPIModelsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type GetBackendAPIModelsCommandInput = GetBackendAPIModelsRequest; +export type GetBackendAPIModelsCommandOutput = GetBackendAPIModelsResponse & __MetadataBearer; + +/** + *

Generates a model schema for existing backend API resource.

+ */ +export class GetBackendAPIModelsCommand extends $Command< + GetBackendAPIModelsCommandInput, + GetBackendAPIModelsCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: GetBackendAPIModelsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "GetBackendAPIModelsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: GetBackendAPIModelsRequest.filterSensitiveLog, + outputFilterSensitiveLog: GetBackendAPIModelsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: GetBackendAPIModelsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1GetBackendAPIModelsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1GetBackendAPIModelsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/GetBackendAuthCommand.ts b/clients/client-amplifybackend/commands/GetBackendAuthCommand.ts new file mode 100644 index 000000000000..684eb18bba6a --- /dev/null +++ b/clients/client-amplifybackend/commands/GetBackendAuthCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { GetBackendAuthRequest, GetBackendAuthResponse } from "../models/models_0"; +import { + deserializeAws_restJson1GetBackendAuthCommand, + serializeAws_restJson1GetBackendAuthCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type GetBackendAuthCommandInput = GetBackendAuthRequest; +export type GetBackendAuthCommandOutput = GetBackendAuthResponse & __MetadataBearer; + +/** + *

Gets a backend auth details.

+ */ +export class GetBackendAuthCommand extends $Command< + GetBackendAuthCommandInput, + GetBackendAuthCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: GetBackendAuthCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "GetBackendAuthCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: GetBackendAuthRequest.filterSensitiveLog, + outputFilterSensitiveLog: GetBackendAuthResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: GetBackendAuthCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1GetBackendAuthCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1GetBackendAuthCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/GetBackendCommand.ts b/clients/client-amplifybackend/commands/GetBackendCommand.ts new file mode 100644 index 000000000000..14b938ab2113 --- /dev/null +++ b/clients/client-amplifybackend/commands/GetBackendCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { GetBackendRequest, GetBackendResponse } from "../models/models_0"; +import { + deserializeAws_restJson1GetBackendCommand, + serializeAws_restJson1GetBackendCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type GetBackendCommandInput = GetBackendRequest; +export type GetBackendCommandOutput = GetBackendResponse & __MetadataBearer; + +/** + *

Provides project level details for your Amplify UI project.

+ */ +export class GetBackendCommand extends $Command< + GetBackendCommandInput, + GetBackendCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: GetBackendCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "GetBackendCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: GetBackendRequest.filterSensitiveLog, + outputFilterSensitiveLog: GetBackendResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: GetBackendCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1GetBackendCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1GetBackendCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/GetBackendJobCommand.ts b/clients/client-amplifybackend/commands/GetBackendJobCommand.ts new file mode 100644 index 000000000000..cdf94b437d00 --- /dev/null +++ b/clients/client-amplifybackend/commands/GetBackendJobCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { GetBackendJobRequest, GetBackendJobResponse } from "../models/models_0"; +import { + deserializeAws_restJson1GetBackendJobCommand, + serializeAws_restJson1GetBackendJobCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type GetBackendJobCommandInput = GetBackendJobRequest; +export type GetBackendJobCommandOutput = GetBackendJobResponse & __MetadataBearer; + +/** + *

Returns information about a specific job.

+ */ +export class GetBackendJobCommand extends $Command< + GetBackendJobCommandInput, + GetBackendJobCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: GetBackendJobCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "GetBackendJobCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: GetBackendJobRequest.filterSensitiveLog, + outputFilterSensitiveLog: GetBackendJobResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: GetBackendJobCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1GetBackendJobCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1GetBackendJobCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/GetTokenCommand.ts b/clients/client-amplifybackend/commands/GetTokenCommand.ts new file mode 100644 index 000000000000..05911cdfa47e --- /dev/null +++ b/clients/client-amplifybackend/commands/GetTokenCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { GetTokenRequest, GetTokenResponse } from "../models/models_0"; +import { + deserializeAws_restJson1GetTokenCommand, + serializeAws_restJson1GetTokenCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type GetTokenCommandInput = GetTokenRequest; +export type GetTokenCommandOutput = GetTokenResponse & __MetadataBearer; + +/** + *

Gets the challenge token based on the given appId and sessionId.

+ */ +export class GetTokenCommand extends $Command< + GetTokenCommandInput, + GetTokenCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: GetTokenCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "GetTokenCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: GetTokenRequest.filterSensitiveLog, + outputFilterSensitiveLog: GetTokenResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: GetTokenCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1GetTokenCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1GetTokenCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/ListBackendJobsCommand.ts b/clients/client-amplifybackend/commands/ListBackendJobsCommand.ts new file mode 100644 index 000000000000..81df36ae2949 --- /dev/null +++ b/clients/client-amplifybackend/commands/ListBackendJobsCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { ListBackendJobsRequest, ListBackendJobsResponse } from "../models/models_0"; +import { + deserializeAws_restJson1ListBackendJobsCommand, + serializeAws_restJson1ListBackendJobsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListBackendJobsCommandInput = ListBackendJobsRequest; +export type ListBackendJobsCommandOutput = ListBackendJobsResponse & __MetadataBearer; + +/** + *

Lists the jobs for the backend of an Amplify app.

+ */ +export class ListBackendJobsCommand extends $Command< + ListBackendJobsCommandInput, + ListBackendJobsCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListBackendJobsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "ListBackendJobsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListBackendJobsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListBackendJobsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListBackendJobsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1ListBackendJobsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1ListBackendJobsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/RemoveAllBackendsCommand.ts b/clients/client-amplifybackend/commands/RemoveAllBackendsCommand.ts new file mode 100644 index 000000000000..8ef08a42135b --- /dev/null +++ b/clients/client-amplifybackend/commands/RemoveAllBackendsCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { RemoveAllBackendsRequest, RemoveAllBackendsResponse } from "../models/models_0"; +import { + deserializeAws_restJson1RemoveAllBackendsCommand, + serializeAws_restJson1RemoveAllBackendsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type RemoveAllBackendsCommandInput = RemoveAllBackendsRequest; +export type RemoveAllBackendsCommandOutput = RemoveAllBackendsResponse & __MetadataBearer; + +/** + *

Removes all backend environments from your Amplify project.

+ */ +export class RemoveAllBackendsCommand extends $Command< + RemoveAllBackendsCommandInput, + RemoveAllBackendsCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: RemoveAllBackendsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "RemoveAllBackendsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: RemoveAllBackendsRequest.filterSensitiveLog, + outputFilterSensitiveLog: RemoveAllBackendsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: RemoveAllBackendsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1RemoveAllBackendsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1RemoveAllBackendsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/RemoveBackendConfigCommand.ts b/clients/client-amplifybackend/commands/RemoveBackendConfigCommand.ts new file mode 100644 index 000000000000..b10546f809af --- /dev/null +++ b/clients/client-amplifybackend/commands/RemoveBackendConfigCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { RemoveBackendConfigRequest, RemoveBackendConfigResponse } from "../models/models_0"; +import { + deserializeAws_restJson1RemoveBackendConfigCommand, + serializeAws_restJson1RemoveBackendConfigCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type RemoveBackendConfigCommandInput = RemoveBackendConfigRequest; +export type RemoveBackendConfigCommandOutput = RemoveBackendConfigResponse & __MetadataBearer; + +/** + *

Removes the AWS resources required to access the Amplify Admin UI.

+ */ +export class RemoveBackendConfigCommand extends $Command< + RemoveBackendConfigCommandInput, + RemoveBackendConfigCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: RemoveBackendConfigCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "RemoveBackendConfigCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: RemoveBackendConfigRequest.filterSensitiveLog, + outputFilterSensitiveLog: RemoveBackendConfigResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: RemoveBackendConfigCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1RemoveBackendConfigCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1RemoveBackendConfigCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/UpdateBackendAPICommand.ts b/clients/client-amplifybackend/commands/UpdateBackendAPICommand.ts new file mode 100644 index 000000000000..2ad68d8b599f --- /dev/null +++ b/clients/client-amplifybackend/commands/UpdateBackendAPICommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { UpdateBackendAPIRequest, UpdateBackendAPIResponse } from "../models/models_0"; +import { + deserializeAws_restJson1UpdateBackendAPICommand, + serializeAws_restJson1UpdateBackendAPICommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type UpdateBackendAPICommandInput = UpdateBackendAPIRequest; +export type UpdateBackendAPICommandOutput = UpdateBackendAPIResponse & __MetadataBearer; + +/** + *

Updates an existing backend API resource.

+ */ +export class UpdateBackendAPICommand extends $Command< + UpdateBackendAPICommandInput, + UpdateBackendAPICommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: UpdateBackendAPICommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "UpdateBackendAPICommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: UpdateBackendAPIRequest.filterSensitiveLog, + outputFilterSensitiveLog: UpdateBackendAPIResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: UpdateBackendAPICommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1UpdateBackendAPICommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1UpdateBackendAPICommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/UpdateBackendAuthCommand.ts b/clients/client-amplifybackend/commands/UpdateBackendAuthCommand.ts new file mode 100644 index 000000000000..9c0edcb8254e --- /dev/null +++ b/clients/client-amplifybackend/commands/UpdateBackendAuthCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { UpdateBackendAuthRequest, UpdateBackendAuthResponse } from "../models/models_0"; +import { + deserializeAws_restJson1UpdateBackendAuthCommand, + serializeAws_restJson1UpdateBackendAuthCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type UpdateBackendAuthCommandInput = UpdateBackendAuthRequest; +export type UpdateBackendAuthCommandOutput = UpdateBackendAuthResponse & __MetadataBearer; + +/** + *

Updates an existing backend authentication resource.

+ */ +export class UpdateBackendAuthCommand extends $Command< + UpdateBackendAuthCommandInput, + UpdateBackendAuthCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: UpdateBackendAuthCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "UpdateBackendAuthCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: UpdateBackendAuthRequest.filterSensitiveLog, + outputFilterSensitiveLog: UpdateBackendAuthResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: UpdateBackendAuthCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1UpdateBackendAuthCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1UpdateBackendAuthCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/UpdateBackendConfigCommand.ts b/clients/client-amplifybackend/commands/UpdateBackendConfigCommand.ts new file mode 100644 index 000000000000..7e633ab3635a --- /dev/null +++ b/clients/client-amplifybackend/commands/UpdateBackendConfigCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { UpdateBackendConfigRequest, UpdateBackendConfigResponse } from "../models/models_0"; +import { + deserializeAws_restJson1UpdateBackendConfigCommand, + serializeAws_restJson1UpdateBackendConfigCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type UpdateBackendConfigCommandInput = UpdateBackendConfigRequest; +export type UpdateBackendConfigCommandOutput = UpdateBackendConfigResponse & __MetadataBearer; + +/** + *

Updates the AWS resources required to access the Amplify Admin UI.

+ */ +export class UpdateBackendConfigCommand extends $Command< + UpdateBackendConfigCommandInput, + UpdateBackendConfigCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: UpdateBackendConfigCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "UpdateBackendConfigCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: UpdateBackendConfigRequest.filterSensitiveLog, + outputFilterSensitiveLog: UpdateBackendConfigResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: UpdateBackendConfigCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1UpdateBackendConfigCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1UpdateBackendConfigCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/commands/UpdateBackendJobCommand.ts b/clients/client-amplifybackend/commands/UpdateBackendJobCommand.ts new file mode 100644 index 000000000000..99f3e909c417 --- /dev/null +++ b/clients/client-amplifybackend/commands/UpdateBackendJobCommand.ts @@ -0,0 +1,88 @@ +import { AmplifyBackendClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AmplifyBackendClient"; +import { UpdateBackendJobRequest, UpdateBackendJobResponse } from "../models/models_0"; +import { + deserializeAws_restJson1UpdateBackendJobCommand, + serializeAws_restJson1UpdateBackendJobCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type UpdateBackendJobCommandInput = UpdateBackendJobRequest; +export type UpdateBackendJobCommandOutput = UpdateBackendJobResponse & __MetadataBearer; + +/** + *

Updates a specific job.

+ */ +export class UpdateBackendJobCommand extends $Command< + UpdateBackendJobCommandInput, + UpdateBackendJobCommandOutput, + AmplifyBackendClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: UpdateBackendJobCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AmplifyBackendClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AmplifyBackendClient"; + const commandName = "UpdateBackendJobCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: UpdateBackendJobRequest.filterSensitiveLog, + outputFilterSensitiveLog: UpdateBackendJobResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: UpdateBackendJobCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1UpdateBackendJobCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1UpdateBackendJobCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-amplifybackend/endpoints.ts b/clients/client-amplifybackend/endpoints.ts new file mode 100644 index 000000000000..afd16e8e49b0 --- /dev/null +++ b/clients/client-amplifybackend/endpoints.ts @@ -0,0 +1,81 @@ +import { RegionInfo, RegionInfoProvider } from "@aws-sdk/types"; + +// Partition default templates +const AWS_TEMPLATE = "amplifybackend.{region}.amazonaws.com"; +const AWS_CN_TEMPLATE = "amplifybackend.{region}.amazonaws.com.cn"; +const AWS_ISO_TEMPLATE = "amplifybackend.{region}.c2s.ic.gov"; +const AWS_ISO_B_TEMPLATE = "amplifybackend.{region}.sc2s.sgov.gov"; +const AWS_US_GOV_TEMPLATE = "amplifybackend.{region}.amazonaws.com"; + +// Partition regions +const AWS_REGIONS = new Set([ + "ap-east-1", + "ap-northeast-1", + "ap-northeast-2", + "ap-south-1", + "ap-southeast-1", + "ap-southeast-2", + "ca-central-1", + "eu-central-1", + "eu-north-1", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "me-south-1", + "sa-east-1", + "us-east-1", + "us-east-2", + "us-west-1", + "us-west-2", +]); +const AWS_CN_REGIONS = new Set(["cn-north-1", "cn-northwest-1"]); +const AWS_ISO_REGIONS = new Set(["us-iso-east-1"]); +const AWS_ISO_B_REGIONS = new Set(["us-isob-east-1"]); +const AWS_US_GOV_REGIONS = new Set(["us-gov-east-1", "us-gov-west-1"]); + +export const defaultRegionInfoProvider: RegionInfoProvider = (region: string, options?: any) => { + let regionInfo: RegionInfo | undefined = undefined; + switch (region) { + // First, try to match exact region names. + // Next, try to match partition endpoints. + default: + if (AWS_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_TEMPLATE.replace("{region}", region), + partition: "aws", + }; + } + if (AWS_CN_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_CN_TEMPLATE.replace("{region}", region), + partition: "aws-cn", + }; + } + if (AWS_ISO_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_ISO_TEMPLATE.replace("{region}", region), + partition: "aws-iso", + }; + } + if (AWS_ISO_B_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_ISO_B_TEMPLATE.replace("{region}", region), + partition: "aws-iso-b", + }; + } + if (AWS_US_GOV_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_US_GOV_TEMPLATE.replace("{region}", region), + partition: "aws-us-gov", + }; + } + // Finally, assume it's an AWS partition endpoint. + if (regionInfo === undefined) { + regionInfo = { + hostname: AWS_TEMPLATE.replace("{region}", region), + partition: "aws", + }; + } + } + return Promise.resolve(regionInfo); +}; diff --git a/clients/client-amplifybackend/index.ts b/clients/client-amplifybackend/index.ts new file mode 100644 index 000000000000..d8c74182c9c4 --- /dev/null +++ b/clients/client-amplifybackend/index.ts @@ -0,0 +1,27 @@ +export * from "./AmplifyBackendClient"; +export * from "./AmplifyBackend"; +export * from "./commands/CloneBackendCommand"; +export * from "./commands/CreateBackendCommand"; +export * from "./commands/CreateBackendAPICommand"; +export * from "./commands/CreateBackendAuthCommand"; +export * from "./commands/CreateBackendConfigCommand"; +export * from "./commands/CreateTokenCommand"; +export * from "./commands/DeleteBackendCommand"; +export * from "./commands/DeleteBackendAPICommand"; +export * from "./commands/DeleteBackendAuthCommand"; +export * from "./commands/DeleteTokenCommand"; +export * from "./commands/GenerateBackendAPIModelsCommand"; +export * from "./commands/GetBackendCommand"; +export * from "./commands/GetBackendAPICommand"; +export * from "./commands/GetBackendAPIModelsCommand"; +export * from "./commands/GetBackendAuthCommand"; +export * from "./commands/GetBackendJobCommand"; +export * from "./commands/GetTokenCommand"; +export * from "./commands/ListBackendJobsCommand"; +export * from "./commands/RemoveAllBackendsCommand"; +export * from "./commands/RemoveBackendConfigCommand"; +export * from "./commands/UpdateBackendAPICommand"; +export * from "./commands/UpdateBackendAuthCommand"; +export * from "./commands/UpdateBackendConfigCommand"; +export * from "./commands/UpdateBackendJobCommand"; +export * from "./models/index"; diff --git a/clients/client-amplifybackend/models/index.ts b/clients/client-amplifybackend/models/index.ts new file mode 100644 index 000000000000..09c5d6e09b8c --- /dev/null +++ b/clients/client-amplifybackend/models/index.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/clients/client-amplifybackend/models/models_0.ts b/clients/client-amplifybackend/models/models_0.ts new file mode 100644 index 000000000000..6abbc5a0b439 --- /dev/null +++ b/clients/client-amplifybackend/models/models_0.ts @@ -0,0 +1,2304 @@ +import { SENSITIVE_STRING, SmithyException as __SmithyException } from "@aws-sdk/smithy-client"; +import { MetadataBearer as $MetadataBearer } from "@aws-sdk/types"; + +export enum AdditionalConstraintsElement { + REQUIRE_DIGIT = "REQUIRE_DIGIT", + REQUIRE_LOWERCASE = "REQUIRE_LOWERCASE", + REQUIRE_SYMBOL = "REQUIRE_SYMBOL", + REQUIRE_UPPERCASE = "REQUIRE_UPPERCASE", +} + +/** + *

An error returned if a request is not formed properly.

+ */ +export interface BadRequestException extends __SmithyException, $MetadataBearer { + name: "BadRequestException"; + $fault: "client"; + /** + *

An error message to inform that the request has failed.

+ */ + Message?: string; +} + +export namespace BadRequestException { + export const filterSensitiveLog = (obj: BadRequestException): any => ({ + ...obj, + }); +} + +/** + *

The request body for CloneBackend.

+ */ +export interface CloneBackendRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName: string | undefined; + + /** + *

The name of the destination backend environment to be created.

+ */ + TargetEnvironmentName: string | undefined; +} + +export namespace CloneBackendRequest { + export const filterSensitiveLog = (obj: CloneBackendRequest): any => ({ + ...obj, + }); +} + +export interface CloneBackendResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName?: string; + + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; + + /** + *

The ID for the job.

+ */ + JobId?: string; + + /** + *

The name of the operation.

+ */ + Operation?: string; + + /** + *

The current status of the request.

+ */ + Status?: string; +} + +export namespace CloneBackendResponse { + export const filterSensitiveLog = (obj: CloneBackendResponse): any => ({ + ...obj, + }); +} + +/** + *

An error returned if there's a temporary issue with the service.

+ */ +export interface GatewayTimeoutException extends __SmithyException, $MetadataBearer { + name: "GatewayTimeoutException"; + $fault: "server"; + /** + *

An error message to inform that the request has failed.

+ */ + Message?: string; +} + +export namespace GatewayTimeoutException { + export const filterSensitiveLog = (obj: GatewayTimeoutException): any => ({ + ...obj, + }); +} + +/** + *

An error returned when a specific resource type is not found.

+ */ +export interface NotFoundException extends __SmithyException, $MetadataBearer { + name: "NotFoundException"; + $fault: "client"; + /** + *

An error message to inform that the request has failed.

+ */ + Message?: string; + + /** + *

The type of resource that wasn't found.

+ */ + ResourceType?: string; +} + +export namespace NotFoundException { + export const filterSensitiveLog = (obj: NotFoundException): any => ({ + ...obj, + }); +} + +/** + *

An error that is returned when a limit of a specific type has been exceeded.

+ */ +export interface TooManyRequestsException extends __SmithyException, $MetadataBearer { + name: "TooManyRequestsException"; + $fault: "client"; + /** + *

The type of limit that was exceeded.

+ */ + LimitType?: string; + + /** + *

An error message to inform that the request has failed.

+ */ + Message?: string; +} + +export namespace TooManyRequestsException { + export const filterSensitiveLog = (obj: TooManyRequestsException): any => ({ + ...obj, + }); +} + +/** + *

Defines the resource configuration for the data model in your Amplify project.

+ */ +export interface ResourceConfig {} + +export namespace ResourceConfig { + export const filterSensitiveLog = (obj: ResourceConfig): any => ({ + ...obj, + }); +} + +/** + *

The request body for CreateBackend.

+ */ +export interface CreateBackendRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The name of the app.

+ */ + AppName: string | undefined; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName: string | undefined; + + /** + *

The resource configuration for the create backend request.

+ */ + ResourceConfig?: ResourceConfig; + + /** + *

The name of the resource.

+ */ + ResourceName?: string; +} + +export namespace CreateBackendRequest { + export const filterSensitiveLog = (obj: CreateBackendRequest): any => ({ + ...obj, + }); +} + +export interface CreateBackendResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName?: string; + + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; + + /** + *

The ID for the job.

+ */ + JobId?: string; + + /** + *

The name of the operation.

+ */ + Operation?: string; + + /** + *

The current status of the request.

+ */ + Status?: string; +} + +export namespace CreateBackendResponse { + export const filterSensitiveLog = (obj: CreateBackendResponse): any => ({ + ...obj, + }); +} + +export enum Mode { + AMAZON_COGNITO_USER_POOLS = "AMAZON_COGNITO_USER_POOLS", + API_KEY = "API_KEY", + AWS_IAM = "AWS_IAM", + OPENID_CONNECT = "OPENID_CONNECT", +} + +/** + *

The authentication settings for accessing provisioned data models in your Amplify project.

+ */ +export interface BackendAPIAppSyncAuthSettings { + /** + *

The Amazon Cognito user pool id, if Amazon Cognito was used as an authentication setting to access your data models.

+ */ + CognitoUserPoolId?: string; + + /** + *

The api key description for API_KEY, if it was used as an authentication mechanism to access your data models.

+ */ + Description?: string; + + /** + *

The api key expiration time for API_KEY, if it was used as an authentication mechanism to access your data models.

+ */ + ExpirationTime?: number; + + /** + *

The expiry time for the OpenID authentication mechanism.

+ */ + OpenIDAuthTTL?: string; + + /** + *

The clientID for openID, if openID was used as an authentication setting to access your data models.

+ */ + OpenIDClientId?: string; + + /** + *

The expiry time for the OpenID authentication mechanism.

+ */ + OpenIDIatTTL?: string; + + /** + *

The openID issuer URL, if openID was used as an authentication setting to access your data models.

+ */ + OpenIDIssueURL?: string; + + /** + *

The open id provider name, if openID was used as an authentication mechanism to access your data models.

+ */ + OpenIDProviderName?: string; +} + +export namespace BackendAPIAppSyncAuthSettings { + export const filterSensitiveLog = (obj: BackendAPIAppSyncAuthSettings): any => ({ + ...obj, + }); +} + +/** + *

Describes the auth types for your configured data models.

+ */ +export interface BackendAPIAuthType { + /** + *

Describes the authentication mode.

+ */ + Mode?: Mode | string; + + /** + *

Describes settings for the authentication mode.

+ */ + Settings?: BackendAPIAppSyncAuthSettings; +} + +export namespace BackendAPIAuthType { + export const filterSensitiveLog = (obj: BackendAPIAuthType): any => ({ + ...obj, + }); +} + +export enum ResolutionStrategy { + AUTOMERGE = "AUTOMERGE", + LAMBDA = "LAMBDA", + NONE = "NONE", + OPTIMISTIC_CONCURRENCY = "OPTIMISTIC_CONCURRENCY", +} + +/** + *

Describes the conflict resolution configuration for your data model configured in your Amplify project.

+ */ +export interface BackendAPIConflictResolution { + /** + *

The strategy for conflict resolution.

+ */ + ResolutionStrategy?: ResolutionStrategy | string; +} + +export namespace BackendAPIConflictResolution { + export const filterSensitiveLog = (obj: BackendAPIConflictResolution): any => ({ + ...obj, + }); +} + +/** + *

The resource config for the data model,configured as a part of the Amplify project.

+ */ +export interface BackendAPIResourceConfig { + /** + *

Addition authentication methods used to interact with your data models.

+ */ + AdditionalAuthTypes?: BackendAPIAuthType[]; + + /** + *

The API name used to interact with the data model, configured as a part of the amplify project.

+ */ + ApiName?: string; + + /** + *

The conflict resoultion strategy for your data stored in the data models.

+ */ + ConflictResolution?: BackendAPIConflictResolution; + + /** + *

The default authentication type for interacting with the configured data models in your amplify project.

+ */ + DefaultAuthType?: BackendAPIAuthType; + + /** + *

The service used to provision and interact with the data model.

+ */ + Service?: string; + + /** + *

The definition of the data model in the annotated transform of the graphql schema.

+ */ + TransformSchema?: string; +} + +export namespace BackendAPIResourceConfig { + export const filterSensitiveLog = (obj: BackendAPIResourceConfig): any => ({ + ...obj, + }); +} + +/** + *

The request body for CreateBackendAPI.

+ */ +export interface CreateBackendAPIRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName: string | undefined; + + /** + *

The resource configuration for this request.

+ */ + ResourceConfig: BackendAPIResourceConfig | undefined; + + /** + *

The name of this resource.

+ */ + ResourceName: string | undefined; +} + +export namespace CreateBackendAPIRequest { + export const filterSensitiveLog = (obj: CreateBackendAPIRequest): any => ({ + ...obj, + }); +} + +export interface CreateBackendAPIResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName?: string; + + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; + + /** + *

The ID for the job.

+ */ + JobId?: string; + + /** + *

The name of the operation.

+ */ + Operation?: string; + + /** + *

The current status of the request.

+ */ + Status?: string; +} + +export namespace CreateBackendAPIResponse { + export const filterSensitiveLog = (obj: CreateBackendAPIResponse): any => ({ + ...obj, + }); +} + +export enum AuthResources { + IDENTITY_POOL_AND_USER_POOL = "IDENTITY_POOL_AND_USER_POOL", + USER_POOL_ONLY = "USER_POOL_ONLY", +} + +/** + *

Describes authorization configurations for the auth resources, configures as a part of your Amplify project.

+ */ +export interface CreateBackendAuthIdentityPoolConfig { + /** + *

Name of the identity pool used for authorization.

+ */ + IdentityPoolName: string | undefined; + + /** + *

Set to true or false based on whether you want to enable guest authorization to your Amplify app.

+ */ + UnauthenticatedLogin: boolean | undefined; +} + +export namespace CreateBackendAuthIdentityPoolConfig { + export const filterSensitiveLog = (obj: CreateBackendAuthIdentityPoolConfig): any => ({ + ...obj, + }); +} + +export enum Service { + COGNITO = "COGNITO", +} + +export enum DeliveryMethod { + EMAIL = "EMAIL", + SMS = "SMS", +} + +/** + *

The configuration for the email sent when an app user forgets their password.

+ */ +export interface EmailSettings { + /** + *

The body of the email.

+ */ + EmailMessage?: string; + + /** + *

The subject of the email.

+ */ + EmailSubject?: string; +} + +export namespace EmailSettings { + export const filterSensitiveLog = (obj: EmailSettings): any => ({ + ...obj, + }); +} + +/** + *

SMS settings for authentication.

+ */ +export interface SmsSettings { + /** + *

The body of the SMS message.

+ */ + SmsMessage?: string; +} + +export namespace SmsSettings { + export const filterSensitiveLog = (obj: SmsSettings): any => ({ + ...obj, + }); +} + +/** + *

Describes the forgot password policy for authenticating into the Amlify app.

+ */ +export interface CreateBackendAuthForgotPasswordConfig { + /** + *

Describes which mode to use (either SMS or email) to deliver messages to app users that want to recover their password.

+ */ + DeliveryMethod: DeliveryMethod | string | undefined; + + /** + *

The configuration for the email sent when an app user forgets their password.

+ */ + EmailSettings?: EmailSettings; + + /** + *

The configuration for the SMS message sent when an app user forgets their password.

+ */ + SmsSettings?: SmsSettings; +} + +export namespace CreateBackendAuthForgotPasswordConfig { + export const filterSensitiveLog = (obj: CreateBackendAuthForgotPasswordConfig): any => ({ + ...obj, + }); +} + +export enum MFAMode { + OFF = "OFF", + ON = "ON", + OPTIONAL = "OPTIONAL", +} + +export enum MfaTypesElement { + SMS = "SMS", + TOTP = "TOTP", +} + +/** + *

The settings of your MFA configuration for the backend of your Amplify project.

+ */ +export interface Settings { + /** + *

The supported MFA types

+ */ + MfaTypes?: (MfaTypesElement | string)[]; + + /** + *

The body of the SMS message.

+ */ + SmsMessage?: string; +} + +export namespace Settings { + export const filterSensitiveLog = (obj: Settings): any => ({ + ...obj, + }); +} + +/** + *

Describes whether multi-factor authentication policies should be applied for your Amazon Cognito user pool configured as a part of your Amplify project.

+ */ +export interface CreateBackendAuthMFAConfig { + /** + *

Describes whether MFA should be [ON, OFF, OPTIONAL] for authentication in your Amplify project.

+ */ + MFAMode: MFAMode | string | undefined; + + /** + *

Describes the configuration settings and methods for your Amplify app users to use MFA.

+ */ + Settings?: Settings; +} + +export namespace CreateBackendAuthMFAConfig { + export const filterSensitiveLog = (obj: CreateBackendAuthMFAConfig): any => ({ + ...obj, + }); +} + +export enum OAuthGrantType { + CODE = "CODE", + IMPLICIT = "IMPLICIT", +} + +export enum OAuthScopesElement { + AWS_COGNITO_SIGNIN_USER_ADMIN = "AWS_COGNITO_SIGNIN_USER_ADMIN", + EMAIL = "EMAIL", + OPENID = "OPENID", + PHONE = "PHONE", + PROFILE = "PROFILE", +} + +/** + *

Describes third party social federation configurations for allowing your app users to sign in using OAuth.

+ */ +export interface BackendAuthSocialProviderConfig { + /** + *

Describes the client_id which can be obtained from the third party social federation provider.

+ */ + ClientId?: string; + + /** + *

Describes the client_secret which can be obtained from third party social federation providers.

+ */ + ClientSecret?: string; +} + +export namespace BackendAuthSocialProviderConfig { + export const filterSensitiveLog = (obj: BackendAuthSocialProviderConfig): any => ({ + ...obj, + }); +} + +/** + *

The settings for using the social providers for access to your Amplify app.

+ */ +export interface SocialProviderSettings { + /** + *

Describes third party social federation configurations for allowing your app users to sign in using OAuth.

+ */ + Facebook?: BackendAuthSocialProviderConfig; + + /** + *

Describes third party social federation configurations for allowing your app users to sign in using OAuth.

+ */ + Google?: BackendAuthSocialProviderConfig; + + /** + *

Describes third party social federation configurations for allowing your app users to sign in using OAuth.

+ */ + LoginWithAmazon?: BackendAuthSocialProviderConfig; +} + +export namespace SocialProviderSettings { + export const filterSensitiveLog = (obj: SocialProviderSettings): any => ({ + ...obj, + }); +} + +/** + *

Creates the OAuth configuration for your Amplify project.

+ */ +export interface CreateBackendAuthOAuthConfig { + /** + *

The domain prefix for your Amplify app.

+ */ + DomainPrefix?: string; + + /** + *

The OAuth grant type which you use to allow app users to authenticate from your Amplify app.

+ */ + OAuthGrantType: OAuthGrantType | string | undefined; + + /** + *

List of OAuth related flows which use to allow your app users to authenticate from your Amplify app.

+ */ + OAuthScopes: (OAuthScopesElement | string)[] | undefined; + + /** + *

The redirected URI for sigining into your Amplify app.

+ */ + RedirectSignInURIs: string[] | undefined; + + /** + *

Redirect URLs used by OAuth when a user signs out of an Amplify app.

+ */ + RedirectSignOutURIs: string[] | undefined; + + /** + *

The settings for using the social providers for access to your Amplify app.

+ */ + SocialProviderSettings?: SocialProviderSettings; +} + +export namespace CreateBackendAuthOAuthConfig { + export const filterSensitiveLog = (obj: CreateBackendAuthOAuthConfig): any => ({ + ...obj, + }); +} + +/** + *

The password policy configuration for the backend to your Amplify project.

+ */ +export interface CreateBackendAuthPasswordPolicyConfig { + /** + *

Additional constraints for the pasword used to access the backend of your Amplify project.

+ */ + AdditionalConstraints?: (AdditionalConstraintsElement | string)[]; + + /** + *

The minimum length of password used to access the backend of your Amplify project.

+ */ + MinimumLength: number | undefined; +} + +export namespace CreateBackendAuthPasswordPolicyConfig { + export const filterSensitiveLog = (obj: CreateBackendAuthPasswordPolicyConfig): any => ({ + ...obj, + }); +} + +export enum RequiredSignUpAttributesElement { + ADDRESS = "ADDRESS", + BIRTHDATE = "BIRTHDATE", + EMAIL = "EMAIL", + FAMILY_NAME = "FAMILY_NAME", + GENDER = "GENDER", + GIVEN_NAME = "GIVEN_NAME", + LOCALE = "LOCALE", + MIDDLE_NAME = "MIDDLE_NAME", + NAME = "NAME", + NICKNAME = "NICKNAME", + PHONE_NUMBER = "PHONE_NUMBER", + PICTURE = "PICTURE", + PREFERRED_USERNAME = "PREFERRED_USERNAME", + PROFILE = "PROFILE", + UPDATED_AT = "UPDATED_AT", + WEBSITE = "WEBSITE", + ZONE_INFO = "ZONE_INFO", +} + +export enum SignInMethod { + EMAIL = "EMAIL", + EMAIL_AND_PHONE_NUMBER = "EMAIL_AND_PHONE_NUMBER", + PHONE_NUMBER = "PHONE_NUMBER", + USERNAME = "USERNAME", +} + +/** + *

Describes the Amazon Cognito user pool configuration for the auth resource to be configured for your Amplify project.

+ */ +export interface CreateBackendAuthUserPoolConfig { + /** + *

Describes the forgotten password policy for your Amazon Cognito user pool, configured as a part of your Amplify project.

+ */ + ForgotPassword?: CreateBackendAuthForgotPasswordConfig; + + /** + *

Describes whether multi-factor authentication policies should be applied for your Amazon Cognito user pool configured as a part of your Amplify project.

+ */ + Mfa?: CreateBackendAuthMFAConfig; + + /** + *

Describes the OAuth policy and rules for your Amazon Cognito user pool, configured as a part of your Amplify project.

+ */ + OAuth?: CreateBackendAuthOAuthConfig; + + /** + *

Describes the password policy for your Amazon Cognito user pool, configured as a part of your Amplify project.

+ */ + PasswordPolicy?: CreateBackendAuthPasswordPolicyConfig; + + /** + *

The required attributes to sign up new users in the user pool.

+ */ + RequiredSignUpAttributes: (RequiredSignUpAttributesElement | string)[] | undefined; + + /** + *

Describes the sign-in methods that your Amplify app users to login using the Amazon Cognito user pool, configured as a part of your Amplify project.

+ */ + SignInMethod: SignInMethod | string | undefined; + + /** + *

The Amazon Cognito user pool name.

+ */ + UserPoolName: string | undefined; +} + +export namespace CreateBackendAuthUserPoolConfig { + export const filterSensitiveLog = (obj: CreateBackendAuthUserPoolConfig): any => ({ + ...obj, + }); +} + +/** + *

Defines the resource configuration when creating an auth resource in your Amplify project.

+ */ +export interface CreateBackendAuthResourceConfig { + /** + *

Defines whether you want to configure only authentication or both authentication and authorization settings.

+ */ + AuthResources: AuthResources | string | undefined; + + /** + *

Describes the authorization configuration for the Amazon Cognito identity pool, provisioned as a part of your auth resource in the Amplify project.

+ */ + IdentityPoolConfigs?: CreateBackendAuthIdentityPoolConfig; + + /** + *

Defines the service name to use when configuring an authentication resource in your Amplify project.

+ */ + Service: Service | string | undefined; + + /** + *

Describes authentication configuration for the Amazon Cognito user pool, provisioned as a part of your auth resource in the Amplify project.

+ */ + UserPoolConfigs: CreateBackendAuthUserPoolConfig | undefined; +} + +export namespace CreateBackendAuthResourceConfig { + export const filterSensitiveLog = (obj: CreateBackendAuthResourceConfig): any => ({ + ...obj, + }); +} + +/** + *

The request body for CreateBackendAuth.

+ */ +export interface CreateBackendAuthRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName: string | undefined; + + /** + *

The resource configuration for this request object.

+ */ + ResourceConfig: CreateBackendAuthResourceConfig | undefined; + + /** + *

The name of this resource.

+ */ + ResourceName: string | undefined; +} + +export namespace CreateBackendAuthRequest { + export const filterSensitiveLog = (obj: CreateBackendAuthRequest): any => ({ + ...obj, + }); +} + +export interface CreateBackendAuthResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName?: string; + + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; + + /** + *

The ID for the job.

+ */ + JobId?: string; + + /** + *

The name of the operation.

+ */ + Operation?: string; + + /** + *

The current status of the request.

+ */ + Status?: string; +} + +export namespace CreateBackendAuthResponse { + export const filterSensitiveLog = (obj: CreateBackendAuthResponse): any => ({ + ...obj, + }); +} + +/** + *

The request body for CreateBackendConfig.

+ */ +export interface CreateBackendConfigRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The app ID for the backend manager.

+ */ + BackendManagerAppId?: string; +} + +export namespace CreateBackendConfigRequest { + export const filterSensitiveLog = (obj: CreateBackendConfigRequest): any => ({ + ...obj, + }); +} + +export interface CreateBackendConfigResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName?: string; + + /** + *

The ID for the job.

+ */ + JobId?: string; + + /** + *

The current status of the request.

+ */ + Status?: string; +} + +export namespace CreateBackendConfigResponse { + export const filterSensitiveLog = (obj: CreateBackendConfigResponse): any => ({ + ...obj, + }); +} + +export interface CreateTokenRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; +} + +export namespace CreateTokenRequest { + export const filterSensitiveLog = (obj: CreateTokenRequest): any => ({ + ...obj, + }); +} + +export interface CreateTokenResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

One time challenge code for authenticating into Amplify Admin UI.

+ */ + ChallengeCode?: string; + + /** + *

A unique identifier provided when creating a new challenge token.

+ */ + SessionId?: string; + + /** + *

The expiry time for the one time generated token code.

+ */ + Ttl?: string; +} + +export namespace CreateTokenResponse { + export const filterSensitiveLog = (obj: CreateTokenResponse): any => ({ + ...obj, + }); +} + +export interface DeleteBackendRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName: string | undefined; +} + +export namespace DeleteBackendRequest { + export const filterSensitiveLog = (obj: DeleteBackendRequest): any => ({ + ...obj, + }); +} + +export interface DeleteBackendResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName?: string; + + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; + + /** + *

The ID for the job.

+ */ + JobId?: string; + + /** + *

The name of the operation.

+ */ + Operation?: string; + + /** + *

The current status of the request.

+ */ + Status?: string; +} + +export namespace DeleteBackendResponse { + export const filterSensitiveLog = (obj: DeleteBackendResponse): any => ({ + ...obj, + }); +} + +/** + *

The request body for DeleteBackendAPI.

+ */ +export interface DeleteBackendAPIRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName: string | undefined; + + /** + *

Defines the resource configuration for the data model in your Amplify project.

+ */ + ResourceConfig?: BackendAPIResourceConfig; + + /** + *

The name of this resource.

+ */ + ResourceName: string | undefined; +} + +export namespace DeleteBackendAPIRequest { + export const filterSensitiveLog = (obj: DeleteBackendAPIRequest): any => ({ + ...obj, + }); +} + +export interface DeleteBackendAPIResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName?: string; + + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; + + /** + *

The ID for the job.

+ */ + JobId?: string; + + /** + *

The name of the operation.

+ */ + Operation?: string; + + /** + *

The current status of the request.

+ */ + Status?: string; +} + +export namespace DeleteBackendAPIResponse { + export const filterSensitiveLog = (obj: DeleteBackendAPIResponse): any => ({ + ...obj, + }); +} + +/** + *

The request body for DeleteBackendAuth.

+ */ +export interface DeleteBackendAuthRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName: string | undefined; + + /** + *

The name of this resource.

+ */ + ResourceName: string | undefined; +} + +export namespace DeleteBackendAuthRequest { + export const filterSensitiveLog = (obj: DeleteBackendAuthRequest): any => ({ + ...obj, + }); +} + +export interface DeleteBackendAuthResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName?: string; + + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; + + /** + *

The ID for the job.

+ */ + JobId?: string; + + /** + *

The name of the operation.

+ */ + Operation?: string; + + /** + *

The current status of the request.

+ */ + Status?: string; +} + +export namespace DeleteBackendAuthResponse { + export const filterSensitiveLog = (obj: DeleteBackendAuthResponse): any => ({ + ...obj, + }); +} + +export interface DeleteTokenRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The session Id.

+ */ + SessionId: string | undefined; +} + +export namespace DeleteTokenRequest { + export const filterSensitiveLog = (obj: DeleteTokenRequest): any => ({ + ...obj, + }); +} + +export interface DeleteTokenResponse { + /** + *

Indicates whether the request succeeded or failed.

+ */ + IsSuccess?: boolean; +} + +export namespace DeleteTokenResponse { + export const filterSensitiveLog = (obj: DeleteTokenResponse): any => ({ + ...obj, + }); +} + +/** + *

The request body for GenerateBackendAPIModels.

+ */ +export interface GenerateBackendAPIModelsRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName: string | undefined; + + /** + *

The name of this resource.

+ */ + ResourceName: string | undefined; +} + +export namespace GenerateBackendAPIModelsRequest { + export const filterSensitiveLog = (obj: GenerateBackendAPIModelsRequest): any => ({ + ...obj, + }); +} + +export interface GenerateBackendAPIModelsResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName?: string; + + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; + + /** + *

The ID for the job.

+ */ + JobId?: string; + + /** + *

The name of the operation.

+ */ + Operation?: string; + + /** + *

The current status of the request.

+ */ + Status?: string; +} + +export namespace GenerateBackendAPIModelsResponse { + export const filterSensitiveLog = (obj: GenerateBackendAPIModelsResponse): any => ({ + ...obj, + }); +} + +/** + *

The request body for GetBackend.

+ */ +export interface GetBackendRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName?: string; +} + +export namespace GetBackendRequest { + export const filterSensitiveLog = (obj: GetBackendRequest): any => ({ + ...obj, + }); +} + +export interface GetBackendResponse { + /** + *

A stringified version of the current configs for your Amplify project.

+ */ + AmplifyMetaConfig?: string; + + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

The name of the app.

+ */ + AppName?: string; + + /** + *

A list of backend environments in an array.

+ */ + BackendEnvironmentList?: string[]; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName?: string; + + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; +} + +export namespace GetBackendResponse { + export const filterSensitiveLog = (obj: GetBackendResponse): any => ({ + ...obj, + }); +} + +/** + *

The request body for GetBackendAPI.

+ */ +export interface GetBackendAPIRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName: string | undefined; + + /** + *

Defines the resource configuration for the data model in your Amplify project.

+ */ + ResourceConfig?: BackendAPIResourceConfig; + + /** + *

The name of this resource.

+ */ + ResourceName: string | undefined; +} + +export namespace GetBackendAPIRequest { + export const filterSensitiveLog = (obj: GetBackendAPIRequest): any => ({ + ...obj, + }); +} + +export interface GetBackendAPIResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName?: string; + + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; + + /** + *

The resource configuration for this response object.

+ */ + ResourceConfig?: BackendAPIResourceConfig; + + /** + *

The name of this resource.

+ */ + ResourceName?: string; +} + +export namespace GetBackendAPIResponse { + export const filterSensitiveLog = (obj: GetBackendAPIResponse): any => ({ + ...obj, + }); +} + +/** + *

The request body for GetBackendAPIModels.

+ */ +export interface GetBackendAPIModelsRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName: string | undefined; + + /** + *

The name of this resource.

+ */ + ResourceName: string | undefined; +} + +export namespace GetBackendAPIModelsRequest { + export const filterSensitiveLog = (obj: GetBackendAPIModelsRequest): any => ({ + ...obj, + }); +} + +export enum Status { + LATEST = "LATEST", + STALE = "STALE", +} + +export interface GetBackendAPIModelsResponse { + /** + *

Stringified JSON of the datastore model.

+ */ + Models?: string; + + /** + *

The current status of the request.

+ */ + Status?: Status | string; +} + +export namespace GetBackendAPIModelsResponse { + export const filterSensitiveLog = (obj: GetBackendAPIModelsResponse): any => ({ + ...obj, + }); +} + +/** + *

The request body for GetBackendAuth.

+ */ +export interface GetBackendAuthRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName: string | undefined; + + /** + *

The name of this resource.

+ */ + ResourceName: string | undefined; +} + +export namespace GetBackendAuthRequest { + export const filterSensitiveLog = (obj: GetBackendAuthRequest): any => ({ + ...obj, + }); +} + +export interface GetBackendAuthResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName?: string; + + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; + + /** + *

The resource configuration for authorization requests to the backend of your Amplify project.

+ */ + ResourceConfig?: CreateBackendAuthResourceConfig; + + /** + *

The name of this resource.

+ */ + ResourceName?: string; +} + +export namespace GetBackendAuthResponse { + export const filterSensitiveLog = (obj: GetBackendAuthResponse): any => ({ + ...obj, + }); +} + +export interface GetBackendJobRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName: string | undefined; + + /** + *

The ID for the job.

+ */ + JobId: string | undefined; +} + +export namespace GetBackendJobRequest { + export const filterSensitiveLog = (obj: GetBackendJobRequest): any => ({ + ...obj, + }); +} + +export interface GetBackendJobResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName?: string; + + /** + *

The time when the job was created.

+ */ + CreateTime?: string; + + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; + + /** + *

The ID for the job.

+ */ + JobId?: string; + + /** + *

The name of the operation.

+ */ + Operation?: string; + + /** + *

The current status of the request.

+ */ + Status?: string; + + /** + *

The time when the job was last updated.

+ */ + UpdateTime?: string; +} + +export namespace GetBackendJobResponse { + export const filterSensitiveLog = (obj: GetBackendJobResponse): any => ({ + ...obj, + }); +} + +export interface GetTokenRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The session Id.

+ */ + SessionId: string | undefined; +} + +export namespace GetTokenRequest { + export const filterSensitiveLog = (obj: GetTokenRequest): any => ({ + ...obj, + }); +} + +export interface GetTokenResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

The one time challenge code for authenticating into Amplify Admin UI.

+ */ + ChallengeCode?: string; + + /** + *

A unique identifier provided when creating a new challenge token.

+ */ + SessionId?: string; + + /** + *

The expiry time for the one time generated token code.

+ */ + Ttl?: string; +} + +export namespace GetTokenResponse { + export const filterSensitiveLog = (obj: GetTokenResponse): any => ({ + ...obj, + }); +} + +/** + *

The request body for ListBackendJobs.

+ */ +export interface ListBackendJobsRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName: string | undefined; + + /** + *

The ID for the job.

+ */ + JobId?: string; + + /** + *

The maximum number of results you want in the response.

+ */ + MaxResults?: number; + + /** + *

The token for the next set of results.

+ */ + NextToken?: string; + + /** + *

Filters the list of response objects to only include those with the specified operation name.

+ */ + Operation?: string; + + /** + *

Filters the list of response objects to only include those with the specified status.

+ */ + Status?: string; +} + +export namespace ListBackendJobsRequest { + export const filterSensitiveLog = (obj: ListBackendJobsRequest): any => ({ + ...obj, + }); +} + +/** + *

The response object for this operation.

+ */ +export interface BackendJobRespObj { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName: string | undefined; + + /** + *

The time when the job was created.

+ */ + CreateTime?: string; + + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; + + /** + *

The ID for the job.

+ */ + JobId?: string; + + /** + *

The name of the operation.

+ */ + Operation?: string; + + /** + *

The current status of the request.

+ */ + Status?: string; + + /** + *

The time when the job was last updated.

+ */ + UpdateTime?: string; +} + +export namespace BackendJobRespObj { + export const filterSensitiveLog = (obj: BackendJobRespObj): any => ({ + ...obj, + }); +} + +export interface ListBackendJobsResponse { + /** + *

An array of jobs and their properties.

+ */ + Jobs?: BackendJobRespObj[]; + + /** + *

The token for the next set of results.

+ */ + NextToken?: string; +} + +export namespace ListBackendJobsResponse { + export const filterSensitiveLog = (obj: ListBackendJobsResponse): any => ({ + ...obj, + }); +} + +/** + *

The request body for RemoveAllBackends.

+ */ +export interface RemoveAllBackendsRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

Cleans up the Amplify Console app if this value is set to true.

+ */ + CleanAmplifyApp?: boolean; +} + +export namespace RemoveAllBackendsRequest { + export const filterSensitiveLog = (obj: RemoveAllBackendsRequest): any => ({ + ...obj, + }); +} + +export interface RemoveAllBackendsResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; + + /** + *

The ID for the job.

+ */ + JobId?: string; + + /** + *

The name of the operation.

+ */ + Operation?: string; + + /** + *

The current status of the request.

+ */ + Status?: string; +} + +export namespace RemoveAllBackendsResponse { + export const filterSensitiveLog = (obj: RemoveAllBackendsResponse): any => ({ + ...obj, + }); +} + +export interface RemoveBackendConfigRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; +} + +export namespace RemoveBackendConfigRequest { + export const filterSensitiveLog = (obj: RemoveBackendConfigRequest): any => ({ + ...obj, + }); +} + +export interface RemoveBackendConfigResponse { + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; +} + +export namespace RemoveBackendConfigResponse { + export const filterSensitiveLog = (obj: RemoveBackendConfigResponse): any => ({ + ...obj, + }); +} + +/** + *

The request body for UpdateBackendAPI.

+ */ +export interface UpdateBackendAPIRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName: string | undefined; + + /** + *

Defines the resource configuration for the data model in your Amplify project.

+ */ + ResourceConfig?: BackendAPIResourceConfig; + + /** + *

The name of this resource.

+ */ + ResourceName: string | undefined; +} + +export namespace UpdateBackendAPIRequest { + export const filterSensitiveLog = (obj: UpdateBackendAPIRequest): any => ({ + ...obj, + }); +} + +export interface UpdateBackendAPIResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName?: string; + + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; + + /** + *

The ID for the job.

+ */ + JobId?: string; + + /** + *

The name of the operation.

+ */ + Operation?: string; + + /** + *

The current status of the request.

+ */ + Status?: string; +} + +export namespace UpdateBackendAPIResponse { + export const filterSensitiveLog = (obj: UpdateBackendAPIResponse): any => ({ + ...obj, + }); +} + +/** + *

Describes the authorization configuration for the Amazon Cognito identity pool, provisioned as a part of your auth resource in the Amplify project.

+ */ +export interface UpdateBackendAuthIdentityPoolConfig { + /** + *

A boolean value which can be set to allow or disallow guest level authorization into your Amplify app.

+ */ + UnauthenticatedLogin?: boolean; +} + +export namespace UpdateBackendAuthIdentityPoolConfig { + export const filterSensitiveLog = (obj: UpdateBackendAuthIdentityPoolConfig): any => ({ + ...obj, + }); +} + +/** + *

Describes the forgot password policy for authenticating into the Amlify app.

+ */ +export interface UpdateBackendAuthForgotPasswordConfig { + /** + *

Describes which mode to use (either SMS or email) to deliver messages to app users that want to recover their password.

+ */ + DeliveryMethod?: DeliveryMethod | string; + + /** + *

The configuration for the email sent when an app user forgets their password.

+ */ + EmailSettings?: EmailSettings; + + /** + *

The configuration for the SMS message sent when an Amplify app user forgets their password.

+ */ + SmsSettings?: SmsSettings; +} + +export namespace UpdateBackendAuthForgotPasswordConfig { + export const filterSensitiveLog = (obj: UpdateBackendAuthForgotPasswordConfig): any => ({ + ...obj, + }); +} + +/** + *

Updates the multi-factor authentication (MFA) configuration for the backend of your Amplify project.

+ */ +export interface UpdateBackendAuthMFAConfig { + /** + *

The MFA mode for the backend of your Amplify project.

+ */ + MFAMode?: MFAMode | string; + + /** + *

The settings of your MFA configuration for the backend of your Amplify project.

+ */ + Settings?: Settings; +} + +export namespace UpdateBackendAuthMFAConfig { + export const filterSensitiveLog = (obj: UpdateBackendAuthMFAConfig): any => ({ + ...obj, + }); +} + +/** + *

The OAuth configurations for authenticating users into your Amplify app.

+ */ +export interface UpdateBackendAuthOAuthConfig { + /** + *

The Amazon Cognito domain prefix used to create a hosted UI for authentication.

+ */ + DomainPrefix?: string; + + /** + *

The OAuth grant type to allow app users to authenticate from your Amplify app.

+ */ + OAuthGrantType?: OAuthGrantType | string; + + /** + *

The list of OAuth related flows which can allow users to authenticate from your Amplify app.

+ */ + OAuthScopes?: (OAuthScopesElement | string)[]; + + /** + *

Redirect URLs used by OAuth when a user signs in to an Amplify app.

+ */ + RedirectSignInURIs?: string[]; + + /** + *

Redirect URLs used by OAuth when a user signs out of an Amplify app.

+ */ + RedirectSignOutURIs?: string[]; + + /** + *

Describes third party social federation configurations for allowing your users to sign in with OAuth.

+ */ + SocialProviderSettings?: SocialProviderSettings; +} + +export namespace UpdateBackendAuthOAuthConfig { + export const filterSensitiveLog = (obj: UpdateBackendAuthOAuthConfig): any => ({ + ...obj, + }); +} + +/** + *

Describes the password policy for your Amazon Cognito user pool configured as a part of your Amplify project.

+ */ +export interface UpdateBackendAuthPasswordPolicyConfig { + /** + *

Describes additional constrains on password requirements to sign in into the auth resource, configured as a part of your Ampify project.

+ */ + AdditionalConstraints?: (AdditionalConstraintsElement | string)[]; + + /** + *

Describes the minimum length of password required to sign in into the auth resource, configured as a part of your Ampify project.

+ */ + MinimumLength?: number; +} + +export namespace UpdateBackendAuthPasswordPolicyConfig { + export const filterSensitiveLog = (obj: UpdateBackendAuthPasswordPolicyConfig): any => ({ + ...obj, + }); +} + +/** + *

Describes the Amazon Cognito user pool configuration for the authorization resource to be configured for your Amplify project on an update.

+ */ +export interface UpdateBackendAuthUserPoolConfig { + /** + *

Describes the forgot password policy for your Amazon Cognito user pool, configured as a part of your Amplify project.

+ */ + ForgotPassword?: UpdateBackendAuthForgotPasswordConfig; + + /** + *

Describes whether multi-factor authentication policies should be applied for your Amazon Cognito user pool configured as a part of your Amplify project.

+ */ + Mfa?: UpdateBackendAuthMFAConfig; + + /** + *

Describes the OAuth policy and rules for your Amazon Cognito user pool, configured as a part of your Amplify project.

+ */ + OAuth?: UpdateBackendAuthOAuthConfig; + + /** + *

Describes the password policy for your Amazon Cognito user pool, configured as a part of your Amplify project.

+ */ + PasswordPolicy?: UpdateBackendAuthPasswordPolicyConfig; +} + +export namespace UpdateBackendAuthUserPoolConfig { + export const filterSensitiveLog = (obj: UpdateBackendAuthUserPoolConfig): any => ({ + ...obj, + }); +} + +/** + *

Defines the resource configuration when updating an authentication resource in your Amplify project.

+ */ +export interface UpdateBackendAuthResourceConfig { + /** + *

Defines the service name to use when configuring an authentication resource in your Amplify project.

+ */ + AuthResources: AuthResources | string | undefined; + + /** + *

Describes the authorization configuration for the Amazon Cognito identity pool, provisioned as a part of your auth resource in the Amplify project.

+ */ + IdentityPoolConfigs?: UpdateBackendAuthIdentityPoolConfig; + + /** + *

Defines the service name to use when configuring an authentication resource in your Amplify project.

+ */ + Service: Service | string | undefined; + + /** + *

Describes the authentication configuration for the Amazon Cognito userpool, provisioned as a part of your auth resource in the Amplify project.

+ */ + UserPoolConfigs: UpdateBackendAuthUserPoolConfig | undefined; +} + +export namespace UpdateBackendAuthResourceConfig { + export const filterSensitiveLog = (obj: UpdateBackendAuthResourceConfig): any => ({ + ...obj, + }); +} + +/** + *

The request body for UpdateBackendAuth.

+ */ +export interface UpdateBackendAuthRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName: string | undefined; + + /** + *

The resource configuration for this request object.

+ */ + ResourceConfig: UpdateBackendAuthResourceConfig | undefined; + + /** + *

The name of this resource.

+ */ + ResourceName: string | undefined; +} + +export namespace UpdateBackendAuthRequest { + export const filterSensitiveLog = (obj: UpdateBackendAuthRequest): any => ({ + ...obj, + }); +} + +export interface UpdateBackendAuthResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName?: string; + + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; + + /** + *

The ID for the job.

+ */ + JobId?: string; + + /** + *

The name of the operation.

+ */ + Operation?: string; + + /** + *

The current status of the request.

+ */ + Status?: string; +} + +export namespace UpdateBackendAuthResponse { + export const filterSensitiveLog = (obj: UpdateBackendAuthResponse): any => ({ + ...obj, + }); +} + +/** + *

The request object for this operation.

+ */ +export interface LoginAuthConfigReqObj { + /** + *

Amazon Cognito identitypool id used for the Amplify Admin UI login authorization.

+ */ + AwsCognitoIdentityPoolId?: string; + + /** + *

The AWS Region for the the Amplify Admin login.

+ */ + AwsCognitoRegion?: string; + + /** + *

The Amazon Cognito userpool id used for Amplify Admin UI login authentication.

+ */ + AwsUserPoolsId?: string; + + /** + *

The web client ID for the Amazon Cognito userpools.

+ */ + AwsUserPoolsWebClientId?: string; +} + +export namespace LoginAuthConfigReqObj { + export const filterSensitiveLog = (obj: LoginAuthConfigReqObj): any => ({ + ...obj, + }); +} + +/** + *

The request body for UpdateBackendConfig.

+ */ +export interface UpdateBackendConfigRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

Describes the Amazon Cognito configuration for Admin UI access.

+ */ + LoginAuthConfig?: LoginAuthConfigReqObj; +} + +export namespace UpdateBackendConfigRequest { + export const filterSensitiveLog = (obj: UpdateBackendConfigRequest): any => ({ + ...obj, + }); +} + +export interface UpdateBackendConfigResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

The app ID for the backend manager.

+ */ + BackendManagerAppId?: string; + + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; + + /** + *

Describes the Amazon Cognito configurations for the Admin UI auth resource to login with.

+ */ + LoginAuthConfig?: LoginAuthConfigReqObj; +} + +export namespace UpdateBackendConfigResponse { + export const filterSensitiveLog = (obj: UpdateBackendConfigResponse): any => ({ + ...obj, + }); +} + +/** + *

The request body for GetBackendJob.

+ */ +export interface UpdateBackendJobRequest { + /** + *

The app ID.

+ */ + AppId: string | undefined; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName: string | undefined; + + /** + *

The ID for the job.

+ */ + JobId: string | undefined; + + /** + *

Filters the list of response objects to only include those with the specified operation name.

+ */ + Operation?: string; + + /** + *

Filters the list of response objects to only include those with the specified status.

+ */ + Status?: string; +} + +export namespace UpdateBackendJobRequest { + export const filterSensitiveLog = (obj: UpdateBackendJobRequest): any => ({ + ...obj, + }); +} + +export interface UpdateBackendJobResponse { + /** + *

The app ID.

+ */ + AppId?: string; + + /** + *

The name of the backend environment.

+ */ + BackendEnvironmentName?: string; + + /** + *

The time when the job was created.

+ */ + CreateTime?: string; + + /** + *

If the request failed, this is the returned error.

+ */ + Error?: string; + + /** + *

The ID for the job.

+ */ + JobId?: string; + + /** + *

The name of the operation.

+ */ + Operation?: string; + + /** + *

The current status of the request.

+ */ + Status?: string; + + /** + *

The time when the job was last updated.

+ */ + UpdateTime?: string; +} + +export namespace UpdateBackendJobResponse { + export const filterSensitiveLog = (obj: UpdateBackendJobResponse): any => ({ + ...obj, + }); +} diff --git a/clients/client-amplifybackend/package.json b/clients/client-amplifybackend/package.json new file mode 100644 index 000000000000..d97624a2c39e --- /dev/null +++ b/clients/client-amplifybackend/package.json @@ -0,0 +1,83 @@ +{ + "name": "@aws-sdk/client-amplifybackend", + "description": "AWS SDK for JavaScript Amplifybackend Client for Node.js, Browser and React Native", + "version": "1.0.0-rc.1", + "scripts": { + "clean": "yarn remove-definitions && yarn remove-dist && yarn remove-documentation", + "build-documentation": "yarn remove-documentation && typedoc ./", + "prepublishOnly": "yarn build", + "pretest": "yarn build:cjs", + "remove-definitions": "rimraf ./types", + "remove-dist": "rimraf ./dist", + "remove-documentation": "rimraf ./docs", + "test": "yarn build && jest --coverage --passWithNoTests", + "build:cjs": "tsc -p tsconfig.json", + "build:es": "tsc -p tsconfig.es.json", + "build": "yarn build:cjs && yarn build:es" + }, + "main": "./dist/cjs/index.js", + "types": "./types/index.d.ts", + "module": "./dist/es/index.js", + "browser": { + "./runtimeConfig": "./runtimeConfig.browser" + }, + "react-native": { + "./runtimeConfig": "./runtimeConfig.native" + }, + "sideEffects": false, + "dependencies": { + "@aws-crypto/sha256-browser": "^1.0.0", + "@aws-crypto/sha256-js": "^1.0.0", + "@aws-sdk/config-resolver": "1.0.0-rc.7", + "@aws-sdk/credential-provider-node": "1.0.0-rc.7", + "@aws-sdk/fetch-http-handler": "1.0.0-rc.7", + "@aws-sdk/hash-node": "1.0.0-rc.7", + "@aws-sdk/invalid-dependency": "1.0.0-rc.3", + "@aws-sdk/middleware-content-length": "1.0.0-rc.7", + "@aws-sdk/middleware-host-header": "1.0.0-rc.7", + "@aws-sdk/middleware-logger": "1.0.0-rc.7", + "@aws-sdk/middleware-retry": "1.0.0-rc.7", + "@aws-sdk/middleware-serde": "1.0.0-rc.7", + "@aws-sdk/middleware-signing": "1.0.0-rc.7", + "@aws-sdk/middleware-stack": "1.0.0-rc.7", + "@aws-sdk/middleware-user-agent": "1.0.0-rc.7", + "@aws-sdk/node-config-provider": "1.0.0-rc.7", + "@aws-sdk/node-http-handler": "1.0.0-rc.7", + "@aws-sdk/protocol-http": "1.0.0-rc.7", + "@aws-sdk/smithy-client": "1.0.0-rc.7", + "@aws-sdk/url-parser-browser": "1.0.0-rc.7", + "@aws-sdk/url-parser-node": "1.0.0-rc.7", + "@aws-sdk/util-base64-browser": "1.0.0-rc.3", + "@aws-sdk/util-base64-node": "1.0.0-rc.3", + "@aws-sdk/util-body-length-browser": "1.0.0-rc.3", + "@aws-sdk/util-body-length-node": "1.0.0-rc.3", + "@aws-sdk/util-user-agent-browser": "1.0.0-rc.7", + "@aws-sdk/util-user-agent-node": "1.0.0-rc.7", + "@aws-sdk/util-utf8-browser": "1.0.0-rc.3", + "@aws-sdk/util-utf8-node": "1.0.0-rc.3", + "tslib": "^2.0.0" + }, + "devDependencies": { + "@aws-sdk/client-documentation-generator": "1.0.0-rc.7", + "@aws-sdk/types": "1.0.0-rc.7", + "@types/node": "^12.7.5", + "jest": "^26.1.0", + "rimraf": "^3.0.0", + "typedoc": "^0.19.2", + "typescript": "~4.1.2" + }, + "engines": { + "node": ">=10.0.0" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/master/clients/client-amplifybackend", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "clients/client-amplifybackend" + } +} diff --git a/clients/client-amplifybackend/protocols/Aws_restJson1.ts b/clients/client-amplifybackend/protocols/Aws_restJson1.ts new file mode 100644 index 000000000000..0a9fd55db711 --- /dev/null +++ b/clients/client-amplifybackend/protocols/Aws_restJson1.ts @@ -0,0 +1,4171 @@ +import { CloneBackendCommandInput, CloneBackendCommandOutput } from "../commands/CloneBackendCommand"; +import { CreateBackendAPICommandInput, CreateBackendAPICommandOutput } from "../commands/CreateBackendAPICommand"; +import { CreateBackendAuthCommandInput, CreateBackendAuthCommandOutput } from "../commands/CreateBackendAuthCommand"; +import { CreateBackendCommandInput, CreateBackendCommandOutput } from "../commands/CreateBackendCommand"; +import { + CreateBackendConfigCommandInput, + CreateBackendConfigCommandOutput, +} from "../commands/CreateBackendConfigCommand"; +import { CreateTokenCommandInput, CreateTokenCommandOutput } from "../commands/CreateTokenCommand"; +import { DeleteBackendAPICommandInput, DeleteBackendAPICommandOutput } from "../commands/DeleteBackendAPICommand"; +import { DeleteBackendAuthCommandInput, DeleteBackendAuthCommandOutput } from "../commands/DeleteBackendAuthCommand"; +import { DeleteBackendCommandInput, DeleteBackendCommandOutput } from "../commands/DeleteBackendCommand"; +import { DeleteTokenCommandInput, DeleteTokenCommandOutput } from "../commands/DeleteTokenCommand"; +import { + GenerateBackendAPIModelsCommandInput, + GenerateBackendAPIModelsCommandOutput, +} from "../commands/GenerateBackendAPIModelsCommand"; +import { GetBackendAPICommandInput, GetBackendAPICommandOutput } from "../commands/GetBackendAPICommand"; +import { + GetBackendAPIModelsCommandInput, + GetBackendAPIModelsCommandOutput, +} from "../commands/GetBackendAPIModelsCommand"; +import { GetBackendAuthCommandInput, GetBackendAuthCommandOutput } from "../commands/GetBackendAuthCommand"; +import { GetBackendCommandInput, GetBackendCommandOutput } from "../commands/GetBackendCommand"; +import { GetBackendJobCommandInput, GetBackendJobCommandOutput } from "../commands/GetBackendJobCommand"; +import { GetTokenCommandInput, GetTokenCommandOutput } from "../commands/GetTokenCommand"; +import { ListBackendJobsCommandInput, ListBackendJobsCommandOutput } from "../commands/ListBackendJobsCommand"; +import { RemoveAllBackendsCommandInput, RemoveAllBackendsCommandOutput } from "../commands/RemoveAllBackendsCommand"; +import { + RemoveBackendConfigCommandInput, + RemoveBackendConfigCommandOutput, +} from "../commands/RemoveBackendConfigCommand"; +import { UpdateBackendAPICommandInput, UpdateBackendAPICommandOutput } from "../commands/UpdateBackendAPICommand"; +import { UpdateBackendAuthCommandInput, UpdateBackendAuthCommandOutput } from "../commands/UpdateBackendAuthCommand"; +import { + UpdateBackendConfigCommandInput, + UpdateBackendConfigCommandOutput, +} from "../commands/UpdateBackendConfigCommand"; +import { UpdateBackendJobCommandInput, UpdateBackendJobCommandOutput } from "../commands/UpdateBackendJobCommand"; +import { + AdditionalConstraintsElement, + BackendAPIAppSyncAuthSettings, + BackendAPIAuthType, + BackendAPIConflictResolution, + BackendAPIResourceConfig, + BackendAuthSocialProviderConfig, + BackendJobRespObj, + BadRequestException, + CreateBackendAuthForgotPasswordConfig, + CreateBackendAuthIdentityPoolConfig, + CreateBackendAuthMFAConfig, + CreateBackendAuthOAuthConfig, + CreateBackendAuthPasswordPolicyConfig, + CreateBackendAuthResourceConfig, + CreateBackendAuthUserPoolConfig, + EmailSettings, + GatewayTimeoutException, + LoginAuthConfigReqObj, + MfaTypesElement, + NotFoundException, + OAuthScopesElement, + RequiredSignUpAttributesElement, + ResourceConfig, + Settings, + SmsSettings, + SocialProviderSettings, + TooManyRequestsException, + UpdateBackendAuthForgotPasswordConfig, + UpdateBackendAuthIdentityPoolConfig, + UpdateBackendAuthMFAConfig, + UpdateBackendAuthOAuthConfig, + UpdateBackendAuthPasswordPolicyConfig, + UpdateBackendAuthResourceConfig, + UpdateBackendAuthUserPoolConfig, +} from "../models/models_0"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { + SmithyException as __SmithyException, + extendedEncodeURIComponent as __extendedEncodeURIComponent, +} from "@aws-sdk/smithy-client"; +import { + Endpoint as __Endpoint, + MetadataBearer as __MetadataBearer, + ResponseMetadata as __ResponseMetadata, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export const serializeAws_restJson1CloneBackendCommand = async ( + input: CloneBackendCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/backend/{AppId}/environments/{BackendEnvironmentName}/clone"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + if (input.BackendEnvironmentName !== undefined) { + const labelValue: string = input.BackendEnvironmentName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: BackendEnvironmentName."); + } + resolvedPath = resolvedPath.replace("{BackendEnvironmentName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: BackendEnvironmentName."); + } + let body: any; + body = JSON.stringify({ + ...(input.TargetEnvironmentName !== undefined && { targetEnvironmentName: input.TargetEnvironmentName }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1CreateBackendCommand = async ( + input: CreateBackendCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/backend"; + let body: any; + body = JSON.stringify({ + ...(input.AppId !== undefined && { appId: input.AppId }), + ...(input.AppName !== undefined && { appName: input.AppName }), + ...(input.BackendEnvironmentName !== undefined && { backendEnvironmentName: input.BackendEnvironmentName }), + ...(input.ResourceConfig !== undefined && { + resourceConfig: serializeAws_restJson1ResourceConfig(input.ResourceConfig, context), + }), + ...(input.ResourceName !== undefined && { resourceName: input.ResourceName }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1CreateBackendAPICommand = async ( + input: CreateBackendAPICommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/backend/{AppId}/api"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + let body: any; + body = JSON.stringify({ + ...(input.BackendEnvironmentName !== undefined && { backendEnvironmentName: input.BackendEnvironmentName }), + ...(input.ResourceConfig !== undefined && { + resourceConfig: serializeAws_restJson1BackendAPIResourceConfig(input.ResourceConfig, context), + }), + ...(input.ResourceName !== undefined && { resourceName: input.ResourceName }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1CreateBackendAuthCommand = async ( + input: CreateBackendAuthCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/backend/{AppId}/auth"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + let body: any; + body = JSON.stringify({ + ...(input.BackendEnvironmentName !== undefined && { backendEnvironmentName: input.BackendEnvironmentName }), + ...(input.ResourceConfig !== undefined && { + resourceConfig: serializeAws_restJson1CreateBackendAuthResourceConfig(input.ResourceConfig, context), + }), + ...(input.ResourceName !== undefined && { resourceName: input.ResourceName }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1CreateBackendConfigCommand = async ( + input: CreateBackendConfigCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/backend/{AppId}/config"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + let body: any; + body = JSON.stringify({ + ...(input.BackendManagerAppId !== undefined && { backendManagerAppId: input.BackendManagerAppId }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1CreateTokenCommand = async ( + input: CreateTokenCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/backend/{AppId}/challenge"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DeleteBackendCommand = async ( + input: DeleteBackendCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/backend/{AppId}/environments/{BackendEnvironmentName}/remove"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + if (input.BackendEnvironmentName !== undefined) { + const labelValue: string = input.BackendEnvironmentName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: BackendEnvironmentName."); + } + resolvedPath = resolvedPath.replace("{BackendEnvironmentName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: BackendEnvironmentName."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DeleteBackendAPICommand = async ( + input: DeleteBackendAPICommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/backend/{AppId}/api/{BackendEnvironmentName}/remove"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + if (input.BackendEnvironmentName !== undefined) { + const labelValue: string = input.BackendEnvironmentName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: BackendEnvironmentName."); + } + resolvedPath = resolvedPath.replace("{BackendEnvironmentName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: BackendEnvironmentName."); + } + let body: any; + body = JSON.stringify({ + ...(input.ResourceConfig !== undefined && { + resourceConfig: serializeAws_restJson1BackendAPIResourceConfig(input.ResourceConfig, context), + }), + ...(input.ResourceName !== undefined && { resourceName: input.ResourceName }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DeleteBackendAuthCommand = async ( + input: DeleteBackendAuthCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/backend/{AppId}/auth/{BackendEnvironmentName}/remove"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + if (input.BackendEnvironmentName !== undefined) { + const labelValue: string = input.BackendEnvironmentName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: BackendEnvironmentName."); + } + resolvedPath = resolvedPath.replace("{BackendEnvironmentName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: BackendEnvironmentName."); + } + let body: any; + body = JSON.stringify({ + ...(input.ResourceName !== undefined && { resourceName: input.ResourceName }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DeleteTokenCommand = async ( + input: DeleteTokenCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/backend/{AppId}/challenge/{SessionId}/remove"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + if (input.SessionId !== undefined) { + const labelValue: string = input.SessionId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: SessionId."); + } + resolvedPath = resolvedPath.replace("{SessionId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: SessionId."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1GenerateBackendAPIModelsCommand = async ( + input: GenerateBackendAPIModelsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/backend/{AppId}/api/{BackendEnvironmentName}/generateModels"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + if (input.BackendEnvironmentName !== undefined) { + const labelValue: string = input.BackendEnvironmentName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: BackendEnvironmentName."); + } + resolvedPath = resolvedPath.replace("{BackendEnvironmentName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: BackendEnvironmentName."); + } + let body: any; + body = JSON.stringify({ + ...(input.ResourceName !== undefined && { resourceName: input.ResourceName }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1GetBackendCommand = async ( + input: GetBackendCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/backend/{AppId}/details"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + let body: any; + body = JSON.stringify({ + ...(input.BackendEnvironmentName !== undefined && { backendEnvironmentName: input.BackendEnvironmentName }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1GetBackendAPICommand = async ( + input: GetBackendAPICommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/backend/{AppId}/api/{BackendEnvironmentName}/details"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + if (input.BackendEnvironmentName !== undefined) { + const labelValue: string = input.BackendEnvironmentName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: BackendEnvironmentName."); + } + resolvedPath = resolvedPath.replace("{BackendEnvironmentName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: BackendEnvironmentName."); + } + let body: any; + body = JSON.stringify({ + ...(input.ResourceConfig !== undefined && { + resourceConfig: serializeAws_restJson1BackendAPIResourceConfig(input.ResourceConfig, context), + }), + ...(input.ResourceName !== undefined && { resourceName: input.ResourceName }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1GetBackendAPIModelsCommand = async ( + input: GetBackendAPIModelsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/backend/{AppId}/api/{BackendEnvironmentName}/getModels"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + if (input.BackendEnvironmentName !== undefined) { + const labelValue: string = input.BackendEnvironmentName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: BackendEnvironmentName."); + } + resolvedPath = resolvedPath.replace("{BackendEnvironmentName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: BackendEnvironmentName."); + } + let body: any; + body = JSON.stringify({ + ...(input.ResourceName !== undefined && { resourceName: input.ResourceName }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1GetBackendAuthCommand = async ( + input: GetBackendAuthCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/backend/{AppId}/auth/{BackendEnvironmentName}/details"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + if (input.BackendEnvironmentName !== undefined) { + const labelValue: string = input.BackendEnvironmentName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: BackendEnvironmentName."); + } + resolvedPath = resolvedPath.replace("{BackendEnvironmentName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: BackendEnvironmentName."); + } + let body: any; + body = JSON.stringify({ + ...(input.ResourceName !== undefined && { resourceName: input.ResourceName }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1GetBackendJobCommand = async ( + input: GetBackendJobCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/backend/{AppId}/job/{BackendEnvironmentName}/{JobId}"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + if (input.BackendEnvironmentName !== undefined) { + const labelValue: string = input.BackendEnvironmentName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: BackendEnvironmentName."); + } + resolvedPath = resolvedPath.replace("{BackendEnvironmentName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: BackendEnvironmentName."); + } + if (input.JobId !== undefined) { + const labelValue: string = input.JobId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: JobId."); + } + resolvedPath = resolvedPath.replace("{JobId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: JobId."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1GetTokenCommand = async ( + input: GetTokenCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/backend/{AppId}/challenge/{SessionId}"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + if (input.SessionId !== undefined) { + const labelValue: string = input.SessionId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: SessionId."); + } + resolvedPath = resolvedPath.replace("{SessionId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: SessionId."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1ListBackendJobsCommand = async ( + input: ListBackendJobsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/backend/{AppId}/job/{BackendEnvironmentName}"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + if (input.BackendEnvironmentName !== undefined) { + const labelValue: string = input.BackendEnvironmentName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: BackendEnvironmentName."); + } + resolvedPath = resolvedPath.replace("{BackendEnvironmentName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: BackendEnvironmentName."); + } + let body: any; + body = JSON.stringify({ + ...(input.JobId !== undefined && { jobId: input.JobId }), + ...(input.MaxResults !== undefined && { maxResults: input.MaxResults }), + ...(input.NextToken !== undefined && { nextToken: input.NextToken }), + ...(input.Operation !== undefined && { operation: input.Operation }), + ...(input.Status !== undefined && { status: input.Status }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1RemoveAllBackendsCommand = async ( + input: RemoveAllBackendsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/backend/{AppId}/remove"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + let body: any; + body = JSON.stringify({ + ...(input.CleanAmplifyApp !== undefined && { cleanAmplifyApp: input.CleanAmplifyApp }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1RemoveBackendConfigCommand = async ( + input: RemoveBackendConfigCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/backend/{AppId}/config/remove"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1UpdateBackendAPICommand = async ( + input: UpdateBackendAPICommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/backend/{AppId}/api/{BackendEnvironmentName}"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + if (input.BackendEnvironmentName !== undefined) { + const labelValue: string = input.BackendEnvironmentName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: BackendEnvironmentName."); + } + resolvedPath = resolvedPath.replace("{BackendEnvironmentName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: BackendEnvironmentName."); + } + let body: any; + body = JSON.stringify({ + ...(input.ResourceConfig !== undefined && { + resourceConfig: serializeAws_restJson1BackendAPIResourceConfig(input.ResourceConfig, context), + }), + ...(input.ResourceName !== undefined && { resourceName: input.ResourceName }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1UpdateBackendAuthCommand = async ( + input: UpdateBackendAuthCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/backend/{AppId}/auth/{BackendEnvironmentName}"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + if (input.BackendEnvironmentName !== undefined) { + const labelValue: string = input.BackendEnvironmentName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: BackendEnvironmentName."); + } + resolvedPath = resolvedPath.replace("{BackendEnvironmentName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: BackendEnvironmentName."); + } + let body: any; + body = JSON.stringify({ + ...(input.ResourceConfig !== undefined && { + resourceConfig: serializeAws_restJson1UpdateBackendAuthResourceConfig(input.ResourceConfig, context), + }), + ...(input.ResourceName !== undefined && { resourceName: input.ResourceName }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1UpdateBackendConfigCommand = async ( + input: UpdateBackendConfigCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/backend/{AppId}/config/update"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + let body: any; + body = JSON.stringify({ + ...(input.LoginAuthConfig !== undefined && { + loginAuthConfig: serializeAws_restJson1LoginAuthConfigReqObj(input.LoginAuthConfig, context), + }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1UpdateBackendJobCommand = async ( + input: UpdateBackendJobCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/backend/{AppId}/job/{BackendEnvironmentName}/{JobId}"; + if (input.AppId !== undefined) { + const labelValue: string = input.AppId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: AppId."); + } + resolvedPath = resolvedPath.replace("{AppId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: AppId."); + } + if (input.BackendEnvironmentName !== undefined) { + const labelValue: string = input.BackendEnvironmentName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: BackendEnvironmentName."); + } + resolvedPath = resolvedPath.replace("{BackendEnvironmentName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: BackendEnvironmentName."); + } + if (input.JobId !== undefined) { + const labelValue: string = input.JobId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: JobId."); + } + resolvedPath = resolvedPath.replace("{JobId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: JobId."); + } + let body: any; + body = JSON.stringify({ + ...(input.Operation !== undefined && { operation: input.Operation }), + ...(input.Status !== undefined && { status: input.Status }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const deserializeAws_restJson1CloneBackendCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1CloneBackendCommandError(output, context); + } + const contents: CloneBackendCommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + BackendEnvironmentName: undefined, + Error: undefined, + JobId: undefined, + Operation: undefined, + Status: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.backendEnvironmentName !== undefined && data.backendEnvironmentName !== null) { + contents.BackendEnvironmentName = data.backendEnvironmentName; + } + if (data.error !== undefined && data.error !== null) { + contents.Error = data.error; + } + if (data.jobId !== undefined && data.jobId !== null) { + contents.JobId = data.jobId; + } + if (data.operation !== undefined && data.operation !== null) { + contents.Operation = data.operation; + } + if (data.status !== undefined && data.status !== null) { + contents.Status = data.status; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1CloneBackendCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1CreateBackendCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1CreateBackendCommandError(output, context); + } + const contents: CreateBackendCommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + BackendEnvironmentName: undefined, + Error: undefined, + JobId: undefined, + Operation: undefined, + Status: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.backendEnvironmentName !== undefined && data.backendEnvironmentName !== null) { + contents.BackendEnvironmentName = data.backendEnvironmentName; + } + if (data.error !== undefined && data.error !== null) { + contents.Error = data.error; + } + if (data.jobId !== undefined && data.jobId !== null) { + contents.JobId = data.jobId; + } + if (data.operation !== undefined && data.operation !== null) { + contents.Operation = data.operation; + } + if (data.status !== undefined && data.status !== null) { + contents.Status = data.status; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1CreateBackendCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1CreateBackendAPICommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1CreateBackendAPICommandError(output, context); + } + const contents: CreateBackendAPICommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + BackendEnvironmentName: undefined, + Error: undefined, + JobId: undefined, + Operation: undefined, + Status: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.backendEnvironmentName !== undefined && data.backendEnvironmentName !== null) { + contents.BackendEnvironmentName = data.backendEnvironmentName; + } + if (data.error !== undefined && data.error !== null) { + contents.Error = data.error; + } + if (data.jobId !== undefined && data.jobId !== null) { + contents.JobId = data.jobId; + } + if (data.operation !== undefined && data.operation !== null) { + contents.Operation = data.operation; + } + if (data.status !== undefined && data.status !== null) { + contents.Status = data.status; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1CreateBackendAPICommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1CreateBackendAuthCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1CreateBackendAuthCommandError(output, context); + } + const contents: CreateBackendAuthCommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + BackendEnvironmentName: undefined, + Error: undefined, + JobId: undefined, + Operation: undefined, + Status: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.backendEnvironmentName !== undefined && data.backendEnvironmentName !== null) { + contents.BackendEnvironmentName = data.backendEnvironmentName; + } + if (data.error !== undefined && data.error !== null) { + contents.Error = data.error; + } + if (data.jobId !== undefined && data.jobId !== null) { + contents.JobId = data.jobId; + } + if (data.operation !== undefined && data.operation !== null) { + contents.Operation = data.operation; + } + if (data.status !== undefined && data.status !== null) { + contents.Status = data.status; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1CreateBackendAuthCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1CreateBackendConfigCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1CreateBackendConfigCommandError(output, context); + } + const contents: CreateBackendConfigCommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + BackendEnvironmentName: undefined, + JobId: undefined, + Status: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.backendEnvironmentName !== undefined && data.backendEnvironmentName !== null) { + contents.BackendEnvironmentName = data.backendEnvironmentName; + } + if (data.jobId !== undefined && data.jobId !== null) { + contents.JobId = data.jobId; + } + if (data.status !== undefined && data.status !== null) { + contents.Status = data.status; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1CreateBackendConfigCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1CreateTokenCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1CreateTokenCommandError(output, context); + } + const contents: CreateTokenCommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + ChallengeCode: undefined, + SessionId: undefined, + Ttl: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.challengeCode !== undefined && data.challengeCode !== null) { + contents.ChallengeCode = data.challengeCode; + } + if (data.sessionId !== undefined && data.sessionId !== null) { + contents.SessionId = data.sessionId; + } + if (data.ttl !== undefined && data.ttl !== null) { + contents.Ttl = data.ttl; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1CreateTokenCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DeleteBackendCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DeleteBackendCommandError(output, context); + } + const contents: DeleteBackendCommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + BackendEnvironmentName: undefined, + Error: undefined, + JobId: undefined, + Operation: undefined, + Status: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.backendEnvironmentName !== undefined && data.backendEnvironmentName !== null) { + contents.BackendEnvironmentName = data.backendEnvironmentName; + } + if (data.error !== undefined && data.error !== null) { + contents.Error = data.error; + } + if (data.jobId !== undefined && data.jobId !== null) { + contents.JobId = data.jobId; + } + if (data.operation !== undefined && data.operation !== null) { + contents.Operation = data.operation; + } + if (data.status !== undefined && data.status !== null) { + contents.Status = data.status; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DeleteBackendCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DeleteBackendAPICommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DeleteBackendAPICommandError(output, context); + } + const contents: DeleteBackendAPICommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + BackendEnvironmentName: undefined, + Error: undefined, + JobId: undefined, + Operation: undefined, + Status: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.backendEnvironmentName !== undefined && data.backendEnvironmentName !== null) { + contents.BackendEnvironmentName = data.backendEnvironmentName; + } + if (data.error !== undefined && data.error !== null) { + contents.Error = data.error; + } + if (data.jobId !== undefined && data.jobId !== null) { + contents.JobId = data.jobId; + } + if (data.operation !== undefined && data.operation !== null) { + contents.Operation = data.operation; + } + if (data.status !== undefined && data.status !== null) { + contents.Status = data.status; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DeleteBackendAPICommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DeleteBackendAuthCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DeleteBackendAuthCommandError(output, context); + } + const contents: DeleteBackendAuthCommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + BackendEnvironmentName: undefined, + Error: undefined, + JobId: undefined, + Operation: undefined, + Status: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.backendEnvironmentName !== undefined && data.backendEnvironmentName !== null) { + contents.BackendEnvironmentName = data.backendEnvironmentName; + } + if (data.error !== undefined && data.error !== null) { + contents.Error = data.error; + } + if (data.jobId !== undefined && data.jobId !== null) { + contents.JobId = data.jobId; + } + if (data.operation !== undefined && data.operation !== null) { + contents.Operation = data.operation; + } + if (data.status !== undefined && data.status !== null) { + contents.Status = data.status; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DeleteBackendAuthCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DeleteTokenCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DeleteTokenCommandError(output, context); + } + const contents: DeleteTokenCommandOutput = { + $metadata: deserializeMetadata(output), + IsSuccess: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.isSuccess !== undefined && data.isSuccess !== null) { + contents.IsSuccess = data.isSuccess; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DeleteTokenCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1GenerateBackendAPIModelsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1GenerateBackendAPIModelsCommandError(output, context); + } + const contents: GenerateBackendAPIModelsCommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + BackendEnvironmentName: undefined, + Error: undefined, + JobId: undefined, + Operation: undefined, + Status: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.backendEnvironmentName !== undefined && data.backendEnvironmentName !== null) { + contents.BackendEnvironmentName = data.backendEnvironmentName; + } + if (data.error !== undefined && data.error !== null) { + contents.Error = data.error; + } + if (data.jobId !== undefined && data.jobId !== null) { + contents.JobId = data.jobId; + } + if (data.operation !== undefined && data.operation !== null) { + contents.Operation = data.operation; + } + if (data.status !== undefined && data.status !== null) { + contents.Status = data.status; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1GenerateBackendAPIModelsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1GetBackendCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1GetBackendCommandError(output, context); + } + const contents: GetBackendCommandOutput = { + $metadata: deserializeMetadata(output), + AmplifyMetaConfig: undefined, + AppId: undefined, + AppName: undefined, + BackendEnvironmentList: undefined, + BackendEnvironmentName: undefined, + Error: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.amplifyMetaConfig !== undefined && data.amplifyMetaConfig !== null) { + contents.AmplifyMetaConfig = data.amplifyMetaConfig; + } + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.appName !== undefined && data.appName !== null) { + contents.AppName = data.appName; + } + if (data.backendEnvironmentList !== undefined && data.backendEnvironmentList !== null) { + contents.BackendEnvironmentList = deserializeAws_restJson1ListOf__string(data.backendEnvironmentList, context); + } + if (data.backendEnvironmentName !== undefined && data.backendEnvironmentName !== null) { + contents.BackendEnvironmentName = data.backendEnvironmentName; + } + if (data.error !== undefined && data.error !== null) { + contents.Error = data.error; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1GetBackendCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1GetBackendAPICommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1GetBackendAPICommandError(output, context); + } + const contents: GetBackendAPICommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + BackendEnvironmentName: undefined, + Error: undefined, + ResourceConfig: undefined, + ResourceName: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.backendEnvironmentName !== undefined && data.backendEnvironmentName !== null) { + contents.BackendEnvironmentName = data.backendEnvironmentName; + } + if (data.error !== undefined && data.error !== null) { + contents.Error = data.error; + } + if (data.resourceConfig !== undefined && data.resourceConfig !== null) { + contents.ResourceConfig = deserializeAws_restJson1BackendAPIResourceConfig(data.resourceConfig, context); + } + if (data.resourceName !== undefined && data.resourceName !== null) { + contents.ResourceName = data.resourceName; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1GetBackendAPICommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1GetBackendAPIModelsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1GetBackendAPIModelsCommandError(output, context); + } + const contents: GetBackendAPIModelsCommandOutput = { + $metadata: deserializeMetadata(output), + Models: undefined, + Status: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.models !== undefined && data.models !== null) { + contents.Models = data.models; + } + if (data.status !== undefined && data.status !== null) { + contents.Status = data.status; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1GetBackendAPIModelsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1GetBackendAuthCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1GetBackendAuthCommandError(output, context); + } + const contents: GetBackendAuthCommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + BackendEnvironmentName: undefined, + Error: undefined, + ResourceConfig: undefined, + ResourceName: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.backendEnvironmentName !== undefined && data.backendEnvironmentName !== null) { + contents.BackendEnvironmentName = data.backendEnvironmentName; + } + if (data.error !== undefined && data.error !== null) { + contents.Error = data.error; + } + if (data.resourceConfig !== undefined && data.resourceConfig !== null) { + contents.ResourceConfig = deserializeAws_restJson1CreateBackendAuthResourceConfig(data.resourceConfig, context); + } + if (data.resourceName !== undefined && data.resourceName !== null) { + contents.ResourceName = data.resourceName; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1GetBackendAuthCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1GetBackendJobCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1GetBackendJobCommandError(output, context); + } + const contents: GetBackendJobCommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + BackendEnvironmentName: undefined, + CreateTime: undefined, + Error: undefined, + JobId: undefined, + Operation: undefined, + Status: undefined, + UpdateTime: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.backendEnvironmentName !== undefined && data.backendEnvironmentName !== null) { + contents.BackendEnvironmentName = data.backendEnvironmentName; + } + if (data.createTime !== undefined && data.createTime !== null) { + contents.CreateTime = data.createTime; + } + if (data.error !== undefined && data.error !== null) { + contents.Error = data.error; + } + if (data.jobId !== undefined && data.jobId !== null) { + contents.JobId = data.jobId; + } + if (data.operation !== undefined && data.operation !== null) { + contents.Operation = data.operation; + } + if (data.status !== undefined && data.status !== null) { + contents.Status = data.status; + } + if (data.updateTime !== undefined && data.updateTime !== null) { + contents.UpdateTime = data.updateTime; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1GetBackendJobCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1GetTokenCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1GetTokenCommandError(output, context); + } + const contents: GetTokenCommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + ChallengeCode: undefined, + SessionId: undefined, + Ttl: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.challengeCode !== undefined && data.challengeCode !== null) { + contents.ChallengeCode = data.challengeCode; + } + if (data.sessionId !== undefined && data.sessionId !== null) { + contents.SessionId = data.sessionId; + } + if (data.ttl !== undefined && data.ttl !== null) { + contents.Ttl = data.ttl; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1GetTokenCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1ListBackendJobsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListBackendJobsCommandError(output, context); + } + const contents: ListBackendJobsCommandOutput = { + $metadata: deserializeMetadata(output), + Jobs: undefined, + NextToken: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.jobs !== undefined && data.jobs !== null) { + contents.Jobs = deserializeAws_restJson1ListOfBackendJobRespObj(data.jobs, context); + } + if (data.nextToken !== undefined && data.nextToken !== null) { + contents.NextToken = data.nextToken; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListBackendJobsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1RemoveAllBackendsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1RemoveAllBackendsCommandError(output, context); + } + const contents: RemoveAllBackendsCommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + Error: undefined, + JobId: undefined, + Operation: undefined, + Status: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.error !== undefined && data.error !== null) { + contents.Error = data.error; + } + if (data.jobId !== undefined && data.jobId !== null) { + contents.JobId = data.jobId; + } + if (data.operation !== undefined && data.operation !== null) { + contents.Operation = data.operation; + } + if (data.status !== undefined && data.status !== null) { + contents.Status = data.status; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1RemoveAllBackendsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1RemoveBackendConfigCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1RemoveBackendConfigCommandError(output, context); + } + const contents: RemoveBackendConfigCommandOutput = { + $metadata: deserializeMetadata(output), + Error: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.error !== undefined && data.error !== null) { + contents.Error = data.error; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1RemoveBackendConfigCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1UpdateBackendAPICommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1UpdateBackendAPICommandError(output, context); + } + const contents: UpdateBackendAPICommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + BackendEnvironmentName: undefined, + Error: undefined, + JobId: undefined, + Operation: undefined, + Status: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.backendEnvironmentName !== undefined && data.backendEnvironmentName !== null) { + contents.BackendEnvironmentName = data.backendEnvironmentName; + } + if (data.error !== undefined && data.error !== null) { + contents.Error = data.error; + } + if (data.jobId !== undefined && data.jobId !== null) { + contents.JobId = data.jobId; + } + if (data.operation !== undefined && data.operation !== null) { + contents.Operation = data.operation; + } + if (data.status !== undefined && data.status !== null) { + contents.Status = data.status; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1UpdateBackendAPICommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1UpdateBackendAuthCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1UpdateBackendAuthCommandError(output, context); + } + const contents: UpdateBackendAuthCommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + BackendEnvironmentName: undefined, + Error: undefined, + JobId: undefined, + Operation: undefined, + Status: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.backendEnvironmentName !== undefined && data.backendEnvironmentName !== null) { + contents.BackendEnvironmentName = data.backendEnvironmentName; + } + if (data.error !== undefined && data.error !== null) { + contents.Error = data.error; + } + if (data.jobId !== undefined && data.jobId !== null) { + contents.JobId = data.jobId; + } + if (data.operation !== undefined && data.operation !== null) { + contents.Operation = data.operation; + } + if (data.status !== undefined && data.status !== null) { + contents.Status = data.status; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1UpdateBackendAuthCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1UpdateBackendConfigCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1UpdateBackendConfigCommandError(output, context); + } + const contents: UpdateBackendConfigCommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + BackendManagerAppId: undefined, + Error: undefined, + LoginAuthConfig: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.backendManagerAppId !== undefined && data.backendManagerAppId !== null) { + contents.BackendManagerAppId = data.backendManagerAppId; + } + if (data.error !== undefined && data.error !== null) { + contents.Error = data.error; + } + if (data.loginAuthConfig !== undefined && data.loginAuthConfig !== null) { + contents.LoginAuthConfig = deserializeAws_restJson1LoginAuthConfigReqObj(data.loginAuthConfig, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1UpdateBackendConfigCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1UpdateBackendJobCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1UpdateBackendJobCommandError(output, context); + } + const contents: UpdateBackendJobCommandOutput = { + $metadata: deserializeMetadata(output), + AppId: undefined, + BackendEnvironmentName: undefined, + CreateTime: undefined, + Error: undefined, + JobId: undefined, + Operation: undefined, + Status: undefined, + UpdateTime: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.appId !== undefined && data.appId !== null) { + contents.AppId = data.appId; + } + if (data.backendEnvironmentName !== undefined && data.backendEnvironmentName !== null) { + contents.BackendEnvironmentName = data.backendEnvironmentName; + } + if (data.createTime !== undefined && data.createTime !== null) { + contents.CreateTime = data.createTime; + } + if (data.error !== undefined && data.error !== null) { + contents.Error = data.error; + } + if (data.jobId !== undefined && data.jobId !== null) { + contents.JobId = data.jobId; + } + if (data.operation !== undefined && data.operation !== null) { + contents.Operation = data.operation; + } + if (data.status !== undefined && data.status !== null) { + contents.Status = data.status; + } + if (data.updateTime !== undefined && data.updateTime !== null) { + contents.UpdateTime = data.updateTime; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1UpdateBackendJobCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "BadRequestException": + case "com.amazonaws.amplifybackend#BadRequestException": + response = { + ...(await deserializeAws_restJson1BadRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "GatewayTimeoutException": + case "com.amazonaws.amplifybackend#GatewayTimeoutException": + response = { + ...(await deserializeAws_restJson1GatewayTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NotFoundException": + case "com.amazonaws.amplifybackend#NotFoundException": + response = { + ...(await deserializeAws_restJson1NotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "TooManyRequestsException": + case "com.amazonaws.amplifybackend#TooManyRequestsException": + response = { + ...(await deserializeAws_restJson1TooManyRequestsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +const deserializeAws_restJson1BadRequestExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: BadRequestException = { + name: "BadRequestException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.message !== undefined && data.message !== null) { + contents.Message = data.message; + } + return contents; +}; + +const deserializeAws_restJson1GatewayTimeoutExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: GatewayTimeoutException = { + name: "GatewayTimeoutException", + $fault: "server", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.message !== undefined && data.message !== null) { + contents.Message = data.message; + } + return contents; +}; + +const deserializeAws_restJson1NotFoundExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: NotFoundException = { + name: "NotFoundException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + ResourceType: undefined, + }; + const data: any = parsedOutput.body; + if (data.message !== undefined && data.message !== null) { + contents.Message = data.message; + } + if (data.resourceType !== undefined && data.resourceType !== null) { + contents.ResourceType = data.resourceType; + } + return contents; +}; + +const deserializeAws_restJson1TooManyRequestsExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: TooManyRequestsException = { + name: "TooManyRequestsException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + LimitType: undefined, + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.limitType !== undefined && data.limitType !== null) { + contents.LimitType = data.limitType; + } + if (data.message !== undefined && data.message !== null) { + contents.Message = data.message; + } + return contents; +}; + +const serializeAws_restJson1BackendAPIAppSyncAuthSettings = ( + input: BackendAPIAppSyncAuthSettings, + context: __SerdeContext +): any => { + return { + ...(input.CognitoUserPoolId !== undefined && { cognitoUserPoolId: input.CognitoUserPoolId }), + ...(input.Description !== undefined && { description: input.Description }), + ...(input.ExpirationTime !== undefined && { expirationTime: input.ExpirationTime }), + ...(input.OpenIDAuthTTL !== undefined && { openIDAuthTTL: input.OpenIDAuthTTL }), + ...(input.OpenIDClientId !== undefined && { openIDClientId: input.OpenIDClientId }), + ...(input.OpenIDIatTTL !== undefined && { openIDIatTTL: input.OpenIDIatTTL }), + ...(input.OpenIDIssueURL !== undefined && { openIDIssueURL: input.OpenIDIssueURL }), + ...(input.OpenIDProviderName !== undefined && { openIDProviderName: input.OpenIDProviderName }), + }; +}; + +const serializeAws_restJson1BackendAPIAuthType = (input: BackendAPIAuthType, context: __SerdeContext): any => { + return { + ...(input.Mode !== undefined && { mode: input.Mode }), + ...(input.Settings !== undefined && { + settings: serializeAws_restJson1BackendAPIAppSyncAuthSettings(input.Settings, context), + }), + }; +}; + +const serializeAws_restJson1BackendAPIConflictResolution = ( + input: BackendAPIConflictResolution, + context: __SerdeContext +): any => { + return { + ...(input.ResolutionStrategy !== undefined && { resolutionStrategy: input.ResolutionStrategy }), + }; +}; + +const serializeAws_restJson1BackendAPIResourceConfig = ( + input: BackendAPIResourceConfig, + context: __SerdeContext +): any => { + return { + ...(input.AdditionalAuthTypes !== undefined && { + additionalAuthTypes: serializeAws_restJson1ListOfBackendAPIAuthType(input.AdditionalAuthTypes, context), + }), + ...(input.ApiName !== undefined && { apiName: input.ApiName }), + ...(input.ConflictResolution !== undefined && { + conflictResolution: serializeAws_restJson1BackendAPIConflictResolution(input.ConflictResolution, context), + }), + ...(input.DefaultAuthType !== undefined && { + defaultAuthType: serializeAws_restJson1BackendAPIAuthType(input.DefaultAuthType, context), + }), + ...(input.Service !== undefined && { service: input.Service }), + ...(input.TransformSchema !== undefined && { transformSchema: input.TransformSchema }), + }; +}; + +const serializeAws_restJson1BackendAuthSocialProviderConfig = ( + input: BackendAuthSocialProviderConfig, + context: __SerdeContext +): any => { + return { + ...(input.ClientId !== undefined && { client_id: input.ClientId }), + ...(input.ClientSecret !== undefined && { client_secret: input.ClientSecret }), + }; +}; + +const serializeAws_restJson1CreateBackendAuthForgotPasswordConfig = ( + input: CreateBackendAuthForgotPasswordConfig, + context: __SerdeContext +): any => { + return { + ...(input.DeliveryMethod !== undefined && { deliveryMethod: input.DeliveryMethod }), + ...(input.EmailSettings !== undefined && { + emailSettings: serializeAws_restJson1EmailSettings(input.EmailSettings, context), + }), + ...(input.SmsSettings !== undefined && { + smsSettings: serializeAws_restJson1SmsSettings(input.SmsSettings, context), + }), + }; +}; + +const serializeAws_restJson1CreateBackendAuthIdentityPoolConfig = ( + input: CreateBackendAuthIdentityPoolConfig, + context: __SerdeContext +): any => { + return { + ...(input.IdentityPoolName !== undefined && { identityPoolName: input.IdentityPoolName }), + ...(input.UnauthenticatedLogin !== undefined && { unauthenticatedLogin: input.UnauthenticatedLogin }), + }; +}; + +const serializeAws_restJson1CreateBackendAuthMFAConfig = ( + input: CreateBackendAuthMFAConfig, + context: __SerdeContext +): any => { + return { + ...(input.MFAMode !== undefined && { MFAMode: input.MFAMode }), + ...(input.Settings !== undefined && { settings: serializeAws_restJson1Settings(input.Settings, context) }), + }; +}; + +const serializeAws_restJson1CreateBackendAuthOAuthConfig = ( + input: CreateBackendAuthOAuthConfig, + context: __SerdeContext +): any => { + return { + ...(input.DomainPrefix !== undefined && { domainPrefix: input.DomainPrefix }), + ...(input.OAuthGrantType !== undefined && { oAuthGrantType: input.OAuthGrantType }), + ...(input.OAuthScopes !== undefined && { + oAuthScopes: serializeAws_restJson1ListOfOAuthScopesElement(input.OAuthScopes, context), + }), + ...(input.RedirectSignInURIs !== undefined && { + redirectSignInURIs: serializeAws_restJson1ListOf__string(input.RedirectSignInURIs, context), + }), + ...(input.RedirectSignOutURIs !== undefined && { + redirectSignOutURIs: serializeAws_restJson1ListOf__string(input.RedirectSignOutURIs, context), + }), + ...(input.SocialProviderSettings !== undefined && { + socialProviderSettings: serializeAws_restJson1SocialProviderSettings(input.SocialProviderSettings, context), + }), + }; +}; + +const serializeAws_restJson1CreateBackendAuthPasswordPolicyConfig = ( + input: CreateBackendAuthPasswordPolicyConfig, + context: __SerdeContext +): any => { + return { + ...(input.AdditionalConstraints !== undefined && { + additionalConstraints: serializeAws_restJson1ListOfAdditionalConstraintsElement( + input.AdditionalConstraints, + context + ), + }), + ...(input.MinimumLength !== undefined && { minimumLength: input.MinimumLength }), + }; +}; + +const serializeAws_restJson1CreateBackendAuthResourceConfig = ( + input: CreateBackendAuthResourceConfig, + context: __SerdeContext +): any => { + return { + ...(input.AuthResources !== undefined && { authResources: input.AuthResources }), + ...(input.IdentityPoolConfigs !== undefined && { + identityPoolConfigs: serializeAws_restJson1CreateBackendAuthIdentityPoolConfig( + input.IdentityPoolConfigs, + context + ), + }), + ...(input.Service !== undefined && { service: input.Service }), + ...(input.UserPoolConfigs !== undefined && { + userPoolConfigs: serializeAws_restJson1CreateBackendAuthUserPoolConfig(input.UserPoolConfigs, context), + }), + }; +}; + +const serializeAws_restJson1CreateBackendAuthUserPoolConfig = ( + input: CreateBackendAuthUserPoolConfig, + context: __SerdeContext +): any => { + return { + ...(input.ForgotPassword !== undefined && { + forgotPassword: serializeAws_restJson1CreateBackendAuthForgotPasswordConfig(input.ForgotPassword, context), + }), + ...(input.Mfa !== undefined && { mfa: serializeAws_restJson1CreateBackendAuthMFAConfig(input.Mfa, context) }), + ...(input.OAuth !== undefined && { + oAuth: serializeAws_restJson1CreateBackendAuthOAuthConfig(input.OAuth, context), + }), + ...(input.PasswordPolicy !== undefined && { + passwordPolicy: serializeAws_restJson1CreateBackendAuthPasswordPolicyConfig(input.PasswordPolicy, context), + }), + ...(input.RequiredSignUpAttributes !== undefined && { + requiredSignUpAttributes: serializeAws_restJson1ListOfRequiredSignUpAttributesElement( + input.RequiredSignUpAttributes, + context + ), + }), + ...(input.SignInMethod !== undefined && { signInMethod: input.SignInMethod }), + ...(input.UserPoolName !== undefined && { userPoolName: input.UserPoolName }), + }; +}; + +const serializeAws_restJson1EmailSettings = (input: EmailSettings, context: __SerdeContext): any => { + return { + ...(input.EmailMessage !== undefined && { emailMessage: input.EmailMessage }), + ...(input.EmailSubject !== undefined && { emailSubject: input.EmailSubject }), + }; +}; + +const serializeAws_restJson1ListOf__string = (input: string[], context: __SerdeContext): any => { + return input.map((entry) => entry); +}; + +const serializeAws_restJson1ListOfAdditionalConstraintsElement = ( + input: (AdditionalConstraintsElement | string)[], + context: __SerdeContext +): any => { + return input.map((entry) => entry); +}; + +const serializeAws_restJson1ListOfBackendAPIAuthType = (input: BackendAPIAuthType[], context: __SerdeContext): any => { + return input.map((entry) => serializeAws_restJson1BackendAPIAuthType(entry, context)); +}; + +const serializeAws_restJson1ListOfMfaTypesElement = ( + input: (MfaTypesElement | string)[], + context: __SerdeContext +): any => { + return input.map((entry) => entry); +}; + +const serializeAws_restJson1ListOfOAuthScopesElement = ( + input: (OAuthScopesElement | string)[], + context: __SerdeContext +): any => { + return input.map((entry) => entry); +}; + +const serializeAws_restJson1ListOfRequiredSignUpAttributesElement = ( + input: (RequiredSignUpAttributesElement | string)[], + context: __SerdeContext +): any => { + return input.map((entry) => entry); +}; + +const serializeAws_restJson1LoginAuthConfigReqObj = (input: LoginAuthConfigReqObj, context: __SerdeContext): any => { + return { + ...(input.AwsCognitoIdentityPoolId !== undefined && { + aws_cognito_identity_pool_id: input.AwsCognitoIdentityPoolId, + }), + ...(input.AwsCognitoRegion !== undefined && { aws_cognito_region: input.AwsCognitoRegion }), + ...(input.AwsUserPoolsId !== undefined && { aws_user_pools_id: input.AwsUserPoolsId }), + ...(input.AwsUserPoolsWebClientId !== undefined && { aws_user_pools_web_client_id: input.AwsUserPoolsWebClientId }), + }; +}; + +const serializeAws_restJson1ResourceConfig = (input: ResourceConfig, context: __SerdeContext): any => { + return {}; +}; + +const serializeAws_restJson1Settings = (input: Settings, context: __SerdeContext): any => { + return { + ...(input.MfaTypes !== undefined && { + mfaTypes: serializeAws_restJson1ListOfMfaTypesElement(input.MfaTypes, context), + }), + ...(input.SmsMessage !== undefined && { smsMessage: input.SmsMessage }), + }; +}; + +const serializeAws_restJson1SmsSettings = (input: SmsSettings, context: __SerdeContext): any => { + return { + ...(input.SmsMessage !== undefined && { smsMessage: input.SmsMessage }), + }; +}; + +const serializeAws_restJson1SocialProviderSettings = (input: SocialProviderSettings, context: __SerdeContext): any => { + return { + ...(input.Facebook !== undefined && { + Facebook: serializeAws_restJson1BackendAuthSocialProviderConfig(input.Facebook, context), + }), + ...(input.Google !== undefined && { + Google: serializeAws_restJson1BackendAuthSocialProviderConfig(input.Google, context), + }), + ...(input.LoginWithAmazon !== undefined && { + LoginWithAmazon: serializeAws_restJson1BackendAuthSocialProviderConfig(input.LoginWithAmazon, context), + }), + }; +}; + +const serializeAws_restJson1UpdateBackendAuthForgotPasswordConfig = ( + input: UpdateBackendAuthForgotPasswordConfig, + context: __SerdeContext +): any => { + return { + ...(input.DeliveryMethod !== undefined && { deliveryMethod: input.DeliveryMethod }), + ...(input.EmailSettings !== undefined && { + emailSettings: serializeAws_restJson1EmailSettings(input.EmailSettings, context), + }), + ...(input.SmsSettings !== undefined && { + smsSettings: serializeAws_restJson1SmsSettings(input.SmsSettings, context), + }), + }; +}; + +const serializeAws_restJson1UpdateBackendAuthIdentityPoolConfig = ( + input: UpdateBackendAuthIdentityPoolConfig, + context: __SerdeContext +): any => { + return { + ...(input.UnauthenticatedLogin !== undefined && { unauthenticatedLogin: input.UnauthenticatedLogin }), + }; +}; + +const serializeAws_restJson1UpdateBackendAuthMFAConfig = ( + input: UpdateBackendAuthMFAConfig, + context: __SerdeContext +): any => { + return { + ...(input.MFAMode !== undefined && { MFAMode: input.MFAMode }), + ...(input.Settings !== undefined && { settings: serializeAws_restJson1Settings(input.Settings, context) }), + }; +}; + +const serializeAws_restJson1UpdateBackendAuthOAuthConfig = ( + input: UpdateBackendAuthOAuthConfig, + context: __SerdeContext +): any => { + return { + ...(input.DomainPrefix !== undefined && { domainPrefix: input.DomainPrefix }), + ...(input.OAuthGrantType !== undefined && { oAuthGrantType: input.OAuthGrantType }), + ...(input.OAuthScopes !== undefined && { + oAuthScopes: serializeAws_restJson1ListOfOAuthScopesElement(input.OAuthScopes, context), + }), + ...(input.RedirectSignInURIs !== undefined && { + redirectSignInURIs: serializeAws_restJson1ListOf__string(input.RedirectSignInURIs, context), + }), + ...(input.RedirectSignOutURIs !== undefined && { + redirectSignOutURIs: serializeAws_restJson1ListOf__string(input.RedirectSignOutURIs, context), + }), + ...(input.SocialProviderSettings !== undefined && { + socialProviderSettings: serializeAws_restJson1SocialProviderSettings(input.SocialProviderSettings, context), + }), + }; +}; + +const serializeAws_restJson1UpdateBackendAuthPasswordPolicyConfig = ( + input: UpdateBackendAuthPasswordPolicyConfig, + context: __SerdeContext +): any => { + return { + ...(input.AdditionalConstraints !== undefined && { + additionalConstraints: serializeAws_restJson1ListOfAdditionalConstraintsElement( + input.AdditionalConstraints, + context + ), + }), + ...(input.MinimumLength !== undefined && { minimumLength: input.MinimumLength }), + }; +}; + +const serializeAws_restJson1UpdateBackendAuthResourceConfig = ( + input: UpdateBackendAuthResourceConfig, + context: __SerdeContext +): any => { + return { + ...(input.AuthResources !== undefined && { authResources: input.AuthResources }), + ...(input.IdentityPoolConfigs !== undefined && { + identityPoolConfigs: serializeAws_restJson1UpdateBackendAuthIdentityPoolConfig( + input.IdentityPoolConfigs, + context + ), + }), + ...(input.Service !== undefined && { service: input.Service }), + ...(input.UserPoolConfigs !== undefined && { + userPoolConfigs: serializeAws_restJson1UpdateBackendAuthUserPoolConfig(input.UserPoolConfigs, context), + }), + }; +}; + +const serializeAws_restJson1UpdateBackendAuthUserPoolConfig = ( + input: UpdateBackendAuthUserPoolConfig, + context: __SerdeContext +): any => { + return { + ...(input.ForgotPassword !== undefined && { + forgotPassword: serializeAws_restJson1UpdateBackendAuthForgotPasswordConfig(input.ForgotPassword, context), + }), + ...(input.Mfa !== undefined && { mfa: serializeAws_restJson1UpdateBackendAuthMFAConfig(input.Mfa, context) }), + ...(input.OAuth !== undefined && { + oAuth: serializeAws_restJson1UpdateBackendAuthOAuthConfig(input.OAuth, context), + }), + ...(input.PasswordPolicy !== undefined && { + passwordPolicy: serializeAws_restJson1UpdateBackendAuthPasswordPolicyConfig(input.PasswordPolicy, context), + }), + }; +}; + +const deserializeAws_restJson1BackendAPIAppSyncAuthSettings = ( + output: any, + context: __SerdeContext +): BackendAPIAppSyncAuthSettings => { + return { + CognitoUserPoolId: + output.cognitoUserPoolId !== undefined && output.cognitoUserPoolId !== null + ? output.cognitoUserPoolId + : undefined, + Description: output.description !== undefined && output.description !== null ? output.description : undefined, + ExpirationTime: + output.expirationTime !== undefined && output.expirationTime !== null ? output.expirationTime : undefined, + OpenIDAuthTTL: + output.openIDAuthTTL !== undefined && output.openIDAuthTTL !== null ? output.openIDAuthTTL : undefined, + OpenIDClientId: + output.openIDClientId !== undefined && output.openIDClientId !== null ? output.openIDClientId : undefined, + OpenIDIatTTL: output.openIDIatTTL !== undefined && output.openIDIatTTL !== null ? output.openIDIatTTL : undefined, + OpenIDIssueURL: + output.openIDIssueURL !== undefined && output.openIDIssueURL !== null ? output.openIDIssueURL : undefined, + OpenIDProviderName: + output.openIDProviderName !== undefined && output.openIDProviderName !== null + ? output.openIDProviderName + : undefined, + } as any; +}; + +const deserializeAws_restJson1BackendAPIAuthType = (output: any, context: __SerdeContext): BackendAPIAuthType => { + return { + Mode: output.mode !== undefined && output.mode !== null ? output.mode : undefined, + Settings: + output.settings !== undefined && output.settings !== null + ? deserializeAws_restJson1BackendAPIAppSyncAuthSettings(output.settings, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1BackendAPIConflictResolution = ( + output: any, + context: __SerdeContext +): BackendAPIConflictResolution => { + return { + ResolutionStrategy: + output.resolutionStrategy !== undefined && output.resolutionStrategy !== null + ? output.resolutionStrategy + : undefined, + } as any; +}; + +const deserializeAws_restJson1BackendAPIResourceConfig = ( + output: any, + context: __SerdeContext +): BackendAPIResourceConfig => { + return { + AdditionalAuthTypes: + output.additionalAuthTypes !== undefined && output.additionalAuthTypes !== null + ? deserializeAws_restJson1ListOfBackendAPIAuthType(output.additionalAuthTypes, context) + : undefined, + ApiName: output.apiName !== undefined && output.apiName !== null ? output.apiName : undefined, + ConflictResolution: + output.conflictResolution !== undefined && output.conflictResolution !== null + ? deserializeAws_restJson1BackendAPIConflictResolution(output.conflictResolution, context) + : undefined, + DefaultAuthType: + output.defaultAuthType !== undefined && output.defaultAuthType !== null + ? deserializeAws_restJson1BackendAPIAuthType(output.defaultAuthType, context) + : undefined, + Service: output.service !== undefined && output.service !== null ? output.service : undefined, + TransformSchema: + output.transformSchema !== undefined && output.transformSchema !== null ? output.transformSchema : undefined, + } as any; +}; + +const deserializeAws_restJson1BackendAuthSocialProviderConfig = ( + output: any, + context: __SerdeContext +): BackendAuthSocialProviderConfig => { + return { + ClientId: output.client_id !== undefined && output.client_id !== null ? output.client_id : undefined, + ClientSecret: + output.client_secret !== undefined && output.client_secret !== null ? output.client_secret : undefined, + } as any; +}; + +const deserializeAws_restJson1BackendJobRespObj = (output: any, context: __SerdeContext): BackendJobRespObj => { + return { + AppId: output.appId !== undefined && output.appId !== null ? output.appId : undefined, + BackendEnvironmentName: + output.backendEnvironmentName !== undefined && output.backendEnvironmentName !== null + ? output.backendEnvironmentName + : undefined, + CreateTime: output.createTime !== undefined && output.createTime !== null ? output.createTime : undefined, + Error: output.error !== undefined && output.error !== null ? output.error : undefined, + JobId: output.jobId !== undefined && output.jobId !== null ? output.jobId : undefined, + Operation: output.operation !== undefined && output.operation !== null ? output.operation : undefined, + Status: output.status !== undefined && output.status !== null ? output.status : undefined, + UpdateTime: output.updateTime !== undefined && output.updateTime !== null ? output.updateTime : undefined, + } as any; +}; + +const deserializeAws_restJson1CreateBackendAuthForgotPasswordConfig = ( + output: any, + context: __SerdeContext +): CreateBackendAuthForgotPasswordConfig => { + return { + DeliveryMethod: + output.deliveryMethod !== undefined && output.deliveryMethod !== null ? output.deliveryMethod : undefined, + EmailSettings: + output.emailSettings !== undefined && output.emailSettings !== null + ? deserializeAws_restJson1EmailSettings(output.emailSettings, context) + : undefined, + SmsSettings: + output.smsSettings !== undefined && output.smsSettings !== null + ? deserializeAws_restJson1SmsSettings(output.smsSettings, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1CreateBackendAuthIdentityPoolConfig = ( + output: any, + context: __SerdeContext +): CreateBackendAuthIdentityPoolConfig => { + return { + IdentityPoolName: + output.identityPoolName !== undefined && output.identityPoolName !== null ? output.identityPoolName : undefined, + UnauthenticatedLogin: + output.unauthenticatedLogin !== undefined && output.unauthenticatedLogin !== null + ? output.unauthenticatedLogin + : undefined, + } as any; +}; + +const deserializeAws_restJson1CreateBackendAuthMFAConfig = ( + output: any, + context: __SerdeContext +): CreateBackendAuthMFAConfig => { + return { + MFAMode: output.MFAMode !== undefined && output.MFAMode !== null ? output.MFAMode : undefined, + Settings: + output.settings !== undefined && output.settings !== null + ? deserializeAws_restJson1Settings(output.settings, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1CreateBackendAuthOAuthConfig = ( + output: any, + context: __SerdeContext +): CreateBackendAuthOAuthConfig => { + return { + DomainPrefix: output.domainPrefix !== undefined && output.domainPrefix !== null ? output.domainPrefix : undefined, + OAuthGrantType: + output.oAuthGrantType !== undefined && output.oAuthGrantType !== null ? output.oAuthGrantType : undefined, + OAuthScopes: + output.oAuthScopes !== undefined && output.oAuthScopes !== null + ? deserializeAws_restJson1ListOfOAuthScopesElement(output.oAuthScopes, context) + : undefined, + RedirectSignInURIs: + output.redirectSignInURIs !== undefined && output.redirectSignInURIs !== null + ? deserializeAws_restJson1ListOf__string(output.redirectSignInURIs, context) + : undefined, + RedirectSignOutURIs: + output.redirectSignOutURIs !== undefined && output.redirectSignOutURIs !== null + ? deserializeAws_restJson1ListOf__string(output.redirectSignOutURIs, context) + : undefined, + SocialProviderSettings: + output.socialProviderSettings !== undefined && output.socialProviderSettings !== null + ? deserializeAws_restJson1SocialProviderSettings(output.socialProviderSettings, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1CreateBackendAuthPasswordPolicyConfig = ( + output: any, + context: __SerdeContext +): CreateBackendAuthPasswordPolicyConfig => { + return { + AdditionalConstraints: + output.additionalConstraints !== undefined && output.additionalConstraints !== null + ? deserializeAws_restJson1ListOfAdditionalConstraintsElement(output.additionalConstraints, context) + : undefined, + MinimumLength: + output.minimumLength !== undefined && output.minimumLength !== null ? output.minimumLength : undefined, + } as any; +}; + +const deserializeAws_restJson1CreateBackendAuthResourceConfig = ( + output: any, + context: __SerdeContext +): CreateBackendAuthResourceConfig => { + return { + AuthResources: + output.authResources !== undefined && output.authResources !== null ? output.authResources : undefined, + IdentityPoolConfigs: + output.identityPoolConfigs !== undefined && output.identityPoolConfigs !== null + ? deserializeAws_restJson1CreateBackendAuthIdentityPoolConfig(output.identityPoolConfigs, context) + : undefined, + Service: output.service !== undefined && output.service !== null ? output.service : undefined, + UserPoolConfigs: + output.userPoolConfigs !== undefined && output.userPoolConfigs !== null + ? deserializeAws_restJson1CreateBackendAuthUserPoolConfig(output.userPoolConfigs, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1CreateBackendAuthUserPoolConfig = ( + output: any, + context: __SerdeContext +): CreateBackendAuthUserPoolConfig => { + return { + ForgotPassword: + output.forgotPassword !== undefined && output.forgotPassword !== null + ? deserializeAws_restJson1CreateBackendAuthForgotPasswordConfig(output.forgotPassword, context) + : undefined, + Mfa: + output.mfa !== undefined && output.mfa !== null + ? deserializeAws_restJson1CreateBackendAuthMFAConfig(output.mfa, context) + : undefined, + OAuth: + output.oAuth !== undefined && output.oAuth !== null + ? deserializeAws_restJson1CreateBackendAuthOAuthConfig(output.oAuth, context) + : undefined, + PasswordPolicy: + output.passwordPolicy !== undefined && output.passwordPolicy !== null + ? deserializeAws_restJson1CreateBackendAuthPasswordPolicyConfig(output.passwordPolicy, context) + : undefined, + RequiredSignUpAttributes: + output.requiredSignUpAttributes !== undefined && output.requiredSignUpAttributes !== null + ? deserializeAws_restJson1ListOfRequiredSignUpAttributesElement(output.requiredSignUpAttributes, context) + : undefined, + SignInMethod: output.signInMethod !== undefined && output.signInMethod !== null ? output.signInMethod : undefined, + UserPoolName: output.userPoolName !== undefined && output.userPoolName !== null ? output.userPoolName : undefined, + } as any; +}; + +const deserializeAws_restJson1EmailSettings = (output: any, context: __SerdeContext): EmailSettings => { + return { + EmailMessage: output.emailMessage !== undefined && output.emailMessage !== null ? output.emailMessage : undefined, + EmailSubject: output.emailSubject !== undefined && output.emailSubject !== null ? output.emailSubject : undefined, + } as any; +}; + +const deserializeAws_restJson1ListOf__string = (output: any, context: __SerdeContext): string[] => { + return (output || []).map((entry: any) => entry); +}; + +const deserializeAws_restJson1ListOfAdditionalConstraintsElement = ( + output: any, + context: __SerdeContext +): (AdditionalConstraintsElement | string)[] => { + return (output || []).map((entry: any) => entry); +}; + +const deserializeAws_restJson1ListOfBackendAPIAuthType = ( + output: any, + context: __SerdeContext +): BackendAPIAuthType[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1BackendAPIAuthType(entry, context)); +}; + +const deserializeAws_restJson1ListOfBackendJobRespObj = (output: any, context: __SerdeContext): BackendJobRespObj[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1BackendJobRespObj(entry, context)); +}; + +const deserializeAws_restJson1ListOfMfaTypesElement = ( + output: any, + context: __SerdeContext +): (MfaTypesElement | string)[] => { + return (output || []).map((entry: any) => entry); +}; + +const deserializeAws_restJson1ListOfOAuthScopesElement = ( + output: any, + context: __SerdeContext +): (OAuthScopesElement | string)[] => { + return (output || []).map((entry: any) => entry); +}; + +const deserializeAws_restJson1ListOfRequiredSignUpAttributesElement = ( + output: any, + context: __SerdeContext +): (RequiredSignUpAttributesElement | string)[] => { + return (output || []).map((entry: any) => entry); +}; + +const deserializeAws_restJson1LoginAuthConfigReqObj = (output: any, context: __SerdeContext): LoginAuthConfigReqObj => { + return { + AwsCognitoIdentityPoolId: + output.aws_cognito_identity_pool_id !== undefined && output.aws_cognito_identity_pool_id !== null + ? output.aws_cognito_identity_pool_id + : undefined, + AwsCognitoRegion: + output.aws_cognito_region !== undefined && output.aws_cognito_region !== null + ? output.aws_cognito_region + : undefined, + AwsUserPoolsId: + output.aws_user_pools_id !== undefined && output.aws_user_pools_id !== null + ? output.aws_user_pools_id + : undefined, + AwsUserPoolsWebClientId: + output.aws_user_pools_web_client_id !== undefined && output.aws_user_pools_web_client_id !== null + ? output.aws_user_pools_web_client_id + : undefined, + } as any; +}; + +const deserializeAws_restJson1Settings = (output: any, context: __SerdeContext): Settings => { + return { + MfaTypes: + output.mfaTypes !== undefined && output.mfaTypes !== null + ? deserializeAws_restJson1ListOfMfaTypesElement(output.mfaTypes, context) + : undefined, + SmsMessage: output.smsMessage !== undefined && output.smsMessage !== null ? output.smsMessage : undefined, + } as any; +}; + +const deserializeAws_restJson1SmsSettings = (output: any, context: __SerdeContext): SmsSettings => { + return { + SmsMessage: output.smsMessage !== undefined && output.smsMessage !== null ? output.smsMessage : undefined, + } as any; +}; + +const deserializeAws_restJson1SocialProviderSettings = ( + output: any, + context: __SerdeContext +): SocialProviderSettings => { + return { + Facebook: + output.Facebook !== undefined && output.Facebook !== null + ? deserializeAws_restJson1BackendAuthSocialProviderConfig(output.Facebook, context) + : undefined, + Google: + output.Google !== undefined && output.Google !== null + ? deserializeAws_restJson1BackendAuthSocialProviderConfig(output.Google, context) + : undefined, + LoginWithAmazon: + output.LoginWithAmazon !== undefined && output.LoginWithAmazon !== null + ? deserializeAws_restJson1BackendAuthSocialProviderConfig(output.LoginWithAmazon, context) + : undefined, + } as any; +}; + +const deserializeMetadata = (output: __HttpResponse): __ResponseMetadata => ({ + httpStatusCode: output.statusCode, + httpHeaders: output.headers, + requestId: output.headers["x-amzn-requestid"], +}); + +// Collect low-level response body stream to Uint8Array. +const collectBody = (streamBody: any = new Uint8Array(), context: __SerdeContext): Promise => { + if (streamBody instanceof Uint8Array) { + return Promise.resolve(streamBody); + } + return context.streamCollector(streamBody) || Promise.resolve(new Uint8Array()); +}; + +// Encode Uint8Array data into string with utf-8. +const collectBodyString = (streamBody: any, context: __SerdeContext): Promise => + collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); + +const isSerializableHeaderValue = (value: any): boolean => + value !== undefined && + value !== "" && + (!Object.getOwnPropertyNames(value).includes("length") || value.length != 0) && + (!Object.getOwnPropertyNames(value).includes("size") || value.size != 0); + +const parseBody = (streamBody: any, context: __SerdeContext): any => + collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + return JSON.parse(encoded); + } + return {}; + }); + +/** + * Load an error code for the aws.rest-json-1.1 protocol. + */ +const loadRestJsonErrorCode = (output: __HttpResponse, data: any): string => { + const findKey = (object: any, key: string) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase()); + + const sanitizeErrorCode = (rawValue: string): string => { + let cleanValue = rawValue; + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== undefined) { + return sanitizeErrorCode(output.headers[headerKey]); + } + + if (data.code !== undefined) { + return sanitizeErrorCode(data.code); + } + + if (data["__type"] !== undefined) { + return sanitizeErrorCode(data["__type"]); + } + + return ""; +}; diff --git a/clients/client-amplifybackend/runtimeConfig.browser.ts b/clients/client-amplifybackend/runtimeConfig.browser.ts new file mode 100644 index 000000000000..1ad28c41c63a --- /dev/null +++ b/clients/client-amplifybackend/runtimeConfig.browser.ts @@ -0,0 +1,34 @@ +import packageInfo from "./package.json"; + +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { FetchHttpHandler, streamCollector } from "@aws-sdk/fetch-http-handler"; +import { invalidAsyncFunction } from "@aws-sdk/invalid-dependency"; +import { DEFAULT_MAX_ATTEMPTS } from "@aws-sdk/middleware-retry"; +import { parseUrl } from "@aws-sdk/url-parser-browser"; +import { fromBase64, toBase64 } from "@aws-sdk/util-base64-browser"; +import { calculateBodyLength } from "@aws-sdk/util-body-length-browser"; +import { defaultUserAgent } from "@aws-sdk/util-user-agent-browser"; +import { fromUtf8, toUtf8 } from "@aws-sdk/util-utf8-browser"; +import { ClientDefaults } from "./AmplifyBackendClient"; +import { ClientSharedValues } from "./runtimeConfig.shared"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...ClientSharedValues, + runtime: "browser", + base64Decoder: fromBase64, + base64Encoder: toBase64, + bodyLengthChecker: calculateBodyLength, + credentialDefaultProvider: invalidAsyncFunction("Credentialis missing") as any, + defaultUserAgent: defaultUserAgent(packageInfo.name, packageInfo.version), + maxAttempts: DEFAULT_MAX_ATTEMPTS, + region: invalidAsyncFunction("Region is missing") as any, + requestHandler: new FetchHttpHandler(), + sha256: Sha256, + streamCollector, + urlParser: parseUrl, + utf8Decoder: fromUtf8, + utf8Encoder: toUtf8, +}; diff --git a/clients/client-amplifybackend/runtimeConfig.native.ts b/clients/client-amplifybackend/runtimeConfig.native.ts new file mode 100644 index 000000000000..56a11bbb199d --- /dev/null +++ b/clients/client-amplifybackend/runtimeConfig.native.ts @@ -0,0 +1,17 @@ +import packageInfo from "./package.json"; + +import { Sha256 } from "@aws-crypto/sha256-js"; +import { parseUrl } from "@aws-sdk/url-parser-node"; +import { ClientDefaults } from "./AmplifyBackendClient"; +import { ClientDefaultValues as BrowserDefaults } from "./runtimeConfig.browser"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...BrowserDefaults, + runtime: "react-native", + defaultUserAgent: `aws-sdk-js-v3-react-native-${packageInfo.name}/${packageInfo.version}`, + sha256: Sha256, + urlParser: parseUrl, +}; diff --git a/clients/client-amplifybackend/runtimeConfig.shared.ts b/clients/client-amplifybackend/runtimeConfig.shared.ts new file mode 100644 index 000000000000..4557b401d32a --- /dev/null +++ b/clients/client-amplifybackend/runtimeConfig.shared.ts @@ -0,0 +1,13 @@ +import { defaultRegionInfoProvider } from "./endpoints"; +import { Logger as __Logger } from "@aws-sdk/types"; + +/** + * @internal + */ +export const ClientSharedValues = { + apiVersion: "2020-08-11", + disableHostPrefix: false, + logger: {} as __Logger, + regionInfoProvider: defaultRegionInfoProvider, + signingName: "amplifybackend", +}; diff --git a/clients/client-amplifybackend/runtimeConfig.ts b/clients/client-amplifybackend/runtimeConfig.ts new file mode 100644 index 000000000000..8b2a934698ef --- /dev/null +++ b/clients/client-amplifybackend/runtimeConfig.ts @@ -0,0 +1,36 @@ +import packageInfo from "./package.json"; + +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS } from "@aws-sdk/config-resolver"; +import { defaultProvider as credentialDefaultProvider } from "@aws-sdk/credential-provider-node"; +import { Hash } from "@aws-sdk/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS } from "@aws-sdk/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@aws-sdk/node-config-provider"; +import { NodeHttpHandler, streamCollector } from "@aws-sdk/node-http-handler"; +import { parseUrl } from "@aws-sdk/url-parser-node"; +import { fromBase64, toBase64 } from "@aws-sdk/util-base64-node"; +import { calculateBodyLength } from "@aws-sdk/util-body-length-node"; +import { defaultUserAgent } from "@aws-sdk/util-user-agent-node"; +import { fromUtf8, toUtf8 } from "@aws-sdk/util-utf8-node"; +import { ClientDefaults } from "./AmplifyBackendClient"; +import { ClientSharedValues } from "./runtimeConfig.shared"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...ClientSharedValues, + runtime: "node", + base64Decoder: fromBase64, + base64Encoder: toBase64, + bodyLengthChecker: calculateBodyLength, + credentialDefaultProvider, + defaultUserAgent: defaultUserAgent(packageInfo.name, packageInfo.version), + maxAttempts: loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS), + region: loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, NODE_REGION_CONFIG_FILE_OPTIONS), + requestHandler: new NodeHttpHandler(), + sha256: Hash.bind(null, "sha256"), + streamCollector, + urlParser: parseUrl, + utf8Decoder: fromUtf8, + utf8Encoder: toUtf8, +}; diff --git a/clients/client-amplifybackend/tsconfig.es.json b/clients/client-amplifybackend/tsconfig.es.json new file mode 100644 index 000000000000..30df5d2e6986 --- /dev/null +++ b/clients/client-amplifybackend/tsconfig.es.json @@ -0,0 +1,12 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "target": "es5", + "module": "esnext", + "moduleResolution": "node", + "declaration": false, + "declarationDir": null, + "lib": ["dom", "es5", "es2015.promise", "es2015.collection", "es2015.iterable", "es2015.symbol.wellknown"], + "outDir": "dist/es" + } +} diff --git a/clients/client-amplifybackend/tsconfig.json b/clients/client-amplifybackend/tsconfig.json new file mode 100644 index 000000000000..4cf936f614b4 --- /dev/null +++ b/clients/client-amplifybackend/tsconfig.json @@ -0,0 +1,31 @@ +{ + "compilerOptions": { + "alwaysStrict": true, + "target": "ES2018", + "module": "commonjs", + "declaration": true, + "strict": true, + "sourceMap": true, + "downlevelIteration": true, + "importHelpers": true, + "noEmitHelpers": true, + "incremental": true, + "resolveJsonModule": true, + "esModuleInterop": true, + "declarationDir": "./types", + "outDir": "dist/cjs" + }, + "typedocOptions": { + "exclude": ["**/node_modules/**", "**/*.spec.ts", "./protocols/*.ts", "./e2e/*.ts", "./endpoints.ts"], + "excludeNotExported": true, + "excludePrivate": true, + "hideGenerator": true, + "ignoreCompilerErrors": true, + "includeDeclarations": true, + "readme": "./README.md", + "mode": "file", + "out": "./docs", + "theme": "minimal", + "plugin": ["@aws-sdk/client-documentation-generator"] + } +} diff --git a/clients/client-appintegrations/.gitignore b/clients/client-appintegrations/.gitignore new file mode 100644 index 000000000000..b41c05b597c4 --- /dev/null +++ b/clients/client-appintegrations/.gitignore @@ -0,0 +1,14 @@ +/node_modules/ +/build/ +/coverage/ +/docs/ +/types/ +/dist/ +*.tsbuildinfo +*.tgz +*.log +package-lock.json + +*.d.ts +*.js +*.js.map diff --git a/clients/client-appintegrations/.npmignore b/clients/client-appintegrations/.npmignore new file mode 100644 index 000000000000..b7ff81137c4a --- /dev/null +++ b/clients/client-appintegrations/.npmignore @@ -0,0 +1,4 @@ +/coverage/ +/docs/ +tsconfig.test.json +*.tsbuildinfo diff --git a/clients/client-appintegrations/AppIntegrations.ts b/clients/client-appintegrations/AppIntegrations.ts new file mode 100644 index 000000000000..63ec5edb5cff --- /dev/null +++ b/clients/client-appintegrations/AppIntegrations.ts @@ -0,0 +1,346 @@ +import { AppIntegrationsClient } from "./AppIntegrationsClient"; +import { + CreateEventIntegrationCommand, + CreateEventIntegrationCommandInput, + CreateEventIntegrationCommandOutput, +} from "./commands/CreateEventIntegrationCommand"; +import { + DeleteEventIntegrationCommand, + DeleteEventIntegrationCommandInput, + DeleteEventIntegrationCommandOutput, +} from "./commands/DeleteEventIntegrationCommand"; +import { + GetEventIntegrationCommand, + GetEventIntegrationCommandInput, + GetEventIntegrationCommandOutput, +} from "./commands/GetEventIntegrationCommand"; +import { + ListEventIntegrationAssociationsCommand, + ListEventIntegrationAssociationsCommandInput, + ListEventIntegrationAssociationsCommandOutput, +} from "./commands/ListEventIntegrationAssociationsCommand"; +import { + ListEventIntegrationsCommand, + ListEventIntegrationsCommandInput, + ListEventIntegrationsCommandOutput, +} from "./commands/ListEventIntegrationsCommand"; +import { + ListTagsForResourceCommand, + ListTagsForResourceCommandInput, + ListTagsForResourceCommandOutput, +} from "./commands/ListTagsForResourceCommand"; +import { TagResourceCommand, TagResourceCommandInput, TagResourceCommandOutput } from "./commands/TagResourceCommand"; +import { + UntagResourceCommand, + UntagResourceCommandInput, + UntagResourceCommandOutput, +} from "./commands/UntagResourceCommand"; +import { + UpdateEventIntegrationCommand, + UpdateEventIntegrationCommandInput, + UpdateEventIntegrationCommandOutput, +} from "./commands/UpdateEventIntegrationCommand"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@aws-sdk/types"; + +/** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ * + *

The Amazon AppIntegrations service enables you to configure and reuse connections to external applications.

+ *

For information about how you can use external applications with Amazon Connect, see Set up pre-built integrations in the Amazon Connect Administrator Guide.

+ */ +export class AppIntegrations extends AppIntegrationsClient { + /** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

Creates an EventIntegration, given a specified name, description, and a reference to an + * Amazon Eventbridge bus in your account and a partner event source that will push events to that bus. No + * objects are created in the your account, only metadata that is persisted on the EventIntegration + * control plane.

+ */ + public createEventIntegration( + args: CreateEventIntegrationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public createEventIntegration( + args: CreateEventIntegrationCommandInput, + cb: (err: any, data?: CreateEventIntegrationCommandOutput) => void + ): void; + public createEventIntegration( + args: CreateEventIntegrationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateEventIntegrationCommandOutput) => void + ): void; + public createEventIntegration( + args: CreateEventIntegrationCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateEventIntegrationCommandOutput) => void), + cb?: (err: any, data?: CreateEventIntegrationCommandOutput) => void + ): Promise | void { + const command = new CreateEventIntegrationCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

Deletes the specified existing event integration. If the event integration is associated + * with clients, the request is rejected.

+ */ + public deleteEventIntegration( + args: DeleteEventIntegrationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteEventIntegration( + args: DeleteEventIntegrationCommandInput, + cb: (err: any, data?: DeleteEventIntegrationCommandOutput) => void + ): void; + public deleteEventIntegration( + args: DeleteEventIntegrationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteEventIntegrationCommandOutput) => void + ): void; + public deleteEventIntegration( + args: DeleteEventIntegrationCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteEventIntegrationCommandOutput) => void), + cb?: (err: any, data?: DeleteEventIntegrationCommandOutput) => void + ): Promise | void { + const command = new DeleteEventIntegrationCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

Return information about the event integration.

+ */ + public getEventIntegration( + args: GetEventIntegrationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public getEventIntegration( + args: GetEventIntegrationCommandInput, + cb: (err: any, data?: GetEventIntegrationCommandOutput) => void + ): void; + public getEventIntegration( + args: GetEventIntegrationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetEventIntegrationCommandOutput) => void + ): void; + public getEventIntegration( + args: GetEventIntegrationCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: GetEventIntegrationCommandOutput) => void), + cb?: (err: any, data?: GetEventIntegrationCommandOutput) => void + ): Promise | void { + const command = new GetEventIntegrationCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

Returns a paginated list of event integration associations in the account.

+ */ + public listEventIntegrationAssociations( + args: ListEventIntegrationAssociationsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listEventIntegrationAssociations( + args: ListEventIntegrationAssociationsCommandInput, + cb: (err: any, data?: ListEventIntegrationAssociationsCommandOutput) => void + ): void; + public listEventIntegrationAssociations( + args: ListEventIntegrationAssociationsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListEventIntegrationAssociationsCommandOutput) => void + ): void; + public listEventIntegrationAssociations( + args: ListEventIntegrationAssociationsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListEventIntegrationAssociationsCommandOutput) => void), + cb?: (err: any, data?: ListEventIntegrationAssociationsCommandOutput) => void + ): Promise | void { + const command = new ListEventIntegrationAssociationsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

Returns a paginated list of event integrations in the account.

+ */ + public listEventIntegrations( + args: ListEventIntegrationsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listEventIntegrations( + args: ListEventIntegrationsCommandInput, + cb: (err: any, data?: ListEventIntegrationsCommandOutput) => void + ): void; + public listEventIntegrations( + args: ListEventIntegrationsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListEventIntegrationsCommandOutput) => void + ): void; + public listEventIntegrations( + args: ListEventIntegrationsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListEventIntegrationsCommandOutput) => void), + cb?: (err: any, data?: ListEventIntegrationsCommandOutput) => void + ): Promise | void { + const command = new ListEventIntegrationsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

Lists the tags for the specified resource.

+ */ + public listTagsForResource( + args: ListTagsForResourceCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listTagsForResource( + args: ListTagsForResourceCommandInput, + cb: (err: any, data?: ListTagsForResourceCommandOutput) => void + ): void; + public listTagsForResource( + args: ListTagsForResourceCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListTagsForResourceCommandOutput) => void + ): void; + public listTagsForResource( + args: ListTagsForResourceCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListTagsForResourceCommandOutput) => void), + cb?: (err: any, data?: ListTagsForResourceCommandOutput) => void + ): Promise | void { + const command = new ListTagsForResourceCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

Adds the specified tags to the specified resource.

+ */ + public tagResource(args: TagResourceCommandInput, options?: __HttpHandlerOptions): Promise; + public tagResource(args: TagResourceCommandInput, cb: (err: any, data?: TagResourceCommandOutput) => void): void; + public tagResource( + args: TagResourceCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: TagResourceCommandOutput) => void + ): void; + public tagResource( + args: TagResourceCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: TagResourceCommandOutput) => void), + cb?: (err: any, data?: TagResourceCommandOutput) => void + ): Promise | void { + const command = new TagResourceCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

Removes the specified tags from the specified resource.

+ */ + public untagResource( + args: UntagResourceCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public untagResource( + args: UntagResourceCommandInput, + cb: (err: any, data?: UntagResourceCommandOutput) => void + ): void; + public untagResource( + args: UntagResourceCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UntagResourceCommandOutput) => void + ): void; + public untagResource( + args: UntagResourceCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: UntagResourceCommandOutput) => void), + cb?: (err: any, data?: UntagResourceCommandOutput) => void + ): Promise | void { + const command = new UntagResourceCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

Updates the description of an event integration.

+ */ + public updateEventIntegration( + args: UpdateEventIntegrationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public updateEventIntegration( + args: UpdateEventIntegrationCommandInput, + cb: (err: any, data?: UpdateEventIntegrationCommandOutput) => void + ): void; + public updateEventIntegration( + args: UpdateEventIntegrationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateEventIntegrationCommandOutput) => void + ): void; + public updateEventIntegration( + args: UpdateEventIntegrationCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: UpdateEventIntegrationCommandOutput) => void), + cb?: (err: any, data?: UpdateEventIntegrationCommandOutput) => void + ): Promise | void { + const command = new UpdateEventIntegrationCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } +} diff --git a/clients/client-appintegrations/AppIntegrationsClient.ts b/clients/client-appintegrations/AppIntegrationsClient.ts new file mode 100644 index 000000000000..03ad12c508a0 --- /dev/null +++ b/clients/client-appintegrations/AppIntegrationsClient.ts @@ -0,0 +1,252 @@ +import { + CreateEventIntegrationCommandInput, + CreateEventIntegrationCommandOutput, +} from "./commands/CreateEventIntegrationCommand"; +import { + DeleteEventIntegrationCommandInput, + DeleteEventIntegrationCommandOutput, +} from "./commands/DeleteEventIntegrationCommand"; +import { + GetEventIntegrationCommandInput, + GetEventIntegrationCommandOutput, +} from "./commands/GetEventIntegrationCommand"; +import { + ListEventIntegrationAssociationsCommandInput, + ListEventIntegrationAssociationsCommandOutput, +} from "./commands/ListEventIntegrationAssociationsCommand"; +import { + ListEventIntegrationsCommandInput, + ListEventIntegrationsCommandOutput, +} from "./commands/ListEventIntegrationsCommand"; +import { + ListTagsForResourceCommandInput, + ListTagsForResourceCommandOutput, +} from "./commands/ListTagsForResourceCommand"; +import { TagResourceCommandInput, TagResourceCommandOutput } from "./commands/TagResourceCommand"; +import { UntagResourceCommandInput, UntagResourceCommandOutput } from "./commands/UntagResourceCommand"; +import { + UpdateEventIntegrationCommandInput, + UpdateEventIntegrationCommandOutput, +} from "./commands/UpdateEventIntegrationCommand"; +import { ClientDefaultValues as __ClientDefaultValues } from "./runtimeConfig"; +import { + EndpointsInputConfig, + EndpointsResolvedConfig, + RegionInputConfig, + RegionResolvedConfig, + resolveEndpointsConfig, + resolveRegionConfig, +} from "@aws-sdk/config-resolver"; +import { getContentLengthPlugin } from "@aws-sdk/middleware-content-length"; +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, + getHostHeaderPlugin, + resolveHostHeaderConfig, +} from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { RetryInputConfig, RetryResolvedConfig, getRetryPlugin, resolveRetryConfig } from "@aws-sdk/middleware-retry"; +import { + AwsAuthInputConfig, + AwsAuthResolvedConfig, + getAwsAuthPlugin, + resolveAwsAuthConfig, +} from "@aws-sdk/middleware-signing"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, + getUserAgentPlugin, + resolveUserAgentConfig, +} from "@aws-sdk/middleware-user-agent"; +import { HttpHandler as __HttpHandler } from "@aws-sdk/protocol-http"; +import { + Client as __Client, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@aws-sdk/smithy-client"; +import { + RegionInfoProvider, + Credentials as __Credentials, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, +} from "@aws-sdk/types"; + +export type ServiceInputTypes = + | CreateEventIntegrationCommandInput + | DeleteEventIntegrationCommandInput + | GetEventIntegrationCommandInput + | ListEventIntegrationAssociationsCommandInput + | ListEventIntegrationsCommandInput + | ListTagsForResourceCommandInput + | TagResourceCommandInput + | UntagResourceCommandInput + | UpdateEventIntegrationCommandInput; + +export type ServiceOutputTypes = + | CreateEventIntegrationCommandOutput + | DeleteEventIntegrationCommandOutput + | GetEventIntegrationCommandOutput + | ListEventIntegrationAssociationsCommandOutput + | ListEventIntegrationsCommandOutput + | ListTagsForResourceCommandOutput + | TagResourceCommandOutput + | UntagResourceCommandOutput + | UpdateEventIntegrationCommandOutput; + +export interface ClientDefaults extends Partial<__SmithyResolvedConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandler; + + /** + * A constructor for a class implementing the @aws-sdk/types.Hash interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + */ + sha256?: __HashConstructor; + + /** + * The function that will be used to convert strings into HTTP endpoints. + */ + urlParser?: __UrlParser; + + /** + * A function that can calculate the length of a request body. + */ + bodyLengthChecker?: (body: any) => number | undefined; + + /** + * A function that converts a stream into an array of bytes. + */ + streamCollector?: __StreamCollector; + + /** + * The function that will be used to convert a base64-encoded string to a byte array + */ + base64Decoder?: __Decoder; + + /** + * The function that will be used to convert binary data to a base64-encoded string + */ + base64Encoder?: __Encoder; + + /** + * The function that will be used to convert a UTF8-encoded string to a byte array + */ + utf8Decoder?: __Decoder; + + /** + * The function that will be used to convert binary data to a UTF-8 encoded string + */ + utf8Encoder?: __Encoder; + + /** + * The string that will be used to populate default value in 'User-Agent' header + */ + defaultUserAgent?: string; + + /** + * The runtime environment + */ + runtime?: string; + + /** + * Disable dyanamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + + /** + * The service name with which to sign requests. + */ + signingName?: string; + + /** + * Default credentials provider; Not available in browser runtime + */ + credentialDefaultProvider?: (input: any) => __Provider<__Credentials>; + + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + + /** + * Fetch related hostname, signing name or signing region with given region. + */ + regionInfoProvider?: RegionInfoProvider; +} + +export type AppIntegrationsClientConfig = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & + ClientDefaults & + RegionInputConfig & + EndpointsInputConfig & + AwsAuthInputConfig & + RetryInputConfig & + UserAgentInputConfig & + HostHeaderInputConfig; + +export type AppIntegrationsClientResolvedConfig = __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RegionResolvedConfig & + EndpointsResolvedConfig & + AwsAuthResolvedConfig & + RetryResolvedConfig & + UserAgentResolvedConfig & + HostHeaderResolvedConfig; + +/** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ * + *

The Amazon AppIntegrations service enables you to configure and reuse connections to external applications.

+ *

For information about how you can use external applications with Amazon Connect, see Set up pre-built integrations in the Amazon Connect Administrator Guide.

+ */ +export class AppIntegrationsClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + AppIntegrationsClientResolvedConfig +> { + readonly config: AppIntegrationsClientResolvedConfig; + + constructor(configuration: AppIntegrationsClientConfig) { + let _config_0 = { + ...__ClientDefaultValues, + ...configuration, + }; + let _config_1 = resolveRegionConfig(_config_0); + let _config_2 = resolveEndpointsConfig(_config_1); + let _config_3 = resolveAwsAuthConfig(_config_2); + let _config_4 = resolveRetryConfig(_config_3); + let _config_5 = resolveUserAgentConfig(_config_4); + let _config_6 = resolveHostHeaderConfig(_config_5); + super(_config_6); + this.config = _config_6; + this.middlewareStack.use(getAwsAuthPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + } + + destroy(): void { + super.destroy(); + } +} diff --git a/clients/client-appintegrations/LICENSE b/clients/client-appintegrations/LICENSE new file mode 100644 index 000000000000..dd65ae06be7a --- /dev/null +++ b/clients/client-appintegrations/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/clients/client-appintegrations/README.md b/clients/client-appintegrations/README.md new file mode 100644 index 000000000000..dd1d65f24024 --- /dev/null +++ b/clients/client-appintegrations/README.md @@ -0,0 +1,6 @@ +# @aws-sdk/client-appintegrations + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/client-appintegrations/rc.svg)](https://www.npmjs.com/package/@aws-sdk/client-appintegrations) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/client-appintegrations.svg)](https://www.npmjs.com/package/@aws-sdk/client-appintegrations) + +For SDK usage, please step to [SDK readme](https://github.com/aws/aws-sdk-js-v3). diff --git a/clients/client-appintegrations/commands/CreateEventIntegrationCommand.ts b/clients/client-appintegrations/commands/CreateEventIntegrationCommand.ts new file mode 100644 index 000000000000..753b4ce15497 --- /dev/null +++ b/clients/client-appintegrations/commands/CreateEventIntegrationCommand.ts @@ -0,0 +1,92 @@ +import { AppIntegrationsClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AppIntegrationsClient"; +import { CreateEventIntegrationRequest, CreateEventIntegrationResponse } from "../models/models_0"; +import { + deserializeAws_restJson1CreateEventIntegrationCommand, + serializeAws_restJson1CreateEventIntegrationCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateEventIntegrationCommandInput = CreateEventIntegrationRequest; +export type CreateEventIntegrationCommandOutput = CreateEventIntegrationResponse & __MetadataBearer; + +/** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

Creates an EventIntegration, given a specified name, description, and a reference to an + * Amazon Eventbridge bus in your account and a partner event source that will push events to that bus. No + * objects are created in the your account, only metadata that is persisted on the EventIntegration + * control plane.

+ */ +export class CreateEventIntegrationCommand extends $Command< + CreateEventIntegrationCommandInput, + CreateEventIntegrationCommandOutput, + AppIntegrationsClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateEventIntegrationCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AppIntegrationsClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AppIntegrationsClient"; + const commandName = "CreateEventIntegrationCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateEventIntegrationRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreateEventIntegrationResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateEventIntegrationCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1CreateEventIntegrationCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1CreateEventIntegrationCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-appintegrations/commands/DeleteEventIntegrationCommand.ts b/clients/client-appintegrations/commands/DeleteEventIntegrationCommand.ts new file mode 100644 index 000000000000..2c370eeb9ea1 --- /dev/null +++ b/clients/client-appintegrations/commands/DeleteEventIntegrationCommand.ts @@ -0,0 +1,90 @@ +import { AppIntegrationsClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AppIntegrationsClient"; +import { DeleteEventIntegrationRequest, DeleteEventIntegrationResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DeleteEventIntegrationCommand, + serializeAws_restJson1DeleteEventIntegrationCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteEventIntegrationCommandInput = DeleteEventIntegrationRequest; +export type DeleteEventIntegrationCommandOutput = DeleteEventIntegrationResponse & __MetadataBearer; + +/** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

Deletes the specified existing event integration. If the event integration is associated + * with clients, the request is rejected.

+ */ +export class DeleteEventIntegrationCommand extends $Command< + DeleteEventIntegrationCommandInput, + DeleteEventIntegrationCommandOutput, + AppIntegrationsClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteEventIntegrationCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AppIntegrationsClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AppIntegrationsClient"; + const commandName = "DeleteEventIntegrationCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteEventIntegrationRequest.filterSensitiveLog, + outputFilterSensitiveLog: DeleteEventIntegrationResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteEventIntegrationCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DeleteEventIntegrationCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DeleteEventIntegrationCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-appintegrations/commands/GetEventIntegrationCommand.ts b/clients/client-appintegrations/commands/GetEventIntegrationCommand.ts new file mode 100644 index 000000000000..be210be4b497 --- /dev/null +++ b/clients/client-appintegrations/commands/GetEventIntegrationCommand.ts @@ -0,0 +1,89 @@ +import { AppIntegrationsClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AppIntegrationsClient"; +import { GetEventIntegrationRequest, GetEventIntegrationResponse } from "../models/models_0"; +import { + deserializeAws_restJson1GetEventIntegrationCommand, + serializeAws_restJson1GetEventIntegrationCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type GetEventIntegrationCommandInput = GetEventIntegrationRequest; +export type GetEventIntegrationCommandOutput = GetEventIntegrationResponse & __MetadataBearer; + +/** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

Return information about the event integration.

+ */ +export class GetEventIntegrationCommand extends $Command< + GetEventIntegrationCommandInput, + GetEventIntegrationCommandOutput, + AppIntegrationsClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: GetEventIntegrationCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AppIntegrationsClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AppIntegrationsClient"; + const commandName = "GetEventIntegrationCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: GetEventIntegrationRequest.filterSensitiveLog, + outputFilterSensitiveLog: GetEventIntegrationResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: GetEventIntegrationCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1GetEventIntegrationCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1GetEventIntegrationCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-appintegrations/commands/ListEventIntegrationAssociationsCommand.ts b/clients/client-appintegrations/commands/ListEventIntegrationAssociationsCommand.ts new file mode 100644 index 000000000000..9b37e1327e57 --- /dev/null +++ b/clients/client-appintegrations/commands/ListEventIntegrationAssociationsCommand.ts @@ -0,0 +1,95 @@ +import { AppIntegrationsClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AppIntegrationsClient"; +import { ListEventIntegrationAssociationsRequest, ListEventIntegrationAssociationsResponse } from "../models/models_0"; +import { + deserializeAws_restJson1ListEventIntegrationAssociationsCommand, + serializeAws_restJson1ListEventIntegrationAssociationsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListEventIntegrationAssociationsCommandInput = ListEventIntegrationAssociationsRequest; +export type ListEventIntegrationAssociationsCommandOutput = ListEventIntegrationAssociationsResponse & __MetadataBearer; + +/** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

Returns a paginated list of event integration associations in the account.

+ */ +export class ListEventIntegrationAssociationsCommand extends $Command< + ListEventIntegrationAssociationsCommandInput, + ListEventIntegrationAssociationsCommandOutput, + AppIntegrationsClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListEventIntegrationAssociationsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AppIntegrationsClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AppIntegrationsClient"; + const commandName = "ListEventIntegrationAssociationsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListEventIntegrationAssociationsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListEventIntegrationAssociationsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize( + input: ListEventIntegrationAssociationsCommandInput, + context: __SerdeContext + ): Promise<__HttpRequest> { + return serializeAws_restJson1ListEventIntegrationAssociationsCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_restJson1ListEventIntegrationAssociationsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-appintegrations/commands/ListEventIntegrationsCommand.ts b/clients/client-appintegrations/commands/ListEventIntegrationsCommand.ts new file mode 100644 index 000000000000..857d9ad0a92f --- /dev/null +++ b/clients/client-appintegrations/commands/ListEventIntegrationsCommand.ts @@ -0,0 +1,89 @@ +import { AppIntegrationsClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AppIntegrationsClient"; +import { ListEventIntegrationsRequest, ListEventIntegrationsResponse } from "../models/models_0"; +import { + deserializeAws_restJson1ListEventIntegrationsCommand, + serializeAws_restJson1ListEventIntegrationsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListEventIntegrationsCommandInput = ListEventIntegrationsRequest; +export type ListEventIntegrationsCommandOutput = ListEventIntegrationsResponse & __MetadataBearer; + +/** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

Returns a paginated list of event integrations in the account.

+ */ +export class ListEventIntegrationsCommand extends $Command< + ListEventIntegrationsCommandInput, + ListEventIntegrationsCommandOutput, + AppIntegrationsClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListEventIntegrationsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AppIntegrationsClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AppIntegrationsClient"; + const commandName = "ListEventIntegrationsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListEventIntegrationsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListEventIntegrationsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListEventIntegrationsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1ListEventIntegrationsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1ListEventIntegrationsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-appintegrations/commands/ListTagsForResourceCommand.ts b/clients/client-appintegrations/commands/ListTagsForResourceCommand.ts new file mode 100644 index 000000000000..88d38bec15da --- /dev/null +++ b/clients/client-appintegrations/commands/ListTagsForResourceCommand.ts @@ -0,0 +1,89 @@ +import { AppIntegrationsClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AppIntegrationsClient"; +import { ListTagsForResourceRequest, ListTagsForResourceResponse } from "../models/models_0"; +import { + deserializeAws_restJson1ListTagsForResourceCommand, + serializeAws_restJson1ListTagsForResourceCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListTagsForResourceCommandInput = ListTagsForResourceRequest; +export type ListTagsForResourceCommandOutput = ListTagsForResourceResponse & __MetadataBearer; + +/** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

Lists the tags for the specified resource.

+ */ +export class ListTagsForResourceCommand extends $Command< + ListTagsForResourceCommandInput, + ListTagsForResourceCommandOutput, + AppIntegrationsClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListTagsForResourceCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AppIntegrationsClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AppIntegrationsClient"; + const commandName = "ListTagsForResourceCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListTagsForResourceRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListTagsForResourceResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListTagsForResourceCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1ListTagsForResourceCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1ListTagsForResourceCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-appintegrations/commands/TagResourceCommand.ts b/clients/client-appintegrations/commands/TagResourceCommand.ts new file mode 100644 index 000000000000..941fae040c90 --- /dev/null +++ b/clients/client-appintegrations/commands/TagResourceCommand.ts @@ -0,0 +1,89 @@ +import { AppIntegrationsClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AppIntegrationsClient"; +import { TagResourceRequest, TagResourceResponse } from "../models/models_0"; +import { + deserializeAws_restJson1TagResourceCommand, + serializeAws_restJson1TagResourceCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type TagResourceCommandInput = TagResourceRequest; +export type TagResourceCommandOutput = TagResourceResponse & __MetadataBearer; + +/** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

Adds the specified tags to the specified resource.

+ */ +export class TagResourceCommand extends $Command< + TagResourceCommandInput, + TagResourceCommandOutput, + AppIntegrationsClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: TagResourceCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AppIntegrationsClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AppIntegrationsClient"; + const commandName = "TagResourceCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: TagResourceRequest.filterSensitiveLog, + outputFilterSensitiveLog: TagResourceResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: TagResourceCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1TagResourceCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1TagResourceCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-appintegrations/commands/UntagResourceCommand.ts b/clients/client-appintegrations/commands/UntagResourceCommand.ts new file mode 100644 index 000000000000..c1f54d1ffbbd --- /dev/null +++ b/clients/client-appintegrations/commands/UntagResourceCommand.ts @@ -0,0 +1,89 @@ +import { AppIntegrationsClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AppIntegrationsClient"; +import { UntagResourceRequest, UntagResourceResponse } from "../models/models_0"; +import { + deserializeAws_restJson1UntagResourceCommand, + serializeAws_restJson1UntagResourceCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type UntagResourceCommandInput = UntagResourceRequest; +export type UntagResourceCommandOutput = UntagResourceResponse & __MetadataBearer; + +/** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

Removes the specified tags from the specified resource.

+ */ +export class UntagResourceCommand extends $Command< + UntagResourceCommandInput, + UntagResourceCommandOutput, + AppIntegrationsClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: UntagResourceCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AppIntegrationsClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AppIntegrationsClient"; + const commandName = "UntagResourceCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: UntagResourceRequest.filterSensitiveLog, + outputFilterSensitiveLog: UntagResourceResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: UntagResourceCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1UntagResourceCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1UntagResourceCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-appintegrations/commands/UpdateEventIntegrationCommand.ts b/clients/client-appintegrations/commands/UpdateEventIntegrationCommand.ts new file mode 100644 index 000000000000..50bad5f1ffc3 --- /dev/null +++ b/clients/client-appintegrations/commands/UpdateEventIntegrationCommand.ts @@ -0,0 +1,89 @@ +import { AppIntegrationsClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../AppIntegrationsClient"; +import { UpdateEventIntegrationRequest, UpdateEventIntegrationResponse } from "../models/models_0"; +import { + deserializeAws_restJson1UpdateEventIntegrationCommand, + serializeAws_restJson1UpdateEventIntegrationCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type UpdateEventIntegrationCommandInput = UpdateEventIntegrationRequest; +export type UpdateEventIntegrationCommandOutput = UpdateEventIntegrationResponse & __MetadataBearer; + +/** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

Updates the description of an event integration.

+ */ +export class UpdateEventIntegrationCommand extends $Command< + UpdateEventIntegrationCommandInput, + UpdateEventIntegrationCommandOutput, + AppIntegrationsClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: UpdateEventIntegrationCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: AppIntegrationsClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "AppIntegrationsClient"; + const commandName = "UpdateEventIntegrationCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: UpdateEventIntegrationRequest.filterSensitiveLog, + outputFilterSensitiveLog: UpdateEventIntegrationResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: UpdateEventIntegrationCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1UpdateEventIntegrationCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1UpdateEventIntegrationCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-appintegrations/endpoints.ts b/clients/client-appintegrations/endpoints.ts new file mode 100644 index 000000000000..60033aa33b5f --- /dev/null +++ b/clients/client-appintegrations/endpoints.ts @@ -0,0 +1,81 @@ +import { RegionInfo, RegionInfoProvider } from "@aws-sdk/types"; + +// Partition default templates +const AWS_TEMPLATE = "app-integrations.{region}.amazonaws.com"; +const AWS_CN_TEMPLATE = "app-integrations.{region}.amazonaws.com.cn"; +const AWS_ISO_TEMPLATE = "app-integrations.{region}.c2s.ic.gov"; +const AWS_ISO_B_TEMPLATE = "app-integrations.{region}.sc2s.sgov.gov"; +const AWS_US_GOV_TEMPLATE = "app-integrations.{region}.amazonaws.com"; + +// Partition regions +const AWS_REGIONS = new Set([ + "ap-east-1", + "ap-northeast-1", + "ap-northeast-2", + "ap-south-1", + "ap-southeast-1", + "ap-southeast-2", + "ca-central-1", + "eu-central-1", + "eu-north-1", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "me-south-1", + "sa-east-1", + "us-east-1", + "us-east-2", + "us-west-1", + "us-west-2", +]); +const AWS_CN_REGIONS = new Set(["cn-north-1", "cn-northwest-1"]); +const AWS_ISO_REGIONS = new Set(["us-iso-east-1"]); +const AWS_ISO_B_REGIONS = new Set(["us-isob-east-1"]); +const AWS_US_GOV_REGIONS = new Set(["us-gov-east-1", "us-gov-west-1"]); + +export const defaultRegionInfoProvider: RegionInfoProvider = (region: string, options?: any) => { + let regionInfo: RegionInfo | undefined = undefined; + switch (region) { + // First, try to match exact region names. + // Next, try to match partition endpoints. + default: + if (AWS_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_TEMPLATE.replace("{region}", region), + partition: "aws", + }; + } + if (AWS_CN_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_CN_TEMPLATE.replace("{region}", region), + partition: "aws-cn", + }; + } + if (AWS_ISO_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_ISO_TEMPLATE.replace("{region}", region), + partition: "aws-iso", + }; + } + if (AWS_ISO_B_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_ISO_B_TEMPLATE.replace("{region}", region), + partition: "aws-iso-b", + }; + } + if (AWS_US_GOV_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_US_GOV_TEMPLATE.replace("{region}", region), + partition: "aws-us-gov", + }; + } + // Finally, assume it's an AWS partition endpoint. + if (regionInfo === undefined) { + regionInfo = { + hostname: AWS_TEMPLATE.replace("{region}", region), + partition: "aws", + }; + } + } + return Promise.resolve(regionInfo); +}; diff --git a/clients/client-appintegrations/index.ts b/clients/client-appintegrations/index.ts new file mode 100644 index 000000000000..96603420f8cd --- /dev/null +++ b/clients/client-appintegrations/index.ts @@ -0,0 +1,12 @@ +export * from "./AppIntegrationsClient"; +export * from "./AppIntegrations"; +export * from "./commands/CreateEventIntegrationCommand"; +export * from "./commands/DeleteEventIntegrationCommand"; +export * from "./commands/GetEventIntegrationCommand"; +export * from "./commands/ListEventIntegrationAssociationsCommand"; +export * from "./commands/ListEventIntegrationsCommand"; +export * from "./commands/ListTagsForResourceCommand"; +export * from "./commands/TagResourceCommand"; +export * from "./commands/UntagResourceCommand"; +export * from "./commands/UpdateEventIntegrationCommand"; +export * from "./models/index"; diff --git a/clients/client-appintegrations/models/index.ts b/clients/client-appintegrations/models/index.ts new file mode 100644 index 000000000000..09c5d6e09b8c --- /dev/null +++ b/clients/client-appintegrations/models/index.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/clients/client-appintegrations/models/models_0.ts b/clients/client-appintegrations/models/models_0.ts new file mode 100644 index 000000000000..424aa1b5ce78 --- /dev/null +++ b/clients/client-appintegrations/models/models_0.ts @@ -0,0 +1,515 @@ +import { SENSITIVE_STRING, SmithyException as __SmithyException } from "@aws-sdk/smithy-client"; +import { MetadataBearer as $MetadataBearer } from "@aws-sdk/types"; + +/** + *

You do not have sufficient access to perform this action.

+ */ +export interface AccessDeniedException extends __SmithyException, $MetadataBearer { + name: "AccessDeniedException"; + $fault: "client"; + Message?: string; +} + +export namespace AccessDeniedException { + export const filterSensitiveLog = (obj: AccessDeniedException): any => ({ + ...obj, + }); +} + +/** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

The event filter.

+ */ +export interface EventFilter { + /** + *

The source of the events.

+ */ + Source: string | undefined; +} + +export namespace EventFilter { + export const filterSensitiveLog = (obj: EventFilter): any => ({ + ...obj, + }); +} + +export interface CreateEventIntegrationRequest { + /** + *

The name of the event integration.

+ */ + Name: string | undefined; + + /** + *

The description of the event integration.

+ */ + Description?: string; + + /** + *

The event filter.

+ */ + EventFilter: EventFilter | undefined; + + /** + *

The Eventbridge bus.

+ */ + EventBridgeBus: string | undefined; + + /** + *

A unique, case-sensitive identifier that you provide to ensure the idempotency of the + * request.

+ */ + ClientToken?: string; + + /** + *

One or more tags.

+ */ + Tags?: { [key: string]: string }; +} + +export namespace CreateEventIntegrationRequest { + export const filterSensitiveLog = (obj: CreateEventIntegrationRequest): any => ({ + ...obj, + }); +} + +export interface CreateEventIntegrationResponse { + /** + *

The Amazon Resource Name (ARN) of the event integration.

+ */ + EventIntegrationArn?: string; +} + +export namespace CreateEventIntegrationResponse { + export const filterSensitiveLog = (obj: CreateEventIntegrationResponse): any => ({ + ...obj, + }); +} + +/** + *

A resource with the specified name already exists.

+ */ +export interface DuplicateResourceException extends __SmithyException, $MetadataBearer { + name: "DuplicateResourceException"; + $fault: "client"; + Message?: string; +} + +export namespace DuplicateResourceException { + export const filterSensitiveLog = (obj: DuplicateResourceException): any => ({ + ...obj, + }); +} + +/** + *

Request processing failed due to an error or failure with the service.

+ */ +export interface InternalServiceError extends __SmithyException, $MetadataBearer { + name: "InternalServiceError"; + $fault: "server"; + Message?: string; +} + +export namespace InternalServiceError { + export const filterSensitiveLog = (obj: InternalServiceError): any => ({ + ...obj, + }); +} + +/** + *

The request is not valid.

+ */ +export interface InvalidRequestException extends __SmithyException, $MetadataBearer { + name: "InvalidRequestException"; + $fault: "client"; + Message?: string; +} + +export namespace InvalidRequestException { + export const filterSensitiveLog = (obj: InvalidRequestException): any => ({ + ...obj, + }); +} + +/** + *

The allowed quota for the resource has been exceeded.

+ */ +export interface ResourceQuotaExceededException extends __SmithyException, $MetadataBearer { + name: "ResourceQuotaExceededException"; + $fault: "client"; + Message?: string; +} + +export namespace ResourceQuotaExceededException { + export const filterSensitiveLog = (obj: ResourceQuotaExceededException): any => ({ + ...obj, + }); +} + +/** + *

The throttling limit has been exceeded.

+ */ +export interface ThrottlingException extends __SmithyException, $MetadataBearer { + name: "ThrottlingException"; + $fault: "client"; + Message?: string; +} + +export namespace ThrottlingException { + export const filterSensitiveLog = (obj: ThrottlingException): any => ({ + ...obj, + }); +} + +export interface DeleteEventIntegrationRequest { + /** + *

The name of the event integration.

+ */ + Name: string | undefined; +} + +export namespace DeleteEventIntegrationRequest { + export const filterSensitiveLog = (obj: DeleteEventIntegrationRequest): any => ({ + ...obj, + }); +} + +export interface DeleteEventIntegrationResponse {} + +export namespace DeleteEventIntegrationResponse { + export const filterSensitiveLog = (obj: DeleteEventIntegrationResponse): any => ({ + ...obj, + }); +} + +/** + *

The specified resource was not found.

+ */ +export interface ResourceNotFoundException extends __SmithyException, $MetadataBearer { + name: "ResourceNotFoundException"; + $fault: "client"; + Message?: string; +} + +export namespace ResourceNotFoundException { + export const filterSensitiveLog = (obj: ResourceNotFoundException): any => ({ + ...obj, + }); +} + +export interface GetEventIntegrationRequest { + /** + *

The name of the event integration.

+ */ + Name: string | undefined; +} + +export namespace GetEventIntegrationRequest { + export const filterSensitiveLog = (obj: GetEventIntegrationRequest): any => ({ + ...obj, + }); +} + +export interface GetEventIntegrationResponse { + /** + *

The name of the event integration.

+ */ + Name?: string; + + /** + *

The description of the event integration.

+ */ + Description?: string; + + /** + *

The Amazon Resource Name (ARN) for the event integration.

+ */ + EventIntegrationArn?: string; + + /** + *

The Eventbridge bus.

+ */ + EventBridgeBus?: string; + + /** + *

The event filter.

+ */ + EventFilter?: EventFilter; + + /** + *

One or more tags.

+ */ + Tags?: { [key: string]: string }; +} + +export namespace GetEventIntegrationResponse { + export const filterSensitiveLog = (obj: GetEventIntegrationResponse): any => ({ + ...obj, + }); +} + +export interface ListEventIntegrationAssociationsRequest { + /** + *

The name of the event integration.

+ */ + EventIntegrationName: string | undefined; + + /** + *

The token for the next set of results. Use the value returned in the previous + * response in the next request to retrieve the next set of results.

+ */ + NextToken?: string; + + /** + *

The maximum number of results to return per page.

+ */ + MaxResults?: number; +} + +export namespace ListEventIntegrationAssociationsRequest { + export const filterSensitiveLog = (obj: ListEventIntegrationAssociationsRequest): any => ({ + ...obj, + }); +} + +/** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

The event integration association.

+ */ +export interface EventIntegrationAssociation { + /** + *

The Amazon Resource Name (ARN) for the event integration association.

+ */ + EventIntegrationAssociationArn?: string; + + /** + *

The identifier for the event integration association.

+ */ + EventIntegrationAssociationId?: string; + + /** + *

The name of the event integration.

+ */ + EventIntegrationName?: string; + + /** + *

The identifier for the client that is associated with the event integration.

+ */ + ClientId?: string; + + /** + *

The name of the Eventbridge rule.

+ */ + EventBridgeRuleName?: string; + + /** + *

The metadata associated with the client.

+ */ + ClientAssociationMetadata?: { [key: string]: string }; +} + +export namespace EventIntegrationAssociation { + export const filterSensitiveLog = (obj: EventIntegrationAssociation): any => ({ + ...obj, + }); +} + +export interface ListEventIntegrationAssociationsResponse { + /** + *

The event integration associations.

+ */ + EventIntegrationAssociations?: EventIntegrationAssociation[]; + + /** + *

If there are additional results, this is the token for the next set of results.

+ */ + NextToken?: string; +} + +export namespace ListEventIntegrationAssociationsResponse { + export const filterSensitiveLog = (obj: ListEventIntegrationAssociationsResponse): any => ({ + ...obj, + }); +} + +export interface ListEventIntegrationsRequest { + /** + *

The token for the next set of results. Use the value returned in the previous + * response in the next request to retrieve the next set of results.

+ */ + NextToken?: string; + + /** + *

The maximum number of results to return per page.

+ */ + MaxResults?: number; +} + +export namespace ListEventIntegrationsRequest { + export const filterSensitiveLog = (obj: ListEventIntegrationsRequest): any => ({ + ...obj, + }); +} + +/** + *

The Amazon AppIntegrations APIs are in preview release and are subject to change.

+ *

The event integration.

+ */ +export interface EventIntegration { + /** + *

The Amazon Resource Name (ARN) of the event integration.

+ */ + EventIntegrationArn?: string; + + /** + *

The name of the event integration.

+ */ + Name?: string; + + /** + *

The event integration description.

+ */ + Description?: string; + + /** + *

The event integration filter.

+ */ + EventFilter?: EventFilter; + + /** + *

The Amazon Eventbridge bus for the event integration.

+ */ + EventBridgeBus?: string; + + /** + *

The tags.

+ */ + Tags?: { [key: string]: string }; +} + +export namespace EventIntegration { + export const filterSensitiveLog = (obj: EventIntegration): any => ({ + ...obj, + }); +} + +export interface ListEventIntegrationsResponse { + /** + *

The event integrations.

+ */ + EventIntegrations?: EventIntegration[]; + + /** + *

If there are additional results, this is the token for the next set of results.

+ */ + NextToken?: string; +} + +export namespace ListEventIntegrationsResponse { + export const filterSensitiveLog = (obj: ListEventIntegrationsResponse): any => ({ + ...obj, + }); +} + +export interface ListTagsForResourceRequest { + /** + *

The Amazon Resource Name (ARN) of the resource.

+ */ + resourceArn: string | undefined; +} + +export namespace ListTagsForResourceRequest { + export const filterSensitiveLog = (obj: ListTagsForResourceRequest): any => ({ + ...obj, + }); +} + +export interface ListTagsForResourceResponse { + /** + *

Information about the tags.

+ */ + tags?: { [key: string]: string }; +} + +export namespace ListTagsForResourceResponse { + export const filterSensitiveLog = (obj: ListTagsForResourceResponse): any => ({ + ...obj, + }); +} + +export interface TagResourceRequest { + /** + *

The Amazon Resource Name (ARN) of the resource.

+ */ + resourceArn: string | undefined; + + /** + *

One or more tags.

+ */ + tags: { [key: string]: string } | undefined; +} + +export namespace TagResourceRequest { + export const filterSensitiveLog = (obj: TagResourceRequest): any => ({ + ...obj, + }); +} + +export interface TagResourceResponse {} + +export namespace TagResourceResponse { + export const filterSensitiveLog = (obj: TagResourceResponse): any => ({ + ...obj, + }); +} + +export interface UntagResourceRequest { + /** + *

The Amazon Resource Name (ARN) of the resource.

+ */ + resourceArn: string | undefined; + + /** + *

The tag keys.

+ */ + tagKeys: string[] | undefined; +} + +export namespace UntagResourceRequest { + export const filterSensitiveLog = (obj: UntagResourceRequest): any => ({ + ...obj, + }); +} + +export interface UntagResourceResponse {} + +export namespace UntagResourceResponse { + export const filterSensitiveLog = (obj: UntagResourceResponse): any => ({ + ...obj, + }); +} + +export interface UpdateEventIntegrationRequest { + /** + *

The name of the event integration.

+ */ + Name: string | undefined; + + /** + *

The description of the event inegration.

+ */ + Description?: string; +} + +export namespace UpdateEventIntegrationRequest { + export const filterSensitiveLog = (obj: UpdateEventIntegrationRequest): any => ({ + ...obj, + }); +} + +export interface UpdateEventIntegrationResponse {} + +export namespace UpdateEventIntegrationResponse { + export const filterSensitiveLog = (obj: UpdateEventIntegrationResponse): any => ({ + ...obj, + }); +} diff --git a/clients/client-appintegrations/package.json b/clients/client-appintegrations/package.json new file mode 100644 index 000000000000..47af5c55d92b --- /dev/null +++ b/clients/client-appintegrations/package.json @@ -0,0 +1,85 @@ +{ + "name": "@aws-sdk/client-appintegrations", + "description": "AWS SDK for JavaScript Appintegrations Client for Node.js, Browser and React Native", + "version": "1.0.0-rc.1", + "scripts": { + "clean": "yarn remove-definitions && yarn remove-dist && yarn remove-documentation", + "build-documentation": "yarn remove-documentation && typedoc ./", + "prepublishOnly": "yarn build", + "pretest": "yarn build:cjs", + "remove-definitions": "rimraf ./types", + "remove-dist": "rimraf ./dist", + "remove-documentation": "rimraf ./docs", + "test": "yarn build && jest --coverage --passWithNoTests", + "build:cjs": "tsc -p tsconfig.json", + "build:es": "tsc -p tsconfig.es.json", + "build": "yarn build:cjs && yarn build:es" + }, + "main": "./dist/cjs/index.js", + "types": "./types/index.d.ts", + "module": "./dist/es/index.js", + "browser": { + "./runtimeConfig": "./runtimeConfig.browser" + }, + "react-native": { + "./runtimeConfig": "./runtimeConfig.native" + }, + "sideEffects": false, + "dependencies": { + "@aws-crypto/sha256-browser": "^1.0.0", + "@aws-crypto/sha256-js": "^1.0.0", + "@aws-sdk/config-resolver": "1.0.0-rc.7", + "@aws-sdk/credential-provider-node": "1.0.0-rc.7", + "@aws-sdk/fetch-http-handler": "1.0.0-rc.7", + "@aws-sdk/hash-node": "1.0.0-rc.7", + "@aws-sdk/invalid-dependency": "1.0.0-rc.3", + "@aws-sdk/middleware-content-length": "1.0.0-rc.7", + "@aws-sdk/middleware-host-header": "1.0.0-rc.7", + "@aws-sdk/middleware-logger": "1.0.0-rc.7", + "@aws-sdk/middleware-retry": "1.0.0-rc.7", + "@aws-sdk/middleware-serde": "1.0.0-rc.7", + "@aws-sdk/middleware-signing": "1.0.0-rc.7", + "@aws-sdk/middleware-stack": "1.0.0-rc.7", + "@aws-sdk/middleware-user-agent": "1.0.0-rc.7", + "@aws-sdk/node-config-provider": "1.0.0-rc.7", + "@aws-sdk/node-http-handler": "1.0.0-rc.7", + "@aws-sdk/protocol-http": "1.0.0-rc.7", + "@aws-sdk/smithy-client": "1.0.0-rc.7", + "@aws-sdk/url-parser-browser": "1.0.0-rc.7", + "@aws-sdk/url-parser-node": "1.0.0-rc.7", + "@aws-sdk/util-base64-browser": "1.0.0-rc.3", + "@aws-sdk/util-base64-node": "1.0.0-rc.3", + "@aws-sdk/util-body-length-browser": "1.0.0-rc.3", + "@aws-sdk/util-body-length-node": "1.0.0-rc.3", + "@aws-sdk/util-user-agent-browser": "1.0.0-rc.7", + "@aws-sdk/util-user-agent-node": "1.0.0-rc.7", + "@aws-sdk/util-utf8-browser": "1.0.0-rc.3", + "@aws-sdk/util-utf8-node": "1.0.0-rc.3", + "tslib": "^2.0.0", + "uuid": "^3.0.0" + }, + "devDependencies": { + "@aws-sdk/client-documentation-generator": "1.0.0-rc.7", + "@aws-sdk/types": "1.0.0-rc.7", + "@types/node": "^12.7.5", + "@types/uuid": "^3.0.0", + "jest": "^26.1.0", + "rimraf": "^3.0.0", + "typedoc": "^0.19.2", + "typescript": "~4.1.2" + }, + "engines": { + "node": ">=10.0.0" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/master/clients/client-appintegrations", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "clients/client-appintegrations" + } +} diff --git a/clients/client-appintegrations/protocols/Aws_restJson1.ts b/clients/client-appintegrations/protocols/Aws_restJson1.ts new file mode 100644 index 000000000000..407a556437e3 --- /dev/null +++ b/clients/client-appintegrations/protocols/Aws_restJson1.ts @@ -0,0 +1,1400 @@ +import { + CreateEventIntegrationCommandInput, + CreateEventIntegrationCommandOutput, +} from "../commands/CreateEventIntegrationCommand"; +import { + DeleteEventIntegrationCommandInput, + DeleteEventIntegrationCommandOutput, +} from "../commands/DeleteEventIntegrationCommand"; +import { + GetEventIntegrationCommandInput, + GetEventIntegrationCommandOutput, +} from "../commands/GetEventIntegrationCommand"; +import { + ListEventIntegrationAssociationsCommandInput, + ListEventIntegrationAssociationsCommandOutput, +} from "../commands/ListEventIntegrationAssociationsCommand"; +import { + ListEventIntegrationsCommandInput, + ListEventIntegrationsCommandOutput, +} from "../commands/ListEventIntegrationsCommand"; +import { + ListTagsForResourceCommandInput, + ListTagsForResourceCommandOutput, +} from "../commands/ListTagsForResourceCommand"; +import { TagResourceCommandInput, TagResourceCommandOutput } from "../commands/TagResourceCommand"; +import { UntagResourceCommandInput, UntagResourceCommandOutput } from "../commands/UntagResourceCommand"; +import { + UpdateEventIntegrationCommandInput, + UpdateEventIntegrationCommandOutput, +} from "../commands/UpdateEventIntegrationCommand"; +import { + AccessDeniedException, + DuplicateResourceException, + EventFilter, + EventIntegration, + EventIntegrationAssociation, + InternalServiceError, + InvalidRequestException, + ResourceNotFoundException, + ResourceQuotaExceededException, + ThrottlingException, +} from "../models/models_0"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { + SmithyException as __SmithyException, + extendedEncodeURIComponent as __extendedEncodeURIComponent, +} from "@aws-sdk/smithy-client"; +import { + Endpoint as __Endpoint, + MetadataBearer as __MetadataBearer, + ResponseMetadata as __ResponseMetadata, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; +import { v4 as generateIdempotencyToken } from "uuid"; + +export const serializeAws_restJson1CreateEventIntegrationCommand = async ( + input: CreateEventIntegrationCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/eventIntegrations"; + let body: any; + body = JSON.stringify({ + ClientToken: input.ClientToken ?? generateIdempotencyToken(), + ...(input.Description !== undefined && { Description: input.Description }), + ...(input.EventBridgeBus !== undefined && { EventBridgeBus: input.EventBridgeBus }), + ...(input.EventFilter !== undefined && { + EventFilter: serializeAws_restJson1EventFilter(input.EventFilter, context), + }), + ...(input.Name !== undefined && { Name: input.Name }), + ...(input.Tags !== undefined && { Tags: serializeAws_restJson1TagMap(input.Tags, context) }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DeleteEventIntegrationCommand = async ( + input: DeleteEventIntegrationCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/eventIntegrations/{Name}"; + if (input.Name !== undefined) { + const labelValue: string = input.Name; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: Name."); + } + resolvedPath = resolvedPath.replace("{Name}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: Name."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "DELETE", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1GetEventIntegrationCommand = async ( + input: GetEventIntegrationCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/eventIntegrations/{Name}"; + if (input.Name !== undefined) { + const labelValue: string = input.Name; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: Name."); + } + resolvedPath = resolvedPath.replace("{Name}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: Name."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1ListEventIntegrationAssociationsCommand = async ( + input: ListEventIntegrationAssociationsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/eventIntegrations/{EventIntegrationName}/associations"; + if (input.EventIntegrationName !== undefined) { + const labelValue: string = input.EventIntegrationName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: EventIntegrationName."); + } + resolvedPath = resolvedPath.replace("{EventIntegrationName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: EventIntegrationName."); + } + const query: any = { + ...(input.NextToken !== undefined && { nextToken: input.NextToken }), + ...(input.MaxResults !== undefined && { maxResults: input.MaxResults.toString() }), + }; + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; + +export const serializeAws_restJson1ListEventIntegrationsCommand = async ( + input: ListEventIntegrationsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/eventIntegrations"; + const query: any = { + ...(input.NextToken !== undefined && { nextToken: input.NextToken }), + ...(input.MaxResults !== undefined && { maxResults: input.MaxResults.toString() }), + }; + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; + +export const serializeAws_restJson1ListTagsForResourceCommand = async ( + input: ListTagsForResourceCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/tags/{resourceArn}"; + if (input.resourceArn !== undefined) { + const labelValue: string = input.resourceArn; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: resourceArn."); + } + resolvedPath = resolvedPath.replace("{resourceArn}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: resourceArn."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1TagResourceCommand = async ( + input: TagResourceCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/tags/{resourceArn}"; + if (input.resourceArn !== undefined) { + const labelValue: string = input.resourceArn; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: resourceArn."); + } + resolvedPath = resolvedPath.replace("{resourceArn}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: resourceArn."); + } + let body: any; + body = JSON.stringify({ + ...(input.tags !== undefined && { tags: serializeAws_restJson1TagMap(input.tags, context) }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1UntagResourceCommand = async ( + input: UntagResourceCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/tags/{resourceArn}"; + if (input.resourceArn !== undefined) { + const labelValue: string = input.resourceArn; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: resourceArn."); + } + resolvedPath = resolvedPath.replace("{resourceArn}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: resourceArn."); + } + const query: any = { + ...(input.tagKeys !== undefined && { tagKeys: (input.tagKeys || []).map((_entry) => _entry) }), + }; + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "DELETE", + headers, + path: resolvedPath, + query, + body, + }); +}; + +export const serializeAws_restJson1UpdateEventIntegrationCommand = async ( + input: UpdateEventIntegrationCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/eventIntegrations/{Name}"; + if (input.Name !== undefined) { + const labelValue: string = input.Name; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: Name."); + } + resolvedPath = resolvedPath.replace("{Name}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: Name."); + } + let body: any; + body = JSON.stringify({ + ...(input.Description !== undefined && { Description: input.Description }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "PATCH", + headers, + path: resolvedPath, + body, + }); +}; + +export const deserializeAws_restJson1CreateEventIntegrationCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1CreateEventIntegrationCommandError(output, context); + } + const contents: CreateEventIntegrationCommandOutput = { + $metadata: deserializeMetadata(output), + EventIntegrationArn: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.EventIntegrationArn !== undefined && data.EventIntegrationArn !== null) { + contents.EventIntegrationArn = data.EventIntegrationArn; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1CreateEventIntegrationCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.appintegrations#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "DuplicateResourceException": + case "com.amazonaws.appintegrations#DuplicateResourceException": + response = { + ...(await deserializeAws_restJson1DuplicateResourceExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServiceError": + case "com.amazonaws.appintegrations#InternalServiceError": + response = { + ...(await deserializeAws_restJson1InternalServiceErrorResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.appintegrations#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceQuotaExceededException": + case "com.amazonaws.appintegrations#ResourceQuotaExceededException": + response = { + ...(await deserializeAws_restJson1ResourceQuotaExceededExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.appintegrations#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DeleteEventIntegrationCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DeleteEventIntegrationCommandError(output, context); + } + const contents: DeleteEventIntegrationCommandOutput = { + $metadata: deserializeMetadata(output), + }; + await collectBody(output.body, context); + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DeleteEventIntegrationCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.appintegrations#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServiceError": + case "com.amazonaws.appintegrations#InternalServiceError": + response = { + ...(await deserializeAws_restJson1InternalServiceErrorResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.appintegrations#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.appintegrations#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.appintegrations#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1GetEventIntegrationCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1GetEventIntegrationCommandError(output, context); + } + const contents: GetEventIntegrationCommandOutput = { + $metadata: deserializeMetadata(output), + Description: undefined, + EventBridgeBus: undefined, + EventFilter: undefined, + EventIntegrationArn: undefined, + Name: undefined, + Tags: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.Description !== undefined && data.Description !== null) { + contents.Description = data.Description; + } + if (data.EventBridgeBus !== undefined && data.EventBridgeBus !== null) { + contents.EventBridgeBus = data.EventBridgeBus; + } + if (data.EventFilter !== undefined && data.EventFilter !== null) { + contents.EventFilter = deserializeAws_restJson1EventFilter(data.EventFilter, context); + } + if (data.EventIntegrationArn !== undefined && data.EventIntegrationArn !== null) { + contents.EventIntegrationArn = data.EventIntegrationArn; + } + if (data.Name !== undefined && data.Name !== null) { + contents.Name = data.Name; + } + if (data.Tags !== undefined && data.Tags !== null) { + contents.Tags = deserializeAws_restJson1TagMap(data.Tags, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1GetEventIntegrationCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.appintegrations#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServiceError": + case "com.amazonaws.appintegrations#InternalServiceError": + response = { + ...(await deserializeAws_restJson1InternalServiceErrorResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.appintegrations#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.appintegrations#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.appintegrations#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1ListEventIntegrationAssociationsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListEventIntegrationAssociationsCommandError(output, context); + } + const contents: ListEventIntegrationAssociationsCommandOutput = { + $metadata: deserializeMetadata(output), + EventIntegrationAssociations: undefined, + NextToken: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.EventIntegrationAssociations !== undefined && data.EventIntegrationAssociations !== null) { + contents.EventIntegrationAssociations = deserializeAws_restJson1EventIntegrationAssociationsList( + data.EventIntegrationAssociations, + context + ); + } + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = data.NextToken; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListEventIntegrationAssociationsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.appintegrations#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServiceError": + case "com.amazonaws.appintegrations#InternalServiceError": + response = { + ...(await deserializeAws_restJson1InternalServiceErrorResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.appintegrations#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.appintegrations#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.appintegrations#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1ListEventIntegrationsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListEventIntegrationsCommandError(output, context); + } + const contents: ListEventIntegrationsCommandOutput = { + $metadata: deserializeMetadata(output), + EventIntegrations: undefined, + NextToken: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.EventIntegrations !== undefined && data.EventIntegrations !== null) { + contents.EventIntegrations = deserializeAws_restJson1EventIntegrationsList(data.EventIntegrations, context); + } + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = data.NextToken; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListEventIntegrationsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.appintegrations#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServiceError": + case "com.amazonaws.appintegrations#InternalServiceError": + response = { + ...(await deserializeAws_restJson1InternalServiceErrorResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.appintegrations#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.appintegrations#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1ListTagsForResourceCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListTagsForResourceCommandError(output, context); + } + const contents: ListTagsForResourceCommandOutput = { + $metadata: deserializeMetadata(output), + tags: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.tags !== undefined && data.tags !== null) { + contents.tags = deserializeAws_restJson1TagMap(data.tags, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListTagsForResourceCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InternalServiceError": + case "com.amazonaws.appintegrations#InternalServiceError": + response = { + ...(await deserializeAws_restJson1InternalServiceErrorResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.appintegrations#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.appintegrations#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.appintegrations#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1TagResourceCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1TagResourceCommandError(output, context); + } + const contents: TagResourceCommandOutput = { + $metadata: deserializeMetadata(output), + }; + await collectBody(output.body, context); + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1TagResourceCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InternalServiceError": + case "com.amazonaws.appintegrations#InternalServiceError": + response = { + ...(await deserializeAws_restJson1InternalServiceErrorResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.appintegrations#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.appintegrations#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.appintegrations#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1UntagResourceCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1UntagResourceCommandError(output, context); + } + const contents: UntagResourceCommandOutput = { + $metadata: deserializeMetadata(output), + }; + await collectBody(output.body, context); + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1UntagResourceCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InternalServiceError": + case "com.amazonaws.appintegrations#InternalServiceError": + response = { + ...(await deserializeAws_restJson1InternalServiceErrorResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.appintegrations#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.appintegrations#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.appintegrations#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1UpdateEventIntegrationCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1UpdateEventIntegrationCommandError(output, context); + } + const contents: UpdateEventIntegrationCommandOutput = { + $metadata: deserializeMetadata(output), + }; + await collectBody(output.body, context); + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1UpdateEventIntegrationCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.appintegrations#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServiceError": + case "com.amazonaws.appintegrations#InternalServiceError": + response = { + ...(await deserializeAws_restJson1InternalServiceErrorResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.appintegrations#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.appintegrations#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.appintegrations#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +const deserializeAws_restJson1AccessDeniedExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: AccessDeniedException = { + name: "AccessDeniedException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1DuplicateResourceExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: DuplicateResourceException = { + name: "DuplicateResourceException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1InternalServiceErrorResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: InternalServiceError = { + name: "InternalServiceError", + $fault: "server", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1InvalidRequestExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: InvalidRequestException = { + name: "InvalidRequestException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1ResourceNotFoundExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ResourceNotFoundException = { + name: "ResourceNotFoundException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1ResourceQuotaExceededExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ResourceQuotaExceededException = { + name: "ResourceQuotaExceededException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1ThrottlingExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ThrottlingException = { + name: "ThrottlingException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const serializeAws_restJson1EventFilter = (input: EventFilter, context: __SerdeContext): any => { + return { + ...(input.Source !== undefined && { Source: input.Source }), + }; +}; + +const serializeAws_restJson1TagMap = (input: { [key: string]: string }, context: __SerdeContext): any => { + return Object.entries(input).reduce( + (acc: { [key: string]: string }, [key, value]: [string, any]) => ({ + ...acc, + [key]: value, + }), + {} + ); +}; + +const deserializeAws_restJson1ClientAssociationMetadata = ( + output: any, + context: __SerdeContext +): { [key: string]: string } => { + return Object.entries(output).reduce( + (acc: { [key: string]: string }, [key, value]: [string, any]) => ({ + ...acc, + [key]: value, + }), + {} + ); +}; + +const deserializeAws_restJson1EventFilter = (output: any, context: __SerdeContext): EventFilter => { + return { + Source: output.Source !== undefined && output.Source !== null ? output.Source : undefined, + } as any; +}; + +const deserializeAws_restJson1EventIntegration = (output: any, context: __SerdeContext): EventIntegration => { + return { + Description: output.Description !== undefined && output.Description !== null ? output.Description : undefined, + EventBridgeBus: + output.EventBridgeBus !== undefined && output.EventBridgeBus !== null ? output.EventBridgeBus : undefined, + EventFilter: + output.EventFilter !== undefined && output.EventFilter !== null + ? deserializeAws_restJson1EventFilter(output.EventFilter, context) + : undefined, + EventIntegrationArn: + output.EventIntegrationArn !== undefined && output.EventIntegrationArn !== null + ? output.EventIntegrationArn + : undefined, + Name: output.Name !== undefined && output.Name !== null ? output.Name : undefined, + Tags: + output.Tags !== undefined && output.Tags !== null + ? deserializeAws_restJson1TagMap(output.Tags, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1EventIntegrationAssociation = ( + output: any, + context: __SerdeContext +): EventIntegrationAssociation => { + return { + ClientAssociationMetadata: + output.ClientAssociationMetadata !== undefined && output.ClientAssociationMetadata !== null + ? deserializeAws_restJson1ClientAssociationMetadata(output.ClientAssociationMetadata, context) + : undefined, + ClientId: output.ClientId !== undefined && output.ClientId !== null ? output.ClientId : undefined, + EventBridgeRuleName: + output.EventBridgeRuleName !== undefined && output.EventBridgeRuleName !== null + ? output.EventBridgeRuleName + : undefined, + EventIntegrationAssociationArn: + output.EventIntegrationAssociationArn !== undefined && output.EventIntegrationAssociationArn !== null + ? output.EventIntegrationAssociationArn + : undefined, + EventIntegrationAssociationId: + output.EventIntegrationAssociationId !== undefined && output.EventIntegrationAssociationId !== null + ? output.EventIntegrationAssociationId + : undefined, + EventIntegrationName: + output.EventIntegrationName !== undefined && output.EventIntegrationName !== null + ? output.EventIntegrationName + : undefined, + } as any; +}; + +const deserializeAws_restJson1EventIntegrationAssociationsList = ( + output: any, + context: __SerdeContext +): EventIntegrationAssociation[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1EventIntegrationAssociation(entry, context)); +}; + +const deserializeAws_restJson1EventIntegrationsList = (output: any, context: __SerdeContext): EventIntegration[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1EventIntegration(entry, context)); +}; + +const deserializeAws_restJson1TagMap = (output: any, context: __SerdeContext): { [key: string]: string } => { + return Object.entries(output).reduce( + (acc: { [key: string]: string }, [key, value]: [string, any]) => ({ + ...acc, + [key]: value, + }), + {} + ); +}; + +const deserializeMetadata = (output: __HttpResponse): __ResponseMetadata => ({ + httpStatusCode: output.statusCode, + httpHeaders: output.headers, + requestId: output.headers["x-amzn-requestid"], +}); + +// Collect low-level response body stream to Uint8Array. +const collectBody = (streamBody: any = new Uint8Array(), context: __SerdeContext): Promise => { + if (streamBody instanceof Uint8Array) { + return Promise.resolve(streamBody); + } + return context.streamCollector(streamBody) || Promise.resolve(new Uint8Array()); +}; + +// Encode Uint8Array data into string with utf-8. +const collectBodyString = (streamBody: any, context: __SerdeContext): Promise => + collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); + +const isSerializableHeaderValue = (value: any): boolean => + value !== undefined && + value !== "" && + (!Object.getOwnPropertyNames(value).includes("length") || value.length != 0) && + (!Object.getOwnPropertyNames(value).includes("size") || value.size != 0); + +const parseBody = (streamBody: any, context: __SerdeContext): any => + collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + return JSON.parse(encoded); + } + return {}; + }); + +/** + * Load an error code for the aws.rest-json-1.1 protocol. + */ +const loadRestJsonErrorCode = (output: __HttpResponse, data: any): string => { + const findKey = (object: any, key: string) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase()); + + const sanitizeErrorCode = (rawValue: string): string => { + let cleanValue = rawValue; + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== undefined) { + return sanitizeErrorCode(output.headers[headerKey]); + } + + if (data.code !== undefined) { + return sanitizeErrorCode(data.code); + } + + if (data["__type"] !== undefined) { + return sanitizeErrorCode(data["__type"]); + } + + return ""; +}; diff --git a/clients/client-appintegrations/runtimeConfig.browser.ts b/clients/client-appintegrations/runtimeConfig.browser.ts new file mode 100644 index 000000000000..f8911bb4e730 --- /dev/null +++ b/clients/client-appintegrations/runtimeConfig.browser.ts @@ -0,0 +1,34 @@ +import packageInfo from "./package.json"; + +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { FetchHttpHandler, streamCollector } from "@aws-sdk/fetch-http-handler"; +import { invalidAsyncFunction } from "@aws-sdk/invalid-dependency"; +import { DEFAULT_MAX_ATTEMPTS } from "@aws-sdk/middleware-retry"; +import { parseUrl } from "@aws-sdk/url-parser-browser"; +import { fromBase64, toBase64 } from "@aws-sdk/util-base64-browser"; +import { calculateBodyLength } from "@aws-sdk/util-body-length-browser"; +import { defaultUserAgent } from "@aws-sdk/util-user-agent-browser"; +import { fromUtf8, toUtf8 } from "@aws-sdk/util-utf8-browser"; +import { ClientDefaults } from "./AppIntegrationsClient"; +import { ClientSharedValues } from "./runtimeConfig.shared"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...ClientSharedValues, + runtime: "browser", + base64Decoder: fromBase64, + base64Encoder: toBase64, + bodyLengthChecker: calculateBodyLength, + credentialDefaultProvider: invalidAsyncFunction("Credentialis missing") as any, + defaultUserAgent: defaultUserAgent(packageInfo.name, packageInfo.version), + maxAttempts: DEFAULT_MAX_ATTEMPTS, + region: invalidAsyncFunction("Region is missing") as any, + requestHandler: new FetchHttpHandler(), + sha256: Sha256, + streamCollector, + urlParser: parseUrl, + utf8Decoder: fromUtf8, + utf8Encoder: toUtf8, +}; diff --git a/clients/client-appintegrations/runtimeConfig.native.ts b/clients/client-appintegrations/runtimeConfig.native.ts new file mode 100644 index 000000000000..9dddd1281d4e --- /dev/null +++ b/clients/client-appintegrations/runtimeConfig.native.ts @@ -0,0 +1,17 @@ +import packageInfo from "./package.json"; + +import { Sha256 } from "@aws-crypto/sha256-js"; +import { parseUrl } from "@aws-sdk/url-parser-node"; +import { ClientDefaults } from "./AppIntegrationsClient"; +import { ClientDefaultValues as BrowserDefaults } from "./runtimeConfig.browser"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...BrowserDefaults, + runtime: "react-native", + defaultUserAgent: `aws-sdk-js-v3-react-native-${packageInfo.name}/${packageInfo.version}`, + sha256: Sha256, + urlParser: parseUrl, +}; diff --git a/clients/client-appintegrations/runtimeConfig.shared.ts b/clients/client-appintegrations/runtimeConfig.shared.ts new file mode 100644 index 000000000000..ac0aeac465bf --- /dev/null +++ b/clients/client-appintegrations/runtimeConfig.shared.ts @@ -0,0 +1,13 @@ +import { defaultRegionInfoProvider } from "./endpoints"; +import { Logger as __Logger } from "@aws-sdk/types"; + +/** + * @internal + */ +export const ClientSharedValues = { + apiVersion: "2020-07-29", + disableHostPrefix: false, + logger: {} as __Logger, + regionInfoProvider: defaultRegionInfoProvider, + signingName: "app-integrations", +}; diff --git a/clients/client-appintegrations/runtimeConfig.ts b/clients/client-appintegrations/runtimeConfig.ts new file mode 100644 index 000000000000..facdeb033815 --- /dev/null +++ b/clients/client-appintegrations/runtimeConfig.ts @@ -0,0 +1,36 @@ +import packageInfo from "./package.json"; + +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS } from "@aws-sdk/config-resolver"; +import { defaultProvider as credentialDefaultProvider } from "@aws-sdk/credential-provider-node"; +import { Hash } from "@aws-sdk/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS } from "@aws-sdk/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@aws-sdk/node-config-provider"; +import { NodeHttpHandler, streamCollector } from "@aws-sdk/node-http-handler"; +import { parseUrl } from "@aws-sdk/url-parser-node"; +import { fromBase64, toBase64 } from "@aws-sdk/util-base64-node"; +import { calculateBodyLength } from "@aws-sdk/util-body-length-node"; +import { defaultUserAgent } from "@aws-sdk/util-user-agent-node"; +import { fromUtf8, toUtf8 } from "@aws-sdk/util-utf8-node"; +import { ClientDefaults } from "./AppIntegrationsClient"; +import { ClientSharedValues } from "./runtimeConfig.shared"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...ClientSharedValues, + runtime: "node", + base64Decoder: fromBase64, + base64Encoder: toBase64, + bodyLengthChecker: calculateBodyLength, + credentialDefaultProvider, + defaultUserAgent: defaultUserAgent(packageInfo.name, packageInfo.version), + maxAttempts: loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS), + region: loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, NODE_REGION_CONFIG_FILE_OPTIONS), + requestHandler: new NodeHttpHandler(), + sha256: Hash.bind(null, "sha256"), + streamCollector, + urlParser: parseUrl, + utf8Decoder: fromUtf8, + utf8Encoder: toUtf8, +}; diff --git a/clients/client-appintegrations/tsconfig.es.json b/clients/client-appintegrations/tsconfig.es.json new file mode 100644 index 000000000000..30df5d2e6986 --- /dev/null +++ b/clients/client-appintegrations/tsconfig.es.json @@ -0,0 +1,12 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "target": "es5", + "module": "esnext", + "moduleResolution": "node", + "declaration": false, + "declarationDir": null, + "lib": ["dom", "es5", "es2015.promise", "es2015.collection", "es2015.iterable", "es2015.symbol.wellknown"], + "outDir": "dist/es" + } +} diff --git a/clients/client-appintegrations/tsconfig.json b/clients/client-appintegrations/tsconfig.json new file mode 100644 index 000000000000..4cf936f614b4 --- /dev/null +++ b/clients/client-appintegrations/tsconfig.json @@ -0,0 +1,31 @@ +{ + "compilerOptions": { + "alwaysStrict": true, + "target": "ES2018", + "module": "commonjs", + "declaration": true, + "strict": true, + "sourceMap": true, + "downlevelIteration": true, + "importHelpers": true, + "noEmitHelpers": true, + "incremental": true, + "resolveJsonModule": true, + "esModuleInterop": true, + "declarationDir": "./types", + "outDir": "dist/cjs" + }, + "typedocOptions": { + "exclude": ["**/node_modules/**", "**/*.spec.ts", "./protocols/*.ts", "./e2e/*.ts", "./endpoints.ts"], + "excludeNotExported": true, + "excludePrivate": true, + "hideGenerator": true, + "ignoreCompilerErrors": true, + "includeDeclarations": true, + "readme": "./README.md", + "mode": "file", + "out": "./docs", + "theme": "minimal", + "plugin": ["@aws-sdk/client-documentation-generator"] + } +} diff --git a/clients/client-connect-contact-lens/.gitignore b/clients/client-connect-contact-lens/.gitignore new file mode 100644 index 000000000000..b41c05b597c4 --- /dev/null +++ b/clients/client-connect-contact-lens/.gitignore @@ -0,0 +1,14 @@ +/node_modules/ +/build/ +/coverage/ +/docs/ +/types/ +/dist/ +*.tsbuildinfo +*.tgz +*.log +package-lock.json + +*.d.ts +*.js +*.js.map diff --git a/clients/client-connect-contact-lens/.npmignore b/clients/client-connect-contact-lens/.npmignore new file mode 100644 index 000000000000..b7ff81137c4a --- /dev/null +++ b/clients/client-connect-contact-lens/.npmignore @@ -0,0 +1,4 @@ +/coverage/ +/docs/ +tsconfig.test.json +*.tsbuildinfo diff --git a/clients/client-connect-contact-lens/ConnectContactLens.ts b/clients/client-connect-contact-lens/ConnectContactLens.ts new file mode 100644 index 000000000000..d9c85852eb4c --- /dev/null +++ b/clients/client-connect-contact-lens/ConnectContactLens.ts @@ -0,0 +1,50 @@ +import { ConnectContactLensClient } from "./ConnectContactLensClient"; +import { + ListRealtimeContactAnalysisSegmentsCommand, + ListRealtimeContactAnalysisSegmentsCommandInput, + ListRealtimeContactAnalysisSegmentsCommandOutput, +} from "./commands/ListRealtimeContactAnalysisSegmentsCommand"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@aws-sdk/types"; + +/** + *

Contact Lens for Amazon Connect enables you to analyze conversations between customer and agents, + * by using speech transcription, natural language processing, and intelligent search + * capabilities. It performs sentiment analysis, detects issues, and enables you to automatically + * categorize contacts.

+ *

Contact Lens for Amazon Connect provides both real-time and post-call analytics of customer-agent + * conversations. For more information, see Analyze conversations using + * Contact Lens in the Amazon Connect Administrator Guide.

+ */ +export class ConnectContactLens extends ConnectContactLensClient { + /** + *

Provides a list of analysis segments for a real-time analysis session.

+ */ + public listRealtimeContactAnalysisSegments( + args: ListRealtimeContactAnalysisSegmentsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listRealtimeContactAnalysisSegments( + args: ListRealtimeContactAnalysisSegmentsCommandInput, + cb: (err: any, data?: ListRealtimeContactAnalysisSegmentsCommandOutput) => void + ): void; + public listRealtimeContactAnalysisSegments( + args: ListRealtimeContactAnalysisSegmentsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListRealtimeContactAnalysisSegmentsCommandOutput) => void + ): void; + public listRealtimeContactAnalysisSegments( + args: ListRealtimeContactAnalysisSegmentsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListRealtimeContactAnalysisSegmentsCommandOutput) => void), + cb?: (err: any, data?: ListRealtimeContactAnalysisSegmentsCommandOutput) => void + ): Promise | void { + const command = new ListRealtimeContactAnalysisSegmentsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } +} diff --git a/clients/client-connect-contact-lens/ConnectContactLensClient.ts b/clients/client-connect-contact-lens/ConnectContactLensClient.ts new file mode 100644 index 000000000000..0254a045bedd --- /dev/null +++ b/clients/client-connect-contact-lens/ConnectContactLensClient.ts @@ -0,0 +1,211 @@ +import { + ListRealtimeContactAnalysisSegmentsCommandInput, + ListRealtimeContactAnalysisSegmentsCommandOutput, +} from "./commands/ListRealtimeContactAnalysisSegmentsCommand"; +import { ClientDefaultValues as __ClientDefaultValues } from "./runtimeConfig"; +import { + EndpointsInputConfig, + EndpointsResolvedConfig, + RegionInputConfig, + RegionResolvedConfig, + resolveEndpointsConfig, + resolveRegionConfig, +} from "@aws-sdk/config-resolver"; +import { getContentLengthPlugin } from "@aws-sdk/middleware-content-length"; +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, + getHostHeaderPlugin, + resolveHostHeaderConfig, +} from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { RetryInputConfig, RetryResolvedConfig, getRetryPlugin, resolveRetryConfig } from "@aws-sdk/middleware-retry"; +import { + AwsAuthInputConfig, + AwsAuthResolvedConfig, + getAwsAuthPlugin, + resolveAwsAuthConfig, +} from "@aws-sdk/middleware-signing"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, + getUserAgentPlugin, + resolveUserAgentConfig, +} from "@aws-sdk/middleware-user-agent"; +import { HttpHandler as __HttpHandler } from "@aws-sdk/protocol-http"; +import { + Client as __Client, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@aws-sdk/smithy-client"; +import { + RegionInfoProvider, + Credentials as __Credentials, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, +} from "@aws-sdk/types"; + +export type ServiceInputTypes = ListRealtimeContactAnalysisSegmentsCommandInput; + +export type ServiceOutputTypes = ListRealtimeContactAnalysisSegmentsCommandOutput; + +export interface ClientDefaults extends Partial<__SmithyResolvedConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandler; + + /** + * A constructor for a class implementing the @aws-sdk/types.Hash interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + */ + sha256?: __HashConstructor; + + /** + * The function that will be used to convert strings into HTTP endpoints. + */ + urlParser?: __UrlParser; + + /** + * A function that can calculate the length of a request body. + */ + bodyLengthChecker?: (body: any) => number | undefined; + + /** + * A function that converts a stream into an array of bytes. + */ + streamCollector?: __StreamCollector; + + /** + * The function that will be used to convert a base64-encoded string to a byte array + */ + base64Decoder?: __Decoder; + + /** + * The function that will be used to convert binary data to a base64-encoded string + */ + base64Encoder?: __Encoder; + + /** + * The function that will be used to convert a UTF8-encoded string to a byte array + */ + utf8Decoder?: __Decoder; + + /** + * The function that will be used to convert binary data to a UTF-8 encoded string + */ + utf8Encoder?: __Encoder; + + /** + * The string that will be used to populate default value in 'User-Agent' header + */ + defaultUserAgent?: string; + + /** + * The runtime environment + */ + runtime?: string; + + /** + * Disable dyanamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + + /** + * The service name with which to sign requests. + */ + signingName?: string; + + /** + * Default credentials provider; Not available in browser runtime + */ + credentialDefaultProvider?: (input: any) => __Provider<__Credentials>; + + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + + /** + * Fetch related hostname, signing name or signing region with given region. + */ + regionInfoProvider?: RegionInfoProvider; +} + +export type ConnectContactLensClientConfig = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & + ClientDefaults & + RegionInputConfig & + EndpointsInputConfig & + AwsAuthInputConfig & + RetryInputConfig & + UserAgentInputConfig & + HostHeaderInputConfig; + +export type ConnectContactLensClientResolvedConfig = __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RegionResolvedConfig & + EndpointsResolvedConfig & + AwsAuthResolvedConfig & + RetryResolvedConfig & + UserAgentResolvedConfig & + HostHeaderResolvedConfig; + +/** + *

Contact Lens for Amazon Connect enables you to analyze conversations between customer and agents, + * by using speech transcription, natural language processing, and intelligent search + * capabilities. It performs sentiment analysis, detects issues, and enables you to automatically + * categorize contacts.

+ *

Contact Lens for Amazon Connect provides both real-time and post-call analytics of customer-agent + * conversations. For more information, see Analyze conversations using + * Contact Lens in the Amazon Connect Administrator Guide.

+ */ +export class ConnectContactLensClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + ConnectContactLensClientResolvedConfig +> { + readonly config: ConnectContactLensClientResolvedConfig; + + constructor(configuration: ConnectContactLensClientConfig) { + let _config_0 = { + ...__ClientDefaultValues, + ...configuration, + }; + let _config_1 = resolveRegionConfig(_config_0); + let _config_2 = resolveEndpointsConfig(_config_1); + let _config_3 = resolveAwsAuthConfig(_config_2); + let _config_4 = resolveRetryConfig(_config_3); + let _config_5 = resolveUserAgentConfig(_config_4); + let _config_6 = resolveHostHeaderConfig(_config_5); + super(_config_6); + this.config = _config_6; + this.middlewareStack.use(getAwsAuthPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + } + + destroy(): void { + super.destroy(); + } +} diff --git a/clients/client-connect-contact-lens/LICENSE b/clients/client-connect-contact-lens/LICENSE new file mode 100644 index 000000000000..dd65ae06be7a --- /dev/null +++ b/clients/client-connect-contact-lens/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/clients/client-connect-contact-lens/README.md b/clients/client-connect-contact-lens/README.md new file mode 100644 index 000000000000..71fc4101f654 --- /dev/null +++ b/clients/client-connect-contact-lens/README.md @@ -0,0 +1,6 @@ +# @aws-sdk/client-connect-contact-lens + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/client-connect-contact-lens/rc.svg)](https://www.npmjs.com/package/@aws-sdk/client-connect-contact-lens) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/client-connect-contact-lens.svg)](https://www.npmjs.com/package/@aws-sdk/client-connect-contact-lens) + +For SDK usage, please step to [SDK readme](https://github.com/aws/aws-sdk-js-v3). diff --git a/clients/client-connect-contact-lens/commands/ListRealtimeContactAnalysisSegmentsCommand.ts b/clients/client-connect-contact-lens/commands/ListRealtimeContactAnalysisSegmentsCommand.ts new file mode 100644 index 000000000000..83c5870bf494 --- /dev/null +++ b/clients/client-connect-contact-lens/commands/ListRealtimeContactAnalysisSegmentsCommand.ts @@ -0,0 +1,102 @@ +import { + ConnectContactLensClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../ConnectContactLensClient"; +import { + ListRealtimeContactAnalysisSegmentsRequest, + ListRealtimeContactAnalysisSegmentsResponse, +} from "../models/models_0"; +import { + deserializeAws_restJson1ListRealtimeContactAnalysisSegmentsCommand, + serializeAws_restJson1ListRealtimeContactAnalysisSegmentsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListRealtimeContactAnalysisSegmentsCommandInput = ListRealtimeContactAnalysisSegmentsRequest; +export type ListRealtimeContactAnalysisSegmentsCommandOutput = ListRealtimeContactAnalysisSegmentsResponse & + __MetadataBearer; + +/** + *

Provides a list of analysis segments for a real-time analysis session.

+ */ +export class ListRealtimeContactAnalysisSegmentsCommand extends $Command< + ListRealtimeContactAnalysisSegmentsCommandInput, + ListRealtimeContactAnalysisSegmentsCommandOutput, + ConnectContactLensClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListRealtimeContactAnalysisSegmentsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ConnectContactLensClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ConnectContactLensClient"; + const commandName = "ListRealtimeContactAnalysisSegmentsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListRealtimeContactAnalysisSegmentsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListRealtimeContactAnalysisSegmentsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize( + input: ListRealtimeContactAnalysisSegmentsCommandInput, + context: __SerdeContext + ): Promise<__HttpRequest> { + return serializeAws_restJson1ListRealtimeContactAnalysisSegmentsCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_restJson1ListRealtimeContactAnalysisSegmentsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-connect-contact-lens/endpoints.ts b/clients/client-connect-contact-lens/endpoints.ts new file mode 100644 index 000000000000..5c3a02886cb0 --- /dev/null +++ b/clients/client-connect-contact-lens/endpoints.ts @@ -0,0 +1,111 @@ +import { RegionInfo, RegionInfoProvider } from "@aws-sdk/types"; + +// Partition default templates +const AWS_TEMPLATE = "connect.{region}.amazonaws.com"; +const AWS_CN_TEMPLATE = "connect.{region}.amazonaws.com.cn"; +const AWS_ISO_TEMPLATE = "connect.{region}.c2s.ic.gov"; +const AWS_ISO_B_TEMPLATE = "connect.{region}.sc2s.sgov.gov"; +const AWS_US_GOV_TEMPLATE = "connect.{region}.amazonaws.com"; + +// Partition regions +const AWS_REGIONS = new Set([ + "ap-east-1", + "ap-northeast-1", + "ap-northeast-2", + "ap-south-1", + "ap-southeast-1", + "ap-southeast-2", + "ca-central-1", + "eu-central-1", + "eu-north-1", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "me-south-1", + "sa-east-1", + "us-east-1", + "us-east-2", + "us-west-1", + "us-west-2", +]); +const AWS_CN_REGIONS = new Set(["cn-north-1", "cn-northwest-1"]); +const AWS_ISO_REGIONS = new Set(["us-iso-east-1"]); +const AWS_ISO_B_REGIONS = new Set(["us-isob-east-1"]); +const AWS_US_GOV_REGIONS = new Set(["us-gov-east-1", "us-gov-west-1"]); + +export const defaultRegionInfoProvider: RegionInfoProvider = (region: string, options?: any) => { + let regionInfo: RegionInfo | undefined = undefined; + switch (region) { + // First, try to match exact region names. + case "ap-northeast-1": + regionInfo = { + hostname: "connect.ap-northeast-1.amazonaws.com", + partition: "aws", + }; + break; + case "ap-southeast-2": + regionInfo = { + hostname: "connect.ap-southeast-2.amazonaws.com", + partition: "aws", + }; + break; + case "eu-central-1": + regionInfo = { + hostname: "connect.eu-central-1.amazonaws.com", + partition: "aws", + }; + break; + case "us-east-1": + regionInfo = { + hostname: "connect.us-east-1.amazonaws.com", + partition: "aws", + }; + break; + case "us-west-2": + regionInfo = { + hostname: "connect.us-west-2.amazonaws.com", + partition: "aws", + }; + break; + // Next, try to match partition endpoints. + default: + if (AWS_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_TEMPLATE.replace("{region}", region), + partition: "aws", + }; + } + if (AWS_CN_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_CN_TEMPLATE.replace("{region}", region), + partition: "aws-cn", + }; + } + if (AWS_ISO_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_ISO_TEMPLATE.replace("{region}", region), + partition: "aws-iso", + }; + } + if (AWS_ISO_B_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_ISO_B_TEMPLATE.replace("{region}", region), + partition: "aws-iso-b", + }; + } + if (AWS_US_GOV_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_US_GOV_TEMPLATE.replace("{region}", region), + partition: "aws-us-gov", + }; + } + // Finally, assume it's an AWS partition endpoint. + if (regionInfo === undefined) { + regionInfo = { + hostname: AWS_TEMPLATE.replace("{region}", region), + partition: "aws", + }; + } + } + return Promise.resolve(regionInfo); +}; diff --git a/clients/client-connect-contact-lens/index.ts b/clients/client-connect-contact-lens/index.ts new file mode 100644 index 000000000000..afb1f98c053b --- /dev/null +++ b/clients/client-connect-contact-lens/index.ts @@ -0,0 +1,6 @@ +export * from "./ConnectContactLensClient"; +export * from "./ConnectContactLens"; +export * from "./commands/ListRealtimeContactAnalysisSegmentsCommand"; +export * from "./pagination/ListRealtimeContactAnalysisSegmentsPaginator"; +export * from "./pagination/Interfaces"; +export * from "./models/index"; diff --git a/clients/client-connect-contact-lens/models/index.ts b/clients/client-connect-contact-lens/models/index.ts new file mode 100644 index 000000000000..09c5d6e09b8c --- /dev/null +++ b/clients/client-connect-contact-lens/models/index.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/clients/client-connect-contact-lens/models/models_0.ts b/clients/client-connect-contact-lens/models/models_0.ts new file mode 100644 index 000000000000..04e3961e5ac6 --- /dev/null +++ b/clients/client-connect-contact-lens/models/models_0.ts @@ -0,0 +1,310 @@ +import { SENSITIVE_STRING, SmithyException as __SmithyException } from "@aws-sdk/smithy-client"; +import { MetadataBearer as $MetadataBearer } from "@aws-sdk/types"; + +/** + *

You do not have sufficient access to perform this action.

+ */ +export interface AccessDeniedException extends __SmithyException, $MetadataBearer { + name: "AccessDeniedException"; + $fault: "client"; + Message: string | undefined; +} + +export namespace AccessDeniedException { + export const filterSensitiveLog = (obj: AccessDeniedException): any => ({ + ...obj, + }); +} + +/** + *

Request processing failed due to an error or failure with the service.

+ */ +export interface InternalServiceException extends __SmithyException, $MetadataBearer { + name: "InternalServiceException"; + $fault: "server"; + Message?: string; +} + +export namespace InternalServiceException { + export const filterSensitiveLog = (obj: InternalServiceException): any => ({ + ...obj, + }); +} + +/** + *

The request is not valid.

+ */ +export interface InvalidRequestException extends __SmithyException, $MetadataBearer { + name: "InvalidRequestException"; + $fault: "client"; + Message?: string; +} + +export namespace InvalidRequestException { + export const filterSensitiveLog = (obj: InvalidRequestException): any => ({ + ...obj, + }); +} + +export interface ListRealtimeContactAnalysisSegmentsRequest { + /** + *

The identifier of the Amazon Connect instance.

+ */ + InstanceId: string | undefined; + + /** + *

The identifier of the contact.

+ */ + ContactId: string | undefined; + + /** + *

The maximimum number of results to return per page.

+ */ + MaxResults?: number; + + /** + *

The token for the next set of results. Use the value returned in the previous + * response in the next request to retrieve the next set of results.

+ */ + NextToken?: string; +} + +export namespace ListRealtimeContactAnalysisSegmentsRequest { + export const filterSensitiveLog = (obj: ListRealtimeContactAnalysisSegmentsRequest): any => ({ + ...obj, + }); +} + +/** + *

The section of the contact audio where that category rule was detected.

+ */ +export interface PointOfInterest { + /** + *

The beginning offset in milliseconds where the category rule was detected.

+ */ + BeginOffsetMillis: number | undefined; + + /** + *

The ending offset in milliseconds where the category rule was detected.

+ */ + EndOffsetMillis: number | undefined; +} + +export namespace PointOfInterest { + export const filterSensitiveLog = (obj: PointOfInterest): any => ({ + ...obj, + }); +} + +/** + *

Provides information about the category rule that was matched.

+ */ +export interface CategoryDetails { + /** + *

The section of audio where the category rule was detected.

+ */ + PointsOfInterest: PointOfInterest[] | undefined; +} + +export namespace CategoryDetails { + export const filterSensitiveLog = (obj: CategoryDetails): any => ({ + ...obj, + }); +} + +/** + *

Provides the category rules that are used to automatically categorize contacts based on + * uttered keywords and phrases.

+ */ +export interface Categories { + /** + *

The category rules that have been matched in the analyzed segment.

+ */ + MatchedCategories: string[] | undefined; + + /** + *

The category rule that was matched and when it occurred in the transcript.

+ */ + MatchedDetails: { [key: string]: CategoryDetails } | undefined; +} + +export namespace Categories { + export const filterSensitiveLog = (obj: Categories): any => ({ + ...obj, + }); +} + +/** + *

For characters that were detected as issues, where they occur in the transcript.

+ */ +export interface CharacterOffsets { + /** + *

The beginning of the issue.

+ */ + BeginOffsetChar: number | undefined; + + /** + *

The end of the issue.

+ */ + EndOffsetChar: number | undefined; +} + +export namespace CharacterOffsets { + export const filterSensitiveLog = (obj: CharacterOffsets): any => ({ + ...obj, + }); +} + +/** + *

Potential issues that are detected based on an artificial intelligence analysis of each + * turn in the conversation.

+ */ +export interface IssueDetected { + /** + *

The offset for when the issue was detected in the segment.

+ */ + CharacterOffsets: CharacterOffsets | undefined; +} + +export namespace IssueDetected { + export const filterSensitiveLog = (obj: IssueDetected): any => ({ + ...obj, + }); +} + +export enum SentimentValue { + NEGATIVE = "NEGATIVE", + NEUTRAL = "NEUTRAL", + POSITIVE = "POSITIVE", +} + +/** + *

A list of messages in the session.

+ */ +export interface Transcript { + /** + *

The identifier of the transcript.

+ */ + Id: string | undefined; + + /** + *

The identifier of the participant.

+ */ + ParticipantId: string | undefined; + + /** + *

The role of participant. For example, is it a customer, agent, or system.

+ */ + ParticipantRole: string | undefined; + + /** + *

The content of the transcript.

+ */ + Content: string | undefined; + + /** + *

The beginning offset in the contact for this transcript.

+ */ + BeginOffsetMillis: number | undefined; + + /** + *

The end offset in the contact for this transcript.

+ */ + EndOffsetMillis: number | undefined; + + /** + *

The sentiment of the detected for this piece of transcript.

+ */ + Sentiment: SentimentValue | string | undefined; + + /** + *

List of positions where issues were detected on the transcript.

+ */ + IssuesDetected?: IssueDetected[]; +} + +export namespace Transcript { + export const filterSensitiveLog = (obj: Transcript): any => ({ + ...obj, + }); +} + +/** + *

An analyzed segment for a real-time analysis session.

+ */ +export interface RealtimeContactAnalysisSegment { + /** + *

The analyzed transcript.

+ */ + Transcript?: Transcript; + + /** + *

The matched category rules.

+ */ + Categories?: Categories; +} + +export namespace RealtimeContactAnalysisSegment { + export const filterSensitiveLog = (obj: RealtimeContactAnalysisSegment): any => ({ + ...obj, + }); +} + +export interface ListRealtimeContactAnalysisSegmentsResponse { + /** + *

An analyzed transcript or category.

+ */ + Segments: RealtimeContactAnalysisSegment[] | undefined; + + /** + *

If there are additional results, this is the token for the next set of results. If response includes nextToken there are two possible scenarios:

+ *
    + *
  • + *

    There are more segments so another call is required to get them.

    + *
  • + *
  • + *

    There are no more segments at this time, but more may be available later (real-time + * analysis is in progress) so the client should call the operation again to get new + * segments.

    + *
  • + *
+ *

If response does not include nextToken, the analysis is completed (successfully or failed) and there are no more segments to retrieve.

+ */ + NextToken?: string; +} + +export namespace ListRealtimeContactAnalysisSegmentsResponse { + export const filterSensitiveLog = (obj: ListRealtimeContactAnalysisSegmentsResponse): any => ({ + ...obj, + }); +} + +/** + *

The specified resource was not found.

+ */ +export interface ResourceNotFoundException extends __SmithyException, $MetadataBearer { + name: "ResourceNotFoundException"; + $fault: "client"; + Message?: string; +} + +export namespace ResourceNotFoundException { + export const filterSensitiveLog = (obj: ResourceNotFoundException): any => ({ + ...obj, + }); +} + +/** + *

The throttling limit has been exceeded.

+ */ +export interface ThrottlingException extends __SmithyException, $MetadataBearer { + name: "ThrottlingException"; + $fault: "client"; + Message: string | undefined; +} + +export namespace ThrottlingException { + export const filterSensitiveLog = (obj: ThrottlingException): any => ({ + ...obj, + }); +} diff --git a/clients/client-connect-contact-lens/package.json b/clients/client-connect-contact-lens/package.json new file mode 100644 index 000000000000..61d4249eb2b8 --- /dev/null +++ b/clients/client-connect-contact-lens/package.json @@ -0,0 +1,83 @@ +{ + "name": "@aws-sdk/client-connect-contact-lens", + "description": "AWS SDK for JavaScript Connect Contact Lens Client for Node.js, Browser and React Native", + "version": "1.0.0-rc.1", + "scripts": { + "clean": "yarn remove-definitions && yarn remove-dist && yarn remove-documentation", + "build-documentation": "yarn remove-documentation && typedoc ./", + "prepublishOnly": "yarn build", + "pretest": "yarn build:cjs", + "remove-definitions": "rimraf ./types", + "remove-dist": "rimraf ./dist", + "remove-documentation": "rimraf ./docs", + "test": "yarn build && jest --coverage --passWithNoTests", + "build:cjs": "tsc -p tsconfig.json", + "build:es": "tsc -p tsconfig.es.json", + "build": "yarn build:cjs && yarn build:es" + }, + "main": "./dist/cjs/index.js", + "types": "./types/index.d.ts", + "module": "./dist/es/index.js", + "browser": { + "./runtimeConfig": "./runtimeConfig.browser" + }, + "react-native": { + "./runtimeConfig": "./runtimeConfig.native" + }, + "sideEffects": false, + "dependencies": { + "@aws-crypto/sha256-browser": "^1.0.0", + "@aws-crypto/sha256-js": "^1.0.0", + "@aws-sdk/config-resolver": "1.0.0-rc.7", + "@aws-sdk/credential-provider-node": "1.0.0-rc.7", + "@aws-sdk/fetch-http-handler": "1.0.0-rc.7", + "@aws-sdk/hash-node": "1.0.0-rc.7", + "@aws-sdk/invalid-dependency": "1.0.0-rc.3", + "@aws-sdk/middleware-content-length": "1.0.0-rc.7", + "@aws-sdk/middleware-host-header": "1.0.0-rc.7", + "@aws-sdk/middleware-logger": "1.0.0-rc.7", + "@aws-sdk/middleware-retry": "1.0.0-rc.7", + "@aws-sdk/middleware-serde": "1.0.0-rc.7", + "@aws-sdk/middleware-signing": "1.0.0-rc.7", + "@aws-sdk/middleware-stack": "1.0.0-rc.7", + "@aws-sdk/middleware-user-agent": "1.0.0-rc.7", + "@aws-sdk/node-config-provider": "1.0.0-rc.7", + "@aws-sdk/node-http-handler": "1.0.0-rc.7", + "@aws-sdk/protocol-http": "1.0.0-rc.7", + "@aws-sdk/smithy-client": "1.0.0-rc.7", + "@aws-sdk/url-parser-browser": "1.0.0-rc.7", + "@aws-sdk/url-parser-node": "1.0.0-rc.7", + "@aws-sdk/util-base64-browser": "1.0.0-rc.3", + "@aws-sdk/util-base64-node": "1.0.0-rc.3", + "@aws-sdk/util-body-length-browser": "1.0.0-rc.3", + "@aws-sdk/util-body-length-node": "1.0.0-rc.3", + "@aws-sdk/util-user-agent-browser": "1.0.0-rc.7", + "@aws-sdk/util-user-agent-node": "1.0.0-rc.7", + "@aws-sdk/util-utf8-browser": "1.0.0-rc.3", + "@aws-sdk/util-utf8-node": "1.0.0-rc.3", + "tslib": "^2.0.0" + }, + "devDependencies": { + "@aws-sdk/client-documentation-generator": "1.0.0-rc.7", + "@aws-sdk/types": "1.0.0-rc.7", + "@types/node": "^12.7.5", + "jest": "^26.1.0", + "rimraf": "^3.0.0", + "typedoc": "^0.19.2", + "typescript": "~4.1.2" + }, + "engines": { + "node": ">=10.0.0" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/master/clients/client-connect-contact-lens", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "clients/client-connect-contact-lens" + } +} diff --git a/clients/client-connect-contact-lens/pagination/Interfaces.ts b/clients/client-connect-contact-lens/pagination/Interfaces.ts new file mode 100644 index 000000000000..d300a961c6aa --- /dev/null +++ b/clients/client-connect-contact-lens/pagination/Interfaces.ts @@ -0,0 +1,7 @@ +import { ConnectContactLens } from "../ConnectContactLens"; +import { ConnectContactLensClient } from "../ConnectContactLensClient"; +import { PaginationConfiguration } from "@aws-sdk/types"; + +export interface ConnectContactLensPaginationConfiguration extends PaginationConfiguration { + client: ConnectContactLens | ConnectContactLensClient; +} diff --git a/clients/client-connect-contact-lens/pagination/ListRealtimeContactAnalysisSegmentsPaginator.ts b/clients/client-connect-contact-lens/pagination/ListRealtimeContactAnalysisSegmentsPaginator.ts new file mode 100644 index 000000000000..5bc4b718f83b --- /dev/null +++ b/clients/client-connect-contact-lens/pagination/ListRealtimeContactAnalysisSegmentsPaginator.ts @@ -0,0 +1,57 @@ +import { ConnectContactLens } from "../ConnectContactLens"; +import { ConnectContactLensClient } from "../ConnectContactLensClient"; +import { + ListRealtimeContactAnalysisSegmentsCommand, + ListRealtimeContactAnalysisSegmentsCommandInput, + ListRealtimeContactAnalysisSegmentsCommandOutput, +} from "../commands/ListRealtimeContactAnalysisSegmentsCommand"; +import { ConnectContactLensPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: ConnectContactLensClient, + input: ListRealtimeContactAnalysisSegmentsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListRealtimeContactAnalysisSegmentsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: ConnectContactLens, + input: ListRealtimeContactAnalysisSegmentsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listRealtimeContactAnalysisSegments(input, ...args); +}; +export async function* paginateListRealtimeContactAnalysisSegments( + config: ConnectContactLensPaginationConfiguration, + input: ListRealtimeContactAnalysisSegmentsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListRealtimeContactAnalysisSegmentsCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof ConnectContactLens) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof ConnectContactLensClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected ConnectContactLens | ConnectContactLensClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-connect-contact-lens/protocols/Aws_restJson1.ts b/clients/client-connect-contact-lens/protocols/Aws_restJson1.ts new file mode 100644 index 000000000000..4218c5f1a95b --- /dev/null +++ b/clients/client-connect-contact-lens/protocols/Aws_restJson1.ts @@ -0,0 +1,415 @@ +import { + ListRealtimeContactAnalysisSegmentsCommandInput, + ListRealtimeContactAnalysisSegmentsCommandOutput, +} from "../commands/ListRealtimeContactAnalysisSegmentsCommand"; +import { + AccessDeniedException, + Categories, + CategoryDetails, + CharacterOffsets, + InternalServiceException, + InvalidRequestException, + IssueDetected, + PointOfInterest, + RealtimeContactAnalysisSegment, + ResourceNotFoundException, + ThrottlingException, + Transcript, +} from "../models/models_0"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { SmithyException as __SmithyException } from "@aws-sdk/smithy-client"; +import { + Endpoint as __Endpoint, + MetadataBearer as __MetadataBearer, + ResponseMetadata as __ResponseMetadata, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export const serializeAws_restJson1ListRealtimeContactAnalysisSegmentsCommand = async ( + input: ListRealtimeContactAnalysisSegmentsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/realtime-contact-analysis/analysis-segments"; + let body: any; + body = JSON.stringify({ + ...(input.ContactId !== undefined && { ContactId: input.ContactId }), + ...(input.InstanceId !== undefined && { InstanceId: input.InstanceId }), + ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const deserializeAws_restJson1ListRealtimeContactAnalysisSegmentsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListRealtimeContactAnalysisSegmentsCommandError(output, context); + } + const contents: ListRealtimeContactAnalysisSegmentsCommandOutput = { + $metadata: deserializeMetadata(output), + NextToken: undefined, + Segments: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = data.NextToken; + } + if (data.Segments !== undefined && data.Segments !== null) { + contents.Segments = deserializeAws_restJson1RealtimeContactAnalysisSegments(data.Segments, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListRealtimeContactAnalysisSegmentsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.connectcontactlens#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServiceException": + case "com.amazonaws.connectcontactlens#InternalServiceException": + response = { + ...(await deserializeAws_restJson1InternalServiceExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.connectcontactlens#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.connectcontactlens#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.connectcontactlens#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +const deserializeAws_restJson1AccessDeniedExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: AccessDeniedException = { + name: "AccessDeniedException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1InternalServiceExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: InternalServiceException = { + name: "InternalServiceException", + $fault: "server", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1InvalidRequestExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: InvalidRequestException = { + name: "InvalidRequestException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1ResourceNotFoundExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ResourceNotFoundException = { + name: "ResourceNotFoundException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1ThrottlingExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ThrottlingException = { + name: "ThrottlingException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1Categories = (output: any, context: __SerdeContext): Categories => { + return { + MatchedCategories: + output.MatchedCategories !== undefined && output.MatchedCategories !== null + ? deserializeAws_restJson1MatchedCategories(output.MatchedCategories, context) + : undefined, + MatchedDetails: + output.MatchedDetails !== undefined && output.MatchedDetails !== null + ? deserializeAws_restJson1MatchedDetails(output.MatchedDetails, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1CategoryDetails = (output: any, context: __SerdeContext): CategoryDetails => { + return { + PointsOfInterest: + output.PointsOfInterest !== undefined && output.PointsOfInterest !== null + ? deserializeAws_restJson1PointsOfInterest(output.PointsOfInterest, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1CharacterOffsets = (output: any, context: __SerdeContext): CharacterOffsets => { + return { + BeginOffsetChar: + output.BeginOffsetChar !== undefined && output.BeginOffsetChar !== null ? output.BeginOffsetChar : undefined, + EndOffsetChar: + output.EndOffsetChar !== undefined && output.EndOffsetChar !== null ? output.EndOffsetChar : undefined, + } as any; +}; + +const deserializeAws_restJson1IssueDetected = (output: any, context: __SerdeContext): IssueDetected => { + return { + CharacterOffsets: + output.CharacterOffsets !== undefined && output.CharacterOffsets !== null + ? deserializeAws_restJson1CharacterOffsets(output.CharacterOffsets, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1IssuesDetected = (output: any, context: __SerdeContext): IssueDetected[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1IssueDetected(entry, context)); +}; + +const deserializeAws_restJson1MatchedCategories = (output: any, context: __SerdeContext): string[] => { + return (output || []).map((entry: any) => entry); +}; + +const deserializeAws_restJson1MatchedDetails = ( + output: any, + context: __SerdeContext +): { [key: string]: CategoryDetails } => { + return Object.entries(output).reduce( + (acc: { [key: string]: CategoryDetails }, [key, value]: [string, any]) => ({ + ...acc, + [key]: deserializeAws_restJson1CategoryDetails(value, context), + }), + {} + ); +}; + +const deserializeAws_restJson1PointOfInterest = (output: any, context: __SerdeContext): PointOfInterest => { + return { + BeginOffsetMillis: + output.BeginOffsetMillis !== undefined && output.BeginOffsetMillis !== null + ? output.BeginOffsetMillis + : undefined, + EndOffsetMillis: + output.EndOffsetMillis !== undefined && output.EndOffsetMillis !== null ? output.EndOffsetMillis : undefined, + } as any; +}; + +const deserializeAws_restJson1PointsOfInterest = (output: any, context: __SerdeContext): PointOfInterest[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1PointOfInterest(entry, context)); +}; + +const deserializeAws_restJson1RealtimeContactAnalysisSegment = ( + output: any, + context: __SerdeContext +): RealtimeContactAnalysisSegment => { + return { + Categories: + output.Categories !== undefined && output.Categories !== null + ? deserializeAws_restJson1Categories(output.Categories, context) + : undefined, + Transcript: + output.Transcript !== undefined && output.Transcript !== null + ? deserializeAws_restJson1Transcript(output.Transcript, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1RealtimeContactAnalysisSegments = ( + output: any, + context: __SerdeContext +): RealtimeContactAnalysisSegment[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1RealtimeContactAnalysisSegment(entry, context)); +}; + +const deserializeAws_restJson1Transcript = (output: any, context: __SerdeContext): Transcript => { + return { + BeginOffsetMillis: + output.BeginOffsetMillis !== undefined && output.BeginOffsetMillis !== null + ? output.BeginOffsetMillis + : undefined, + Content: output.Content !== undefined && output.Content !== null ? output.Content : undefined, + EndOffsetMillis: + output.EndOffsetMillis !== undefined && output.EndOffsetMillis !== null ? output.EndOffsetMillis : undefined, + Id: output.Id !== undefined && output.Id !== null ? output.Id : undefined, + IssuesDetected: + output.IssuesDetected !== undefined && output.IssuesDetected !== null + ? deserializeAws_restJson1IssuesDetected(output.IssuesDetected, context) + : undefined, + ParticipantId: + output.ParticipantId !== undefined && output.ParticipantId !== null ? output.ParticipantId : undefined, + ParticipantRole: + output.ParticipantRole !== undefined && output.ParticipantRole !== null ? output.ParticipantRole : undefined, + Sentiment: output.Sentiment !== undefined && output.Sentiment !== null ? output.Sentiment : undefined, + } as any; +}; + +const deserializeMetadata = (output: __HttpResponse): __ResponseMetadata => ({ + httpStatusCode: output.statusCode, + httpHeaders: output.headers, + requestId: output.headers["x-amzn-requestid"], +}); + +// Collect low-level response body stream to Uint8Array. +const collectBody = (streamBody: any = new Uint8Array(), context: __SerdeContext): Promise => { + if (streamBody instanceof Uint8Array) { + return Promise.resolve(streamBody); + } + return context.streamCollector(streamBody) || Promise.resolve(new Uint8Array()); +}; + +// Encode Uint8Array data into string with utf-8. +const collectBodyString = (streamBody: any, context: __SerdeContext): Promise => + collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); + +const isSerializableHeaderValue = (value: any): boolean => + value !== undefined && + value !== "" && + (!Object.getOwnPropertyNames(value).includes("length") || value.length != 0) && + (!Object.getOwnPropertyNames(value).includes("size") || value.size != 0); + +const parseBody = (streamBody: any, context: __SerdeContext): any => + collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + return JSON.parse(encoded); + } + return {}; + }); + +/** + * Load an error code for the aws.rest-json-1.1 protocol. + */ +const loadRestJsonErrorCode = (output: __HttpResponse, data: any): string => { + const findKey = (object: any, key: string) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase()); + + const sanitizeErrorCode = (rawValue: string): string => { + let cleanValue = rawValue; + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== undefined) { + return sanitizeErrorCode(output.headers[headerKey]); + } + + if (data.code !== undefined) { + return sanitizeErrorCode(data.code); + } + + if (data["__type"] !== undefined) { + return sanitizeErrorCode(data["__type"]); + } + + return ""; +}; diff --git a/clients/client-connect-contact-lens/runtimeConfig.browser.ts b/clients/client-connect-contact-lens/runtimeConfig.browser.ts new file mode 100644 index 000000000000..55336fef5a8e --- /dev/null +++ b/clients/client-connect-contact-lens/runtimeConfig.browser.ts @@ -0,0 +1,34 @@ +import packageInfo from "./package.json"; + +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { FetchHttpHandler, streamCollector } from "@aws-sdk/fetch-http-handler"; +import { invalidAsyncFunction } from "@aws-sdk/invalid-dependency"; +import { DEFAULT_MAX_ATTEMPTS } from "@aws-sdk/middleware-retry"; +import { parseUrl } from "@aws-sdk/url-parser-browser"; +import { fromBase64, toBase64 } from "@aws-sdk/util-base64-browser"; +import { calculateBodyLength } from "@aws-sdk/util-body-length-browser"; +import { defaultUserAgent } from "@aws-sdk/util-user-agent-browser"; +import { fromUtf8, toUtf8 } from "@aws-sdk/util-utf8-browser"; +import { ClientDefaults } from "./ConnectContactLensClient"; +import { ClientSharedValues } from "./runtimeConfig.shared"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...ClientSharedValues, + runtime: "browser", + base64Decoder: fromBase64, + base64Encoder: toBase64, + bodyLengthChecker: calculateBodyLength, + credentialDefaultProvider: invalidAsyncFunction("Credentialis missing") as any, + defaultUserAgent: defaultUserAgent(packageInfo.name, packageInfo.version), + maxAttempts: DEFAULT_MAX_ATTEMPTS, + region: invalidAsyncFunction("Region is missing") as any, + requestHandler: new FetchHttpHandler(), + sha256: Sha256, + streamCollector, + urlParser: parseUrl, + utf8Decoder: fromUtf8, + utf8Encoder: toUtf8, +}; diff --git a/clients/client-connect-contact-lens/runtimeConfig.native.ts b/clients/client-connect-contact-lens/runtimeConfig.native.ts new file mode 100644 index 000000000000..0bfb0e7b05cd --- /dev/null +++ b/clients/client-connect-contact-lens/runtimeConfig.native.ts @@ -0,0 +1,17 @@ +import packageInfo from "./package.json"; + +import { Sha256 } from "@aws-crypto/sha256-js"; +import { parseUrl } from "@aws-sdk/url-parser-node"; +import { ClientDefaults } from "./ConnectContactLensClient"; +import { ClientDefaultValues as BrowserDefaults } from "./runtimeConfig.browser"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...BrowserDefaults, + runtime: "react-native", + defaultUserAgent: `aws-sdk-js-v3-react-native-${packageInfo.name}/${packageInfo.version}`, + sha256: Sha256, + urlParser: parseUrl, +}; diff --git a/clients/client-connect-contact-lens/runtimeConfig.shared.ts b/clients/client-connect-contact-lens/runtimeConfig.shared.ts new file mode 100644 index 000000000000..98bf3ed1c4a8 --- /dev/null +++ b/clients/client-connect-contact-lens/runtimeConfig.shared.ts @@ -0,0 +1,13 @@ +import { defaultRegionInfoProvider } from "./endpoints"; +import { Logger as __Logger } from "@aws-sdk/types"; + +/** + * @internal + */ +export const ClientSharedValues = { + apiVersion: "2020-08-21", + disableHostPrefix: false, + logger: {} as __Logger, + regionInfoProvider: defaultRegionInfoProvider, + signingName: "connect", +}; diff --git a/clients/client-connect-contact-lens/runtimeConfig.ts b/clients/client-connect-contact-lens/runtimeConfig.ts new file mode 100644 index 000000000000..7e47a19f4ce0 --- /dev/null +++ b/clients/client-connect-contact-lens/runtimeConfig.ts @@ -0,0 +1,36 @@ +import packageInfo from "./package.json"; + +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS } from "@aws-sdk/config-resolver"; +import { defaultProvider as credentialDefaultProvider } from "@aws-sdk/credential-provider-node"; +import { Hash } from "@aws-sdk/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS } from "@aws-sdk/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@aws-sdk/node-config-provider"; +import { NodeHttpHandler, streamCollector } from "@aws-sdk/node-http-handler"; +import { parseUrl } from "@aws-sdk/url-parser-node"; +import { fromBase64, toBase64 } from "@aws-sdk/util-base64-node"; +import { calculateBodyLength } from "@aws-sdk/util-body-length-node"; +import { defaultUserAgent } from "@aws-sdk/util-user-agent-node"; +import { fromUtf8, toUtf8 } from "@aws-sdk/util-utf8-node"; +import { ClientDefaults } from "./ConnectContactLensClient"; +import { ClientSharedValues } from "./runtimeConfig.shared"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...ClientSharedValues, + runtime: "node", + base64Decoder: fromBase64, + base64Encoder: toBase64, + bodyLengthChecker: calculateBodyLength, + credentialDefaultProvider, + defaultUserAgent: defaultUserAgent(packageInfo.name, packageInfo.version), + maxAttempts: loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS), + region: loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, NODE_REGION_CONFIG_FILE_OPTIONS), + requestHandler: new NodeHttpHandler(), + sha256: Hash.bind(null, "sha256"), + streamCollector, + urlParser: parseUrl, + utf8Decoder: fromUtf8, + utf8Encoder: toUtf8, +}; diff --git a/clients/client-connect-contact-lens/tsconfig.es.json b/clients/client-connect-contact-lens/tsconfig.es.json new file mode 100644 index 000000000000..30df5d2e6986 --- /dev/null +++ b/clients/client-connect-contact-lens/tsconfig.es.json @@ -0,0 +1,12 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "target": "es5", + "module": "esnext", + "moduleResolution": "node", + "declaration": false, + "declarationDir": null, + "lib": ["dom", "es5", "es2015.promise", "es2015.collection", "es2015.iterable", "es2015.symbol.wellknown"], + "outDir": "dist/es" + } +} diff --git a/clients/client-connect-contact-lens/tsconfig.json b/clients/client-connect-contact-lens/tsconfig.json new file mode 100644 index 000000000000..4cf936f614b4 --- /dev/null +++ b/clients/client-connect-contact-lens/tsconfig.json @@ -0,0 +1,31 @@ +{ + "compilerOptions": { + "alwaysStrict": true, + "target": "ES2018", + "module": "commonjs", + "declaration": true, + "strict": true, + "sourceMap": true, + "downlevelIteration": true, + "importHelpers": true, + "noEmitHelpers": true, + "incremental": true, + "resolveJsonModule": true, + "esModuleInterop": true, + "declarationDir": "./types", + "outDir": "dist/cjs" + }, + "typedocOptions": { + "exclude": ["**/node_modules/**", "**/*.spec.ts", "./protocols/*.ts", "./e2e/*.ts", "./endpoints.ts"], + "excludeNotExported": true, + "excludePrivate": true, + "hideGenerator": true, + "ignoreCompilerErrors": true, + "includeDeclarations": true, + "readme": "./README.md", + "mode": "file", + "out": "./docs", + "theme": "minimal", + "plugin": ["@aws-sdk/client-documentation-generator"] + } +} diff --git a/clients/client-connect/Connect.ts b/clients/client-connect/Connect.ts index ec99b7eff4d5..d75b3bb25024 100644 --- a/clients/client-connect/Connect.ts +++ b/clients/client-connect/Connect.ts @@ -39,11 +39,21 @@ import { CreateInstanceCommandInput, CreateInstanceCommandOutput, } from "./commands/CreateInstanceCommand"; +import { + CreateIntegrationAssociationCommand, + CreateIntegrationAssociationCommandInput, + CreateIntegrationAssociationCommandOutput, +} from "./commands/CreateIntegrationAssociationCommand"; import { CreateRoutingProfileCommand, CreateRoutingProfileCommandInput, CreateRoutingProfileCommandOutput, } from "./commands/CreateRoutingProfileCommand"; +import { + CreateUseCaseCommand, + CreateUseCaseCommandInput, + CreateUseCaseCommandOutput, +} from "./commands/CreateUseCaseCommand"; import { CreateUserCommand, CreateUserCommandInput, CreateUserCommandOutput } from "./commands/CreateUserCommand"; import { CreateUserHierarchyGroupCommand, @@ -55,6 +65,16 @@ import { DeleteInstanceCommandInput, DeleteInstanceCommandOutput, } from "./commands/DeleteInstanceCommand"; +import { + DeleteIntegrationAssociationCommand, + DeleteIntegrationAssociationCommandInput, + DeleteIntegrationAssociationCommandOutput, +} from "./commands/DeleteIntegrationAssociationCommand"; +import { + DeleteUseCaseCommand, + DeleteUseCaseCommandInput, + DeleteUseCaseCommandOutput, +} from "./commands/DeleteUseCaseCommand"; import { DeleteUserCommand, DeleteUserCommandInput, DeleteUserCommandOutput } from "./commands/DeleteUserCommand"; import { DeleteUserHierarchyGroupCommand, @@ -181,6 +201,11 @@ import { ListInstancesCommandInput, ListInstancesCommandOutput, } from "./commands/ListInstancesCommand"; +import { + ListIntegrationAssociationsCommand, + ListIntegrationAssociationsCommandInput, + ListIntegrationAssociationsCommandOutput, +} from "./commands/ListIntegrationAssociationsCommand"; import { ListLambdaFunctionsCommand, ListLambdaFunctionsCommandInput, @@ -219,6 +244,11 @@ import { ListTagsForResourceCommandInput, ListTagsForResourceCommandOutput, } from "./commands/ListTagsForResourceCommand"; +import { + ListUseCasesCommand, + ListUseCasesCommandInput, + ListUseCasesCommandOutput, +} from "./commands/ListUseCasesCommand"; import { ListUserHierarchyGroupsCommand, ListUserHierarchyGroupsCommandInput, @@ -245,6 +275,11 @@ import { StartOutboundVoiceContactCommandInput, StartOutboundVoiceContactCommandOutput, } from "./commands/StartOutboundVoiceContactCommand"; +import { + StartTaskContactCommand, + StartTaskContactCommandInput, + StartTaskContactCommandOutput, +} from "./commands/StartTaskContactCommand"; import { StopContactCommand, StopContactCommandInput, StopContactCommandOutput } from "./commands/StopContactCommand"; import { StopContactRecordingCommand, @@ -357,8 +392,7 @@ import { HttpHandlerOptions as __HttpHandlerOptions } from "@aws-sdk/types"; * endpoints, see Amazon Connect * Endpoints.

* - *

Working with contact flows? Check out the Amazon Connect - * Flow language.

+ *

Working with contact flows? Check out the Amazon Connect Flow language.

*
*/ export class Connect extends ConnectClient { @@ -396,12 +430,11 @@ export class Connect extends ConnectClient { /** *

Associates a storage resource type for the first time. You can only associate one type of - * storage configuration in a single call. This means, for example, that you can't define an - * instance with multiple S3 buckets for storing chat transcripts.

- * + * storage configuration in a single call. This means, for example, that you can't define an + * instance with multiple S3 buckets for storing chat transcripts.

*

This API does not create a resource that doesn't exist. It only associates it to the - * instance. Ensure that the resource being specified in the storage configuration, like an Amazon - * S3 bucket, exists when being used for association.

+ * instance. Ensure that the resource being specified in the storage configuration, like an Amazon + * S3 bucket, exists when being used for association.

*/ public associateInstanceStorageConfig( args: AssociateInstanceStorageConfigCommandInput, @@ -595,7 +628,8 @@ export class Connect extends ConnectClient { } /** - *

Initiates an Amazon Connect instance with all the supported channels enabled. It does not attach any + *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Initiates an Amazon Connect instance with all the supported channels enabled. It does not attach any * storage (such as Amazon S3, or Kinesis) or allow for any configurations on features such as * Contact Lens for Amazon Connect.

*/ @@ -628,6 +662,39 @@ export class Connect extends ConnectClient { } } + /** + *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Create an AppIntegration association with anAmazon Connect instance.

+ */ + public createIntegrationAssociation( + args: CreateIntegrationAssociationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public createIntegrationAssociation( + args: CreateIntegrationAssociationCommandInput, + cb: (err: any, data?: CreateIntegrationAssociationCommandOutput) => void + ): void; + public createIntegrationAssociation( + args: CreateIntegrationAssociationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateIntegrationAssociationCommandOutput) => void + ): void; + public createIntegrationAssociation( + args: CreateIntegrationAssociationCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateIntegrationAssociationCommandOutput) => void), + cb?: (err: any, data?: CreateIntegrationAssociationCommandOutput) => void + ): Promise | void { + const command = new CreateIntegrationAssociationCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

Creates a new routing profile.

*/ @@ -660,6 +727,39 @@ export class Connect extends ConnectClient { } } + /** + *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Creates a use case for an AppIntegration association.

+ */ + public createUseCase( + args: CreateUseCaseCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public createUseCase( + args: CreateUseCaseCommandInput, + cb: (err: any, data?: CreateUseCaseCommandOutput) => void + ): void; + public createUseCase( + args: CreateUseCaseCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateUseCaseCommandOutput) => void + ): void; + public createUseCase( + args: CreateUseCaseCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateUseCaseCommandOutput) => void), + cb?: (err: any, data?: CreateUseCaseCommandOutput) => void + ): Promise | void { + const command = new CreateUseCaseCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

Creates a user account for the specified Amazon Connect instance.

*

For information about how to create user accounts using the Amazon Connect console, see Add Users in @@ -721,7 +821,8 @@ export class Connect extends ConnectClient { } /** - *

Deletes the Amazon Connect instance.

+ *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Deletes the Amazon Connect instance.

*/ public deleteInstance( args: DeleteInstanceCommandInput, @@ -752,12 +853,78 @@ export class Connect extends ConnectClient { } } + /** + *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Deletes an AppIntegration association from an Amazon Connect instance. The association must not have + * any use cases associated with it.

+ */ + public deleteIntegrationAssociation( + args: DeleteIntegrationAssociationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteIntegrationAssociation( + args: DeleteIntegrationAssociationCommandInput, + cb: (err: any, data?: DeleteIntegrationAssociationCommandOutput) => void + ): void; + public deleteIntegrationAssociation( + args: DeleteIntegrationAssociationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteIntegrationAssociationCommandOutput) => void + ): void; + public deleteIntegrationAssociation( + args: DeleteIntegrationAssociationCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteIntegrationAssociationCommandOutput) => void), + cb?: (err: any, data?: DeleteIntegrationAssociationCommandOutput) => void + ): Promise | void { + const command = new DeleteIntegrationAssociationCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Deletes a use case from an AppIntegration association.

+ */ + public deleteUseCase( + args: DeleteUseCaseCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteUseCase( + args: DeleteUseCaseCommandInput, + cb: (err: any, data?: DeleteUseCaseCommandOutput) => void + ): void; + public deleteUseCase( + args: DeleteUseCaseCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteUseCaseCommandOutput) => void + ): void; + public deleteUseCase( + args: DeleteUseCaseCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteUseCaseCommandOutput) => void), + cb?: (err: any, data?: DeleteUseCaseCommandOutput) => void + ): Promise | void { + const command = new DeleteUseCaseCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

Deletes a user account from the specified Amazon Connect instance.

*

For information about what happens to a user's data when their account is deleted, see * Delete Users from - * Your Amazon Connect Instance in the Amazon Connect Administrator - * Guide.

+ * Your Amazon Connect Instance in the Amazon Connect Administrator Guide.

*/ public deleteUser(args: DeleteUserCommandInput, options?: __HttpHandlerOptions): Promise; public deleteUser(args: DeleteUserCommandInput, cb: (err: any, data?: DeleteUserCommandOutput) => void): void; @@ -783,7 +950,8 @@ export class Connect extends ConnectClient { } /** - *

Deletes an existing user hierarchy group. It must not be associated with any agents or have any active child groups.

+ *

Deletes an existing user hierarchy group. It must not be associated with any agents or have + * any active child groups.

*/ public deleteUserHierarchyGroup( args: DeleteUserHierarchyGroupCommandInput, @@ -849,12 +1017,11 @@ export class Connect extends ConnectClient { } /** - *

Returns the current state of the specified instance identifier. It tracks the instance while it is - * being created and returns an error status if applicable.

- *

If an instance is not created - * successfully, the instance status reason field returns details relevant to the reason. The instance - * in a failed state is returned only for 24 hours after - * the CreateInstance API was invoked.

+ *

Returns the current state of the specified instance identifier. It tracks the instance while + * it is being created and returns an error status if applicable.

+ *

If an instance is not created successfully, the instance status reason field returns details + * relevant to the reason. The instance in a failed state is returned only for 24 hours after the + * CreateInstance API was invoked.

*/ public describeInstance( args: DescribeInstanceCommandInput, @@ -918,7 +1085,8 @@ export class Connect extends ConnectClient { } /** - *

Retrieves the current storage configurations for the specified resource type, association ID, and instance ID.

+ *

Retrieves the current storage configurations for the specified resource type, association + * ID, and instance ID.

*/ public describeInstanceStorageConfig( args: DescribeInstanceStorageConfigCommandInput, @@ -1109,7 +1277,8 @@ export class Connect extends ConnectClient { } /** - *

Removes the storage type configurations for the specified resource type and association ID.

+ *

Removes the storage type configurations for the specified resource type and association + * ID.

*/ public disassociateInstanceStorageConfig( args: DisassociateInstanceStorageConfigCommandInput, @@ -1504,7 +1673,8 @@ export class Connect extends ConnectClient { } /** - *

Returns a paginated list of all attribute types for the given instance.

+ *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Returns a paginated list of all attribute types for the given instance.

*/ public listInstanceAttributes( args: ListInstanceAttributesCommandInput, @@ -1536,7 +1706,8 @@ export class Connect extends ConnectClient { } /** - *

Return a list of instances which are in active state, creation-in-progress state, and failed + *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Return a list of instances which are in active state, creation-in-progress state, and failed * state. Instances that aren't successfully created (they are in a failed state) are returned only * for 24 hours after the CreateInstance API was invoked.

*/ @@ -1570,7 +1741,9 @@ export class Connect extends ConnectClient { } /** - *

Returns a paginated list of storage configs for the identified instance and resource + *

This API is in preview release for Amazon Connect and is subject to change.

+ * + *

Returns a paginated list of storage configs for the identified instance and resource * type.

*/ public listInstanceStorageConfigs( @@ -1603,7 +1776,42 @@ export class Connect extends ConnectClient { } /** - *

Returns a paginated list of all the Lambda functions that show up in the drop-down options in the relevant contact flow blocks.

+ *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Provides summary information about the AppIntegration associations for the specified Amazon Connect + * instance.

+ */ + public listIntegrationAssociations( + args: ListIntegrationAssociationsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listIntegrationAssociations( + args: ListIntegrationAssociationsCommandInput, + cb: (err: any, data?: ListIntegrationAssociationsCommandOutput) => void + ): void; + public listIntegrationAssociations( + args: ListIntegrationAssociationsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListIntegrationAssociationsCommandOutput) => void + ): void; + public listIntegrationAssociations( + args: ListIntegrationAssociationsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListIntegrationAssociationsCommandOutput) => void), + cb?: (err: any, data?: ListIntegrationAssociationsCommandOutput) => void + ): Promise | void { + const command = new ListIntegrationAssociationsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Returns a paginated list of all the Lambda functions that show up in the drop-down options + * in the relevant contact flow blocks.

*/ public listLambdaFunctions( args: ListLambdaFunctionsCommandInput, @@ -1635,7 +1843,8 @@ export class Connect extends ConnectClient { } /** - *

Returns a paginated list of all the Amazon Lex bots currently associated with the instance.

+ *

Returns a paginated list of all the Amazon Lex bots currently associated with the + * instance.

*/ public listLexBots(args: ListLexBotsCommandInput, options?: __HttpHandlerOptions): Promise; public listLexBots(args: ListLexBotsCommandInput, cb: (err: any, data?: ListLexBotsCommandOutput) => void): void; @@ -1695,8 +1904,7 @@ export class Connect extends ConnectClient { } /** - *

Provides information about the prompts for the specified Amazon Connect - * instance.

+ *

Provides information about the prompts for the specified Amazon Connect instance.

*/ public listPrompts(args: ListPromptsCommandInput, options?: __HttpHandlerOptions): Promise; public listPrompts(args: ListPromptsCommandInput, cb: (err: any, data?: ListPromptsCommandOutput) => void): void; @@ -1917,6 +2125,36 @@ export class Connect extends ConnectClient { } } + /** + *

This API is in preview release for Amazon Connect and is subject to change.

+ *

List the use cases.

+ */ + public listUseCases( + args: ListUseCasesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listUseCases(args: ListUseCasesCommandInput, cb: (err: any, data?: ListUseCasesCommandOutput) => void): void; + public listUseCases( + args: ListUseCasesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListUseCasesCommandOutput) => void + ): void; + public listUseCases( + args: ListUseCasesCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListUseCasesCommandOutput) => void), + cb?: (err: any, data?: ListUseCasesCommandOutput) => void + ): Promise | void { + const command = new ListUseCasesCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

Provides summary information about the hierarchy groups for the specified Amazon Connect * instance.

@@ -2073,7 +2311,7 @@ export class Connect extends ConnectClient { * started and you want to suspend and resume it, such as when collecting sensitive information (for * example, a credit card number), use SuspendContactRecording and ResumeContactRecording.

*

You can use this API to override the recording behavior configured in the Set recording - * behavior block.

+ * behavior block.

*

Only voice recordings are supported at this time.

*/ public startContactRecording( @@ -2118,7 +2356,7 @@ export class Connect extends ConnectClient { * *

UK numbers with a 447 prefix are not allowed by default. Before you can dial these UK * mobile numbers, you must submit a service quota increase request. For more information, see - * Amazon Connect Service Quotas in the Amazon Connect Administrator Guide.

+ * Amazon Connect Service Quotas in the Amazon Connect Administrator Guide.

*
*/ public startOutboundVoiceContact( @@ -2150,6 +2388,38 @@ export class Connect extends ConnectClient { } } + /** + *

Initiates a contact flow to start a new task.

+ */ + public startTaskContact( + args: StartTaskContactCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public startTaskContact( + args: StartTaskContactCommandInput, + cb: (err: any, data?: StartTaskContactCommandOutput) => void + ): void; + public startTaskContact( + args: StartTaskContactCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: StartTaskContactCommandOutput) => void + ): void; + public startTaskContact( + args: StartTaskContactCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: StartTaskContactCommandOutput) => void), + cb?: (err: any, data?: StartTaskContactCommandOutput) => void + ): Promise | void { + const command = new StartTaskContactCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

Ends the specified contact.

*/ @@ -2428,7 +2698,8 @@ export class Connect extends ConnectClient { } /** - *

Updates the value for the specified attribute type.

+ *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Updates the value for the specified attribute type.

*/ public updateInstanceAttribute( args: UpdateInstanceAttributeCommandInput, @@ -2460,7 +2731,8 @@ export class Connect extends ConnectClient { } /** - *

Updates an existing configuration for a resource type. This API is idempotent.

+ *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Updates an existing configuration for a resource type. This API is idempotent.

*/ public updateInstanceStorageConfig( args: UpdateInstanceStorageConfigCommandInput, @@ -2492,7 +2764,8 @@ export class Connect extends ConnectClient { } /** - *

Updates the channels that agents can handle in the Contact Control Panel (CCP) for a routing profile.

+ *

Updates the channels that agents can handle in the Contact Control Panel (CCP) for a routing + * profile.

*/ public updateRoutingProfileConcurrency( args: UpdateRoutingProfileConcurrencyCommandInput, @@ -2558,7 +2831,9 @@ export class Connect extends ConnectClient { } /** - *

Updates the name and description of a routing profile. The request accepts the following data in JSON format. At least Name or Description must be provided.

+ *

Updates the name and description of a routing profile. The request accepts the following + * data in JSON format. At least Name or Description must be + * provided.

*/ public updateRoutingProfileName( args: UpdateRoutingProfileNameCommandInput, diff --git a/clients/client-connect/ConnectClient.ts b/clients/client-connect/ConnectClient.ts index c8341380f5bf..5c8ea77da4eb 100644 --- a/clients/client-connect/ConnectClient.ts +++ b/clients/client-connect/ConnectClient.ts @@ -21,16 +21,26 @@ import { } from "./commands/AssociateSecurityKeyCommand"; import { CreateContactFlowCommandInput, CreateContactFlowCommandOutput } from "./commands/CreateContactFlowCommand"; import { CreateInstanceCommandInput, CreateInstanceCommandOutput } from "./commands/CreateInstanceCommand"; +import { + CreateIntegrationAssociationCommandInput, + CreateIntegrationAssociationCommandOutput, +} from "./commands/CreateIntegrationAssociationCommand"; import { CreateRoutingProfileCommandInput, CreateRoutingProfileCommandOutput, } from "./commands/CreateRoutingProfileCommand"; +import { CreateUseCaseCommandInput, CreateUseCaseCommandOutput } from "./commands/CreateUseCaseCommand"; import { CreateUserCommandInput, CreateUserCommandOutput } from "./commands/CreateUserCommand"; import { CreateUserHierarchyGroupCommandInput, CreateUserHierarchyGroupCommandOutput, } from "./commands/CreateUserHierarchyGroupCommand"; import { DeleteInstanceCommandInput, DeleteInstanceCommandOutput } from "./commands/DeleteInstanceCommand"; +import { + DeleteIntegrationAssociationCommandInput, + DeleteIntegrationAssociationCommandOutput, +} from "./commands/DeleteIntegrationAssociationCommand"; +import { DeleteUseCaseCommandInput, DeleteUseCaseCommandOutput } from "./commands/DeleteUseCaseCommand"; import { DeleteUserCommandInput, DeleteUserCommandOutput } from "./commands/DeleteUserCommand"; import { DeleteUserHierarchyGroupCommandInput, @@ -111,6 +121,10 @@ import { ListInstanceStorageConfigsCommandOutput, } from "./commands/ListInstanceStorageConfigsCommand"; import { ListInstancesCommandInput, ListInstancesCommandOutput } from "./commands/ListInstancesCommand"; +import { + ListIntegrationAssociationsCommandInput, + ListIntegrationAssociationsCommandOutput, +} from "./commands/ListIntegrationAssociationsCommand"; import { ListLambdaFunctionsCommandInput, ListLambdaFunctionsCommandOutput, @@ -136,6 +150,7 @@ import { ListTagsForResourceCommandInput, ListTagsForResourceCommandOutput, } from "./commands/ListTagsForResourceCommand"; +import { ListUseCasesCommandInput, ListUseCasesCommandOutput } from "./commands/ListUseCasesCommand"; import { ListUserHierarchyGroupsCommandInput, ListUserHierarchyGroupsCommandOutput, @@ -154,6 +169,7 @@ import { StartOutboundVoiceContactCommandInput, StartOutboundVoiceContactCommandOutput, } from "./commands/StartOutboundVoiceContactCommand"; +import { StartTaskContactCommandInput, StartTaskContactCommandOutput } from "./commands/StartTaskContactCommand"; import { StopContactCommandInput, StopContactCommandOutput } from "./commands/StopContactCommand"; import { StopContactRecordingCommandInput, @@ -287,10 +303,14 @@ export type ServiceInputTypes = | AssociateSecurityKeyCommandInput | CreateContactFlowCommandInput | CreateInstanceCommandInput + | CreateIntegrationAssociationCommandInput | CreateRoutingProfileCommandInput + | CreateUseCaseCommandInput | CreateUserCommandInput | CreateUserHierarchyGroupCommandInput | DeleteInstanceCommandInput + | DeleteIntegrationAssociationCommandInput + | DeleteUseCaseCommandInput | DeleteUserCommandInput | DeleteUserHierarchyGroupCommandInput | DescribeContactFlowCommandInput @@ -317,6 +337,7 @@ export type ServiceInputTypes = | ListInstanceAttributesCommandInput | ListInstanceStorageConfigsCommandInput | ListInstancesCommandInput + | ListIntegrationAssociationsCommandInput | ListLambdaFunctionsCommandInput | ListLexBotsCommandInput | ListPhoneNumbersCommandInput @@ -327,12 +348,14 @@ export type ServiceInputTypes = | ListSecurityKeysCommandInput | ListSecurityProfilesCommandInput | ListTagsForResourceCommandInput + | ListUseCasesCommandInput | ListUserHierarchyGroupsCommandInput | ListUsersCommandInput | ResumeContactRecordingCommandInput | StartChatContactCommandInput | StartContactRecordingCommandInput | StartOutboundVoiceContactCommandInput + | StartTaskContactCommandInput | StopContactCommandInput | StopContactRecordingCommandInput | SuspendContactRecordingCommandInput @@ -364,10 +387,14 @@ export type ServiceOutputTypes = | AssociateSecurityKeyCommandOutput | CreateContactFlowCommandOutput | CreateInstanceCommandOutput + | CreateIntegrationAssociationCommandOutput | CreateRoutingProfileCommandOutput + | CreateUseCaseCommandOutput | CreateUserCommandOutput | CreateUserHierarchyGroupCommandOutput | DeleteInstanceCommandOutput + | DeleteIntegrationAssociationCommandOutput + | DeleteUseCaseCommandOutput | DeleteUserCommandOutput | DeleteUserHierarchyGroupCommandOutput | DescribeContactFlowCommandOutput @@ -394,6 +421,7 @@ export type ServiceOutputTypes = | ListInstanceAttributesCommandOutput | ListInstanceStorageConfigsCommandOutput | ListInstancesCommandOutput + | ListIntegrationAssociationsCommandOutput | ListLambdaFunctionsCommandOutput | ListLexBotsCommandOutput | ListPhoneNumbersCommandOutput @@ -404,12 +432,14 @@ export type ServiceOutputTypes = | ListSecurityKeysCommandOutput | ListSecurityProfilesCommandOutput | ListTagsForResourceCommandOutput + | ListUseCasesCommandOutput | ListUserHierarchyGroupsCommandOutput | ListUsersCommandOutput | ResumeContactRecordingCommandOutput | StartChatContactCommandOutput | StartContactRecordingCommandOutput | StartOutboundVoiceContactCommandOutput + | StartTaskContactCommandOutput | StopContactCommandOutput | StopContactRecordingCommandOutput | SuspendContactRecordingCommandOutput @@ -557,8 +587,7 @@ export type ConnectClientResolvedConfig = __SmithyResolvedConfiguration<__HttpHa * endpoints, see Amazon Connect * Endpoints.

* - *

Working with contact flows? Check out the Amazon Connect - * Flow language.

+ *

Working with contact flows? Check out the Amazon Connect Flow language.

*
*/ export class ConnectClient extends __Client< diff --git a/clients/client-connect/commands/AssociateInstanceStorageConfigCommand.ts b/clients/client-connect/commands/AssociateInstanceStorageConfigCommand.ts index 0274525445e2..3c18b7bee754 100644 --- a/clients/client-connect/commands/AssociateInstanceStorageConfigCommand.ts +++ b/clients/client-connect/commands/AssociateInstanceStorageConfigCommand.ts @@ -22,12 +22,11 @@ export type AssociateInstanceStorageConfigCommandOutput = AssociateInstanceStora /** *

Associates a storage resource type for the first time. You can only associate one type of - * storage configuration in a single call. This means, for example, that you can't define an - * instance with multiple S3 buckets for storing chat transcripts.

- * + * storage configuration in a single call. This means, for example, that you can't define an + * instance with multiple S3 buckets for storing chat transcripts.

*

This API does not create a resource that doesn't exist. It only associates it to the - * instance. Ensure that the resource being specified in the storage configuration, like an Amazon - * S3 bucket, exists when being used for association.

+ * instance. Ensure that the resource being specified in the storage configuration, like an Amazon + * S3 bucket, exists when being used for association.

*/ export class AssociateInstanceStorageConfigCommand extends $Command< AssociateInstanceStorageConfigCommandInput, diff --git a/clients/client-connect/commands/CreateInstanceCommand.ts b/clients/client-connect/commands/CreateInstanceCommand.ts index 23d19fd73f8a..413615284914 100644 --- a/clients/client-connect/commands/CreateInstanceCommand.ts +++ b/clients/client-connect/commands/CreateInstanceCommand.ts @@ -21,7 +21,8 @@ export type CreateInstanceCommandInput = CreateInstanceRequest; export type CreateInstanceCommandOutput = CreateInstanceResponse & __MetadataBearer; /** - *

Initiates an Amazon Connect instance with all the supported channels enabled. It does not attach any + *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Initiates an Amazon Connect instance with all the supported channels enabled. It does not attach any * storage (such as Amazon S3, or Kinesis) or allow for any configurations on features such as * Contact Lens for Amazon Connect.

*/ diff --git a/clients/client-connect/commands/CreateIntegrationAssociationCommand.ts b/clients/client-connect/commands/CreateIntegrationAssociationCommand.ts new file mode 100644 index 000000000000..0de136487714 --- /dev/null +++ b/clients/client-connect/commands/CreateIntegrationAssociationCommand.ts @@ -0,0 +1,92 @@ +import { ConnectClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ConnectClient"; +import { CreateIntegrationAssociationRequest, CreateIntegrationAssociationResponse } from "../models/models_0"; +import { + deserializeAws_restJson1CreateIntegrationAssociationCommand, + serializeAws_restJson1CreateIntegrationAssociationCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateIntegrationAssociationCommandInput = CreateIntegrationAssociationRequest; +export type CreateIntegrationAssociationCommandOutput = CreateIntegrationAssociationResponse & __MetadataBearer; + +/** + *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Create an AppIntegration association with anAmazon Connect instance.

+ */ +export class CreateIntegrationAssociationCommand extends $Command< + CreateIntegrationAssociationCommandInput, + CreateIntegrationAssociationCommandOutput, + ConnectClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateIntegrationAssociationCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ConnectClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ConnectClient"; + const commandName = "CreateIntegrationAssociationCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateIntegrationAssociationRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreateIntegrationAssociationResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateIntegrationAssociationCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1CreateIntegrationAssociationCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_restJson1CreateIntegrationAssociationCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-connect/commands/CreateUseCaseCommand.ts b/clients/client-connect/commands/CreateUseCaseCommand.ts new file mode 100644 index 000000000000..f15655654e73 --- /dev/null +++ b/clients/client-connect/commands/CreateUseCaseCommand.ts @@ -0,0 +1,89 @@ +import { ConnectClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ConnectClient"; +import { CreateUseCaseRequest, CreateUseCaseResponse } from "../models/models_0"; +import { + deserializeAws_restJson1CreateUseCaseCommand, + serializeAws_restJson1CreateUseCaseCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateUseCaseCommandInput = CreateUseCaseRequest; +export type CreateUseCaseCommandOutput = CreateUseCaseResponse & __MetadataBearer; + +/** + *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Creates a use case for an AppIntegration association.

+ */ +export class CreateUseCaseCommand extends $Command< + CreateUseCaseCommandInput, + CreateUseCaseCommandOutput, + ConnectClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateUseCaseCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ConnectClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ConnectClient"; + const commandName = "CreateUseCaseCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateUseCaseRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreateUseCaseResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateUseCaseCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1CreateUseCaseCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1CreateUseCaseCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-connect/commands/DeleteInstanceCommand.ts b/clients/client-connect/commands/DeleteInstanceCommand.ts index fc6f53c99d93..8fab17676523 100644 --- a/clients/client-connect/commands/DeleteInstanceCommand.ts +++ b/clients/client-connect/commands/DeleteInstanceCommand.ts @@ -21,7 +21,8 @@ export type DeleteInstanceCommandInput = DeleteInstanceRequest; export type DeleteInstanceCommandOutput = __MetadataBearer; /** - *

Deletes the Amazon Connect instance.

+ *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Deletes the Amazon Connect instance.

*/ export class DeleteInstanceCommand extends $Command< DeleteInstanceCommandInput, diff --git a/clients/client-connect/commands/DeleteIntegrationAssociationCommand.ts b/clients/client-connect/commands/DeleteIntegrationAssociationCommand.ts new file mode 100644 index 000000000000..7e55a96678d3 --- /dev/null +++ b/clients/client-connect/commands/DeleteIntegrationAssociationCommand.ts @@ -0,0 +1,93 @@ +import { ConnectClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ConnectClient"; +import { DeleteIntegrationAssociationRequest } from "../models/models_0"; +import { + deserializeAws_restJson1DeleteIntegrationAssociationCommand, + serializeAws_restJson1DeleteIntegrationAssociationCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteIntegrationAssociationCommandInput = DeleteIntegrationAssociationRequest; +export type DeleteIntegrationAssociationCommandOutput = __MetadataBearer; + +/** + *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Deletes an AppIntegration association from an Amazon Connect instance. The association must not have + * any use cases associated with it.

+ */ +export class DeleteIntegrationAssociationCommand extends $Command< + DeleteIntegrationAssociationCommandInput, + DeleteIntegrationAssociationCommandOutput, + ConnectClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteIntegrationAssociationCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ConnectClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ConnectClient"; + const commandName = "DeleteIntegrationAssociationCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteIntegrationAssociationRequest.filterSensitiveLog, + outputFilterSensitiveLog: (output: any) => output, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteIntegrationAssociationCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DeleteIntegrationAssociationCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_restJson1DeleteIntegrationAssociationCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-connect/commands/DeleteUseCaseCommand.ts b/clients/client-connect/commands/DeleteUseCaseCommand.ts new file mode 100644 index 000000000000..d5fc3199f209 --- /dev/null +++ b/clients/client-connect/commands/DeleteUseCaseCommand.ts @@ -0,0 +1,89 @@ +import { ConnectClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ConnectClient"; +import { DeleteUseCaseRequest } from "../models/models_0"; +import { + deserializeAws_restJson1DeleteUseCaseCommand, + serializeAws_restJson1DeleteUseCaseCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteUseCaseCommandInput = DeleteUseCaseRequest; +export type DeleteUseCaseCommandOutput = __MetadataBearer; + +/** + *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Deletes a use case from an AppIntegration association.

+ */ +export class DeleteUseCaseCommand extends $Command< + DeleteUseCaseCommandInput, + DeleteUseCaseCommandOutput, + ConnectClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteUseCaseCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ConnectClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ConnectClient"; + const commandName = "DeleteUseCaseCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteUseCaseRequest.filterSensitiveLog, + outputFilterSensitiveLog: (output: any) => output, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteUseCaseCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DeleteUseCaseCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DeleteUseCaseCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-connect/commands/DeleteUserCommand.ts b/clients/client-connect/commands/DeleteUserCommand.ts index 94f7df1c13c6..c9a7588e827b 100644 --- a/clients/client-connect/commands/DeleteUserCommand.ts +++ b/clients/client-connect/commands/DeleteUserCommand.ts @@ -24,8 +24,7 @@ export type DeleteUserCommandOutput = __MetadataBearer; *

Deletes a user account from the specified Amazon Connect instance.

*

For information about what happens to a user's data when their account is deleted, see * Delete Users from - * Your Amazon Connect Instance in the Amazon Connect Administrator - * Guide.

+ * Your Amazon Connect Instance in the Amazon Connect Administrator Guide.

*/ export class DeleteUserCommand extends $Command< DeleteUserCommandInput, diff --git a/clients/client-connect/commands/DeleteUserHierarchyGroupCommand.ts b/clients/client-connect/commands/DeleteUserHierarchyGroupCommand.ts index d4835d4f43e2..b62e70ede8f8 100644 --- a/clients/client-connect/commands/DeleteUserHierarchyGroupCommand.ts +++ b/clients/client-connect/commands/DeleteUserHierarchyGroupCommand.ts @@ -21,7 +21,8 @@ export type DeleteUserHierarchyGroupCommandInput = DeleteUserHierarchyGroupReque export type DeleteUserHierarchyGroupCommandOutput = __MetadataBearer; /** - *

Deletes an existing user hierarchy group. It must not be associated with any agents or have any active child groups.

+ *

Deletes an existing user hierarchy group. It must not be associated with any agents or have + * any active child groups.

*/ export class DeleteUserHierarchyGroupCommand extends $Command< DeleteUserHierarchyGroupCommandInput, diff --git a/clients/client-connect/commands/DescribeInstanceCommand.ts b/clients/client-connect/commands/DescribeInstanceCommand.ts index 7f3020815da1..e1247b2d64bb 100644 --- a/clients/client-connect/commands/DescribeInstanceCommand.ts +++ b/clients/client-connect/commands/DescribeInstanceCommand.ts @@ -21,12 +21,11 @@ export type DescribeInstanceCommandInput = DescribeInstanceRequest; export type DescribeInstanceCommandOutput = DescribeInstanceResponse & __MetadataBearer; /** - *

Returns the current state of the specified instance identifier. It tracks the instance while it is - * being created and returns an error status if applicable.

- *

If an instance is not created - * successfully, the instance status reason field returns details relevant to the reason. The instance - * in a failed state is returned only for 24 hours after - * the CreateInstance API was invoked.

+ *

Returns the current state of the specified instance identifier. It tracks the instance while + * it is being created and returns an error status if applicable.

+ *

If an instance is not created successfully, the instance status reason field returns details + * relevant to the reason. The instance in a failed state is returned only for 24 hours after the + * CreateInstance API was invoked.

*/ export class DescribeInstanceCommand extends $Command< DescribeInstanceCommandInput, diff --git a/clients/client-connect/commands/DescribeInstanceStorageConfigCommand.ts b/clients/client-connect/commands/DescribeInstanceStorageConfigCommand.ts index 9b6a62913650..060572bdeaed 100644 --- a/clients/client-connect/commands/DescribeInstanceStorageConfigCommand.ts +++ b/clients/client-connect/commands/DescribeInstanceStorageConfigCommand.ts @@ -21,7 +21,8 @@ export type DescribeInstanceStorageConfigCommandInput = DescribeInstanceStorageC export type DescribeInstanceStorageConfigCommandOutput = DescribeInstanceStorageConfigResponse & __MetadataBearer; /** - *

Retrieves the current storage configurations for the specified resource type, association ID, and instance ID.

+ *

Retrieves the current storage configurations for the specified resource type, association + * ID, and instance ID.

*/ export class DescribeInstanceStorageConfigCommand extends $Command< DescribeInstanceStorageConfigCommandInput, diff --git a/clients/client-connect/commands/DisassociateInstanceStorageConfigCommand.ts b/clients/client-connect/commands/DisassociateInstanceStorageConfigCommand.ts index 084edcabc76b..d0cd3e0c7e6c 100644 --- a/clients/client-connect/commands/DisassociateInstanceStorageConfigCommand.ts +++ b/clients/client-connect/commands/DisassociateInstanceStorageConfigCommand.ts @@ -21,7 +21,8 @@ export type DisassociateInstanceStorageConfigCommandInput = DisassociateInstance export type DisassociateInstanceStorageConfigCommandOutput = __MetadataBearer; /** - *

Removes the storage type configurations for the specified resource type and association ID.

+ *

Removes the storage type configurations for the specified resource type and association + * ID.

*/ export class DisassociateInstanceStorageConfigCommand extends $Command< DisassociateInstanceStorageConfigCommandInput, diff --git a/clients/client-connect/commands/ListInstanceAttributesCommand.ts b/clients/client-connect/commands/ListInstanceAttributesCommand.ts index 9622d6bd85cc..09a425e2ac73 100644 --- a/clients/client-connect/commands/ListInstanceAttributesCommand.ts +++ b/clients/client-connect/commands/ListInstanceAttributesCommand.ts @@ -21,7 +21,8 @@ export type ListInstanceAttributesCommandInput = ListInstanceAttributesRequest; export type ListInstanceAttributesCommandOutput = ListInstanceAttributesResponse & __MetadataBearer; /** - *

Returns a paginated list of all attribute types for the given instance.

+ *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Returns a paginated list of all attribute types for the given instance.

*/ export class ListInstanceAttributesCommand extends $Command< ListInstanceAttributesCommandInput, diff --git a/clients/client-connect/commands/ListInstanceStorageConfigsCommand.ts b/clients/client-connect/commands/ListInstanceStorageConfigsCommand.ts index a5ba3dcba577..b5b1b8d71080 100644 --- a/clients/client-connect/commands/ListInstanceStorageConfigsCommand.ts +++ b/clients/client-connect/commands/ListInstanceStorageConfigsCommand.ts @@ -21,7 +21,9 @@ export type ListInstanceStorageConfigsCommandInput = ListInstanceStorageConfigsR export type ListInstanceStorageConfigsCommandOutput = ListInstanceStorageConfigsResponse & __MetadataBearer; /** - *

Returns a paginated list of storage configs for the identified instance and resource + *

This API is in preview release for Amazon Connect and is subject to change.

+ * + *

Returns a paginated list of storage configs for the identified instance and resource * type.

*/ export class ListInstanceStorageConfigsCommand extends $Command< diff --git a/clients/client-connect/commands/ListInstancesCommand.ts b/clients/client-connect/commands/ListInstancesCommand.ts index 11f56373a12c..b4d1ac499a6a 100644 --- a/clients/client-connect/commands/ListInstancesCommand.ts +++ b/clients/client-connect/commands/ListInstancesCommand.ts @@ -21,7 +21,8 @@ export type ListInstancesCommandInput = ListInstancesRequest; export type ListInstancesCommandOutput = ListInstancesResponse & __MetadataBearer; /** - *

Return a list of instances which are in active state, creation-in-progress state, and failed + *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Return a list of instances which are in active state, creation-in-progress state, and failed * state. Instances that aren't successfully created (they are in a failed state) are returned only * for 24 hours after the CreateInstance API was invoked.

*/ diff --git a/clients/client-connect/commands/ListIntegrationAssociationsCommand.ts b/clients/client-connect/commands/ListIntegrationAssociationsCommand.ts new file mode 100644 index 000000000000..b5db1386033c --- /dev/null +++ b/clients/client-connect/commands/ListIntegrationAssociationsCommand.ts @@ -0,0 +1,93 @@ +import { ConnectClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ConnectClient"; +import { ListIntegrationAssociationsRequest, ListIntegrationAssociationsResponse } from "../models/models_0"; +import { + deserializeAws_restJson1ListIntegrationAssociationsCommand, + serializeAws_restJson1ListIntegrationAssociationsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListIntegrationAssociationsCommandInput = ListIntegrationAssociationsRequest; +export type ListIntegrationAssociationsCommandOutput = ListIntegrationAssociationsResponse & __MetadataBearer; + +/** + *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Provides summary information about the AppIntegration associations for the specified Amazon Connect + * instance.

+ */ +export class ListIntegrationAssociationsCommand extends $Command< + ListIntegrationAssociationsCommandInput, + ListIntegrationAssociationsCommandOutput, + ConnectClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListIntegrationAssociationsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ConnectClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ConnectClient"; + const commandName = "ListIntegrationAssociationsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListIntegrationAssociationsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListIntegrationAssociationsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListIntegrationAssociationsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1ListIntegrationAssociationsCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_restJson1ListIntegrationAssociationsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-connect/commands/ListLambdaFunctionsCommand.ts b/clients/client-connect/commands/ListLambdaFunctionsCommand.ts index 126a08fd1a6f..89c2ff8f2ad3 100644 --- a/clients/client-connect/commands/ListLambdaFunctionsCommand.ts +++ b/clients/client-connect/commands/ListLambdaFunctionsCommand.ts @@ -21,7 +21,8 @@ export type ListLambdaFunctionsCommandInput = ListLambdaFunctionsRequest; export type ListLambdaFunctionsCommandOutput = ListLambdaFunctionsResponse & __MetadataBearer; /** - *

Returns a paginated list of all the Lambda functions that show up in the drop-down options in the relevant contact flow blocks.

+ *

Returns a paginated list of all the Lambda functions that show up in the drop-down options + * in the relevant contact flow blocks.

*/ export class ListLambdaFunctionsCommand extends $Command< ListLambdaFunctionsCommandInput, diff --git a/clients/client-connect/commands/ListLexBotsCommand.ts b/clients/client-connect/commands/ListLexBotsCommand.ts index 5eef6e238815..454dd645ea12 100644 --- a/clients/client-connect/commands/ListLexBotsCommand.ts +++ b/clients/client-connect/commands/ListLexBotsCommand.ts @@ -21,7 +21,8 @@ export type ListLexBotsCommandInput = ListLexBotsRequest; export type ListLexBotsCommandOutput = ListLexBotsResponse & __MetadataBearer; /** - *

Returns a paginated list of all the Amazon Lex bots currently associated with the instance.

+ *

Returns a paginated list of all the Amazon Lex bots currently associated with the + * instance.

*/ export class ListLexBotsCommand extends $Command< ListLexBotsCommandInput, diff --git a/clients/client-connect/commands/ListPromptsCommand.ts b/clients/client-connect/commands/ListPromptsCommand.ts index 3aeb59edaed9..7ce141566423 100644 --- a/clients/client-connect/commands/ListPromptsCommand.ts +++ b/clients/client-connect/commands/ListPromptsCommand.ts @@ -21,8 +21,7 @@ export type ListPromptsCommandInput = ListPromptsRequest; export type ListPromptsCommandOutput = ListPromptsResponse & __MetadataBearer; /** - *

Provides information about the prompts for the specified Amazon Connect - * instance.

+ *

Provides information about the prompts for the specified Amazon Connect instance.

*/ export class ListPromptsCommand extends $Command< ListPromptsCommandInput, diff --git a/clients/client-connect/commands/ListUseCasesCommand.ts b/clients/client-connect/commands/ListUseCasesCommand.ts new file mode 100644 index 000000000000..c83d32eca194 --- /dev/null +++ b/clients/client-connect/commands/ListUseCasesCommand.ts @@ -0,0 +1,89 @@ +import { ConnectClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ConnectClient"; +import { ListUseCasesRequest, ListUseCasesResponse } from "../models/models_0"; +import { + deserializeAws_restJson1ListUseCasesCommand, + serializeAws_restJson1ListUseCasesCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListUseCasesCommandInput = ListUseCasesRequest; +export type ListUseCasesCommandOutput = ListUseCasesResponse & __MetadataBearer; + +/** + *

This API is in preview release for Amazon Connect and is subject to change.

+ *

List the use cases.

+ */ +export class ListUseCasesCommand extends $Command< + ListUseCasesCommandInput, + ListUseCasesCommandOutput, + ConnectClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListUseCasesCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ConnectClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ConnectClient"; + const commandName = "ListUseCasesCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListUseCasesRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListUseCasesResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListUseCasesCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1ListUseCasesCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1ListUseCasesCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-connect/commands/StartContactRecordingCommand.ts b/clients/client-connect/commands/StartContactRecordingCommand.ts index 953e612d154c..aea2ea382386 100644 --- a/clients/client-connect/commands/StartContactRecordingCommand.ts +++ b/clients/client-connect/commands/StartContactRecordingCommand.ts @@ -27,7 +27,7 @@ export type StartContactRecordingCommandOutput = StartContactRecordingResponse & * started and you want to suspend and resume it, such as when collecting sensitive information (for * example, a credit card number), use SuspendContactRecording and ResumeContactRecording.

*

You can use this API to override the recording behavior configured in the Set recording - * behavior block.

+ * behavior block.

*

Only voice recordings are supported at this time.

*/ export class StartContactRecordingCommand extends $Command< diff --git a/clients/client-connect/commands/StartOutboundVoiceContactCommand.ts b/clients/client-connect/commands/StartOutboundVoiceContactCommand.ts index c8df7bfbde8c..bad94ac87d54 100644 --- a/clients/client-connect/commands/StartOutboundVoiceContactCommand.ts +++ b/clients/client-connect/commands/StartOutboundVoiceContactCommand.ts @@ -33,7 +33,7 @@ export type StartOutboundVoiceContactCommandOutput = StartOutboundVoiceContactRe * *

UK numbers with a 447 prefix are not allowed by default. Before you can dial these UK * mobile numbers, you must submit a service quota increase request. For more information, see - * Amazon Connect Service Quotas in the Amazon Connect Administrator Guide.

+ * Amazon Connect Service Quotas in the Amazon Connect Administrator Guide.

*
*/ export class StartOutboundVoiceContactCommand extends $Command< diff --git a/clients/client-connect/commands/StartTaskContactCommand.ts b/clients/client-connect/commands/StartTaskContactCommand.ts new file mode 100644 index 000000000000..8c4bff404c29 --- /dev/null +++ b/clients/client-connect/commands/StartTaskContactCommand.ts @@ -0,0 +1,88 @@ +import { ConnectClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ConnectClient"; +import { StartTaskContactRequest, StartTaskContactResponse } from "../models/models_0"; +import { + deserializeAws_restJson1StartTaskContactCommand, + serializeAws_restJson1StartTaskContactCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type StartTaskContactCommandInput = StartTaskContactRequest; +export type StartTaskContactCommandOutput = StartTaskContactResponse & __MetadataBearer; + +/** + *

Initiates a contact flow to start a new task.

+ */ +export class StartTaskContactCommand extends $Command< + StartTaskContactCommandInput, + StartTaskContactCommandOutput, + ConnectClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: StartTaskContactCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ConnectClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ConnectClient"; + const commandName = "StartTaskContactCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: StartTaskContactRequest.filterSensitiveLog, + outputFilterSensitiveLog: StartTaskContactResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: StartTaskContactCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1StartTaskContactCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1StartTaskContactCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-connect/commands/UpdateInstanceAttributeCommand.ts b/clients/client-connect/commands/UpdateInstanceAttributeCommand.ts index 8c2555c96423..b64a0ce72607 100644 --- a/clients/client-connect/commands/UpdateInstanceAttributeCommand.ts +++ b/clients/client-connect/commands/UpdateInstanceAttributeCommand.ts @@ -21,7 +21,8 @@ export type UpdateInstanceAttributeCommandInput = UpdateInstanceAttributeRequest export type UpdateInstanceAttributeCommandOutput = __MetadataBearer; /** - *

Updates the value for the specified attribute type.

+ *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Updates the value for the specified attribute type.

*/ export class UpdateInstanceAttributeCommand extends $Command< UpdateInstanceAttributeCommandInput, diff --git a/clients/client-connect/commands/UpdateInstanceStorageConfigCommand.ts b/clients/client-connect/commands/UpdateInstanceStorageConfigCommand.ts index 142b6e2b3c8e..7cb436ac5a0f 100644 --- a/clients/client-connect/commands/UpdateInstanceStorageConfigCommand.ts +++ b/clients/client-connect/commands/UpdateInstanceStorageConfigCommand.ts @@ -21,7 +21,8 @@ export type UpdateInstanceStorageConfigCommandInput = UpdateInstanceStorageConfi export type UpdateInstanceStorageConfigCommandOutput = __MetadataBearer; /** - *

Updates an existing configuration for a resource type. This API is idempotent.

+ *

This API is in preview release for Amazon Connect and is subject to change.

+ *

Updates an existing configuration for a resource type. This API is idempotent.

*/ export class UpdateInstanceStorageConfigCommand extends $Command< UpdateInstanceStorageConfigCommandInput, diff --git a/clients/client-connect/commands/UpdateRoutingProfileConcurrencyCommand.ts b/clients/client-connect/commands/UpdateRoutingProfileConcurrencyCommand.ts index a9047800dc2b..19851c95d85d 100644 --- a/clients/client-connect/commands/UpdateRoutingProfileConcurrencyCommand.ts +++ b/clients/client-connect/commands/UpdateRoutingProfileConcurrencyCommand.ts @@ -21,7 +21,8 @@ export type UpdateRoutingProfileConcurrencyCommandInput = UpdateRoutingProfileCo export type UpdateRoutingProfileConcurrencyCommandOutput = __MetadataBearer; /** - *

Updates the channels that agents can handle in the Contact Control Panel (CCP) for a routing profile.

+ *

Updates the channels that agents can handle in the Contact Control Panel (CCP) for a routing + * profile.

*/ export class UpdateRoutingProfileConcurrencyCommand extends $Command< UpdateRoutingProfileConcurrencyCommandInput, diff --git a/clients/client-connect/commands/UpdateRoutingProfileNameCommand.ts b/clients/client-connect/commands/UpdateRoutingProfileNameCommand.ts index 7c210c9b4ffa..d0eb6590ae04 100644 --- a/clients/client-connect/commands/UpdateRoutingProfileNameCommand.ts +++ b/clients/client-connect/commands/UpdateRoutingProfileNameCommand.ts @@ -21,7 +21,9 @@ export type UpdateRoutingProfileNameCommandInput = UpdateRoutingProfileNameReque export type UpdateRoutingProfileNameCommandOutput = __MetadataBearer; /** - *

Updates the name and description of a routing profile. The request accepts the following data in JSON format. At least Name or Description must be provided.

+ *

Updates the name and description of a routing profile. The request accepts the following + * data in JSON format. At least Name or Description must be + * provided.

*/ export class UpdateRoutingProfileNameCommand extends $Command< UpdateRoutingProfileNameCommandInput, diff --git a/clients/client-connect/index.ts b/clients/client-connect/index.ts index f7993eb53005..0eb770929476 100644 --- a/clients/client-connect/index.ts +++ b/clients/client-connect/index.ts @@ -8,10 +8,14 @@ export * from "./commands/AssociateRoutingProfileQueuesCommand"; export * from "./commands/AssociateSecurityKeyCommand"; export * from "./commands/CreateContactFlowCommand"; export * from "./commands/CreateInstanceCommand"; +export * from "./commands/CreateIntegrationAssociationCommand"; export * from "./commands/CreateRoutingProfileCommand"; +export * from "./commands/CreateUseCaseCommand"; export * from "./commands/CreateUserCommand"; export * from "./commands/CreateUserHierarchyGroupCommand"; export * from "./commands/DeleteInstanceCommand"; +export * from "./commands/DeleteIntegrationAssociationCommand"; +export * from "./commands/DeleteUseCaseCommand"; export * from "./commands/DeleteUserCommand"; export * from "./commands/DeleteUserHierarchyGroupCommand"; export * from "./commands/DescribeContactFlowCommand"; @@ -46,6 +50,8 @@ export * from "./commands/ListInstancesCommand"; export * from "./pagination/ListInstancesPaginator"; export * from "./commands/ListInstanceStorageConfigsCommand"; export * from "./pagination/ListInstanceStorageConfigsPaginator"; +export * from "./commands/ListIntegrationAssociationsCommand"; +export * from "./pagination/ListIntegrationAssociationsPaginator"; export * from "./commands/ListLambdaFunctionsCommand"; export * from "./pagination/ListLambdaFunctionsPaginator"; export * from "./commands/ListLexBotsCommand"; @@ -65,6 +71,8 @@ export * from "./pagination/ListSecurityKeysPaginator"; export * from "./commands/ListSecurityProfilesCommand"; export * from "./pagination/ListSecurityProfilesPaginator"; export * from "./commands/ListTagsForResourceCommand"; +export * from "./commands/ListUseCasesCommand"; +export * from "./pagination/ListUseCasesPaginator"; export * from "./commands/ListUserHierarchyGroupsCommand"; export * from "./pagination/ListUserHierarchyGroupsPaginator"; export * from "./commands/ListUsersCommand"; @@ -73,6 +81,7 @@ export * from "./commands/ResumeContactRecordingCommand"; export * from "./commands/StartChatContactCommand"; export * from "./commands/StartContactRecordingCommand"; export * from "./commands/StartOutboundVoiceContactCommand"; +export * from "./commands/StartTaskContactCommand"; export * from "./commands/StopContactCommand"; export * from "./commands/StopContactRecordingCommand"; export * from "./commands/SuspendContactRecordingCommand"; diff --git a/clients/client-connect/models/models_0.ts b/clients/client-connect/models/models_0.ts index 1a6d4d94b7ed..62fc24a3e578 100644 --- a/clients/client-connect/models/models_0.ts +++ b/clients/client-connect/models/models_0.ts @@ -214,7 +214,6 @@ export interface KinesisVideoStreamConfig { /** *

The number of hours data is retained in the stream. Kinesis Video Streams retains the data * in a data store that is associated with the stream.

- * *

The default value is 0, indicating that the stream does not persist data.

*/ RetentionPeriodHours: number | undefined; @@ -401,6 +400,7 @@ export namespace AssociateLexBotRequest { export enum Channel { CHAT = "CHAT", + TASK = "TASK", VOICE = "VOICE", } @@ -427,7 +427,8 @@ export namespace RoutingProfileQueueReference { } /** - *

Contains information about the queue and channel for which priority and delay can be set.

+ *

Contains information about the queue and channel for which priority and delay can be + * set.

*/ export interface RoutingProfileQueueConfig { /** @@ -436,8 +437,9 @@ export interface RoutingProfileQueueConfig { QueueReference: RoutingProfileQueueReference | undefined; /** - *

The order in which contacts are to be handled for the queue. For more information, see Queues: priority and - * delay.

+ *

The order in which contacts are to be handled for the queue. For more information, see + * Queues: priority and + * delay.

*/ Priority: number | undefined; @@ -534,7 +536,7 @@ export interface CreateContactFlowRequest { /** *

The type of the contact flow. For descriptions of the available types, see Choose a Contact Flow Type in the Amazon Connect Administrator - * Guide.

+ * Guide.

*/ Type: ContactFlowType | string | undefined; @@ -544,9 +546,7 @@ export interface CreateContactFlowRequest { Description?: string; /** - *

The content of the contact flow. - * - *

+ *

The content of the contact flow.

*/ Content: string | undefined; @@ -710,6 +710,71 @@ export namespace CreateInstanceResponse { }); } +export enum IntegrationType { + EVENT = "EVENT", +} + +export enum SourceType { + SALESFORCE = "SALESFORCE", + ZENDESK = "ZENDESK", +} + +export interface CreateIntegrationAssociationRequest { + /** + *

The identifier of the Amazon Connect instance.

+ */ + InstanceId: string | undefined; + + /** + *

The type of information to be ingested.

+ */ + IntegrationType: IntegrationType | string | undefined; + + /** + *

The Amazon Resource Name (ARN) of the integration.

+ */ + IntegrationArn: string | undefined; + + /** + *

The URL for the external application.

+ */ + SourceApplicationUrl: string | undefined; + + /** + *

The name of the external application.

+ */ + SourceApplicationName: string | undefined; + + /** + *

The type of the data source.

+ */ + SourceType: SourceType | string | undefined; +} + +export namespace CreateIntegrationAssociationRequest { + export const filterSensitiveLog = (obj: CreateIntegrationAssociationRequest): any => ({ + ...obj, + }); +} + +export interface CreateIntegrationAssociationResponse { + /** + *

The identifier for the association.

+ */ + IntegrationAssociationId?: string; + + /** + *

The Amazon Resource Name (ARN) for the association.

+ */ + IntegrationAssociationArn?: string; +} + +export namespace CreateIntegrationAssociationResponse { + export const filterSensitiveLog = (obj: CreateIntegrationAssociationResponse): any => ({ + ...obj, + }); +} + /** *

Contains information about which channels are supported, and how many contacts an agent can * have on a channel simultaneously.

@@ -795,6 +860,52 @@ export namespace CreateRoutingProfileResponse { }); } +export enum UseCaseType { + RULES_EVALUATION = "RULES_EVALUATION", +} + +export interface CreateUseCaseRequest { + /** + *

The identifier of the Amazon Connect instance.

+ */ + InstanceId: string | undefined; + + /** + *

The identifier for the AppIntegration association.

+ */ + IntegrationAssociationId: string | undefined; + + /** + *

The type of use case to associate to the AppIntegration association. Each AppIntegration + * association can have only one of each use case type.

+ */ + UseCaseType: UseCaseType | string | undefined; +} + +export namespace CreateUseCaseRequest { + export const filterSensitiveLog = (obj: CreateUseCaseRequest): any => ({ + ...obj, + }); +} + +export interface CreateUseCaseResponse { + /** + *

The identifier of the use case.

+ */ + UseCaseId?: string; + + /** + *

The Amazon Resource Name (ARN) for the use case.

+ */ + UseCaseArn?: string; +} + +export namespace CreateUseCaseResponse { + export const filterSensitiveLog = (obj: CreateUseCaseResponse): any => ({ + ...obj, + }); +} + /** *

Contains information about the identity of a user.

*/ @@ -952,7 +1063,8 @@ export interface CreateUserHierarchyGroupRequest { Name: string | undefined; /** - *

The identifier for the parent hierarchy group. The user hierarchy is created at level one if the parent group ID is null.

+ *

The identifier for the parent hierarchy group. The user hierarchy is created at level one if + * the parent group ID is null.

*/ ParentGroupId?: string; @@ -999,6 +1111,47 @@ export namespace DeleteInstanceRequest { }); } +export interface DeleteIntegrationAssociationRequest { + /** + *

The identifier of the Amazon Connect instance.

+ */ + InstanceId: string | undefined; + + /** + *

The identifier for the AppIntegration association.

+ */ + IntegrationAssociationId: string | undefined; +} + +export namespace DeleteIntegrationAssociationRequest { + export const filterSensitiveLog = (obj: DeleteIntegrationAssociationRequest): any => ({ + ...obj, + }); +} + +export interface DeleteUseCaseRequest { + /** + *

The identifier of the Amazon Connect instance.

+ */ + InstanceId: string | undefined; + + /** + *

The identifier for the AppIntegration association.

+ */ + IntegrationAssociationId: string | undefined; + + /** + *

The identifier for the use case.

+ */ + UseCaseId: string | undefined; +} + +export namespace DeleteUseCaseRequest { + export const filterSensitiveLog = (obj: DeleteUseCaseRequest): any => ({ + ...obj, + }); +} + export interface DeleteUserRequest { /** *

The identifier of the Amazon Connect instance.

@@ -1123,7 +1276,7 @@ export interface ContactFlow { /** *

The type of the contact flow. For descriptions of the available types, see Choose a Contact Flow Type in the Amazon Connect Administrator - * Guide.

+ * Guide.

*/ Type?: ContactFlowType | string; @@ -1419,7 +1572,8 @@ export interface RoutingProfile { Description?: string; /** - *

The channels agents can handle in the Contact Control Panel (CCP) for this routing profile.

+ *

The channels agents can handle in the Contact Control Panel (CCP) for this routing + * profile.

*/ MediaConcurrencies?: MediaConcurrency[]; @@ -1996,14 +2150,15 @@ export interface GetCurrentMetricDataRequest { /** *

The queues, up to 100, or channels, to use to filter the metrics returned. Metric data is * retrieved only for the resources associated with the queues or channels included in the filter. - * You can include both queue IDs and queue ARNs in the same request. Both VOICE and CHAT channels are supported.

+ * You can include both queue IDs and queue ARNs in the same request. VOICE, CHAT, and TASK channels are supported.

*/ Filters: Filters | undefined; /** *

The grouping applied to the metrics returned. For example, when grouped by * QUEUE, the metrics returned apply to each queue rather than aggregated for all - * queues. If you group by CHANNEL, you should include a Channels filter. Both VOICE and CHAT channels are supported.

+ * queues. If you group by CHANNEL, you should include a Channels filter. + * VOICE, CHAT, and TASK channels are supported.

*

If no Grouping is included in the request, a summary of metrics is * returned.

*/ @@ -2081,8 +2236,8 @@ export interface GetCurrentMetricDataRequest { *
OLDEST_CONTACT_AGE
*
*

Unit: SECONDS

- *

When you use groupings, Unit says SECONDS but the Value is returned in MILLISECONDS. For example, if you get a - * response like this:

+ *

When you use groupings, Unit says SECONDS but the Value is returned in MILLISECONDS. For + * example, if you get a response like this:

*

* { "Metric": { "Name": "OLDEST_CONTACT_AGE", "Unit": "SECONDS" }, "Value": 24113.0 * }

@@ -2429,7 +2584,7 @@ export interface GetMetricDataRequest { /** *

The queues, up to 100, or channels, to use to filter the metrics returned. Metric data is * retrieved only for the resources associated with the queues or channels included in the filter. - * You can include both queue IDs and queue ARNs in the same request. Both VOICE and CHAT channels are supported.

+ * You can include both queue IDs and queue ARNs in the same request. VOICE, CHAT, and TASK channels are supported.

*/ Filters: Filters | undefined; @@ -3035,6 +3190,99 @@ export namespace ListInstanceStorageConfigsResponse { }); } +export interface ListIntegrationAssociationsRequest { + /** + *

The identifier of the Amazon Connect instance.

+ */ + InstanceId: string | undefined; + + /** + *

The token for the next set of results. Use the value returned in the previous + * response in the next request to retrieve the next set of results.

+ */ + NextToken?: string; + + /** + *

The maximimum number of results to return per page.

+ */ + MaxResults?: number; +} + +export namespace ListIntegrationAssociationsRequest { + export const filterSensitiveLog = (obj: ListIntegrationAssociationsRequest): any => ({ + ...obj, + }); +} + +/** + *

Contains summary information about the associated AppIntegrations.

+ */ +export interface IntegrationAssociationSummary { + /** + *

The identifier for the AppIntegration association.

+ */ + IntegrationAssociationId?: string; + + /** + *

The Amazon Resource Name (ARN) for the AppIntegration association.

+ */ + IntegrationAssociationArn?: string; + + /** + *

The identifier of the Amazon Connect instance.

+ */ + InstanceId?: string; + + /** + *

The integration type.

+ */ + IntegrationType?: IntegrationType | string; + + /** + *

The Amazon Resource Name (ARN) for the AppIntegration.

+ */ + IntegrationArn?: string; + + /** + *

The URL for the external application.

+ */ + SourceApplicationUrl?: string; + + /** + *

The user-provided, friendly name for the external application.

+ */ + SourceApplicationName?: string; + + /** + *

The name of the source.

+ */ + SourceType?: SourceType | string; +} + +export namespace IntegrationAssociationSummary { + export const filterSensitiveLog = (obj: IntegrationAssociationSummary): any => ({ + ...obj, + }); +} + +export interface ListIntegrationAssociationsResponse { + /** + *

The AppIntegration associations.

+ */ + IntegrationAssociationSummaryList?: IntegrationAssociationSummary[]; + + /** + *

If there are additional results, this is the token for the next set of results.

+ */ + NextToken?: string; +} + +export namespace ListIntegrationAssociationsResponse { + export const filterSensitiveLog = (obj: ListIntegrationAssociationsResponse): any => ({ + ...obj, + }); +} + export interface ListLambdaFunctionsRequest { /** *

The identifier of the Amazon Connect instance.

@@ -3652,8 +3900,9 @@ export interface RoutingProfileQueueConfigSummary { QueueName: string | undefined; /** - *

The order in which contacts are to be handled for the queue. For more information, see Queues: priority and - * delay.

+ *

The order in which contacts are to be handled for the queue. For more information, see + * Queues: priority and + * delay.

*/ Priority: number | undefined; @@ -3924,6 +4173,84 @@ export namespace ListTagsForResourceResponse { }); } +/** + *

Provides summary information about the use cases for the specified Amazon Connect AppIntegration + * association.

+ */ +export interface ListUseCasesRequest { + /** + *

The identifier of the Amazon Connect instance.

+ */ + InstanceId: string | undefined; + + /** + *

The identifier for the integration association.

+ */ + IntegrationAssociationId: string | undefined; + + /** + *

The token for the next set of results. Use the value returned in the previous + * response in the next request to retrieve the next set of results.

+ */ + NextToken?: string; + + /** + *

The maximimum number of results to return per page.

+ */ + MaxResults?: number; +} + +export namespace ListUseCasesRequest { + export const filterSensitiveLog = (obj: ListUseCasesRequest): any => ({ + ...obj, + }); +} + +/** + *

Contains the use case.

+ */ +export interface UseCase { + /** + *

The identifier for the use case.

+ */ + UseCaseId?: string; + + /** + *

The Amazon Resource Name (ARN) for the use case.

+ */ + UseCaseArn?: string; + + /** + *

The type of use case to associate to the AppIntegration association. Each AppIntegration + * association can have only one of each use case type.

+ */ + UseCaseType?: UseCaseType | string; +} + +export namespace UseCase { + export const filterSensitiveLog = (obj: UseCase): any => ({ + ...obj, + }); +} + +export interface ListUseCasesResponse { + /** + *

The use cases.

+ */ + UseCaseSummaryList?: UseCase[]; + + /** + *

If there are additional results, this is the token for the next set of results.

+ */ + NextToken?: string; +} + +export namespace ListUseCasesResponse { + export const filterSensitiveLog = (obj: ListUseCasesResponse): any => ({ + ...obj, + }); +} + export interface ListUserHierarchyGroupsRequest { /** *

The identifier of the Amazon Connect instance.

@@ -4342,6 +4669,104 @@ export namespace StartOutboundVoiceContactResponse { }); } +export enum ReferenceType { + URL = "URL", +} + +/** + *

A link that an agent selects to complete a given task. You can have up to 4,096 UTF-8 bytes + * across all references for a contact.

+ */ +export interface Reference { + /** + *

A formatted URL that will be shown to an agent in the Contact Control Panel (CCP)

+ */ + Value: string | undefined; + + /** + *

A valid URL.

+ */ + Type: ReferenceType | string | undefined; +} + +export namespace Reference { + export const filterSensitiveLog = (obj: Reference): any => ({ + ...obj, + }); +} + +export interface StartTaskContactRequest { + /** + *

The identifier of the Amazon Connect instance.

+ */ + InstanceId: string | undefined; + + /** + *

The identifier of the previous chat, voice, or task contact.

+ */ + PreviousContactId?: string; + + /** + *

The identifier of the contact flow for initiating the tasks. To see the ContactFlowId in the + * Amazon Connect console user interface, on the navigation menu go to Routing, Contact Flows. Choose the contact flow. On + * the contact flow page, under the name of the contact flow, choose Show + * additional flow information. The ContactFlowId is the last part of the ARN, shown + * here in bold:

+ *

arn:aws:connect:us-west-2:xxxxxxxxxxxx:instance/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/contact-flow/846ec553-a005-41c0-8341-xxxxxxxxxxxx + *

+ */ + ContactFlowId: string | undefined; + + /** + *

A custom key-value pair using an attribute map. The attributes are standard Amazon Connect + * attributes, and can be accessed in contact flows just like any other contact attributes.

+ *

There can be up to 32,768 UTF-8 bytes across all key-value pairs per contact. Attribute keys + * can include only alphanumeric, dash, and underscore characters.

+ */ + Attributes?: { [key: string]: string }; + + /** + *

The name of a task that is shown to an agent in the Contact Control Panel (CCP).

+ */ + Name: string | undefined; + + /** + *

A formatted URL that is shown to an agent in the Contact Control Panel (CCP).

+ */ + References?: { [key: string]: Reference }; + + /** + *

A description of the task that is shown to an agent in the Contact Control Panel + * (CCP).

+ */ + Description?: string; + + /** + *

A unique, case-sensitive identifier that you provide to ensure the idempotency of the + * request.

+ */ + ClientToken?: string; +} + +export namespace StartTaskContactRequest { + export const filterSensitiveLog = (obj: StartTaskContactRequest): any => ({ + ...obj, + }); +} + +export interface StartTaskContactResponse { + /** + *

The identifier of this contact within the Amazon Connect instance.

+ */ + ContactId?: string; +} + +export namespace StartTaskContactResponse { + export const filterSensitiveLog = (obj: StartTaskContactResponse): any => ({ + ...obj, + }); +} + /** *

The contact with the specified ID is not active or does not exist.

*/ @@ -4533,7 +4958,9 @@ export interface UpdateContactFlowContentRequest { ContactFlowId: string | undefined; /** - *

The JSON string that represents contact flow’s content. For an example, see Example contact flow in Amazon Connect Flow language in the Amazon Connect Administrator Guide.

+ *

The JSON string that represents contact flow’s content. For an example, see Example contact + * flow in Amazon Connect Flow language in the Amazon Connect Administrator Guide. + *

*/ Content: string | undefined; } @@ -4709,7 +5136,8 @@ export interface UpdateRoutingProfileQueuesRequest { RoutingProfileId: string | undefined; /** - *

The queues to be updated for this routing profile.

+ *

The queues to be updated for this routing profile. Queues must first be associated to the + * routing profile. You can do this using AssociateRoutingProfileQueues.

*/ QueueConfigs: RoutingProfileQueueConfig[] | undefined; } diff --git a/clients/client-connect/pagination/ListIntegrationAssociationsPaginator.ts b/clients/client-connect/pagination/ListIntegrationAssociationsPaginator.ts new file mode 100644 index 000000000000..3e51b4c29667 --- /dev/null +++ b/clients/client-connect/pagination/ListIntegrationAssociationsPaginator.ts @@ -0,0 +1,57 @@ +import { Connect } from "../Connect"; +import { ConnectClient } from "../ConnectClient"; +import { + ListIntegrationAssociationsCommand, + ListIntegrationAssociationsCommandInput, + ListIntegrationAssociationsCommandOutput, +} from "../commands/ListIntegrationAssociationsCommand"; +import { ConnectPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: ConnectClient, + input: ListIntegrationAssociationsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListIntegrationAssociationsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: Connect, + input: ListIntegrationAssociationsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listIntegrationAssociations(input, ...args); +}; +export async function* paginateListIntegrationAssociations( + config: ConnectPaginationConfiguration, + input: ListIntegrationAssociationsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListIntegrationAssociationsCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof Connect) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof ConnectClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected Connect | ConnectClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-connect/pagination/ListUseCasesPaginator.ts b/clients/client-connect/pagination/ListUseCasesPaginator.ts new file mode 100644 index 000000000000..c69c6be952de --- /dev/null +++ b/clients/client-connect/pagination/ListUseCasesPaginator.ts @@ -0,0 +1,57 @@ +import { Connect } from "../Connect"; +import { ConnectClient } from "../ConnectClient"; +import { + ListUseCasesCommand, + ListUseCasesCommandInput, + ListUseCasesCommandOutput, +} from "../commands/ListUseCasesCommand"; +import { ConnectPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: ConnectClient, + input: ListUseCasesCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListUseCasesCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: Connect, + input: ListUseCasesCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listUseCases(input, ...args); +}; +export async function* paginateListUseCases( + config: ConnectPaginationConfiguration, + input: ListUseCasesCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListUseCasesCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof Connect) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof ConnectClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected Connect | ConnectClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-connect/protocols/Aws_restJson1.ts b/clients/client-connect/protocols/Aws_restJson1.ts index a8d5047aa205..30efa51287ee 100644 --- a/clients/client-connect/protocols/Aws_restJson1.ts +++ b/clients/client-connect/protocols/Aws_restJson1.ts @@ -21,16 +21,26 @@ import { } from "../commands/AssociateSecurityKeyCommand"; import { CreateContactFlowCommandInput, CreateContactFlowCommandOutput } from "../commands/CreateContactFlowCommand"; import { CreateInstanceCommandInput, CreateInstanceCommandOutput } from "../commands/CreateInstanceCommand"; +import { + CreateIntegrationAssociationCommandInput, + CreateIntegrationAssociationCommandOutput, +} from "../commands/CreateIntegrationAssociationCommand"; import { CreateRoutingProfileCommandInput, CreateRoutingProfileCommandOutput, } from "../commands/CreateRoutingProfileCommand"; +import { CreateUseCaseCommandInput, CreateUseCaseCommandOutput } from "../commands/CreateUseCaseCommand"; import { CreateUserCommandInput, CreateUserCommandOutput } from "../commands/CreateUserCommand"; import { CreateUserHierarchyGroupCommandInput, CreateUserHierarchyGroupCommandOutput, } from "../commands/CreateUserHierarchyGroupCommand"; import { DeleteInstanceCommandInput, DeleteInstanceCommandOutput } from "../commands/DeleteInstanceCommand"; +import { + DeleteIntegrationAssociationCommandInput, + DeleteIntegrationAssociationCommandOutput, +} from "../commands/DeleteIntegrationAssociationCommand"; +import { DeleteUseCaseCommandInput, DeleteUseCaseCommandOutput } from "../commands/DeleteUseCaseCommand"; import { DeleteUserCommandInput, DeleteUserCommandOutput } from "../commands/DeleteUserCommand"; import { DeleteUserHierarchyGroupCommandInput, @@ -111,6 +121,10 @@ import { ListInstanceStorageConfigsCommandOutput, } from "../commands/ListInstanceStorageConfigsCommand"; import { ListInstancesCommandInput, ListInstancesCommandOutput } from "../commands/ListInstancesCommand"; +import { + ListIntegrationAssociationsCommandInput, + ListIntegrationAssociationsCommandOutput, +} from "../commands/ListIntegrationAssociationsCommand"; import { ListLambdaFunctionsCommandInput, ListLambdaFunctionsCommandOutput, @@ -136,6 +150,7 @@ import { ListTagsForResourceCommandInput, ListTagsForResourceCommandOutput, } from "../commands/ListTagsForResourceCommand"; +import { ListUseCasesCommandInput, ListUseCasesCommandOutput } from "../commands/ListUseCasesCommand"; import { ListUserHierarchyGroupsCommandInput, ListUserHierarchyGroupsCommandOutput, @@ -154,6 +169,7 @@ import { StartOutboundVoiceContactCommandInput, StartOutboundVoiceContactCommandOutput, } from "../commands/StartOutboundVoiceContactCommand"; +import { StartTaskContactCommandInput, StartTaskContactCommandOutput } from "../commands/StartTaskContactCommand"; import { StopContactCommandInput, StopContactCommandOutput } from "../commands/StopContactCommand"; import { StopContactRecordingCommandInput, @@ -262,6 +278,7 @@ import { InstanceStatusReason, InstanceStorageConfig, InstanceSummary, + IntegrationAssociationSummary, InternalServiceException, InvalidContactFlowException, InvalidParameterException, @@ -279,6 +296,7 @@ import { PromptSummary, QueueReference, QueueSummary, + Reference, ResourceConflictException, ResourceInUseException, ResourceNotFoundException, @@ -293,6 +311,7 @@ import { ServiceQuotaExceededException, Threshold, ThrottlingException, + UseCase, User, UserIdentityInfo, UserNotFoundException, @@ -591,6 +610,43 @@ export const serializeAws_restJson1CreateInstanceCommand = async ( }); }; +export const serializeAws_restJson1CreateIntegrationAssociationCommand = async ( + input: CreateIntegrationAssociationCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/instance/{InstanceId}/integration-associations"; + if (input.InstanceId !== undefined) { + const labelValue: string = input.InstanceId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: InstanceId."); + } + resolvedPath = resolvedPath.replace("{InstanceId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: InstanceId."); + } + let body: any; + body = JSON.stringify({ + ...(input.IntegrationArn !== undefined && { IntegrationArn: input.IntegrationArn }), + ...(input.IntegrationType !== undefined && { IntegrationType: input.IntegrationType }), + ...(input.SourceApplicationName !== undefined && { SourceApplicationName: input.SourceApplicationName }), + ...(input.SourceApplicationUrl !== undefined && { SourceApplicationUrl: input.SourceApplicationUrl }), + ...(input.SourceType !== undefined && { SourceType: input.SourceType }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "PUT", + headers, + path: resolvedPath, + body, + }); +}; + export const serializeAws_restJson1CreateRoutingProfileCommand = async ( input: CreateRoutingProfileCommandInput, context: __SerdeContext @@ -633,6 +689,48 @@ export const serializeAws_restJson1CreateRoutingProfileCommand = async ( }); }; +export const serializeAws_restJson1CreateUseCaseCommand = async ( + input: CreateUseCaseCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/instance/{InstanceId}/integration-associations/{IntegrationAssociationId}/use-cases"; + if (input.InstanceId !== undefined) { + const labelValue: string = input.InstanceId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: InstanceId."); + } + resolvedPath = resolvedPath.replace("{InstanceId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: InstanceId."); + } + if (input.IntegrationAssociationId !== undefined) { + const labelValue: string = input.IntegrationAssociationId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: IntegrationAssociationId."); + } + resolvedPath = resolvedPath.replace("{IntegrationAssociationId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: IntegrationAssociationId."); + } + let body: any; + body = JSON.stringify({ + ...(input.UseCaseType !== undefined && { UseCaseType: input.UseCaseType }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "PUT", + headers, + path: resolvedPath, + body, + }); +}; + export const serializeAws_restJson1CreateUserCommand = async ( input: CreateUserCommandInput, context: __SerdeContext @@ -744,6 +842,93 @@ export const serializeAws_restJson1DeleteInstanceCommand = async ( }); }; +export const serializeAws_restJson1DeleteIntegrationAssociationCommand = async ( + input: DeleteIntegrationAssociationCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/instance/{InstanceId}/integration-associations/{IntegrationAssociationId}"; + if (input.InstanceId !== undefined) { + const labelValue: string = input.InstanceId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: InstanceId."); + } + resolvedPath = resolvedPath.replace("{InstanceId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: InstanceId."); + } + if (input.IntegrationAssociationId !== undefined) { + const labelValue: string = input.IntegrationAssociationId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: IntegrationAssociationId."); + } + resolvedPath = resolvedPath.replace("{IntegrationAssociationId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: IntegrationAssociationId."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "DELETE", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DeleteUseCaseCommand = async ( + input: DeleteUseCaseCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/instance/{InstanceId}/integration-associations/{IntegrationAssociationId}/use-cases/{UseCaseId}"; + if (input.InstanceId !== undefined) { + const labelValue: string = input.InstanceId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: InstanceId."); + } + resolvedPath = resolvedPath.replace("{InstanceId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: InstanceId."); + } + if (input.IntegrationAssociationId !== undefined) { + const labelValue: string = input.IntegrationAssociationId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: IntegrationAssociationId."); + } + resolvedPath = resolvedPath.replace("{IntegrationAssociationId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: IntegrationAssociationId."); + } + if (input.UseCaseId !== undefined) { + const labelValue: string = input.UseCaseId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: UseCaseId."); + } + resolvedPath = resolvedPath.replace("{UseCaseId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: UseCaseId."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "DELETE", + headers, + path: resolvedPath, + body, + }); +}; + export const serializeAws_restJson1DeleteUserCommand = async ( input: DeleteUserCommandInput, context: __SerdeContext @@ -1703,6 +1888,41 @@ export const serializeAws_restJson1ListInstanceStorageConfigsCommand = async ( }); }; +export const serializeAws_restJson1ListIntegrationAssociationsCommand = async ( + input: ListIntegrationAssociationsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/instance/{InstanceId}/integration-associations"; + if (input.InstanceId !== undefined) { + const labelValue: string = input.InstanceId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: InstanceId."); + } + resolvedPath = resolvedPath.replace("{InstanceId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: InstanceId."); + } + const query: any = { + ...(input.NextToken !== undefined && { nextToken: input.NextToken }), + ...(input.MaxResults !== undefined && { maxResults: input.MaxResults.toString() }), + }; + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; + export const serializeAws_restJson1ListLambdaFunctionsCommand = async ( input: ListLambdaFunctionsCommandInput, context: __SerdeContext @@ -2064,6 +2284,50 @@ export const serializeAws_restJson1ListTagsForResourceCommand = async ( }); }; +export const serializeAws_restJson1ListUseCasesCommand = async ( + input: ListUseCasesCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/instance/{InstanceId}/integration-associations/{IntegrationAssociationId}/use-cases"; + if (input.InstanceId !== undefined) { + const labelValue: string = input.InstanceId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: InstanceId."); + } + resolvedPath = resolvedPath.replace("{InstanceId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: InstanceId."); + } + if (input.IntegrationAssociationId !== undefined) { + const labelValue: string = input.IntegrationAssociationId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: IntegrationAssociationId."); + } + resolvedPath = resolvedPath.replace("{IntegrationAssociationId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: IntegrationAssociationId."); + } + const query: any = { + ...(input.NextToken !== undefined && { nextToken: input.NextToken }), + ...(input.MaxResults !== undefined && { maxResults: input.MaxResults.toString() }), + }; + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; + export const serializeAws_restJson1ListUserHierarchyGroupsCommand = async ( input: ListUserHierarchyGroupsCommandInput, context: __SerdeContext @@ -2255,6 +2519,39 @@ export const serializeAws_restJson1StartOutboundVoiceContactCommand = async ( }); }; +export const serializeAws_restJson1StartTaskContactCommand = async ( + input: StartTaskContactCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/contact/task"; + let body: any; + body = JSON.stringify({ + ...(input.Attributes !== undefined && { Attributes: serializeAws_restJson1Attributes(input.Attributes, context) }), + ClientToken: input.ClientToken ?? generateIdempotencyToken(), + ...(input.ContactFlowId !== undefined && { ContactFlowId: input.ContactFlowId }), + ...(input.Description !== undefined && { Description: input.Description }), + ...(input.InstanceId !== undefined && { InstanceId: input.InstanceId }), + ...(input.Name !== undefined && { Name: input.Name }), + ...(input.PreviousContactId !== undefined && { PreviousContactId: input.PreviousContactId }), + ...(input.References !== undefined && { + References: serializeAws_restJson1ContactReferences(input.References, context), + }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "PUT", + headers, + path: resolvedPath, + body, + }); +}; + export const serializeAws_restJson1StopContactCommand = async ( input: StopContactCommandInput, context: __SerdeContext @@ -3850,32 +4147,32 @@ const deserializeAws_restJson1CreateInstanceCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_restJson1CreateRoutingProfileCommand = async ( +export const deserializeAws_restJson1CreateIntegrationAssociationCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode !== 200 && output.statusCode >= 300) { - return deserializeAws_restJson1CreateRoutingProfileCommandError(output, context); + return deserializeAws_restJson1CreateIntegrationAssociationCommandError(output, context); } - const contents: CreateRoutingProfileCommandOutput = { + const contents: CreateIntegrationAssociationCommandOutput = { $metadata: deserializeMetadata(output), - RoutingProfileArn: undefined, - RoutingProfileId: undefined, + IntegrationAssociationArn: undefined, + IntegrationAssociationId: undefined, }; const data: any = await parseBody(output.body, context); - if (data.RoutingProfileArn !== undefined && data.RoutingProfileArn !== null) { - contents.RoutingProfileArn = data.RoutingProfileArn; + if (data.IntegrationAssociationArn !== undefined && data.IntegrationAssociationArn !== null) { + contents.IntegrationAssociationArn = data.IntegrationAssociationArn; } - if (data.RoutingProfileId !== undefined && data.RoutingProfileId !== null) { - contents.RoutingProfileId = data.RoutingProfileId; + if (data.IntegrationAssociationId !== undefined && data.IntegrationAssociationId !== null) { + contents.IntegrationAssociationId = data.IntegrationAssociationId; } return Promise.resolve(contents); }; -const deserializeAws_restJson1CreateRoutingProfileCommandError = async ( +const deserializeAws_restJson1CreateIntegrationAssociationCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -3900,14 +4197,6 @@ const deserializeAws_restJson1CreateRoutingProfileCommandError = async ( $metadata: deserializeMetadata(output), }; break; - case "InvalidParameterException": - case "com.amazonaws.connect#InvalidParameterException": - response = { - ...(await deserializeAws_restJson1InvalidParameterExceptionResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; case "InvalidRequestException": case "com.amazonaws.connect#InvalidRequestException": response = { @@ -3916,14 +4205,6 @@ const deserializeAws_restJson1CreateRoutingProfileCommandError = async ( $metadata: deserializeMetadata(output), }; break; - case "LimitExceededException": - case "com.amazonaws.connect#LimitExceededException": - response = { - ...(await deserializeAws_restJson1LimitExceededExceptionResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; case "ResourceNotFoundException": case "com.amazonaws.connect#ResourceNotFoundException": response = { @@ -3957,7 +4238,205 @@ const deserializeAws_restJson1CreateRoutingProfileCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_restJson1CreateUserCommand = async ( +export const deserializeAws_restJson1CreateRoutingProfileCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1CreateRoutingProfileCommandError(output, context); + } + const contents: CreateRoutingProfileCommandOutput = { + $metadata: deserializeMetadata(output), + RoutingProfileArn: undefined, + RoutingProfileId: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.RoutingProfileArn !== undefined && data.RoutingProfileArn !== null) { + contents.RoutingProfileArn = data.RoutingProfileArn; + } + if (data.RoutingProfileId !== undefined && data.RoutingProfileId !== null) { + contents.RoutingProfileId = data.RoutingProfileId; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1CreateRoutingProfileCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "DuplicateResourceException": + case "com.amazonaws.connect#DuplicateResourceException": + response = { + ...(await deserializeAws_restJson1DuplicateResourceExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServiceException": + case "com.amazonaws.connect#InternalServiceException": + response = { + ...(await deserializeAws_restJson1InternalServiceExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidParameterException": + case "com.amazonaws.connect#InvalidParameterException": + response = { + ...(await deserializeAws_restJson1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.connect#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "LimitExceededException": + case "com.amazonaws.connect#LimitExceededException": + response = { + ...(await deserializeAws_restJson1LimitExceededExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.connect#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.connect#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1CreateUseCaseCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1CreateUseCaseCommandError(output, context); + } + const contents: CreateUseCaseCommandOutput = { + $metadata: deserializeMetadata(output), + UseCaseArn: undefined, + UseCaseId: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.UseCaseArn !== undefined && data.UseCaseArn !== null) { + contents.UseCaseArn = data.UseCaseArn; + } + if (data.UseCaseId !== undefined && data.UseCaseId !== null) { + contents.UseCaseId = data.UseCaseId; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1CreateUseCaseCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "DuplicateResourceException": + case "com.amazonaws.connect#DuplicateResourceException": + response = { + ...(await deserializeAws_restJson1DuplicateResourceExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServiceException": + case "com.amazonaws.connect#InternalServiceException": + response = { + ...(await deserializeAws_restJson1InternalServiceExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.connect#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.connect#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.connect#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1CreateUserCommand = async ( output: __HttpResponse, context: __SerdeContext ): Promise => { @@ -4238,6 +4717,156 @@ const deserializeAws_restJson1DeleteInstanceCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; +export const deserializeAws_restJson1DeleteIntegrationAssociationCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DeleteIntegrationAssociationCommandError(output, context); + } + const contents: DeleteIntegrationAssociationCommandOutput = { + $metadata: deserializeMetadata(output), + }; + await collectBody(output.body, context); + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DeleteIntegrationAssociationCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InternalServiceException": + case "com.amazonaws.connect#InternalServiceException": + response = { + ...(await deserializeAws_restJson1InternalServiceExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.connect#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.connect#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.connect#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DeleteUseCaseCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DeleteUseCaseCommandError(output, context); + } + const contents: DeleteUseCaseCommandOutput = { + $metadata: deserializeMetadata(output), + }; + await collectBody(output.body, context); + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DeleteUseCaseCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InternalServiceException": + case "com.amazonaws.connect#InternalServiceException": + response = { + ...(await deserializeAws_restJson1InternalServiceExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.connect#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.connect#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.connect#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + export const deserializeAws_restJson1DeleteUserCommand = async ( output: __HttpResponse, context: __SerdeContext @@ -6327,25 +6956,92 @@ export const deserializeAws_restJson1ListInstancesCommand = async ( if (output.statusCode !== 200 && output.statusCode >= 300) { return deserializeAws_restJson1ListInstancesCommandError(output, context); } - const contents: ListInstancesCommandOutput = { + const contents: ListInstancesCommandOutput = { + $metadata: deserializeMetadata(output), + InstanceSummaryList: undefined, + NextToken: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.InstanceSummaryList !== undefined && data.InstanceSummaryList !== null) { + contents.InstanceSummaryList = deserializeAws_restJson1InstanceSummaryList(data.InstanceSummaryList, context); + } + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = data.NextToken; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListInstancesCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InternalServiceException": + case "com.amazonaws.connect#InternalServiceException": + response = { + ...(await deserializeAws_restJson1InternalServiceExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.connect#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1ListInstanceStorageConfigsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListInstanceStorageConfigsCommandError(output, context); + } + const contents: ListInstanceStorageConfigsCommandOutput = { $metadata: deserializeMetadata(output), - InstanceSummaryList: undefined, NextToken: undefined, + StorageConfigs: undefined, }; const data: any = await parseBody(output.body, context); - if (data.InstanceSummaryList !== undefined && data.InstanceSummaryList !== null) { - contents.InstanceSummaryList = deserializeAws_restJson1InstanceSummaryList(data.InstanceSummaryList, context); - } if (data.NextToken !== undefined && data.NextToken !== null) { contents.NextToken = data.NextToken; } + if (data.StorageConfigs !== undefined && data.StorageConfigs !== null) { + contents.StorageConfigs = deserializeAws_restJson1InstanceStorageConfigs(data.StorageConfigs, context); + } return Promise.resolve(contents); }; -const deserializeAws_restJson1ListInstancesCommandError = async ( +const deserializeAws_restJson1ListInstanceStorageConfigsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6362,6 +7058,14 @@ const deserializeAws_restJson1ListInstancesCommandError = async ( $metadata: deserializeMetadata(output), }; break; + case "InvalidParameterException": + case "com.amazonaws.connect#InvalidParameterException": + response = { + ...(await deserializeAws_restJson1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; case "InvalidRequestException": case "com.amazonaws.connect#InvalidRequestException": response = { @@ -6370,6 +7074,22 @@ const deserializeAws_restJson1ListInstancesCommandError = async ( $metadata: deserializeMetadata(output), }; break; + case "ResourceNotFoundException": + case "com.amazonaws.connect#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.connect#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -6387,32 +7107,35 @@ const deserializeAws_restJson1ListInstancesCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_restJson1ListInstanceStorageConfigsCommand = async ( +export const deserializeAws_restJson1ListIntegrationAssociationsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode !== 200 && output.statusCode >= 300) { - return deserializeAws_restJson1ListInstanceStorageConfigsCommandError(output, context); + return deserializeAws_restJson1ListIntegrationAssociationsCommandError(output, context); } - const contents: ListInstanceStorageConfigsCommandOutput = { + const contents: ListIntegrationAssociationsCommandOutput = { $metadata: deserializeMetadata(output), + IntegrationAssociationSummaryList: undefined, NextToken: undefined, - StorageConfigs: undefined, }; const data: any = await parseBody(output.body, context); + if (data.IntegrationAssociationSummaryList !== undefined && data.IntegrationAssociationSummaryList !== null) { + contents.IntegrationAssociationSummaryList = deserializeAws_restJson1IntegrationAssociationSummaryList( + data.IntegrationAssociationSummaryList, + context + ); + } if (data.NextToken !== undefined && data.NextToken !== null) { contents.NextToken = data.NextToken; } - if (data.StorageConfigs !== undefined && data.StorageConfigs !== null) { - contents.StorageConfigs = deserializeAws_restJson1InstanceStorageConfigs(data.StorageConfigs, context); - } return Promise.resolve(contents); }; -const deserializeAws_restJson1ListInstanceStorageConfigsCommandError = async ( +const deserializeAws_restJson1ListIntegrationAssociationsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6429,14 +7152,6 @@ const deserializeAws_restJson1ListInstanceStorageConfigsCommandError = async ( $metadata: deserializeMetadata(output), }; break; - case "InvalidParameterException": - case "com.amazonaws.connect#InvalidParameterException": - response = { - ...(await deserializeAws_restJson1InvalidParameterExceptionResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; case "InvalidRequestException": case "com.amazonaws.connect#InvalidRequestException": response = { @@ -7396,6 +8111,89 @@ const deserializeAws_restJson1ListTagsForResourceCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; +export const deserializeAws_restJson1ListUseCasesCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListUseCasesCommandError(output, context); + } + const contents: ListUseCasesCommandOutput = { + $metadata: deserializeMetadata(output), + NextToken: undefined, + UseCaseSummaryList: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = data.NextToken; + } + if (data.UseCaseSummaryList !== undefined && data.UseCaseSummaryList !== null) { + contents.UseCaseSummaryList = deserializeAws_restJson1UseCaseSummaryList(data.UseCaseSummaryList, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListUseCasesCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InternalServiceException": + case "com.amazonaws.connect#InternalServiceException": + response = { + ...(await deserializeAws_restJson1InternalServiceExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.connect#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.connect#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.connect#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + export const deserializeAws_restJson1ListUserHierarchyGroupsCommand = async ( output: __HttpResponse, context: __SerdeContext @@ -7921,6 +8719,101 @@ const deserializeAws_restJson1StartOutboundVoiceContactCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; +export const deserializeAws_restJson1StartTaskContactCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1StartTaskContactCommandError(output, context); + } + const contents: StartTaskContactCommandOutput = { + $metadata: deserializeMetadata(output), + ContactId: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.ContactId !== undefined && data.ContactId !== null) { + contents.ContactId = data.ContactId; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1StartTaskContactCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InternalServiceException": + case "com.amazonaws.connect#InternalServiceException": + response = { + ...(await deserializeAws_restJson1InternalServiceExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidParameterException": + case "com.amazonaws.connect#InvalidParameterException": + response = { + ...(await deserializeAws_restJson1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.connect#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.connect#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceQuotaExceededException": + case "com.amazonaws.connect#ServiceQuotaExceededException": + response = { + ...(await deserializeAws_restJson1ServiceQuotaExceededExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.connect#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + export const deserializeAws_restJson1StopContactCommand = async ( output: __HttpResponse, context: __SerdeContext @@ -9965,6 +10858,16 @@ const serializeAws_restJson1ChatMessage = (input: ChatMessage, context: __SerdeC }; }; +const serializeAws_restJson1ContactReferences = (input: { [key: string]: Reference }, context: __SerdeContext): any => { + return Object.entries(input).reduce( + (acc: { [key: string]: Reference }, [key, value]: [string, any]) => ({ + ...acc, + [key]: serializeAws_restJson1Reference(value, context), + }), + {} + ); +}; + const serializeAws_restJson1CurrentMetric = (input: CurrentMetric, context: __SerdeContext): any => { return { ...(input.Name !== undefined && { Name: input.Name }), @@ -10106,6 +11009,13 @@ const serializeAws_restJson1Queues = (input: string[], context: __SerdeContext): return input.map((entry) => entry); }; +const serializeAws_restJson1Reference = (input: Reference, context: __SerdeContext): any => { + return { + ...(input.Type !== undefined && { Type: input.Type }), + ...(input.Value !== undefined && { Value: input.Value }), + }; +}; + const serializeAws_restJson1RoutingProfileQueueConfig = ( input: RoutingProfileQueueConfig, context: __SerdeContext @@ -10588,6 +11498,43 @@ const deserializeAws_restJson1InstanceSummaryList = (output: any, context: __Ser return (output || []).map((entry: any) => deserializeAws_restJson1InstanceSummary(entry, context)); }; +const deserializeAws_restJson1IntegrationAssociationSummary = ( + output: any, + context: __SerdeContext +): IntegrationAssociationSummary => { + return { + InstanceId: output.InstanceId !== undefined && output.InstanceId !== null ? output.InstanceId : undefined, + IntegrationArn: + output.IntegrationArn !== undefined && output.IntegrationArn !== null ? output.IntegrationArn : undefined, + IntegrationAssociationArn: + output.IntegrationAssociationArn !== undefined && output.IntegrationAssociationArn !== null + ? output.IntegrationAssociationArn + : undefined, + IntegrationAssociationId: + output.IntegrationAssociationId !== undefined && output.IntegrationAssociationId !== null + ? output.IntegrationAssociationId + : undefined, + IntegrationType: + output.IntegrationType !== undefined && output.IntegrationType !== null ? output.IntegrationType : undefined, + SourceApplicationName: + output.SourceApplicationName !== undefined && output.SourceApplicationName !== null + ? output.SourceApplicationName + : undefined, + SourceApplicationUrl: + output.SourceApplicationUrl !== undefined && output.SourceApplicationUrl !== null + ? output.SourceApplicationUrl + : undefined, + SourceType: output.SourceType !== undefined && output.SourceType !== null ? output.SourceType : undefined, + } as any; +}; + +const deserializeAws_restJson1IntegrationAssociationSummaryList = ( + output: any, + context: __SerdeContext +): IntegrationAssociationSummary[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1IntegrationAssociationSummary(entry, context)); +}; + const deserializeAws_restJson1KinesisFirehoseConfig = (output: any, context: __SerdeContext): KinesisFirehoseConfig => { return { FirehoseArn: output.FirehoseArn !== undefined && output.FirehoseArn !== null ? output.FirehoseArn : undefined, @@ -10832,6 +11779,18 @@ const deserializeAws_restJson1Threshold = (output: any, context: __SerdeContext) } as any; }; +const deserializeAws_restJson1UseCase = (output: any, context: __SerdeContext): UseCase => { + return { + UseCaseArn: output.UseCaseArn !== undefined && output.UseCaseArn !== null ? output.UseCaseArn : undefined, + UseCaseId: output.UseCaseId !== undefined && output.UseCaseId !== null ? output.UseCaseId : undefined, + UseCaseType: output.UseCaseType !== undefined && output.UseCaseType !== null ? output.UseCaseType : undefined, + } as any; +}; + +const deserializeAws_restJson1UseCaseSummaryList = (output: any, context: __SerdeContext): UseCase[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1UseCase(entry, context)); +}; + const deserializeAws_restJson1User = (output: any, context: __SerdeContext): User => { return { Arn: output.Arn !== undefined && output.Arn !== null ? output.Arn : undefined, diff --git a/clients/client-devops-guru/.gitignore b/clients/client-devops-guru/.gitignore new file mode 100644 index 000000000000..b41c05b597c4 --- /dev/null +++ b/clients/client-devops-guru/.gitignore @@ -0,0 +1,14 @@ +/node_modules/ +/build/ +/coverage/ +/docs/ +/types/ +/dist/ +*.tsbuildinfo +*.tgz +*.log +package-lock.json + +*.d.ts +*.js +*.js.map diff --git a/clients/client-devops-guru/.npmignore b/clients/client-devops-guru/.npmignore new file mode 100644 index 000000000000..b7ff81137c4a --- /dev/null +++ b/clients/client-devops-guru/.npmignore @@ -0,0 +1,4 @@ +/coverage/ +/docs/ +tsconfig.test.json +*.tsbuildinfo diff --git a/clients/client-devops-guru/DevOpsGuru.ts b/clients/client-devops-guru/DevOpsGuru.ts new file mode 100644 index 000000000000..d68485eb7c3a --- /dev/null +++ b/clients/client-devops-guru/DevOpsGuru.ts @@ -0,0 +1,720 @@ +import { DevOpsGuruClient } from "./DevOpsGuruClient"; +import { + AddNotificationChannelCommand, + AddNotificationChannelCommandInput, + AddNotificationChannelCommandOutput, +} from "./commands/AddNotificationChannelCommand"; +import { + DescribeAccountHealthCommand, + DescribeAccountHealthCommandInput, + DescribeAccountHealthCommandOutput, +} from "./commands/DescribeAccountHealthCommand"; +import { + DescribeAccountOverviewCommand, + DescribeAccountOverviewCommandInput, + DescribeAccountOverviewCommandOutput, +} from "./commands/DescribeAccountOverviewCommand"; +import { + DescribeAnomalyCommand, + DescribeAnomalyCommandInput, + DescribeAnomalyCommandOutput, +} from "./commands/DescribeAnomalyCommand"; +import { + DescribeInsightCommand, + DescribeInsightCommandInput, + DescribeInsightCommandOutput, +} from "./commands/DescribeInsightCommand"; +import { + DescribeResourceCollectionHealthCommand, + DescribeResourceCollectionHealthCommandInput, + DescribeResourceCollectionHealthCommandOutput, +} from "./commands/DescribeResourceCollectionHealthCommand"; +import { + DescribeServiceIntegrationCommand, + DescribeServiceIntegrationCommandInput, + DescribeServiceIntegrationCommandOutput, +} from "./commands/DescribeServiceIntegrationCommand"; +import { + GetResourceCollectionCommand, + GetResourceCollectionCommandInput, + GetResourceCollectionCommandOutput, +} from "./commands/GetResourceCollectionCommand"; +import { + ListAnomaliesForInsightCommand, + ListAnomaliesForInsightCommandInput, + ListAnomaliesForInsightCommandOutput, +} from "./commands/ListAnomaliesForInsightCommand"; +import { ListEventsCommand, ListEventsCommandInput, ListEventsCommandOutput } from "./commands/ListEventsCommand"; +import { + ListInsightsCommand, + ListInsightsCommandInput, + ListInsightsCommandOutput, +} from "./commands/ListInsightsCommand"; +import { + ListNotificationChannelsCommand, + ListNotificationChannelsCommandInput, + ListNotificationChannelsCommandOutput, +} from "./commands/ListNotificationChannelsCommand"; +import { + ListRecommendationsCommand, + ListRecommendationsCommandInput, + ListRecommendationsCommandOutput, +} from "./commands/ListRecommendationsCommand"; +import { PutFeedbackCommand, PutFeedbackCommandInput, PutFeedbackCommandOutput } from "./commands/PutFeedbackCommand"; +import { + RemoveNotificationChannelCommand, + RemoveNotificationChannelCommandInput, + RemoveNotificationChannelCommandOutput, +} from "./commands/RemoveNotificationChannelCommand"; +import { + SearchInsightsCommand, + SearchInsightsCommandInput, + SearchInsightsCommandOutput, +} from "./commands/SearchInsightsCommand"; +import { + UpdateResourceCollectionCommand, + UpdateResourceCollectionCommandInput, + UpdateResourceCollectionCommandOutput, +} from "./commands/UpdateResourceCollectionCommand"; +import { + UpdateServiceIntegrationCommand, + UpdateServiceIntegrationCommandInput, + UpdateServiceIntegrationCommandOutput, +} from "./commands/UpdateServiceIntegrationCommand"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@aws-sdk/types"; + +/** + *

Amazon DevOps Guru is a fully managed service that helps you identify anomalous behavior in business + * critical operational applications. You specify the AWS resources that you want DevOps Guru to cover, + * then the Amazon CloudWatch metrics and AWS CloudTrail events related to those resources are analyzed. When + * anomalous behavior is detected, DevOps Guru creates an insight that includes + * recommendations, related events, and related metrics that can help you improve your + * operational applications. For more information, see What is Amazon DevOps Guru.

+ * + *

+ * You can specify 1 or 2 Amazon Simple Notification Service topics so you are notified every time a new insight is created. You can also enable DevOps Guru to generate + * an OpsItem in AWS Systems Manager for each insight to help you manage and track your work addressing insights. + *

+ * + *

+ * To learn about the DevOps Guru workflow, see How DevOps Guru works. To + * learn about DevOps Guru concepts, see Concepts in DevOps Guru. + *

+ */ +export class DevOpsGuru extends DevOpsGuruClient { + /** + *

+ * Adds a notification channel to DevOps Guru. A notification channel is used to notify you about important DevOps Guru events, such as when an insight is generated. + *

+ */ + public addNotificationChannel( + args: AddNotificationChannelCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public addNotificationChannel( + args: AddNotificationChannelCommandInput, + cb: (err: any, data?: AddNotificationChannelCommandOutput) => void + ): void; + public addNotificationChannel( + args: AddNotificationChannelCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: AddNotificationChannelCommandOutput) => void + ): void; + public addNotificationChannel( + args: AddNotificationChannelCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: AddNotificationChannelCommandOutput) => void), + cb?: (err: any, data?: AddNotificationChannelCommandOutput) => void + ): Promise | void { + const command = new AddNotificationChannelCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

+ * Returns the number of open reactive insights, the number of open proactive insights, and the number of metrics analyzed in your AWS account. + * Use these numbers to gauge the health of operations in your AWS account. + *

+ */ + public describeAccountHealth( + args: DescribeAccountHealthCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeAccountHealth( + args: DescribeAccountHealthCommandInput, + cb: (err: any, data?: DescribeAccountHealthCommandOutput) => void + ): void; + public describeAccountHealth( + args: DescribeAccountHealthCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeAccountHealthCommandOutput) => void + ): void; + public describeAccountHealth( + args: DescribeAccountHealthCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeAccountHealthCommandOutput) => void), + cb?: (err: any, data?: DescribeAccountHealthCommandOutput) => void + ): Promise | void { + const command = new DescribeAccountHealthCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

+ * For the time range passed in, returns the number of open reactive insight that were created, the number of open proactive insights + * that were created, and the Mean Time to Recover (MTTR) for all closed reactive insights. + *

+ */ + public describeAccountOverview( + args: DescribeAccountOverviewCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeAccountOverview( + args: DescribeAccountOverviewCommandInput, + cb: (err: any, data?: DescribeAccountOverviewCommandOutput) => void + ): void; + public describeAccountOverview( + args: DescribeAccountOverviewCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeAccountOverviewCommandOutput) => void + ): void; + public describeAccountOverview( + args: DescribeAccountOverviewCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeAccountOverviewCommandOutput) => void), + cb?: (err: any, data?: DescribeAccountOverviewCommandOutput) => void + ): Promise | void { + const command = new DescribeAccountOverviewCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

+ * Returns details about an anomaly that you specify using its ID. + *

+ */ + public describeAnomaly( + args: DescribeAnomalyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeAnomaly( + args: DescribeAnomalyCommandInput, + cb: (err: any, data?: DescribeAnomalyCommandOutput) => void + ): void; + public describeAnomaly( + args: DescribeAnomalyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeAnomalyCommandOutput) => void + ): void; + public describeAnomaly( + args: DescribeAnomalyCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeAnomalyCommandOutput) => void), + cb?: (err: any, data?: DescribeAnomalyCommandOutput) => void + ): Promise | void { + const command = new DescribeAnomalyCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

+ * Returns details about an insight that you specify using its ID. + *

+ */ + public describeInsight( + args: DescribeInsightCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeInsight( + args: DescribeInsightCommandInput, + cb: (err: any, data?: DescribeInsightCommandOutput) => void + ): void; + public describeInsight( + args: DescribeInsightCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeInsightCommandOutput) => void + ): void; + public describeInsight( + args: DescribeInsightCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeInsightCommandOutput) => void), + cb?: (err: any, data?: DescribeInsightCommandOutput) => void + ): Promise | void { + const command = new DescribeInsightCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

+ * Returns the number of open proactive insights, open reactive insights, and the Mean Time to Recover (MTTR) for all closed insights in + * resource collections in your account. You specify the type of AWS resources collection. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. + *

+ */ + public describeResourceCollectionHealth( + args: DescribeResourceCollectionHealthCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeResourceCollectionHealth( + args: DescribeResourceCollectionHealthCommandInput, + cb: (err: any, data?: DescribeResourceCollectionHealthCommandOutput) => void + ): void; + public describeResourceCollectionHealth( + args: DescribeResourceCollectionHealthCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeResourceCollectionHealthCommandOutput) => void + ): void; + public describeResourceCollectionHealth( + args: DescribeResourceCollectionHealthCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeResourceCollectionHealthCommandOutput) => void), + cb?: (err: any, data?: DescribeResourceCollectionHealthCommandOutput) => void + ): Promise | void { + const command = new DescribeResourceCollectionHealthCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

+ * Returns the integration status of services that are integrated with DevOps Guru. + * The one service that can be integrated with DevOps Guru + * is AWS Systems Manager, which can be used to create an OpsItem for each generated insight. + *

+ */ + public describeServiceIntegration( + args: DescribeServiceIntegrationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeServiceIntegration( + args: DescribeServiceIntegrationCommandInput, + cb: (err: any, data?: DescribeServiceIntegrationCommandOutput) => void + ): void; + public describeServiceIntegration( + args: DescribeServiceIntegrationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeServiceIntegrationCommandOutput) => void + ): void; + public describeServiceIntegration( + args: DescribeServiceIntegrationCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeServiceIntegrationCommandOutput) => void), + cb?: (err: any, data?: DescribeServiceIntegrationCommandOutput) => void + ): Promise | void { + const command = new DescribeServiceIntegrationCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

+ * Returns lists AWS resources that are of the specified resource collection type. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. + *

+ */ + public getResourceCollection( + args: GetResourceCollectionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public getResourceCollection( + args: GetResourceCollectionCommandInput, + cb: (err: any, data?: GetResourceCollectionCommandOutput) => void + ): void; + public getResourceCollection( + args: GetResourceCollectionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetResourceCollectionCommandOutput) => void + ): void; + public getResourceCollection( + args: GetResourceCollectionCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: GetResourceCollectionCommandOutput) => void), + cb?: (err: any, data?: GetResourceCollectionCommandOutput) => void + ): Promise | void { + const command = new GetResourceCollectionCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

+ * Returns a list of the anomalies that belong to an insight that you specify using its ID. + *

+ */ + public listAnomaliesForInsight( + args: ListAnomaliesForInsightCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listAnomaliesForInsight( + args: ListAnomaliesForInsightCommandInput, + cb: (err: any, data?: ListAnomaliesForInsightCommandOutput) => void + ): void; + public listAnomaliesForInsight( + args: ListAnomaliesForInsightCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListAnomaliesForInsightCommandOutput) => void + ): void; + public listAnomaliesForInsight( + args: ListAnomaliesForInsightCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListAnomaliesForInsightCommandOutput) => void), + cb?: (err: any, data?: ListAnomaliesForInsightCommandOutput) => void + ): Promise | void { + const command = new ListAnomaliesForInsightCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

+ * Returns a list of the events emitted by the resources that are evaluated by DevOps Guru. You can use filters to specify which events are returned. + *

+ */ + public listEvents(args: ListEventsCommandInput, options?: __HttpHandlerOptions): Promise; + public listEvents(args: ListEventsCommandInput, cb: (err: any, data?: ListEventsCommandOutput) => void): void; + public listEvents( + args: ListEventsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListEventsCommandOutput) => void + ): void; + public listEvents( + args: ListEventsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListEventsCommandOutput) => void), + cb?: (err: any, data?: ListEventsCommandOutput) => void + ): Promise | void { + const command = new ListEventsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

+ * Returns a list of insights in your AWS account. You can specify which insights are returned by their start time and + * status (ONGOING, CLOSED, or ANY). + *

+ */ + public listInsights( + args: ListInsightsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listInsights(args: ListInsightsCommandInput, cb: (err: any, data?: ListInsightsCommandOutput) => void): void; + public listInsights( + args: ListInsightsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListInsightsCommandOutput) => void + ): void; + public listInsights( + args: ListInsightsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListInsightsCommandOutput) => void), + cb?: (err: any, data?: ListInsightsCommandOutput) => void + ): Promise | void { + const command = new ListInsightsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

+ * Returns a list of notification channels configured for DevOps Guru. Each notification channel is used to notify you when + * DevOps Guru generates an insight that contains information about how to improve your operations. The one + * supported notification channel is Amazon Simple Notification Service (Amazon SNS). + *

+ */ + public listNotificationChannels( + args: ListNotificationChannelsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listNotificationChannels( + args: ListNotificationChannelsCommandInput, + cb: (err: any, data?: ListNotificationChannelsCommandOutput) => void + ): void; + public listNotificationChannels( + args: ListNotificationChannelsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListNotificationChannelsCommandOutput) => void + ): void; + public listNotificationChannels( + args: ListNotificationChannelsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListNotificationChannelsCommandOutput) => void), + cb?: (err: any, data?: ListNotificationChannelsCommandOutput) => void + ): Promise | void { + const command = new ListNotificationChannelsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

+ * Returns a list of a specified insight's recommendations. Each recommendation includes a list of related metrics and a list of related events. + *

+ */ + public listRecommendations( + args: ListRecommendationsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listRecommendations( + args: ListRecommendationsCommandInput, + cb: (err: any, data?: ListRecommendationsCommandOutput) => void + ): void; + public listRecommendations( + args: ListRecommendationsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListRecommendationsCommandOutput) => void + ): void; + public listRecommendations( + args: ListRecommendationsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListRecommendationsCommandOutput) => void), + cb?: (err: any, data?: ListRecommendationsCommandOutput) => void + ): Promise | void { + const command = new ListRecommendationsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

+ * Collects customer feedback about the specified insight. + *

+ */ + public putFeedback(args: PutFeedbackCommandInput, options?: __HttpHandlerOptions): Promise; + public putFeedback(args: PutFeedbackCommandInput, cb: (err: any, data?: PutFeedbackCommandOutput) => void): void; + public putFeedback( + args: PutFeedbackCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutFeedbackCommandOutput) => void + ): void; + public putFeedback( + args: PutFeedbackCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: PutFeedbackCommandOutput) => void), + cb?: (err: any, data?: PutFeedbackCommandOutput) => void + ): Promise | void { + const command = new PutFeedbackCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

+ * Removes a notification channel from DevOps Guru. A notification channel is used to notify you when DevOps Guru generates an insight + * that contains information about how to improve your operations. + *

+ */ + public removeNotificationChannel( + args: RemoveNotificationChannelCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public removeNotificationChannel( + args: RemoveNotificationChannelCommandInput, + cb: (err: any, data?: RemoveNotificationChannelCommandOutput) => void + ): void; + public removeNotificationChannel( + args: RemoveNotificationChannelCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: RemoveNotificationChannelCommandOutput) => void + ): void; + public removeNotificationChannel( + args: RemoveNotificationChannelCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: RemoveNotificationChannelCommandOutput) => void), + cb?: (err: any, data?: RemoveNotificationChannelCommandOutput) => void + ): Promise | void { + const command = new RemoveNotificationChannelCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

+ * Returns a list of insights in your AWS account. You can specify which insights are returned by their start time, one or more statuses + * (ONGOING, CLOSED, and CLOSED), one or more severities (LOW, MEDIUM, + * and HIGH), and type (REACTIVE or PROACTIVE). + *

+ *

+ * Use the Filters parameter to specify status and severity + * search parameters. Use the Type parameter to specify REACTIVE or PROACTIVE in your search. + *

+ */ + public searchInsights( + args: SearchInsightsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public searchInsights( + args: SearchInsightsCommandInput, + cb: (err: any, data?: SearchInsightsCommandOutput) => void + ): void; + public searchInsights( + args: SearchInsightsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: SearchInsightsCommandOutput) => void + ): void; + public searchInsights( + args: SearchInsightsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: SearchInsightsCommandOutput) => void), + cb?: (err: any, data?: SearchInsightsCommandOutput) => void + ): Promise | void { + const command = new SearchInsightsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

Updates the collection of resources that DevOps Guru analyzes. + * The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. This method also creates the IAM role required for you + * to use DevOps Guru.

+ */ + public updateResourceCollection( + args: UpdateResourceCollectionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public updateResourceCollection( + args: UpdateResourceCollectionCommandInput, + cb: (err: any, data?: UpdateResourceCollectionCommandOutput) => void + ): void; + public updateResourceCollection( + args: UpdateResourceCollectionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateResourceCollectionCommandOutput) => void + ): void; + public updateResourceCollection( + args: UpdateResourceCollectionCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: UpdateResourceCollectionCommandOutput) => void), + cb?: (err: any, data?: UpdateResourceCollectionCommandOutput) => void + ): Promise | void { + const command = new UpdateResourceCollectionCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

+ * Enables or disables integration with a service that can be integrated with DevOps Guru. The one service that can be integrated with + * DevOps Guru is AWS Systems Manager, which can be used to create an OpsItem for each generated insight. + *

+ */ + public updateServiceIntegration( + args: UpdateServiceIntegrationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public updateServiceIntegration( + args: UpdateServiceIntegrationCommandInput, + cb: (err: any, data?: UpdateServiceIntegrationCommandOutput) => void + ): void; + public updateServiceIntegration( + args: UpdateServiceIntegrationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateServiceIntegrationCommandOutput) => void + ): void; + public updateServiceIntegration( + args: UpdateServiceIntegrationCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: UpdateServiceIntegrationCommandOutput) => void), + cb?: (err: any, data?: UpdateServiceIntegrationCommandOutput) => void + ): Promise | void { + const command = new UpdateServiceIntegrationCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } +} diff --git a/clients/client-devops-guru/DevOpsGuruClient.ts b/clients/client-devops-guru/DevOpsGuruClient.ts new file mode 100644 index 000000000000..176dfaff77e7 --- /dev/null +++ b/clients/client-devops-guru/DevOpsGuruClient.ts @@ -0,0 +1,306 @@ +import { + AddNotificationChannelCommandInput, + AddNotificationChannelCommandOutput, +} from "./commands/AddNotificationChannelCommand"; +import { + DescribeAccountHealthCommandInput, + DescribeAccountHealthCommandOutput, +} from "./commands/DescribeAccountHealthCommand"; +import { + DescribeAccountOverviewCommandInput, + DescribeAccountOverviewCommandOutput, +} from "./commands/DescribeAccountOverviewCommand"; +import { DescribeAnomalyCommandInput, DescribeAnomalyCommandOutput } from "./commands/DescribeAnomalyCommand"; +import { DescribeInsightCommandInput, DescribeInsightCommandOutput } from "./commands/DescribeInsightCommand"; +import { + DescribeResourceCollectionHealthCommandInput, + DescribeResourceCollectionHealthCommandOutput, +} from "./commands/DescribeResourceCollectionHealthCommand"; +import { + DescribeServiceIntegrationCommandInput, + DescribeServiceIntegrationCommandOutput, +} from "./commands/DescribeServiceIntegrationCommand"; +import { + GetResourceCollectionCommandInput, + GetResourceCollectionCommandOutput, +} from "./commands/GetResourceCollectionCommand"; +import { + ListAnomaliesForInsightCommandInput, + ListAnomaliesForInsightCommandOutput, +} from "./commands/ListAnomaliesForInsightCommand"; +import { ListEventsCommandInput, ListEventsCommandOutput } from "./commands/ListEventsCommand"; +import { ListInsightsCommandInput, ListInsightsCommandOutput } from "./commands/ListInsightsCommand"; +import { + ListNotificationChannelsCommandInput, + ListNotificationChannelsCommandOutput, +} from "./commands/ListNotificationChannelsCommand"; +import { + ListRecommendationsCommandInput, + ListRecommendationsCommandOutput, +} from "./commands/ListRecommendationsCommand"; +import { PutFeedbackCommandInput, PutFeedbackCommandOutput } from "./commands/PutFeedbackCommand"; +import { + RemoveNotificationChannelCommandInput, + RemoveNotificationChannelCommandOutput, +} from "./commands/RemoveNotificationChannelCommand"; +import { SearchInsightsCommandInput, SearchInsightsCommandOutput } from "./commands/SearchInsightsCommand"; +import { + UpdateResourceCollectionCommandInput, + UpdateResourceCollectionCommandOutput, +} from "./commands/UpdateResourceCollectionCommand"; +import { + UpdateServiceIntegrationCommandInput, + UpdateServiceIntegrationCommandOutput, +} from "./commands/UpdateServiceIntegrationCommand"; +import { ClientDefaultValues as __ClientDefaultValues } from "./runtimeConfig"; +import { + EndpointsInputConfig, + EndpointsResolvedConfig, + RegionInputConfig, + RegionResolvedConfig, + resolveEndpointsConfig, + resolveRegionConfig, +} from "@aws-sdk/config-resolver"; +import { getContentLengthPlugin } from "@aws-sdk/middleware-content-length"; +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, + getHostHeaderPlugin, + resolveHostHeaderConfig, +} from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { RetryInputConfig, RetryResolvedConfig, getRetryPlugin, resolveRetryConfig } from "@aws-sdk/middleware-retry"; +import { + AwsAuthInputConfig, + AwsAuthResolvedConfig, + getAwsAuthPlugin, + resolveAwsAuthConfig, +} from "@aws-sdk/middleware-signing"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, + getUserAgentPlugin, + resolveUserAgentConfig, +} from "@aws-sdk/middleware-user-agent"; +import { HttpHandler as __HttpHandler } from "@aws-sdk/protocol-http"; +import { + Client as __Client, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@aws-sdk/smithy-client"; +import { + RegionInfoProvider, + Credentials as __Credentials, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, +} from "@aws-sdk/types"; + +export type ServiceInputTypes = + | AddNotificationChannelCommandInput + | DescribeAccountHealthCommandInput + | DescribeAccountOverviewCommandInput + | DescribeAnomalyCommandInput + | DescribeInsightCommandInput + | DescribeResourceCollectionHealthCommandInput + | DescribeServiceIntegrationCommandInput + | GetResourceCollectionCommandInput + | ListAnomaliesForInsightCommandInput + | ListEventsCommandInput + | ListInsightsCommandInput + | ListNotificationChannelsCommandInput + | ListRecommendationsCommandInput + | PutFeedbackCommandInput + | RemoveNotificationChannelCommandInput + | SearchInsightsCommandInput + | UpdateResourceCollectionCommandInput + | UpdateServiceIntegrationCommandInput; + +export type ServiceOutputTypes = + | AddNotificationChannelCommandOutput + | DescribeAccountHealthCommandOutput + | DescribeAccountOverviewCommandOutput + | DescribeAnomalyCommandOutput + | DescribeInsightCommandOutput + | DescribeResourceCollectionHealthCommandOutput + | DescribeServiceIntegrationCommandOutput + | GetResourceCollectionCommandOutput + | ListAnomaliesForInsightCommandOutput + | ListEventsCommandOutput + | ListInsightsCommandOutput + | ListNotificationChannelsCommandOutput + | ListRecommendationsCommandOutput + | PutFeedbackCommandOutput + | RemoveNotificationChannelCommandOutput + | SearchInsightsCommandOutput + | UpdateResourceCollectionCommandOutput + | UpdateServiceIntegrationCommandOutput; + +export interface ClientDefaults extends Partial<__SmithyResolvedConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandler; + + /** + * A constructor for a class implementing the @aws-sdk/types.Hash interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + */ + sha256?: __HashConstructor; + + /** + * The function that will be used to convert strings into HTTP endpoints. + */ + urlParser?: __UrlParser; + + /** + * A function that can calculate the length of a request body. + */ + bodyLengthChecker?: (body: any) => number | undefined; + + /** + * A function that converts a stream into an array of bytes. + */ + streamCollector?: __StreamCollector; + + /** + * The function that will be used to convert a base64-encoded string to a byte array + */ + base64Decoder?: __Decoder; + + /** + * The function that will be used to convert binary data to a base64-encoded string + */ + base64Encoder?: __Encoder; + + /** + * The function that will be used to convert a UTF8-encoded string to a byte array + */ + utf8Decoder?: __Decoder; + + /** + * The function that will be used to convert binary data to a UTF-8 encoded string + */ + utf8Encoder?: __Encoder; + + /** + * The string that will be used to populate default value in 'User-Agent' header + */ + defaultUserAgent?: string; + + /** + * The runtime environment + */ + runtime?: string; + + /** + * Disable dyanamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + + /** + * The service name with which to sign requests. + */ + signingName?: string; + + /** + * Default credentials provider; Not available in browser runtime + */ + credentialDefaultProvider?: (input: any) => __Provider<__Credentials>; + + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + + /** + * Fetch related hostname, signing name or signing region with given region. + */ + regionInfoProvider?: RegionInfoProvider; +} + +export type DevOpsGuruClientConfig = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & + ClientDefaults & + RegionInputConfig & + EndpointsInputConfig & + AwsAuthInputConfig & + RetryInputConfig & + UserAgentInputConfig & + HostHeaderInputConfig; + +export type DevOpsGuruClientResolvedConfig = __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RegionResolvedConfig & + EndpointsResolvedConfig & + AwsAuthResolvedConfig & + RetryResolvedConfig & + UserAgentResolvedConfig & + HostHeaderResolvedConfig; + +/** + *

Amazon DevOps Guru is a fully managed service that helps you identify anomalous behavior in business + * critical operational applications. You specify the AWS resources that you want DevOps Guru to cover, + * then the Amazon CloudWatch metrics and AWS CloudTrail events related to those resources are analyzed. When + * anomalous behavior is detected, DevOps Guru creates an insight that includes + * recommendations, related events, and related metrics that can help you improve your + * operational applications. For more information, see What is Amazon DevOps Guru.

+ * + *

+ * You can specify 1 or 2 Amazon Simple Notification Service topics so you are notified every time a new insight is created. You can also enable DevOps Guru to generate + * an OpsItem in AWS Systems Manager for each insight to help you manage and track your work addressing insights. + *

+ * + *

+ * To learn about the DevOps Guru workflow, see How DevOps Guru works. To + * learn about DevOps Guru concepts, see Concepts in DevOps Guru. + *

+ */ +export class DevOpsGuruClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + DevOpsGuruClientResolvedConfig +> { + readonly config: DevOpsGuruClientResolvedConfig; + + constructor(configuration: DevOpsGuruClientConfig) { + let _config_0 = { + ...__ClientDefaultValues, + ...configuration, + }; + let _config_1 = resolveRegionConfig(_config_0); + let _config_2 = resolveEndpointsConfig(_config_1); + let _config_3 = resolveAwsAuthConfig(_config_2); + let _config_4 = resolveRetryConfig(_config_3); + let _config_5 = resolveUserAgentConfig(_config_4); + let _config_6 = resolveHostHeaderConfig(_config_5); + super(_config_6); + this.config = _config_6; + this.middlewareStack.use(getAwsAuthPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + } + + destroy(): void { + super.destroy(); + } +} diff --git a/clients/client-devops-guru/LICENSE b/clients/client-devops-guru/LICENSE new file mode 100644 index 000000000000..dd65ae06be7a --- /dev/null +++ b/clients/client-devops-guru/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/clients/client-devops-guru/README.md b/clients/client-devops-guru/README.md new file mode 100644 index 000000000000..e9b7bcb82652 --- /dev/null +++ b/clients/client-devops-guru/README.md @@ -0,0 +1,6 @@ +# @aws-sdk/client-devops-guru + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/client-devops-guru/rc.svg)](https://www.npmjs.com/package/@aws-sdk/client-devops-guru) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/client-devops-guru.svg)](https://www.npmjs.com/package/@aws-sdk/client-devops-guru) + +For SDK usage, please step to [SDK readme](https://github.com/aws/aws-sdk-js-v3). diff --git a/clients/client-devops-guru/commands/AddNotificationChannelCommand.ts b/clients/client-devops-guru/commands/AddNotificationChannelCommand.ts new file mode 100644 index 000000000000..89e8500a5481 --- /dev/null +++ b/clients/client-devops-guru/commands/AddNotificationChannelCommand.ts @@ -0,0 +1,90 @@ +import { DevOpsGuruClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DevOpsGuruClient"; +import { AddNotificationChannelRequest, AddNotificationChannelResponse } from "../models/models_0"; +import { + deserializeAws_restJson1AddNotificationChannelCommand, + serializeAws_restJson1AddNotificationChannelCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type AddNotificationChannelCommandInput = AddNotificationChannelRequest; +export type AddNotificationChannelCommandOutput = AddNotificationChannelResponse & __MetadataBearer; + +/** + *

+ * Adds a notification channel to DevOps Guru. A notification channel is used to notify you about important DevOps Guru events, such as when an insight is generated. + *

+ */ +export class AddNotificationChannelCommand extends $Command< + AddNotificationChannelCommandInput, + AddNotificationChannelCommandOutput, + DevOpsGuruClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: AddNotificationChannelCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DevOpsGuruClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DevOpsGuruClient"; + const commandName = "AddNotificationChannelCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: AddNotificationChannelRequest.filterSensitiveLog, + outputFilterSensitiveLog: AddNotificationChannelResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: AddNotificationChannelCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1AddNotificationChannelCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1AddNotificationChannelCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-devops-guru/commands/DescribeAccountHealthCommand.ts b/clients/client-devops-guru/commands/DescribeAccountHealthCommand.ts new file mode 100644 index 000000000000..7ff4e379be4f --- /dev/null +++ b/clients/client-devops-guru/commands/DescribeAccountHealthCommand.ts @@ -0,0 +1,91 @@ +import { DevOpsGuruClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DevOpsGuruClient"; +import { DescribeAccountHealthRequest, DescribeAccountHealthResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DescribeAccountHealthCommand, + serializeAws_restJson1DescribeAccountHealthCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeAccountHealthCommandInput = DescribeAccountHealthRequest; +export type DescribeAccountHealthCommandOutput = DescribeAccountHealthResponse & __MetadataBearer; + +/** + *

+ * Returns the number of open reactive insights, the number of open proactive insights, and the number of metrics analyzed in your AWS account. + * Use these numbers to gauge the health of operations in your AWS account. + *

+ */ +export class DescribeAccountHealthCommand extends $Command< + DescribeAccountHealthCommandInput, + DescribeAccountHealthCommandOutput, + DevOpsGuruClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeAccountHealthCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DevOpsGuruClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DevOpsGuruClient"; + const commandName = "DescribeAccountHealthCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeAccountHealthRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeAccountHealthResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeAccountHealthCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DescribeAccountHealthCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DescribeAccountHealthCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-devops-guru/commands/DescribeAccountOverviewCommand.ts b/clients/client-devops-guru/commands/DescribeAccountOverviewCommand.ts new file mode 100644 index 000000000000..6660de848f52 --- /dev/null +++ b/clients/client-devops-guru/commands/DescribeAccountOverviewCommand.ts @@ -0,0 +1,91 @@ +import { DevOpsGuruClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DevOpsGuruClient"; +import { DescribeAccountOverviewRequest, DescribeAccountOverviewResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DescribeAccountOverviewCommand, + serializeAws_restJson1DescribeAccountOverviewCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeAccountOverviewCommandInput = DescribeAccountOverviewRequest; +export type DescribeAccountOverviewCommandOutput = DescribeAccountOverviewResponse & __MetadataBearer; + +/** + *

+ * For the time range passed in, returns the number of open reactive insight that were created, the number of open proactive insights + * that were created, and the Mean Time to Recover (MTTR) for all closed reactive insights. + *

+ */ +export class DescribeAccountOverviewCommand extends $Command< + DescribeAccountOverviewCommandInput, + DescribeAccountOverviewCommandOutput, + DevOpsGuruClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeAccountOverviewCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DevOpsGuruClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DevOpsGuruClient"; + const commandName = "DescribeAccountOverviewCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeAccountOverviewRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeAccountOverviewResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeAccountOverviewCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DescribeAccountOverviewCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DescribeAccountOverviewCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-devops-guru/commands/DescribeAnomalyCommand.ts b/clients/client-devops-guru/commands/DescribeAnomalyCommand.ts new file mode 100644 index 000000000000..b0bd3495a476 --- /dev/null +++ b/clients/client-devops-guru/commands/DescribeAnomalyCommand.ts @@ -0,0 +1,90 @@ +import { DevOpsGuruClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DevOpsGuruClient"; +import { DescribeAnomalyRequest, DescribeAnomalyResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DescribeAnomalyCommand, + serializeAws_restJson1DescribeAnomalyCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeAnomalyCommandInput = DescribeAnomalyRequest; +export type DescribeAnomalyCommandOutput = DescribeAnomalyResponse & __MetadataBearer; + +/** + *

+ * Returns details about an anomaly that you specify using its ID. + *

+ */ +export class DescribeAnomalyCommand extends $Command< + DescribeAnomalyCommandInput, + DescribeAnomalyCommandOutput, + DevOpsGuruClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeAnomalyCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DevOpsGuruClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DevOpsGuruClient"; + const commandName = "DescribeAnomalyCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeAnomalyRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeAnomalyResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeAnomalyCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DescribeAnomalyCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DescribeAnomalyCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-devops-guru/commands/DescribeInsightCommand.ts b/clients/client-devops-guru/commands/DescribeInsightCommand.ts new file mode 100644 index 000000000000..81eeb27cf421 --- /dev/null +++ b/clients/client-devops-guru/commands/DescribeInsightCommand.ts @@ -0,0 +1,90 @@ +import { DevOpsGuruClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DevOpsGuruClient"; +import { DescribeInsightRequest, DescribeInsightResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DescribeInsightCommand, + serializeAws_restJson1DescribeInsightCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeInsightCommandInput = DescribeInsightRequest; +export type DescribeInsightCommandOutput = DescribeInsightResponse & __MetadataBearer; + +/** + *

+ * Returns details about an insight that you specify using its ID. + *

+ */ +export class DescribeInsightCommand extends $Command< + DescribeInsightCommandInput, + DescribeInsightCommandOutput, + DevOpsGuruClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeInsightCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DevOpsGuruClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DevOpsGuruClient"; + const commandName = "DescribeInsightCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeInsightRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeInsightResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeInsightCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DescribeInsightCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DescribeInsightCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-devops-guru/commands/DescribeResourceCollectionHealthCommand.ts b/clients/client-devops-guru/commands/DescribeResourceCollectionHealthCommand.ts new file mode 100644 index 000000000000..75ce2e8a7892 --- /dev/null +++ b/clients/client-devops-guru/commands/DescribeResourceCollectionHealthCommand.ts @@ -0,0 +1,98 @@ +import { DevOpsGuruClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DevOpsGuruClient"; +import { DescribeResourceCollectionHealthRequest, DescribeResourceCollectionHealthResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DescribeResourceCollectionHealthCommand, + serializeAws_restJson1DescribeResourceCollectionHealthCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeResourceCollectionHealthCommandInput = DescribeResourceCollectionHealthRequest; +export type DescribeResourceCollectionHealthCommandOutput = DescribeResourceCollectionHealthResponse & __MetadataBearer; + +/** + *

+ * Returns the number of open proactive insights, open reactive insights, and the Mean Time to Recover (MTTR) for all closed insights in + * resource collections in your account. You specify the type of AWS resources collection. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. + *

+ */ +export class DescribeResourceCollectionHealthCommand extends $Command< + DescribeResourceCollectionHealthCommandInput, + DescribeResourceCollectionHealthCommandOutput, + DevOpsGuruClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeResourceCollectionHealthCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DevOpsGuruClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DevOpsGuruClient"; + const commandName = "DescribeResourceCollectionHealthCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeResourceCollectionHealthRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeResourceCollectionHealthResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize( + input: DescribeResourceCollectionHealthCommandInput, + context: __SerdeContext + ): Promise<__HttpRequest> { + return serializeAws_restJson1DescribeResourceCollectionHealthCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_restJson1DescribeResourceCollectionHealthCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-devops-guru/commands/DescribeServiceIntegrationCommand.ts b/clients/client-devops-guru/commands/DescribeServiceIntegrationCommand.ts new file mode 100644 index 000000000000..ec14d707a558 --- /dev/null +++ b/clients/client-devops-guru/commands/DescribeServiceIntegrationCommand.ts @@ -0,0 +1,95 @@ +import { DevOpsGuruClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DevOpsGuruClient"; +import { DescribeServiceIntegrationRequest, DescribeServiceIntegrationResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DescribeServiceIntegrationCommand, + serializeAws_restJson1DescribeServiceIntegrationCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeServiceIntegrationCommandInput = DescribeServiceIntegrationRequest; +export type DescribeServiceIntegrationCommandOutput = DescribeServiceIntegrationResponse & __MetadataBearer; + +/** + *

+ * Returns the integration status of services that are integrated with DevOps Guru. + * The one service that can be integrated with DevOps Guru + * is AWS Systems Manager, which can be used to create an OpsItem for each generated insight. + *

+ */ +export class DescribeServiceIntegrationCommand extends $Command< + DescribeServiceIntegrationCommandInput, + DescribeServiceIntegrationCommandOutput, + DevOpsGuruClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeServiceIntegrationCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DevOpsGuruClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DevOpsGuruClient"; + const commandName = "DescribeServiceIntegrationCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeServiceIntegrationRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeServiceIntegrationResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeServiceIntegrationCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DescribeServiceIntegrationCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_restJson1DescribeServiceIntegrationCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-devops-guru/commands/GetResourceCollectionCommand.ts b/clients/client-devops-guru/commands/GetResourceCollectionCommand.ts new file mode 100644 index 000000000000..791e37a5ec11 --- /dev/null +++ b/clients/client-devops-guru/commands/GetResourceCollectionCommand.ts @@ -0,0 +1,91 @@ +import { DevOpsGuruClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DevOpsGuruClient"; +import { GetResourceCollectionRequest, GetResourceCollectionResponse } from "../models/models_0"; +import { + deserializeAws_restJson1GetResourceCollectionCommand, + serializeAws_restJson1GetResourceCollectionCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type GetResourceCollectionCommandInput = GetResourceCollectionRequest; +export type GetResourceCollectionCommandOutput = GetResourceCollectionResponse & __MetadataBearer; + +/** + *

+ * Returns lists AWS resources that are of the specified resource collection type. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. + *

+ */ +export class GetResourceCollectionCommand extends $Command< + GetResourceCollectionCommandInput, + GetResourceCollectionCommandOutput, + DevOpsGuruClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: GetResourceCollectionCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DevOpsGuruClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DevOpsGuruClient"; + const commandName = "GetResourceCollectionCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: GetResourceCollectionRequest.filterSensitiveLog, + outputFilterSensitiveLog: GetResourceCollectionResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: GetResourceCollectionCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1GetResourceCollectionCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1GetResourceCollectionCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-devops-guru/commands/ListAnomaliesForInsightCommand.ts b/clients/client-devops-guru/commands/ListAnomaliesForInsightCommand.ts new file mode 100644 index 000000000000..82d53201aa70 --- /dev/null +++ b/clients/client-devops-guru/commands/ListAnomaliesForInsightCommand.ts @@ -0,0 +1,90 @@ +import { DevOpsGuruClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DevOpsGuruClient"; +import { ListAnomaliesForInsightRequest, ListAnomaliesForInsightResponse } from "../models/models_0"; +import { + deserializeAws_restJson1ListAnomaliesForInsightCommand, + serializeAws_restJson1ListAnomaliesForInsightCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListAnomaliesForInsightCommandInput = ListAnomaliesForInsightRequest; +export type ListAnomaliesForInsightCommandOutput = ListAnomaliesForInsightResponse & __MetadataBearer; + +/** + *

+ * Returns a list of the anomalies that belong to an insight that you specify using its ID. + *

+ */ +export class ListAnomaliesForInsightCommand extends $Command< + ListAnomaliesForInsightCommandInput, + ListAnomaliesForInsightCommandOutput, + DevOpsGuruClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListAnomaliesForInsightCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DevOpsGuruClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DevOpsGuruClient"; + const commandName = "ListAnomaliesForInsightCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListAnomaliesForInsightRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListAnomaliesForInsightResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListAnomaliesForInsightCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1ListAnomaliesForInsightCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1ListAnomaliesForInsightCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-devops-guru/commands/ListEventsCommand.ts b/clients/client-devops-guru/commands/ListEventsCommand.ts new file mode 100644 index 000000000000..70182d24280b --- /dev/null +++ b/clients/client-devops-guru/commands/ListEventsCommand.ts @@ -0,0 +1,90 @@ +import { DevOpsGuruClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DevOpsGuruClient"; +import { ListEventsRequest, ListEventsResponse } from "../models/models_0"; +import { + deserializeAws_restJson1ListEventsCommand, + serializeAws_restJson1ListEventsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListEventsCommandInput = ListEventsRequest; +export type ListEventsCommandOutput = ListEventsResponse & __MetadataBearer; + +/** + *

+ * Returns a list of the events emitted by the resources that are evaluated by DevOps Guru. You can use filters to specify which events are returned. + *

+ */ +export class ListEventsCommand extends $Command< + ListEventsCommandInput, + ListEventsCommandOutput, + DevOpsGuruClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListEventsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DevOpsGuruClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DevOpsGuruClient"; + const commandName = "ListEventsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListEventsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListEventsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListEventsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1ListEventsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1ListEventsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-devops-guru/commands/ListInsightsCommand.ts b/clients/client-devops-guru/commands/ListInsightsCommand.ts new file mode 100644 index 000000000000..f1c9a2da460f --- /dev/null +++ b/clients/client-devops-guru/commands/ListInsightsCommand.ts @@ -0,0 +1,91 @@ +import { DevOpsGuruClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DevOpsGuruClient"; +import { ListInsightsRequest, ListInsightsResponse } from "../models/models_0"; +import { + deserializeAws_restJson1ListInsightsCommand, + serializeAws_restJson1ListInsightsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListInsightsCommandInput = ListInsightsRequest; +export type ListInsightsCommandOutput = ListInsightsResponse & __MetadataBearer; + +/** + *

+ * Returns a list of insights in your AWS account. You can specify which insights are returned by their start time and + * status (ONGOING, CLOSED, or ANY). + *

+ */ +export class ListInsightsCommand extends $Command< + ListInsightsCommandInput, + ListInsightsCommandOutput, + DevOpsGuruClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListInsightsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DevOpsGuruClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DevOpsGuruClient"; + const commandName = "ListInsightsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListInsightsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListInsightsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListInsightsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1ListInsightsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1ListInsightsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-devops-guru/commands/ListNotificationChannelsCommand.ts b/clients/client-devops-guru/commands/ListNotificationChannelsCommand.ts new file mode 100644 index 000000000000..704e5b9f40af --- /dev/null +++ b/clients/client-devops-guru/commands/ListNotificationChannelsCommand.ts @@ -0,0 +1,92 @@ +import { DevOpsGuruClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DevOpsGuruClient"; +import { ListNotificationChannelsRequest, ListNotificationChannelsResponse } from "../models/models_0"; +import { + deserializeAws_restJson1ListNotificationChannelsCommand, + serializeAws_restJson1ListNotificationChannelsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListNotificationChannelsCommandInput = ListNotificationChannelsRequest; +export type ListNotificationChannelsCommandOutput = ListNotificationChannelsResponse & __MetadataBearer; + +/** + *

+ * Returns a list of notification channels configured for DevOps Guru. Each notification channel is used to notify you when + * DevOps Guru generates an insight that contains information about how to improve your operations. The one + * supported notification channel is Amazon Simple Notification Service (Amazon SNS). + *

+ */ +export class ListNotificationChannelsCommand extends $Command< + ListNotificationChannelsCommandInput, + ListNotificationChannelsCommandOutput, + DevOpsGuruClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListNotificationChannelsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DevOpsGuruClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DevOpsGuruClient"; + const commandName = "ListNotificationChannelsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListNotificationChannelsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListNotificationChannelsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListNotificationChannelsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1ListNotificationChannelsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1ListNotificationChannelsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-devops-guru/commands/ListRecommendationsCommand.ts b/clients/client-devops-guru/commands/ListRecommendationsCommand.ts new file mode 100644 index 000000000000..5a0c546a6dfc --- /dev/null +++ b/clients/client-devops-guru/commands/ListRecommendationsCommand.ts @@ -0,0 +1,90 @@ +import { DevOpsGuruClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DevOpsGuruClient"; +import { ListRecommendationsRequest, ListRecommendationsResponse } from "../models/models_0"; +import { + deserializeAws_restJson1ListRecommendationsCommand, + serializeAws_restJson1ListRecommendationsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListRecommendationsCommandInput = ListRecommendationsRequest; +export type ListRecommendationsCommandOutput = ListRecommendationsResponse & __MetadataBearer; + +/** + *

+ * Returns a list of a specified insight's recommendations. Each recommendation includes a list of related metrics and a list of related events. + *

+ */ +export class ListRecommendationsCommand extends $Command< + ListRecommendationsCommandInput, + ListRecommendationsCommandOutput, + DevOpsGuruClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListRecommendationsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DevOpsGuruClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DevOpsGuruClient"; + const commandName = "ListRecommendationsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListRecommendationsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListRecommendationsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListRecommendationsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1ListRecommendationsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1ListRecommendationsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-devops-guru/commands/PutFeedbackCommand.ts b/clients/client-devops-guru/commands/PutFeedbackCommand.ts new file mode 100644 index 000000000000..d3830085e813 --- /dev/null +++ b/clients/client-devops-guru/commands/PutFeedbackCommand.ts @@ -0,0 +1,90 @@ +import { DevOpsGuruClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DevOpsGuruClient"; +import { PutFeedbackRequest, PutFeedbackResponse } from "../models/models_0"; +import { + deserializeAws_restJson1PutFeedbackCommand, + serializeAws_restJson1PutFeedbackCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type PutFeedbackCommandInput = PutFeedbackRequest; +export type PutFeedbackCommandOutput = PutFeedbackResponse & __MetadataBearer; + +/** + *

+ * Collects customer feedback about the specified insight. + *

+ */ +export class PutFeedbackCommand extends $Command< + PutFeedbackCommandInput, + PutFeedbackCommandOutput, + DevOpsGuruClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: PutFeedbackCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DevOpsGuruClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DevOpsGuruClient"; + const commandName = "PutFeedbackCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: PutFeedbackRequest.filterSensitiveLog, + outputFilterSensitiveLog: PutFeedbackResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: PutFeedbackCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1PutFeedbackCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1PutFeedbackCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-devops-guru/commands/RemoveNotificationChannelCommand.ts b/clients/client-devops-guru/commands/RemoveNotificationChannelCommand.ts new file mode 100644 index 000000000000..73bcb73817ba --- /dev/null +++ b/clients/client-devops-guru/commands/RemoveNotificationChannelCommand.ts @@ -0,0 +1,94 @@ +import { DevOpsGuruClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DevOpsGuruClient"; +import { RemoveNotificationChannelRequest, RemoveNotificationChannelResponse } from "../models/models_0"; +import { + deserializeAws_restJson1RemoveNotificationChannelCommand, + serializeAws_restJson1RemoveNotificationChannelCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type RemoveNotificationChannelCommandInput = RemoveNotificationChannelRequest; +export type RemoveNotificationChannelCommandOutput = RemoveNotificationChannelResponse & __MetadataBearer; + +/** + *

+ * Removes a notification channel from DevOps Guru. A notification channel is used to notify you when DevOps Guru generates an insight + * that contains information about how to improve your operations. + *

+ */ +export class RemoveNotificationChannelCommand extends $Command< + RemoveNotificationChannelCommandInput, + RemoveNotificationChannelCommandOutput, + DevOpsGuruClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: RemoveNotificationChannelCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DevOpsGuruClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DevOpsGuruClient"; + const commandName = "RemoveNotificationChannelCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: RemoveNotificationChannelRequest.filterSensitiveLog, + outputFilterSensitiveLog: RemoveNotificationChannelResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: RemoveNotificationChannelCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1RemoveNotificationChannelCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_restJson1RemoveNotificationChannelCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-devops-guru/commands/SearchInsightsCommand.ts b/clients/client-devops-guru/commands/SearchInsightsCommand.ts new file mode 100644 index 000000000000..e7590d44288b --- /dev/null +++ b/clients/client-devops-guru/commands/SearchInsightsCommand.ts @@ -0,0 +1,96 @@ +import { DevOpsGuruClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DevOpsGuruClient"; +import { SearchInsightsRequest, SearchInsightsResponse } from "../models/models_0"; +import { + deserializeAws_restJson1SearchInsightsCommand, + serializeAws_restJson1SearchInsightsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type SearchInsightsCommandInput = SearchInsightsRequest; +export type SearchInsightsCommandOutput = SearchInsightsResponse & __MetadataBearer; + +/** + *

+ * Returns a list of insights in your AWS account. You can specify which insights are returned by their start time, one or more statuses + * (ONGOING, CLOSED, and CLOSED), one or more severities (LOW, MEDIUM, + * and HIGH), and type (REACTIVE or PROACTIVE). + *

+ *

+ * Use the Filters parameter to specify status and severity + * search parameters. Use the Type parameter to specify REACTIVE or PROACTIVE in your search. + *

+ */ +export class SearchInsightsCommand extends $Command< + SearchInsightsCommandInput, + SearchInsightsCommandOutput, + DevOpsGuruClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: SearchInsightsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DevOpsGuruClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DevOpsGuruClient"; + const commandName = "SearchInsightsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: SearchInsightsRequest.filterSensitiveLog, + outputFilterSensitiveLog: SearchInsightsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: SearchInsightsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1SearchInsightsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1SearchInsightsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-devops-guru/commands/UpdateResourceCollectionCommand.ts b/clients/client-devops-guru/commands/UpdateResourceCollectionCommand.ts new file mode 100644 index 000000000000..b1085d1193a5 --- /dev/null +++ b/clients/client-devops-guru/commands/UpdateResourceCollectionCommand.ts @@ -0,0 +1,91 @@ +import { DevOpsGuruClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DevOpsGuruClient"; +import { UpdateResourceCollectionRequest, UpdateResourceCollectionResponse } from "../models/models_0"; +import { + deserializeAws_restJson1UpdateResourceCollectionCommand, + serializeAws_restJson1UpdateResourceCollectionCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type UpdateResourceCollectionCommandInput = UpdateResourceCollectionRequest; +export type UpdateResourceCollectionCommandOutput = UpdateResourceCollectionResponse & __MetadataBearer; + +/** + *

Updates the collection of resources that DevOps Guru analyzes. + * The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. This method also creates the IAM role required for you + * to use DevOps Guru.

+ */ +export class UpdateResourceCollectionCommand extends $Command< + UpdateResourceCollectionCommandInput, + UpdateResourceCollectionCommandOutput, + DevOpsGuruClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: UpdateResourceCollectionCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DevOpsGuruClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DevOpsGuruClient"; + const commandName = "UpdateResourceCollectionCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: UpdateResourceCollectionRequest.filterSensitiveLog, + outputFilterSensitiveLog: UpdateResourceCollectionResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: UpdateResourceCollectionCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1UpdateResourceCollectionCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1UpdateResourceCollectionCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-devops-guru/commands/UpdateServiceIntegrationCommand.ts b/clients/client-devops-guru/commands/UpdateServiceIntegrationCommand.ts new file mode 100644 index 000000000000..4d5f931b912d --- /dev/null +++ b/clients/client-devops-guru/commands/UpdateServiceIntegrationCommand.ts @@ -0,0 +1,91 @@ +import { DevOpsGuruClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DevOpsGuruClient"; +import { UpdateServiceIntegrationRequest, UpdateServiceIntegrationResponse } from "../models/models_0"; +import { + deserializeAws_restJson1UpdateServiceIntegrationCommand, + serializeAws_restJson1UpdateServiceIntegrationCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type UpdateServiceIntegrationCommandInput = UpdateServiceIntegrationRequest; +export type UpdateServiceIntegrationCommandOutput = UpdateServiceIntegrationResponse & __MetadataBearer; + +/** + *

+ * Enables or disables integration with a service that can be integrated with DevOps Guru. The one service that can be integrated with + * DevOps Guru is AWS Systems Manager, which can be used to create an OpsItem for each generated insight. + *

+ */ +export class UpdateServiceIntegrationCommand extends $Command< + UpdateServiceIntegrationCommandInput, + UpdateServiceIntegrationCommandOutput, + DevOpsGuruClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: UpdateServiceIntegrationCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DevOpsGuruClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DevOpsGuruClient"; + const commandName = "UpdateServiceIntegrationCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: UpdateServiceIntegrationRequest.filterSensitiveLog, + outputFilterSensitiveLog: UpdateServiceIntegrationResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: UpdateServiceIntegrationCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1UpdateServiceIntegrationCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1UpdateServiceIntegrationCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-devops-guru/endpoints.ts b/clients/client-devops-guru/endpoints.ts new file mode 100644 index 000000000000..f5fd4b40fce5 --- /dev/null +++ b/clients/client-devops-guru/endpoints.ts @@ -0,0 +1,81 @@ +import { RegionInfo, RegionInfoProvider } from "@aws-sdk/types"; + +// Partition default templates +const AWS_TEMPLATE = "devops-guru.{region}.amazonaws.com"; +const AWS_CN_TEMPLATE = "devops-guru.{region}.amazonaws.com.cn"; +const AWS_ISO_TEMPLATE = "devops-guru.{region}.c2s.ic.gov"; +const AWS_ISO_B_TEMPLATE = "devops-guru.{region}.sc2s.sgov.gov"; +const AWS_US_GOV_TEMPLATE = "devops-guru.{region}.amazonaws.com"; + +// Partition regions +const AWS_REGIONS = new Set([ + "ap-east-1", + "ap-northeast-1", + "ap-northeast-2", + "ap-south-1", + "ap-southeast-1", + "ap-southeast-2", + "ca-central-1", + "eu-central-1", + "eu-north-1", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "me-south-1", + "sa-east-1", + "us-east-1", + "us-east-2", + "us-west-1", + "us-west-2", +]); +const AWS_CN_REGIONS = new Set(["cn-north-1", "cn-northwest-1"]); +const AWS_ISO_REGIONS = new Set(["us-iso-east-1"]); +const AWS_ISO_B_REGIONS = new Set(["us-isob-east-1"]); +const AWS_US_GOV_REGIONS = new Set(["us-gov-east-1", "us-gov-west-1"]); + +export const defaultRegionInfoProvider: RegionInfoProvider = (region: string, options?: any) => { + let regionInfo: RegionInfo | undefined = undefined; + switch (region) { + // First, try to match exact region names. + // Next, try to match partition endpoints. + default: + if (AWS_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_TEMPLATE.replace("{region}", region), + partition: "aws", + }; + } + if (AWS_CN_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_CN_TEMPLATE.replace("{region}", region), + partition: "aws-cn", + }; + } + if (AWS_ISO_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_ISO_TEMPLATE.replace("{region}", region), + partition: "aws-iso", + }; + } + if (AWS_ISO_B_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_ISO_B_TEMPLATE.replace("{region}", region), + partition: "aws-iso-b", + }; + } + if (AWS_US_GOV_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_US_GOV_TEMPLATE.replace("{region}", region), + partition: "aws-us-gov", + }; + } + // Finally, assume it's an AWS partition endpoint. + if (regionInfo === undefined) { + regionInfo = { + hostname: AWS_TEMPLATE.replace("{region}", region), + partition: "aws", + }; + } + } + return Promise.resolve(regionInfo); +}; diff --git a/clients/client-devops-guru/index.ts b/clients/client-devops-guru/index.ts new file mode 100644 index 000000000000..27c590d16b0a --- /dev/null +++ b/clients/client-devops-guru/index.ts @@ -0,0 +1,30 @@ +export * from "./DevOpsGuruClient"; +export * from "./DevOpsGuru"; +export * from "./commands/AddNotificationChannelCommand"; +export * from "./commands/DescribeAccountHealthCommand"; +export * from "./commands/DescribeAccountOverviewCommand"; +export * from "./commands/DescribeAnomalyCommand"; +export * from "./commands/DescribeInsightCommand"; +export * from "./commands/DescribeResourceCollectionHealthCommand"; +export * from "./pagination/DescribeResourceCollectionHealthPaginator"; +export * from "./commands/DescribeServiceIntegrationCommand"; +export * from "./commands/GetResourceCollectionCommand"; +export * from "./pagination/GetResourceCollectionPaginator"; +export * from "./commands/ListAnomaliesForInsightCommand"; +export * from "./pagination/ListAnomaliesForInsightPaginator"; +export * from "./commands/ListEventsCommand"; +export * from "./pagination/ListEventsPaginator"; +export * from "./commands/ListInsightsCommand"; +export * from "./pagination/ListInsightsPaginator"; +export * from "./commands/ListNotificationChannelsCommand"; +export * from "./pagination/ListNotificationChannelsPaginator"; +export * from "./commands/ListRecommendationsCommand"; +export * from "./pagination/ListRecommendationsPaginator"; +export * from "./commands/PutFeedbackCommand"; +export * from "./commands/RemoveNotificationChannelCommand"; +export * from "./commands/SearchInsightsCommand"; +export * from "./pagination/SearchInsightsPaginator"; +export * from "./commands/UpdateResourceCollectionCommand"; +export * from "./commands/UpdateServiceIntegrationCommand"; +export * from "./pagination/Interfaces"; +export * from "./models/index"; diff --git a/clients/client-devops-guru/models/index.ts b/clients/client-devops-guru/models/index.ts new file mode 100644 index 000000000000..09c5d6e09b8c --- /dev/null +++ b/clients/client-devops-guru/models/index.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/clients/client-devops-guru/models/models_0.ts b/clients/client-devops-guru/models/models_0.ts new file mode 100644 index 000000000000..d318a9c350a2 --- /dev/null +++ b/clients/client-devops-guru/models/models_0.ts @@ -0,0 +1,2677 @@ +import { SENSITIVE_STRING, SmithyException as __SmithyException } from "@aws-sdk/smithy-client"; +import { MetadataBearer as $MetadataBearer } from "@aws-sdk/types"; + +/** + *

You don't have permissions to perform the requested operation. The user or role that is making the request must have at + * least one IAM permissions policy attached that grants the required permissions. For more information, see + * Access Management in the IAM User Guide.

+ */ +export interface AccessDeniedException extends __SmithyException, $MetadataBearer { + name: "AccessDeniedException"; + $fault: "client"; + Message: string | undefined; +} + +export namespace AccessDeniedException { + export const filterSensitiveLog = (obj: AccessDeniedException): any => ({ + ...obj, + }); +} + +/** + *

Contains the Amazon Resource Name (ARN) of an Amazon Simple Notification Service topic.

+ */ +export interface SnsChannelConfig { + /** + *

The Amazon Resource Name (ARN) of an Amazon Simple Notification Service topic.

+ */ + TopicArn?: string; +} + +export namespace SnsChannelConfig { + export const filterSensitiveLog = (obj: SnsChannelConfig): any => ({ + ...obj, + }); +} + +/** + *

Information about notification channels you have configured with DevOps Guru. + * The one + * supported notification channel is Amazon Simple Notification Service (Amazon SNS).

+ */ +export interface NotificationChannelConfig { + /** + *

+ * Information about a notification channel configured in DevOps Guru to send notifications when insights are created. + *

+ */ + Sns: SnsChannelConfig | undefined; +} + +export namespace NotificationChannelConfig { + export const filterSensitiveLog = (obj: NotificationChannelConfig): any => ({ + ...obj, + }); +} + +export interface AddNotificationChannelRequest { + /** + *

A NotificationChannelConfig object that specifies what type of + * notification channel to add. The one + * supported notification channel is Amazon Simple Notification Service (Amazon SNS).

+ */ + Config: NotificationChannelConfig | undefined; +} + +export namespace AddNotificationChannelRequest { + export const filterSensitiveLog = (obj: AddNotificationChannelRequest): any => ({ + ...obj, + }); +} + +export interface AddNotificationChannelResponse { + /** + *

+ * The ID of the added notification channel. + *

+ */ + Id: string | undefined; +} + +export namespace AddNotificationChannelResponse { + export const filterSensitiveLog = (obj: AddNotificationChannelResponse): any => ({ + ...obj, + }); +} + +/** + *

+ * An exception that is thrown when a conflict occurs. + *

+ */ +export interface ConflictException extends __SmithyException, $MetadataBearer { + name: "ConflictException"; + $fault: "client"; + Message: string | undefined; + /** + *

+ * The ID of the AWS resource in which a conflict occurred. + *

+ */ + ResourceId: string | undefined; + + /** + *

+ * The type of the AWS resource in which a conflict occurred. + *

+ */ + ResourceType: string | undefined; +} + +export namespace ConflictException { + export const filterSensitiveLog = (obj: ConflictException): any => ({ + ...obj, + }); +} + +/** + *

An internal failure in an Amazon service occurred.

+ */ +export interface InternalServerException extends __SmithyException, $MetadataBearer { + name: "InternalServerException"; + $fault: "server"; + Message: string | undefined; + /** + *

+ * The number of seconds after which the action that caused the internal server + * exception can be retried. + *

+ */ + RetryAfterSeconds?: number; +} + +export namespace InternalServerException { + export const filterSensitiveLog = (obj: InternalServerException): any => ({ + ...obj, + }); +} + +/** + *

A requested resource could not be found

+ */ +export interface ResourceNotFoundException extends __SmithyException, $MetadataBearer { + name: "ResourceNotFoundException"; + $fault: "client"; + Message: string | undefined; + /** + *

+ * The ID of the AWS resource that could not be found. + *

+ */ + ResourceId: string | undefined; + + /** + *

+ * The type of the AWS resource that could not be found. + *

+ */ + ResourceType: string | undefined; +} + +export namespace ResourceNotFoundException { + export const filterSensitiveLog = (obj: ResourceNotFoundException): any => ({ + ...obj, + }); +} + +/** + *

The request contains a value that exceeds a maximum quota.

+ */ +export interface ServiceQuotaExceededException extends __SmithyException, $MetadataBearer { + name: "ServiceQuotaExceededException"; + $fault: "client"; + Message?: string; +} + +export namespace ServiceQuotaExceededException { + export const filterSensitiveLog = (obj: ServiceQuotaExceededException): any => ({ + ...obj, + }); +} + +/** + *

The request was denied due to a request throttling.

+ */ +export interface ThrottlingException extends __SmithyException, $MetadataBearer { + name: "ThrottlingException"; + $fault: "client"; + Message: string | undefined; + /** + *

+ * The code of the quota that was exceeded, causing the throttling exception. + *

+ */ + QuotaCode?: string; + + /** + *

+ * The code of the service that caused the throttling exception. + *

+ */ + ServiceCode?: string; + + /** + *

+ * The number of seconds after which the action that caused the throttling + * exception can be retried. + *

+ */ + RetryAfterSeconds?: number; +} + +export namespace ThrottlingException { + export const filterSensitiveLog = (obj: ThrottlingException): any => ({ + ...obj, + }); +} + +/** + *

+ * The field associated with the validation exception. + *

+ */ +export interface ValidationExceptionField { + /** + *

+ * The name of the field. + *

+ */ + Name: string | undefined; + + /** + *

+ * The message associated with the validation exception with information to help + * determine its cause. + *

+ */ + Message: string | undefined; +} + +export namespace ValidationExceptionField { + export const filterSensitiveLog = (obj: ValidationExceptionField): any => ({ + ...obj, + }); +} + +export enum ValidationExceptionReason { + CANNOT_PARSE = "CANNOT_PARSE", + FIELD_VALIDATION_FAILED = "FIELD_VALIDATION_FAILED", + OTHER = "OTHER", + UNKNOWN_OPERATION = "UNKNOWN_OPERATION", +} + +/** + *

+ * Contains information about data passed in to a field during a request that is not valid. + *

+ */ +export interface ValidationException extends __SmithyException, $MetadataBearer { + name: "ValidationException"; + $fault: "client"; + /** + *

+ * A message that describes the validation exception. + *

+ */ + Message: string | undefined; + + /** + *

+ * The reason the validation exception was thrown. + *

+ */ + Reason?: ValidationExceptionReason | string; + + /** + *

+ * An array of fields that are associated with the validation exception. + *

+ */ + Fields?: ValidationExceptionField[]; +} + +export namespace ValidationException { + export const filterSensitiveLog = (obj: ValidationException): any => ({ + ...obj, + }); +} + +export enum AnomalySeverity { + HIGH = "HIGH", + LOW = "LOW", + MEDIUM = "MEDIUM", +} + +/** + *

+ * The dimension of a Amazon CloudWatch metric that is used when DevOps Guru analyzes the resources in your account for + * operational problems and anomalous behaivor. A dimension is a name/value pair that is part of the idenity + * of a metric. A metric can have up to 10 dimensions. For more information, see + * Dimensions + * in the Amazon CloudWatch User Guide. + *

+ */ +export interface CloudWatchMetricsDimension { + /** + *

+ * The name of the CloudWatch dimension. + *

+ */ + Name?: string; + + /** + *

+ * The value of the CloudWatch dimension. + *

+ */ + Value?: string; +} + +export namespace CloudWatchMetricsDimension { + export const filterSensitiveLog = (obj: CloudWatchMetricsDimension): any => ({ + ...obj, + }); +} + +export enum CloudWatchMetricsStat { + AVERAGE = "Average", + MAXIMUM = "Maximum", + MINIMUM = "Minimum", + P50 = "p50", + P90 = "p90", + P99 = "p99", + SAMPLE_COUNT = "SampleCount", + SUM = "Sum", +} + +/** + *

+ * Information about an Amazon CloudWatch metric. + *

+ */ +export interface CloudWatchMetricsDetail { + /** + *

+ * The name of the CloudWatch metric. + *

+ */ + MetricName?: string; + + /** + *

+ * The namespace of the CloudWatch metric. A namespace is a container for CloudWatch metrics. + *

+ */ + Namespace?: string; + + /** + *

+ * An array of CloudWatch dimensions associated with + *

+ */ + Dimensions?: CloudWatchMetricsDimension[]; + + /** + *

+ * The type of statistic associated with the CloudWatch metric. For more information, see + * Statistics in the + * Amazon CloudWatch User Guide. + *

+ */ + Stat?: CloudWatchMetricsStat | string; + + /** + *

+ * The unit of measure used for the CloudWatch metric. For example, Bytes, Seconds, + * Count, and Percent. + *

+ */ + Unit?: string; + + /** + *

+ * The length of time associated with the CloudWatch metric in number of seconds. + *

+ */ + Period?: number; +} + +export namespace CloudWatchMetricsDetail { + export const filterSensitiveLog = (obj: CloudWatchMetricsDetail): any => ({ + ...obj, + }); +} + +/** + *

+ * Details about the source of the anomalous operational data that triggered the anonaly. The + * one supported source is Amazon CloudWatch metrics. + *

+ */ +export interface AnomalySourceDetails { + /** + *

An array of CloudWatchMetricsDetail object that contains information + * about the analyzed metrics that displayed anomalous behavior.

+ */ + CloudWatchMetrics?: CloudWatchMetricsDetail[]; +} + +export namespace AnomalySourceDetails { + export const filterSensitiveLog = (obj: AnomalySourceDetails): any => ({ + ...obj, + }); +} + +export enum AnomalyStatus { + CLOSED = "CLOSED", + ONGOING = "ONGOING", +} + +/** + *

+ * A time range that specifies when the observed unusual behavior in an anomaly started and ended. + *

+ */ +export interface AnomalyTimeRange { + /** + *

+ * The time when the anomalous behavior started. + *

+ */ + StartTime: Date | undefined; + + /** + *

The time when the anomalous behavior ended.

+ */ + EndTime?: Date; +} + +export namespace AnomalyTimeRange { + export const filterSensitiveLog = (obj: AnomalyTimeRange): any => ({ + ...obj, + }); +} + +export interface DescribeAccountHealthRequest {} + +export namespace DescribeAccountHealthRequest { + export const filterSensitiveLog = (obj: DescribeAccountHealthRequest): any => ({ + ...obj, + }); +} + +export interface DescribeAccountHealthResponse { + /** + *

+ * An integer that specifies the number of open reactive insights in your AWS account. + *

+ */ + OpenReactiveInsights: number | undefined; + + /** + *

+ * An integer that specifies the number of open proactive insights in your AWS account. + *

+ */ + OpenProactiveInsights: number | undefined; + + /** + *

+ * An integer that specifies the number of metrics that have been analyzed in your AWS account. + *

+ */ + MetricsAnalyzed: number | undefined; +} + +export namespace DescribeAccountHealthResponse { + export const filterSensitiveLog = (obj: DescribeAccountHealthResponse): any => ({ + ...obj, + }); +} + +export interface DescribeAccountOverviewRequest { + /** + *

+ * The start of the time range passed in. The start time granularity is at the + * day level. The floor of the start time is used. Returned information occurred after this day. + *

+ */ + FromTime: Date | undefined; + + /** + *

+ * The end of the time range passed in. The start time granularity is at the + * day level. The floor of the start time is used. Returned information occurred before this day. If this is not specified, then the current day is used. + *

+ */ + ToTime?: Date; +} + +export namespace DescribeAccountOverviewRequest { + export const filterSensitiveLog = (obj: DescribeAccountOverviewRequest): any => ({ + ...obj, + }); +} + +export interface DescribeAccountOverviewResponse { + /** + *

+ * An integer that specifies the number of open reactive insights in your AWS account that were created during the + * time range passed in. + *

+ */ + ReactiveInsights: number | undefined; + + /** + *

+ * An integer that specifies the number of open proactive insights in your AWS account that were created during the + * time range passed in. + *

+ */ + ProactiveInsights: number | undefined; + + /** + *

+ * The Mean Time to Recover (MTTR) for all closed insights that were created during the + * time range passed in. + *

+ */ + MeanTimeToRecoverInMilliseconds: number | undefined; +} + +export namespace DescribeAccountOverviewResponse { + export const filterSensitiveLog = (obj: DescribeAccountOverviewResponse): any => ({ + ...obj, + }); +} + +export interface DescribeAnomalyRequest { + /** + *

+ * The ID of the anomaly. + *

+ */ + Id: string | undefined; +} + +export namespace DescribeAnomalyRequest { + export const filterSensitiveLog = (obj: DescribeAnomalyRequest): any => ({ + ...obj, + }); +} + +/** + *

+ * The time range during which anomalous behavior in a proactive anomaly or an insight is expected to occur. + *

+ */ +export interface PredictionTimeRange { + /** + *

+ * The time range during which a metric limit is expected to be exceeded. This applies to proactive insights only. + *

+ */ + StartTime: Date | undefined; + + /** + *

+ * The time when the behavior in a proactive insight is expected to end. + *

+ */ + EndTime?: Date; +} + +export namespace PredictionTimeRange { + export const filterSensitiveLog = (obj: PredictionTimeRange): any => ({ + ...obj, + }); +} + +/** + *

+ * Information about AWS CloudFormation stacks. You can use stacks to specify which AWS resources in your account to analyze. + * For more information, see Stacks + * in the AWS CloudFormation User Guide. + *

+ */ +export interface CloudFormationCollection { + /** + *

+ * An array of CloudFormation stack names. + *

+ */ + StackNames?: string[]; +} + +export namespace CloudFormationCollection { + export const filterSensitiveLog = (obj: CloudFormationCollection): any => ({ + ...obj, + }); +} + +/** + *

+ * A collection of AWS resources supported by DevOps Guru. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. + *

+ */ +export interface ResourceCollection { + /** + *

An array of the names of AWS CloudFormation stacks. The stacks define AWS resources + * that DevOps Guru analyzes. + *

+ */ + CloudFormation?: CloudFormationCollection; +} + +export namespace ResourceCollection { + export const filterSensitiveLog = (obj: ResourceCollection): any => ({ + ...obj, + }); +} + +/** + *

Information about an anomaly. This object is returned by ListAnomalies.

+ */ +export interface ProactiveAnomaly { + /** + *

+ * The ID of a proactive anomaly. + *

+ */ + Id?: string; + + /** + *

+ * The severity of a proactive anomaly. + *

+ */ + Severity?: AnomalySeverity | string; + + /** + *

+ * The status of a proactive anomaly. + *

+ */ + Status?: AnomalyStatus | string; + + /** + *

+ * The time of the anomaly's most recent update. + *

+ */ + UpdateTime?: Date; + + /** + *

+ * A time range that specifies when the observed unusual behavior in an anomaly started and ended. + *

+ */ + AnomalyTimeRange?: AnomalyTimeRange; + + /** + *

+ * The time range during which anomalous behavior in a proactive anomaly or an insight is expected to occur. + *

+ */ + PredictionTimeRange?: PredictionTimeRange; + + /** + *

+ * Details about the source of the analyzed operational data that triggered the anomaly. The one supported source is Amazon CloudWatch metrics. + *

+ */ + SourceDetails?: AnomalySourceDetails; + + /** + *

+ * The ID of the insight that contains this anomaly. An insight is composed of related anomalies. + *

+ */ + AssociatedInsightId?: string; + + /** + *

+ * A collection of AWS resources supported by DevOps Guru. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. + *

+ */ + ResourceCollection?: ResourceCollection; + + /** + *

+ * A threshold that was exceeded by behavior in analyzed resources. Exceeding this + * threshold is related to the anomalous behavior that generated this anomaly. + *

+ */ + Limit?: number; +} + +export namespace ProactiveAnomaly { + export const filterSensitiveLog = (obj: ProactiveAnomaly): any => ({ + ...obj, + }); +} + +/** + *

Details about a reactive anomaly. This object is returned by ListAnomalies.

+ */ +export interface ReactiveAnomaly { + /** + *

The ID of the reactive anomaly.

+ */ + Id?: string; + + /** + *

The severity of the anomaly.

+ */ + Severity?: AnomalySeverity | string; + + /** + *

+ * The status of the anomaly. + *

+ */ + Status?: AnomalyStatus | string; + + /** + *

+ * A time range that specifies when the observed unusual behavior in an anomaly started and ended. + *

+ */ + AnomalyTimeRange?: AnomalyTimeRange; + + /** + *

+ * Details about the source of the analyzed operational data that triggered the anomaly. The one supported source is Amazon CloudWatch metrics. + *

+ */ + SourceDetails?: AnomalySourceDetails; + + /** + *

+ * The ID of the insight that contains this anomaly. An insight is composed of related anomalies. + *

+ */ + AssociatedInsightId?: string; + + /** + *

+ * A collection of AWS resources supported by DevOps Guru. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. + *

+ */ + ResourceCollection?: ResourceCollection; +} + +export namespace ReactiveAnomaly { + export const filterSensitiveLog = (obj: ReactiveAnomaly): any => ({ + ...obj, + }); +} + +export interface DescribeAnomalyResponse { + /** + *

+ * An ReactiveAnomaly object that represents the requested anomaly. + *

+ */ + ProactiveAnomaly?: ProactiveAnomaly; + + /** + *

+ * An ProactiveAnomaly object that represents the requested anomaly. + *

+ */ + ReactiveAnomaly?: ReactiveAnomaly; +} + +export namespace DescribeAnomalyResponse { + export const filterSensitiveLog = (obj: DescribeAnomalyResponse): any => ({ + ...obj, + }); +} + +export interface DescribeInsightRequest { + /** + *

+ * The ID of the insight. + *

+ */ + Id: string | undefined; +} + +export namespace DescribeInsightRequest { + export const filterSensitiveLog = (obj: DescribeInsightRequest): any => ({ + ...obj, + }); +} + +/** + *

A time ranged that specifies when the observed behavior in an insight started and + * ended.

+ */ +export interface InsightTimeRange { + /** + *

+ * The time when the behavior described in an insight started. + *

+ */ + StartTime: Date | undefined; + + /** + *

+ * The time when the behavior described in an insight ended. + *

+ */ + EndTime?: Date; +} + +export namespace InsightTimeRange { + export const filterSensitiveLog = (obj: InsightTimeRange): any => ({ + ...obj, + }); +} + +export enum InsightSeverity { + HIGH = "HIGH", + LOW = "LOW", + MEDIUM = "MEDIUM", +} + +export enum InsightStatus { + CLOSED = "CLOSED", + ONGOING = "ONGOING", +} + +/** + *

Details about a proactive insight. This object is returned by ListInsights.

+ */ +export interface ProactiveInsight { + /** + *

The ID of the proactive insight.

+ */ + Id?: string; + + /** + *

The name of the proactive insight.

+ */ + Name?: string; + + /** + *

The severity of the proactive insight.

+ */ + Severity?: InsightSeverity | string; + + /** + *

The status of the proactive insight.

+ */ + Status?: InsightStatus | string; + + /** + *

A time ranged that specifies when the observed behavior in an insight started and + * ended.

+ */ + InsightTimeRange?: InsightTimeRange; + + /** + *

+ * The time range during which anomalous behavior in a proactive anomaly or an insight is expected to occur. + *

+ */ + PredictionTimeRange?: PredictionTimeRange; + + /** + *

+ * A collection of AWS resources supported by DevOps Guru. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. + *

+ */ + ResourceCollection?: ResourceCollection; + + /** + *

+ * The ID of the AWS System Manager OpsItem created for this insight. You must enable + * the creation of OpstItems insights before they are created for each insight. + *

+ */ + SsmOpsItemId?: string; +} + +export namespace ProactiveInsight { + export const filterSensitiveLog = (obj: ProactiveInsight): any => ({ + ...obj, + }); +} + +/** + *

+ * Information about a reactive insight. This object is returned by ListInsights. + *

+ */ +export interface ReactiveInsight { + /** + *

+ * The ID of a reactive insight. + *

+ */ + Id?: string; + + /** + *

+ * The name of a reactive insight. + *

+ */ + Name?: string; + + /** + *

+ * The severity of a reactive insight. + *

+ */ + Severity?: InsightSeverity | string; + + /** + *

+ * The status of a reactive insight. + *

+ */ + Status?: InsightStatus | string; + + /** + *

A time ranged that specifies when the observed behavior in an insight started and + * ended.

+ */ + InsightTimeRange?: InsightTimeRange; + + /** + *

+ * A collection of AWS resources supported by DevOps Guru. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. + *

+ */ + ResourceCollection?: ResourceCollection; + + /** + *

+ * The ID of the AWS System Manager OpsItem created for this insight. You must enable + * the creation of OpstItems insights before they are created for each insight. + *

+ */ + SsmOpsItemId?: string; +} + +export namespace ReactiveInsight { + export const filterSensitiveLog = (obj: ReactiveInsight): any => ({ + ...obj, + }); +} + +export interface DescribeInsightResponse { + /** + *

+ * An ProactiveInsight object that represents the requested insight. + *

+ */ + ProactiveInsight?: ProactiveInsight; + + /** + *

+ * An ReactiveInsight object that represents the requested insight. + *

+ */ + ReactiveInsight?: ReactiveInsight; +} + +export namespace DescribeInsightResponse { + export const filterSensitiveLog = (obj: DescribeInsightResponse): any => ({ + ...obj, + }); +} + +export enum ResourceCollectionType { + AWS_CLOUD_FORMATION = "AWS_CLOUD_FORMATION", +} + +export interface DescribeResourceCollectionHealthRequest { + /** + *

+ * An AWS resource collection type. This type specifies how analyzed AWS resources are defined. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. + *

+ */ + ResourceCollectionType: ResourceCollectionType | string | undefined; + + /** + *

The pagination token to use to retrieve + * the next page of results for this operation. If this value is null, it retrieves the first page.

+ */ + NextToken?: string; +} + +export namespace DescribeResourceCollectionHealthRequest { + export const filterSensitiveLog = (obj: DescribeResourceCollectionHealthRequest): any => ({ + ...obj, + }); +} + +/** + *

+ * Information about the number of open reactive and proactive insights that can be + * used to gauge the health of your system. + *

+ */ +export interface InsightHealth { + /** + *

+ * The number of open proactive insights. + *

+ */ + OpenProactiveInsights?: number; + + /** + *

+ * The number of open reactive insights. + *

+ */ + OpenReactiveInsights?: number; + + /** + *

+ * The Meant Time to Recover (MTTR) for the insight. + *

+ */ + MeanTimeToRecoverInMilliseconds?: number; +} + +export namespace InsightHealth { + export const filterSensitiveLog = (obj: InsightHealth): any => ({ + ...obj, + }); +} + +/** + *

+ * Information about the health of AWS resources in your account that are specified by an AWS CloudFormation stack. + *

+ */ +export interface CloudFormationHealth { + /** + *

+ * The name of the CloudFormation stack. + *

+ */ + StackName?: string; + + /** + *

+ * Information about the health of the AWS resources in your account that are specified by an AWS CloudFormation stack, including + * the number of open proactive, open reactive insights, and the Mean Time to Recover (MTTR) of closed insights. + *

+ */ + Insight?: InsightHealth; +} + +export namespace CloudFormationHealth { + export const filterSensitiveLog = (obj: CloudFormationHealth): any => ({ + ...obj, + }); +} + +export interface DescribeResourceCollectionHealthResponse { + /** + *

+ * The returned CloudFormationHealthOverview object that contains an InsightHealthOverview object with + * the requested system health information. + *

+ */ + CloudFormation: CloudFormationHealth[] | undefined; + + /** + *

The pagination token to use to retrieve + * the next page of results for this operation. If there are no more pages, this value is null.

+ */ + NextToken?: string; +} + +export namespace DescribeResourceCollectionHealthResponse { + export const filterSensitiveLog = (obj: DescribeResourceCollectionHealthResponse): any => ({ + ...obj, + }); +} + +export interface DescribeServiceIntegrationRequest {} + +export namespace DescribeServiceIntegrationRequest { + export const filterSensitiveLog = (obj: DescribeServiceIntegrationRequest): any => ({ + ...obj, + }); +} + +export enum OptInStatus { + DISABLED = "DISABLED", + ENABLED = "ENABLED", +} + +/** + *

+ * Information about whether DevOps Guru is configured to create an OpsItem in AWS Systems Manager OpsCenter for each created insight. + *

+ */ +export interface OpsCenterIntegration { + /** + *

+ * Specifies if DevOps Guru is enabled to create an AWS Systems Manager OpsItem for each created insight. + *

+ */ + OptInStatus?: OptInStatus | string; +} + +export namespace OpsCenterIntegration { + export const filterSensitiveLog = (obj: OpsCenterIntegration): any => ({ + ...obj, + }); +} + +/** + *

+ * Information about the integration of DevOps Guru with another AWS service, such as AWS Systems Manager. + *

+ */ +export interface ServiceIntegrationConfig { + /** + *

+ * Information about whether DevOps Guru is configured to create an OpsItem in AWS Systems Manager OpsCenter for each created insight. + *

+ */ + OpsCenter?: OpsCenterIntegration; +} + +export namespace ServiceIntegrationConfig { + export const filterSensitiveLog = (obj: ServiceIntegrationConfig): any => ({ + ...obj, + }); +} + +export interface DescribeServiceIntegrationResponse { + /** + *

+ * Information about the integration of DevOps Guru with another AWS service, such as AWS Systems Manager. + *

+ */ + ServiceIntegration?: ServiceIntegrationConfig; +} + +export namespace DescribeServiceIntegrationResponse { + export const filterSensitiveLog = (obj: DescribeServiceIntegrationResponse): any => ({ + ...obj, + }); +} + +export interface GetResourceCollectionRequest { + /** + *

+ * The type of AWS resource collections to return. The one valid value is CLOUD_FORMATION for + * AWS CloudFormation stacks. + *

+ */ + ResourceCollectionType: ResourceCollectionType | string | undefined; + + /** + *

The pagination token to use to retrieve + * the next page of results for this operation. If this value is null, it retrieves the first page.

+ */ + NextToken?: string; +} + +export namespace GetResourceCollectionRequest { + export const filterSensitiveLog = (obj: GetResourceCollectionRequest): any => ({ + ...obj, + }); +} + +/** + *

+ * Information about AWS CloudFormation stacks. You can use stacks to specify which AWS resources in your account to analyze. + * For more information, see Stacks + * in the AWS CloudFormation User Guide. + *

+ */ +export interface CloudFormationCollectionFilter { + /** + *

+ * An array of CloudFormation stack names. + *

+ */ + StackNames?: string[]; +} + +export namespace CloudFormationCollectionFilter { + export const filterSensitiveLog = (obj: CloudFormationCollectionFilter): any => ({ + ...obj, + }); +} + +/** + *

+ * Information about a filter used to specify which AWS resources are analyzed for anomalous behavior by DevOps Guru. + *

+ */ +export interface ResourceCollectionFilter { + /** + *

+ * Information about AWS CloudFormation stacks. You can use stacks to specify which AWS resources in your account to analyze. + * For more information, see Stacks + * in the AWS CloudFormation User Guide. + *

+ */ + CloudFormation?: CloudFormationCollectionFilter; +} + +export namespace ResourceCollectionFilter { + export const filterSensitiveLog = (obj: ResourceCollectionFilter): any => ({ + ...obj, + }); +} + +export interface GetResourceCollectionResponse { + /** + *

+ * The requested list of AWS resource collections. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. + *

+ */ + ResourceCollection?: ResourceCollectionFilter; + + /** + *

The pagination token to use to retrieve + * the next page of results for this operation. If there are no more pages, this value is null.

+ */ + NextToken?: string; +} + +export namespace GetResourceCollectionResponse { + export const filterSensitiveLog = (obj: GetResourceCollectionResponse): any => ({ + ...obj, + }); +} + +/** + *

+ * A time range used to specify when the behavior of an insight or anomaly started. + *

+ */ +export interface StartTimeRange { + /** + *

+ * The start time of the time range. + *

+ */ + FromTime?: Date; + + /** + *

+ * The end time of the time range. + *

+ */ + ToTime?: Date; +} + +export namespace StartTimeRange { + export const filterSensitiveLog = (obj: StartTimeRange): any => ({ + ...obj, + }); +} + +export interface ListAnomaliesForInsightRequest { + /** + *

+ * The ID of the insight. The returned anomalies belong to this insight. + *

+ */ + InsightId: string | undefined; + + /** + *

+ * A time range used to specify when the requested anomalies started. All returned anomalies started + * during this time range. + *

+ */ + StartTimeRange?: StartTimeRange; + + /** + *

The maximum number of results to return with a single call. + * To retrieve the remaining results, make another call with the returned nextToken value. + * The default value is 500.

+ */ + MaxResults?: number; + + /** + *

The pagination token to use to retrieve + * the next page of results for this operation. If this value is null, it retrieves the first page.

+ */ + NextToken?: string; +} + +export namespace ListAnomaliesForInsightRequest { + export const filterSensitiveLog = (obj: ListAnomaliesForInsightRequest): any => ({ + ...obj, + }); +} + +/** + *

Details about a proactive anomaly. This object is returned by + * DescribeAnomaly. + *

+ */ +export interface ProactiveAnomalySummary { + /** + *

The ID of the anomaly.

+ */ + Id?: string; + + /** + *

The severity of the anomaly.

+ */ + Severity?: AnomalySeverity | string; + + /** + *

The status of the anomaly.

+ */ + Status?: AnomalyStatus | string; + + /** + *

+ * The time of the anomaly's most recent update. + *

+ */ + UpdateTime?: Date; + + /** + *

+ * A time range that specifies when the observed unusual behavior in an anomaly started and ended. + *

+ */ + AnomalyTimeRange?: AnomalyTimeRange; + + /** + *

+ * The time range during which anomalous behavior in a proactive anomaly or an insight is expected to occur. + *

+ */ + PredictionTimeRange?: PredictionTimeRange; + + /** + *

+ * Details about the source of the analyzed operational data that triggered the anomaly. The one supported source is Amazon CloudWatch metrics. + *

+ */ + SourceDetails?: AnomalySourceDetails; + + /** + *

+ * The ID of the insight that contains this anomaly. An insight is composed of related anomalies. + *

+ */ + AssociatedInsightId?: string; + + /** + *

+ * A collection of AWS resources supported by DevOps Guru. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. + *

+ */ + ResourceCollection?: ResourceCollection; + + /** + *

+ * A threshold that was exceeded by behavior in analyzed resources. Exceeding this + * threshold is related to the anomalous behavior that generated this anomaly. + *

+ */ + Limit?: number; +} + +export namespace ProactiveAnomalySummary { + export const filterSensitiveLog = (obj: ProactiveAnomalySummary): any => ({ + ...obj, + }); +} + +/** + *

Details about a reactive anomaly. This object is returned by + * DescribeAnomaly. + *

+ */ +export interface ReactiveAnomalySummary { + /** + *

+ * The ID of the reactive anomaly. + *

+ */ + Id?: string; + + /** + *

+ * The severity of the reactive anomaly. + *

+ */ + Severity?: AnomalySeverity | string; + + /** + *

+ * The status of the reactive anomaly. + *

+ */ + Status?: AnomalyStatus | string; + + /** + *

+ * A time range that specifies when the observed unusual behavior in an anomaly started and ended. + *

+ */ + AnomalyTimeRange?: AnomalyTimeRange; + + /** + *

+ * Details about the source of the analyzed operational data that triggered the anomaly. The one supported source is Amazon CloudWatch metrics. + *

+ */ + SourceDetails?: AnomalySourceDetails; + + /** + *

+ * The ID of the insight that contains this anomaly. An insight is composed of related anomalies. + *

+ */ + AssociatedInsightId?: string; + + /** + *

+ * A collection of AWS resources supported by DevOps Guru. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. + *

+ */ + ResourceCollection?: ResourceCollection; +} + +export namespace ReactiveAnomalySummary { + export const filterSensitiveLog = (obj: ReactiveAnomalySummary): any => ({ + ...obj, + }); +} + +export interface ListAnomaliesForInsightResponse { + /** + *

+ * An array of ProactiveAnomalySummary objects that represent the requested anomalies + *

+ */ + ProactiveAnomalies?: ProactiveAnomalySummary[]; + + /** + *

+ * An array of ReactiveAnomalySummary objects that represent the requested anomalies + *

+ */ + ReactiveAnomalies?: ReactiveAnomalySummary[]; + + /** + *

The pagination token to use to retrieve + * the next page of results for this operation. If there are no more pages, this value is null.

+ */ + NextToken?: string; +} + +export namespace ListAnomaliesForInsightResponse { + export const filterSensitiveLog = (obj: ListAnomaliesForInsightResponse): any => ({ + ...obj, + }); +} + +export enum EventDataSource { + AWS_CLOUD_TRAIL = "AWS_CLOUD_TRAIL", + AWS_CODE_DEPLOY = "AWS_CODE_DEPLOY", +} + +export enum EventClass { + CONFIG_CHANGE = "CONFIG_CHANGE", + DEPLOYMENT = "DEPLOYMENT", + INFRASTRUCTURE = "INFRASTRUCTURE", + SCHEMA_CHANGE = "SCHEMA_CHANGE", + SECURITY_CHANGE = "SECURITY_CHANGE", +} + +/** + *

+ * The time range during which an AWS event occurred. AWS resource events and metrics are analyzed by DevOps Guru to find anomalous behavior and + * provide recommendations to improve your operational solutions. + *

+ */ +export interface EventTimeRange { + /** + *

+ * The time when the event started. + *

+ */ + FromTime: Date | undefined; + + /** + *

+ * The time when the event ended. + *

+ */ + ToTime: Date | undefined; +} + +export namespace EventTimeRange { + export const filterSensitiveLog = (obj: EventTimeRange): any => ({ + ...obj, + }); +} + +/** + *

+ * Filters you can use to specify which events are returned when ListEvents is called. + *

+ */ +export interface ListEventsFilters { + /** + *

+ * An ID of an insight that is related to the events you want to filter for. + *

+ */ + InsightId?: string; + + /** + *

A time range during which you want the filtered events to have occurred.

+ */ + EventTimeRange?: EventTimeRange; + + /** + *

+ * The class of the events you want to filter for, such as an infrastructure change, a deployment, or a schema change. + *

+ */ + EventClass?: EventClass | string; + + /** + *

+ * The AWS source that emitted the events you want to filter for. + *

+ */ + EventSource?: string; + + /** + *

+ * The source, AWS_CLOUD_TRAIL or AWS_CODE_DEPLOY, of the events you want returned. + *

+ */ + DataSource?: EventDataSource | string; + + /** + *

+ * A collection of AWS resources supported by DevOps Guru. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. + *

+ */ + ResourceCollection?: ResourceCollection; +} + +export namespace ListEventsFilters { + export const filterSensitiveLog = (obj: ListEventsFilters): any => ({ + ...obj, + }); +} + +export interface ListEventsRequest { + /** + *

+ * A ListEventsFilters object used to specify which events to return. + *

+ */ + Filters: ListEventsFilters | undefined; + + /** + *

The maximum number of results to return with a single call. + * To retrieve the remaining results, make another call with the returned nextToken value. + * The default value is 500.

+ */ + MaxResults?: number; + + /** + *

The pagination token to use to retrieve + * the next page of results for this operation. If this value is null, it retrieves the first page.

+ */ + NextToken?: string; +} + +export namespace ListEventsRequest { + export const filterSensitiveLog = (obj: ListEventsRequest): any => ({ + ...obj, + }); +} + +/** + *

+ * The AWS resource that emitted an event. AWS resource events and metrics are analyzed by DevOps Guru to find anomalous behavior and + * provide recommendations to improve your operational solutions. + *

+ */ +export interface EventResource { + /** + *

+ * The type of resource that emitted an event. + *

+ */ + Type?: string; + + /** + *

+ * The name of the resource that emitted an event. + *

+ */ + Name?: string; + + /** + *

+ * The Amazon Resource Name (ARN) of the resource that emitted an event. + *

+ */ + Arn?: string; +} + +export namespace EventResource { + export const filterSensitiveLog = (obj: EventResource): any => ({ + ...obj, + }); +} + +/** + *

+ * An AWS resource event. AWS resource events and metrics are analyzed by DevOps Guru to find anomalous behavior and + * provide recommendations to improve your operational solutions. + *

+ */ +export interface Event { + /** + *

+ * A collection of AWS resources supported by DevOps Guru. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. + *

+ */ + ResourceCollection?: ResourceCollection; + + /** + *

+ * The ID of the event. + *

+ */ + Id?: string; + + /** + *

A Timestamp that specifies the time the event occurred.

+ */ + Time?: Date; + + /** + *

+ * The AWS source that emitted the event. + *

+ */ + EventSource?: string; + + /** + *

+ * The name of the event. + *

+ */ + Name?: string; + + /** + *

+ * The source, AWS_CLOUD_TRAIL or AWS_CODE_DEPLOY, where DevOps Guru analysis found the event. + *

+ */ + DataSource?: EventDataSource | string; + + /** + *

+ * The class of the event. The class specifies what the event is related to, such as an infrastructure change, a deployment, or a schema change. + *

+ */ + EventClass?: EventClass | string; + + /** + *

+ * An EventResource object that contains information about the resource that emitted the event. + *

+ */ + Resources?: EventResource[]; +} + +export namespace Event { + export const filterSensitiveLog = (obj: Event): any => ({ + ...obj, + }); +} + +export interface ListEventsResponse { + /** + *

+ * A list of the requested events. + *

+ */ + Events: Event[] | undefined; + + /** + *

The pagination token to use to retrieve + * the next page of results for this operation. If there are no more pages, this value is null.

+ */ + NextToken?: string; +} + +export namespace ListEventsResponse { + export const filterSensitiveLog = (obj: ListEventsResponse): any => ({ + ...obj, + }); +} + +export enum InsightType { + PROACTIVE = "PROACTIVE", + REACTIVE = "REACTIVE", +} + +/** + *

+ * Used to filter for insights that have any status. + *

+ */ +export interface ListInsightsAnyStatusFilter { + /** + *

+ * Use to filter for either REACTIVE or PROACTIVE insights. + *

+ */ + Type: InsightType | string | undefined; + + /** + *

+ * A time range used to specify when the behavior of the filtered insights started. + *

+ */ + StartTimeRange: StartTimeRange | undefined; +} + +export namespace ListInsightsAnyStatusFilter { + export const filterSensitiveLog = (obj: ListInsightsAnyStatusFilter): any => ({ + ...obj, + }); +} + +/** + *

+ * A range of time that specifies when anomalous behavior in an anomaly or insight ended. + *

+ */ +export interface EndTimeRange { + /** + *

+ * The earliest end time in the time range. + *

+ */ + FromTime?: Date; + + /** + *

+ * The latest end time in the time range. + *

+ */ + ToTime?: Date; +} + +export namespace EndTimeRange { + export const filterSensitiveLog = (obj: EndTimeRange): any => ({ + ...obj, + }); +} + +/** + *

+ * Used to filter for insights that have the status CLOSED. + *

+ */ +export interface ListInsightsClosedStatusFilter { + /** + *

+ * Use to filter for either REACTIVE or PROACTIVE insights. + *

+ */ + Type: InsightType | string | undefined; + + /** + *

+ * A time range used to specify when the behavior of the filtered insights ended. + *

+ */ + EndTimeRange: EndTimeRange | undefined; +} + +export namespace ListInsightsClosedStatusFilter { + export const filterSensitiveLog = (obj: ListInsightsClosedStatusFilter): any => ({ + ...obj, + }); +} + +/** + *

+ * Used to filter for insights that have the status ONGOING. + *

+ */ +export interface ListInsightsOngoingStatusFilter { + /** + *

+ * Use to filter for either REACTIVE or PROACTIVE insights. + *

+ */ + Type: InsightType | string | undefined; +} + +export namespace ListInsightsOngoingStatusFilter { + export const filterSensitiveLog = (obj: ListInsightsOngoingStatusFilter): any => ({ + ...obj, + }); +} + +/** + *

+ * A filter used by ListInsights to specify which insights to return. + *

+ */ +export interface ListInsightsStatusFilter { + /** + *

+ * A ListInsightsAnyStatusFilter that specifies ongoing insights + * that are either REACTIVE or PROACTIVE. + *

+ */ + Ongoing?: ListInsightsOngoingStatusFilter; + + /** + *

+ * A ListInsightsClosedStatusFilter that specifies closed insights that are + * either REACTIVE or PROACTIVE. + *

+ */ + Closed?: ListInsightsClosedStatusFilter; + + /** + *

+ * A ListInsightsAnyStatusFilter that specifies insights of any status + * that are either REACTIVE or PROACTIVE. + *

+ */ + Any?: ListInsightsAnyStatusFilter; +} + +export namespace ListInsightsStatusFilter { + export const filterSensitiveLog = (obj: ListInsightsStatusFilter): any => ({ + ...obj, + }); +} + +export interface ListInsightsRequest { + /** + *

+ * A filter used to filter the returned insights by their status. You can specify one status filter. + *

+ */ + StatusFilter: ListInsightsStatusFilter | undefined; + + /** + *

The maximum number of results to return with a single call. + * To retrieve the remaining results, make another call with the returned nextToken value. + * The default value is 500.

+ */ + MaxResults?: number; + + /** + *

The pagination token to use to retrieve + * the next page of results for this operation. If this value is null, it retrieves the first page.

+ */ + NextToken?: string; +} + +export namespace ListInsightsRequest { + export const filterSensitiveLog = (obj: ListInsightsRequest): any => ({ + ...obj, + }); +} + +/** + *

Details about a proactive insight. This object is returned by + * DescribeInsight. + *

+ */ +export interface ProactiveInsightSummary { + /** + *

The ID of the proactive insight.

+ */ + Id?: string; + + /** + *

The name of the proactive insight.

+ */ + Name?: string; + + /** + *

The severity of the proactive insight.

+ */ + Severity?: InsightSeverity | string; + + /** + *

The status of the proactive insight.

+ */ + Status?: InsightStatus | string; + + /** + *

A time ranged that specifies when the observed behavior in an insight started and + * ended.

+ */ + InsightTimeRange?: InsightTimeRange; + + /** + *

+ * The time range during which anomalous behavior in a proactive anomaly or an insight is expected to occur. + *

+ */ + PredictionTimeRange?: PredictionTimeRange; + + /** + *

+ * A collection of AWS resources supported by DevOps Guru. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. + *

+ */ + ResourceCollection?: ResourceCollection; +} + +export namespace ProactiveInsightSummary { + export const filterSensitiveLog = (obj: ProactiveInsightSummary): any => ({ + ...obj, + }); +} + +/** + *

Information about a reactive insight. This object is returned by + * DescribeInsight. + *

+ */ +export interface ReactiveInsightSummary { + /** + *

+ * The ID of a reactive summary. + *

+ */ + Id?: string; + + /** + *

+ * The name of a reactive insight. + *

+ */ + Name?: string; + + /** + *

+ * The severity of a reactive insight. + *

+ */ + Severity?: InsightSeverity | string; + + /** + *

+ * The status of a reactive insight. + *

+ */ + Status?: InsightStatus | string; + + /** + *

A time ranged that specifies when the observed behavior in an insight started and + * ended.

+ */ + InsightTimeRange?: InsightTimeRange; + + /** + *

+ * A collection of AWS resources supported by DevOps Guru. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. + *

+ */ + ResourceCollection?: ResourceCollection; +} + +export namespace ReactiveInsightSummary { + export const filterSensitiveLog = (obj: ReactiveInsightSummary): any => ({ + ...obj, + }); +} + +export interface ListInsightsResponse { + /** + *

+ * The returned list of proactive insights. + *

+ */ + ProactiveInsights?: ProactiveInsightSummary[]; + + /** + *

+ * The returned list of reactive insights. + *

+ */ + ReactiveInsights?: ReactiveInsightSummary[]; + + /** + *

The pagination token to use to retrieve + * the next page of results for this operation. If there are no more pages, this value is null.

+ */ + NextToken?: string; +} + +export namespace ListInsightsResponse { + export const filterSensitiveLog = (obj: ListInsightsResponse): any => ({ + ...obj, + }); +} + +export interface ListNotificationChannelsRequest { + /** + *

The pagination token to use to retrieve + * the next page of results for this operation. If this value is null, it retrieves the first page.

+ */ + NextToken?: string; +} + +export namespace ListNotificationChannelsRequest { + export const filterSensitiveLog = (obj: ListNotificationChannelsRequest): any => ({ + ...obj, + }); +} + +/** + *

+ * Information about a notification channel. A notification channel is used to notify you when DevOps Guru creates an insight. + * The one + * supported notification channel is Amazon Simple Notification Service (Amazon SNS). + *

+ */ +export interface NotificationChannel { + /** + *

+ * The ID of a notification channel. + *

+ */ + Id?: string; + + /** + *

+ * A NotificationChannelConfig object that contains information about configured notification channels. + *

+ */ + Config?: NotificationChannelConfig; +} + +export namespace NotificationChannel { + export const filterSensitiveLog = (obj: NotificationChannel): any => ({ + ...obj, + }); +} + +export interface ListNotificationChannelsResponse { + /** + *

+ * An array that contains the requested notification channels. + *

+ */ + Channels?: NotificationChannel[]; + + /** + *

The pagination token to use to retrieve + * the next page of results for this operation. If there are no more pages, this value is null.

+ */ + NextToken?: string; +} + +export namespace ListNotificationChannelsResponse { + export const filterSensitiveLog = (obj: ListNotificationChannelsResponse): any => ({ + ...obj, + }); +} + +export interface ListRecommendationsRequest { + /** + *

+ * The ID of the requested insight. + *

+ */ + InsightId: string | undefined; + + /** + *

The pagination token to use to retrieve + * the next page of results for this operation. If this value is null, it retrieves the first page.

+ */ + NextToken?: string; +} + +export namespace ListRecommendationsRequest { + export const filterSensitiveLog = (obj: ListRecommendationsRequest): any => ({ + ...obj, + }); +} + +/** + *

+ * Information about a resource in which DevOps Guru detected anomalous behavior. + *

+ */ +export interface RecommendationRelatedAnomalyResource { + /** + *

+ * The name of the resource. + *

+ */ + Name?: string; + + /** + *

+ * The type of the resource. + *

+ */ + Type?: string; +} + +export namespace RecommendationRelatedAnomalyResource { + export const filterSensitiveLog = (obj: RecommendationRelatedAnomalyResource): any => ({ + ...obj, + }); +} + +/** + *

+ * Information about an Amazon CloudWatch metric that is analyzed by DevOps Guru. It is one of many analyzed metrics + * that are used to generate insights. + *

+ */ +export interface RecommendationRelatedCloudWatchMetricsSourceDetail { + /** + *

The name of the CloudWatch metric.

+ */ + MetricName?: string; + + /** + *

The namespace of the CloudWatch metric. A namespace is a container for CloudWatch metrics.

+ */ + Namespace?: string; +} + +export namespace RecommendationRelatedCloudWatchMetricsSourceDetail { + export const filterSensitiveLog = (obj: RecommendationRelatedCloudWatchMetricsSourceDetail): any => ({ + ...obj, + }); +} + +/** + *

+ * Contains an array of RecommendationRelatedCloudWatchMetricsSourceDetail objects that contain the name + * and namespace of an Amazon CloudWatch metric. + *

+ */ +export interface RecommendationRelatedAnomalySourceDetail { + /** + *

An array of CloudWatchMetricsDetail objects that contains information + * about the analyzed metrics that displayed anomalous behavior.

+ */ + CloudWatchMetrics?: RecommendationRelatedCloudWatchMetricsSourceDetail[]; +} + +export namespace RecommendationRelatedAnomalySourceDetail { + export const filterSensitiveLog = (obj: RecommendationRelatedAnomalySourceDetail): any => ({ + ...obj, + }); +} + +/** + *

+ * Information about an anomaly that is related to a recommendations. + *

+ */ +export interface RecommendationRelatedAnomaly { + /** + *

+ * An array of objects that represent resources in which DevOps Guru detected anomalous behavior. Each object contains the name + * and type of the resource. + *

+ */ + Resources?: RecommendationRelatedAnomalyResource[]; + + /** + *

+ * Information about where the anomalous behavior related the recommendation was found. For example, details in Amazon CloudWatch metrics. + *

+ */ + SourceDetails?: RecommendationRelatedAnomalySourceDetail[]; +} + +export namespace RecommendationRelatedAnomaly { + export const filterSensitiveLog = (obj: RecommendationRelatedAnomaly): any => ({ + ...obj, + }); +} + +/** + *

+ * Information about an AWS resource that emitted and event that is related to a recommendation in an insight. + *

+ */ +export interface RecommendationRelatedEventResource { + /** + *

+ * The name of the resource that emitted the event. This corresponds to the Name field in an + * EventResource object. + *

+ */ + Name?: string; + + /** + *

+ * The type of the resource that emitted the event. This corresponds to the Type field in an + * EventResource object. + *

+ */ + Type?: string; +} + +export namespace RecommendationRelatedEventResource { + export const filterSensitiveLog = (obj: RecommendationRelatedEventResource): any => ({ + ...obj, + }); +} + +/** + *

+ * Information about an event that is related to a recommendations. + *

+ */ +export interface RecommendationRelatedEvent { + /** + *

+ * The name of the event. This corresponds to the Name field in an + * Event object. + *

+ */ + Name?: string; + + /** + *

+ * A ResourceCollection object that contains arrays of the names of AWS + * CloudFormation stacks. + *

+ */ + Resources?: RecommendationRelatedEventResource[]; +} + +export namespace RecommendationRelatedEvent { + export const filterSensitiveLog = (obj: RecommendationRelatedEvent): any => ({ + ...obj, + }); +} + +/** + *

Recommendation information to help you remediate detected anomalous behavior that + * generated an insight.

+ */ +export interface Recommendation { + /** + *

+ * A description of the problem. + *

+ */ + Description?: string; + + /** + *

+ * A hyperlink to information to help you address the problem. + *

+ */ + Link?: string; + + /** + *

+ * The name of the recommendation. + *

+ */ + Name?: string; + + /** + *

+ * The reason DevOps Guru flagged the anomalous behavior as a problem. + *

+ */ + Reason?: string; + + /** + *

+ * Events that are related to the problem. Use these events to learn more about what's happening and to help address the issue. + *

+ */ + RelatedEvents?: RecommendationRelatedEvent[]; + + /** + *

+ * Anomalies that are related to the problem. Use these Anomalies to learn more about what's happening and to help address the issue. + *

+ */ + RelatedAnomalies?: RecommendationRelatedAnomaly[]; +} + +export namespace Recommendation { + export const filterSensitiveLog = (obj: Recommendation): any => ({ + ...obj, + }); +} + +export interface ListRecommendationsResponse { + /** + *

+ * An array of the requested recommendations. + *

+ */ + Recommendations?: Recommendation[]; + + /** + *

The pagination token to use to retrieve + * the next page of results for this operation. If there are no more pages, this value is null.

+ */ + NextToken?: string; +} + +export namespace ListRecommendationsResponse { + export const filterSensitiveLog = (obj: ListRecommendationsResponse): any => ({ + ...obj, + }); +} + +export enum InsightFeedbackOption { + ALERT_TOO_SENSITIVE = "ALERT_TOO_SENSITIVE", + DATA_INCORRECT = "DATA_INCORRECT", + DATA_NOISY_ANOMALY = "DATA_NOISY_ANOMALY", + RECOMMENDATION_USEFUL = "RECOMMENDATION_USEFUL", + VALID_COLLECTION = "VALID_COLLECTION", +} + +/** + *

+ * Information about insight feedback received from a customer. + *

+ */ +export interface InsightFeedback { + /** + *

+ * The insight feedback ID. + *

+ */ + Id?: string; + + /** + *

+ * The feedback provided by the customer. + *

+ */ + Feedback?: InsightFeedbackOption | string; +} + +export namespace InsightFeedback { + export const filterSensitiveLog = (obj: InsightFeedback): any => ({ + ...obj, + }); +} + +export interface PutFeedbackRequest { + /** + *

+ * The feedback from customers is about the recommendations in this insight. + *

+ */ + InsightFeedback?: InsightFeedback; +} + +export namespace PutFeedbackRequest { + export const filterSensitiveLog = (obj: PutFeedbackRequest): any => ({ + ...obj, + }); +} + +export interface PutFeedbackResponse {} + +export namespace PutFeedbackResponse { + export const filterSensitiveLog = (obj: PutFeedbackResponse): any => ({ + ...obj, + }); +} + +export interface RemoveNotificationChannelRequest { + /** + *

+ * The ID of the notification channel to be removed. + *

+ */ + Id: string | undefined; +} + +export namespace RemoveNotificationChannelRequest { + export const filterSensitiveLog = (obj: RemoveNotificationChannelRequest): any => ({ + ...obj, + }); +} + +export interface RemoveNotificationChannelResponse {} + +export namespace RemoveNotificationChannelResponse { + export const filterSensitiveLog = (obj: RemoveNotificationChannelResponse): any => ({ + ...obj, + }); +} + +/** + *

+ * Specifies one or more severity values and one or more status values that are used to search + * for insights. + *

+ */ +export interface SearchInsightsFilters { + /** + *

+ * An array of severity values used to search for insights. + *

+ */ + Severities?: (InsightSeverity | string)[]; + + /** + *

+ * An array of status values used to search for insights. + *

+ */ + Statuses?: (InsightStatus | string)[]; + + /** + *

+ * A collection of AWS resources supported by DevOps Guru. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze + * only the AWS resources that are defined in the stacks. + *

+ */ + ResourceCollection?: ResourceCollection; +} + +export namespace SearchInsightsFilters { + export const filterSensitiveLog = (obj: SearchInsightsFilters): any => ({ + ...obj, + }); +} + +export interface SearchInsightsRequest { + /** + *

+ * The start of the time range passed in. Returned insights occurred after this time. + *

+ */ + StartTimeRange: StartTimeRange | undefined; + + /** + *

+ * A SearchInsightsFilters object that is used to set the severity and status filters on your insight search. + *

+ */ + Filters?: SearchInsightsFilters; + + /** + *

The maximum number of results to return with a single call. + * To retrieve the remaining results, make another call with the returned nextToken value. + * The default value is 500.

+ */ + MaxResults?: number; + + /** + *

The pagination token to use to retrieve + * the next page of results for this operation. If this value is null, it retrieves the first page.

+ */ + NextToken?: string; + + /** + *

+ * The type of insights you are searching for (REACTIVE or PROACTIVE). + *

+ */ + Type: InsightType | string | undefined; +} + +export namespace SearchInsightsRequest { + export const filterSensitiveLog = (obj: SearchInsightsRequest): any => ({ + ...obj, + }); +} + +export interface SearchInsightsResponse { + /** + *

+ * The returned proactive insights. + *

+ */ + ProactiveInsights?: ProactiveInsightSummary[]; + + /** + *

+ * The returned reactive insights. + *

+ */ + ReactiveInsights?: ReactiveInsightSummary[]; + + /** + *

The pagination token to use to retrieve + * the next page of results for this operation. If there are no more pages, this value is null.

+ */ + NextToken?: string; +} + +export namespace SearchInsightsResponse { + export const filterSensitiveLog = (obj: SearchInsightsResponse): any => ({ + ...obj, + }); +} + +export enum UpdateResourceCollectionAction { + ADD = "ADD", + REMOVE = "REMOVE", +} + +/** + *

Contains the names of AWS CloudFormation stacks used to update a collection of stacks.

+ */ +export interface UpdateCloudFormationCollectionFilter { + /** + *

+ * An array of the name of stacks to update. + *

+ */ + StackNames?: string[]; +} + +export namespace UpdateCloudFormationCollectionFilter { + export const filterSensitiveLog = (obj: UpdateCloudFormationCollectionFilter): any => ({ + ...obj, + }); +} + +/** + *

+ * Contains information used to update a collection of AWS resources. + *

+ */ +export interface UpdateResourceCollectionFilter { + /** + *

+ * An collection of AWS CloudFormation stacks. + *

+ */ + CloudFormation?: UpdateCloudFormationCollectionFilter; +} + +export namespace UpdateResourceCollectionFilter { + export const filterSensitiveLog = (obj: UpdateResourceCollectionFilter): any => ({ + ...obj, + }); +} + +export interface UpdateResourceCollectionRequest { + /** + *

+ * Specifies if the resource collection in the request is added or deleted to the resource collection. + *

+ */ + Action: UpdateResourceCollectionAction | string | undefined; + + /** + *

+ * Contains information used to update a collection of AWS resources. + *

+ */ + ResourceCollection: UpdateResourceCollectionFilter | undefined; +} + +export namespace UpdateResourceCollectionRequest { + export const filterSensitiveLog = (obj: UpdateResourceCollectionRequest): any => ({ + ...obj, + }); +} + +export interface UpdateResourceCollectionResponse {} + +export namespace UpdateResourceCollectionResponse { + export const filterSensitiveLog = (obj: UpdateResourceCollectionResponse): any => ({ + ...obj, + }); +} + +/** + *

+ * Information about whether DevOps Guru is configured to create an OpsItem in AWS Systems Manager OpsCenter for each created insight. + *

+ */ +export interface OpsCenterIntegrationConfig { + /** + *

+ * Specifies if DevOps Guru is enabled to create an AWS Systems Manager OpsItem for each created insight. + *

+ */ + OptInStatus?: OptInStatus | string; +} + +export namespace OpsCenterIntegrationConfig { + export const filterSensitiveLog = (obj: OpsCenterIntegrationConfig): any => ({ + ...obj, + }); +} + +/** + *

+ * Information about updating the integration status of an AWS service, such as AWS Systems Manager, with DevOps Guru. + *

+ */ +export interface UpdateServiceIntegrationConfig { + /** + *

+ * Information about whether DevOps Guru is configured to create an OpsItem in AWS Systems Manager OpsCenter for each created insight. + *

+ */ + OpsCenter?: OpsCenterIntegrationConfig; +} + +export namespace UpdateServiceIntegrationConfig { + export const filterSensitiveLog = (obj: UpdateServiceIntegrationConfig): any => ({ + ...obj, + }); +} + +export interface UpdateServiceIntegrationRequest { + /** + *

+ * An IntegratedServiceConfig object used to specify the integrated service you want to update, and whether you + * want to update it to enabled or disabled. + *

+ */ + ServiceIntegration: UpdateServiceIntegrationConfig | undefined; +} + +export namespace UpdateServiceIntegrationRequest { + export const filterSensitiveLog = (obj: UpdateServiceIntegrationRequest): any => ({ + ...obj, + }); +} + +export interface UpdateServiceIntegrationResponse {} + +export namespace UpdateServiceIntegrationResponse { + export const filterSensitiveLog = (obj: UpdateServiceIntegrationResponse): any => ({ + ...obj, + }); +} diff --git a/clients/client-devops-guru/package.json b/clients/client-devops-guru/package.json new file mode 100644 index 000000000000..d0d78f2d51be --- /dev/null +++ b/clients/client-devops-guru/package.json @@ -0,0 +1,83 @@ +{ + "name": "@aws-sdk/client-devops-guru", + "description": "AWS SDK for JavaScript Devops Guru Client for Node.js, Browser and React Native", + "version": "1.0.0-rc.1", + "scripts": { + "clean": "yarn remove-definitions && yarn remove-dist && yarn remove-documentation", + "build-documentation": "yarn remove-documentation && typedoc ./", + "prepublishOnly": "yarn build", + "pretest": "yarn build:cjs", + "remove-definitions": "rimraf ./types", + "remove-dist": "rimraf ./dist", + "remove-documentation": "rimraf ./docs", + "test": "yarn build && jest --coverage --passWithNoTests", + "build:cjs": "tsc -p tsconfig.json", + "build:es": "tsc -p tsconfig.es.json", + "build": "yarn build:cjs && yarn build:es" + }, + "main": "./dist/cjs/index.js", + "types": "./types/index.d.ts", + "module": "./dist/es/index.js", + "browser": { + "./runtimeConfig": "./runtimeConfig.browser" + }, + "react-native": { + "./runtimeConfig": "./runtimeConfig.native" + }, + "sideEffects": false, + "dependencies": { + "@aws-crypto/sha256-browser": "^1.0.0", + "@aws-crypto/sha256-js": "^1.0.0", + "@aws-sdk/config-resolver": "1.0.0-rc.7", + "@aws-sdk/credential-provider-node": "1.0.0-rc.7", + "@aws-sdk/fetch-http-handler": "1.0.0-rc.7", + "@aws-sdk/hash-node": "1.0.0-rc.7", + "@aws-sdk/invalid-dependency": "1.0.0-rc.3", + "@aws-sdk/middleware-content-length": "1.0.0-rc.7", + "@aws-sdk/middleware-host-header": "1.0.0-rc.7", + "@aws-sdk/middleware-logger": "1.0.0-rc.7", + "@aws-sdk/middleware-retry": "1.0.0-rc.7", + "@aws-sdk/middleware-serde": "1.0.0-rc.7", + "@aws-sdk/middleware-signing": "1.0.0-rc.7", + "@aws-sdk/middleware-stack": "1.0.0-rc.7", + "@aws-sdk/middleware-user-agent": "1.0.0-rc.7", + "@aws-sdk/node-config-provider": "1.0.0-rc.7", + "@aws-sdk/node-http-handler": "1.0.0-rc.7", + "@aws-sdk/protocol-http": "1.0.0-rc.7", + "@aws-sdk/smithy-client": "1.0.0-rc.7", + "@aws-sdk/url-parser-browser": "1.0.0-rc.7", + "@aws-sdk/url-parser-node": "1.0.0-rc.7", + "@aws-sdk/util-base64-browser": "1.0.0-rc.3", + "@aws-sdk/util-base64-node": "1.0.0-rc.3", + "@aws-sdk/util-body-length-browser": "1.0.0-rc.3", + "@aws-sdk/util-body-length-node": "1.0.0-rc.3", + "@aws-sdk/util-user-agent-browser": "1.0.0-rc.7", + "@aws-sdk/util-user-agent-node": "1.0.0-rc.7", + "@aws-sdk/util-utf8-browser": "1.0.0-rc.3", + "@aws-sdk/util-utf8-node": "1.0.0-rc.3", + "tslib": "^2.0.0" + }, + "devDependencies": { + "@aws-sdk/client-documentation-generator": "1.0.0-rc.7", + "@aws-sdk/types": "1.0.0-rc.7", + "@types/node": "^12.7.5", + "jest": "^26.1.0", + "rimraf": "^3.0.0", + "typedoc": "^0.19.2", + "typescript": "~4.1.2" + }, + "engines": { + "node": ">=10.0.0" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/master/clients/client-devops-guru", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "clients/client-devops-guru" + } +} diff --git a/clients/client-devops-guru/pagination/DescribeResourceCollectionHealthPaginator.ts b/clients/client-devops-guru/pagination/DescribeResourceCollectionHealthPaginator.ts new file mode 100644 index 000000000000..51e89bc93c06 --- /dev/null +++ b/clients/client-devops-guru/pagination/DescribeResourceCollectionHealthPaginator.ts @@ -0,0 +1,56 @@ +import { DevOpsGuru } from "../DevOpsGuru"; +import { DevOpsGuruClient } from "../DevOpsGuruClient"; +import { + DescribeResourceCollectionHealthCommand, + DescribeResourceCollectionHealthCommandInput, + DescribeResourceCollectionHealthCommandOutput, +} from "../commands/DescribeResourceCollectionHealthCommand"; +import { DevOpsGuruPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: DevOpsGuruClient, + input: DescribeResourceCollectionHealthCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new DescribeResourceCollectionHealthCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: DevOpsGuru, + input: DescribeResourceCollectionHealthCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.describeResourceCollectionHealth(input, ...args); +}; +export async function* paginateDescribeResourceCollectionHealth( + config: DevOpsGuruPaginationConfiguration, + input: DescribeResourceCollectionHealthCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: DescribeResourceCollectionHealthCommandOutput; + while (hasNext) { + input.NextToken = token; + if (config.client instanceof DevOpsGuru) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof DevOpsGuruClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected DevOpsGuru | DevOpsGuruClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-devops-guru/pagination/GetResourceCollectionPaginator.ts b/clients/client-devops-guru/pagination/GetResourceCollectionPaginator.ts new file mode 100644 index 000000000000..a753d647b103 --- /dev/null +++ b/clients/client-devops-guru/pagination/GetResourceCollectionPaginator.ts @@ -0,0 +1,56 @@ +import { DevOpsGuru } from "../DevOpsGuru"; +import { DevOpsGuruClient } from "../DevOpsGuruClient"; +import { + GetResourceCollectionCommand, + GetResourceCollectionCommandInput, + GetResourceCollectionCommandOutput, +} from "../commands/GetResourceCollectionCommand"; +import { DevOpsGuruPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: DevOpsGuruClient, + input: GetResourceCollectionCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new GetResourceCollectionCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: DevOpsGuru, + input: GetResourceCollectionCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.getResourceCollection(input, ...args); +}; +export async function* paginateGetResourceCollection( + config: DevOpsGuruPaginationConfiguration, + input: GetResourceCollectionCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: GetResourceCollectionCommandOutput; + while (hasNext) { + input.NextToken = token; + if (config.client instanceof DevOpsGuru) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof DevOpsGuruClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected DevOpsGuru | DevOpsGuruClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-devops-guru/pagination/Interfaces.ts b/clients/client-devops-guru/pagination/Interfaces.ts new file mode 100644 index 000000000000..f46acf918592 --- /dev/null +++ b/clients/client-devops-guru/pagination/Interfaces.ts @@ -0,0 +1,7 @@ +import { DevOpsGuru } from "../DevOpsGuru"; +import { DevOpsGuruClient } from "../DevOpsGuruClient"; +import { PaginationConfiguration } from "@aws-sdk/types"; + +export interface DevOpsGuruPaginationConfiguration extends PaginationConfiguration { + client: DevOpsGuru | DevOpsGuruClient; +} diff --git a/clients/client-devops-guru/pagination/ListAnomaliesForInsightPaginator.ts b/clients/client-devops-guru/pagination/ListAnomaliesForInsightPaginator.ts new file mode 100644 index 000000000000..26733747172e --- /dev/null +++ b/clients/client-devops-guru/pagination/ListAnomaliesForInsightPaginator.ts @@ -0,0 +1,57 @@ +import { DevOpsGuru } from "../DevOpsGuru"; +import { DevOpsGuruClient } from "../DevOpsGuruClient"; +import { + ListAnomaliesForInsightCommand, + ListAnomaliesForInsightCommandInput, + ListAnomaliesForInsightCommandOutput, +} from "../commands/ListAnomaliesForInsightCommand"; +import { DevOpsGuruPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: DevOpsGuruClient, + input: ListAnomaliesForInsightCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListAnomaliesForInsightCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: DevOpsGuru, + input: ListAnomaliesForInsightCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listAnomaliesForInsight(input, ...args); +}; +export async function* paginateListAnomaliesForInsight( + config: DevOpsGuruPaginationConfiguration, + input: ListAnomaliesForInsightCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListAnomaliesForInsightCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof DevOpsGuru) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof DevOpsGuruClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected DevOpsGuru | DevOpsGuruClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-devops-guru/pagination/ListEventsPaginator.ts b/clients/client-devops-guru/pagination/ListEventsPaginator.ts new file mode 100644 index 000000000000..e72ed9cbfb64 --- /dev/null +++ b/clients/client-devops-guru/pagination/ListEventsPaginator.ts @@ -0,0 +1,53 @@ +import { DevOpsGuru } from "../DevOpsGuru"; +import { DevOpsGuruClient } from "../DevOpsGuruClient"; +import { ListEventsCommand, ListEventsCommandInput, ListEventsCommandOutput } from "../commands/ListEventsCommand"; +import { DevOpsGuruPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: DevOpsGuruClient, + input: ListEventsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListEventsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: DevOpsGuru, + input: ListEventsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listEvents(input, ...args); +}; +export async function* paginateListEvents( + config: DevOpsGuruPaginationConfiguration, + input: ListEventsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListEventsCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof DevOpsGuru) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof DevOpsGuruClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected DevOpsGuru | DevOpsGuruClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-devops-guru/pagination/ListInsightsPaginator.ts b/clients/client-devops-guru/pagination/ListInsightsPaginator.ts new file mode 100644 index 000000000000..91dd396215b1 --- /dev/null +++ b/clients/client-devops-guru/pagination/ListInsightsPaginator.ts @@ -0,0 +1,57 @@ +import { DevOpsGuru } from "../DevOpsGuru"; +import { DevOpsGuruClient } from "../DevOpsGuruClient"; +import { + ListInsightsCommand, + ListInsightsCommandInput, + ListInsightsCommandOutput, +} from "../commands/ListInsightsCommand"; +import { DevOpsGuruPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: DevOpsGuruClient, + input: ListInsightsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListInsightsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: DevOpsGuru, + input: ListInsightsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listInsights(input, ...args); +}; +export async function* paginateListInsights( + config: DevOpsGuruPaginationConfiguration, + input: ListInsightsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListInsightsCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof DevOpsGuru) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof DevOpsGuruClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected DevOpsGuru | DevOpsGuruClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-devops-guru/pagination/ListNotificationChannelsPaginator.ts b/clients/client-devops-guru/pagination/ListNotificationChannelsPaginator.ts new file mode 100644 index 000000000000..a9316f6997d9 --- /dev/null +++ b/clients/client-devops-guru/pagination/ListNotificationChannelsPaginator.ts @@ -0,0 +1,56 @@ +import { DevOpsGuru } from "../DevOpsGuru"; +import { DevOpsGuruClient } from "../DevOpsGuruClient"; +import { + ListNotificationChannelsCommand, + ListNotificationChannelsCommandInput, + ListNotificationChannelsCommandOutput, +} from "../commands/ListNotificationChannelsCommand"; +import { DevOpsGuruPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: DevOpsGuruClient, + input: ListNotificationChannelsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListNotificationChannelsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: DevOpsGuru, + input: ListNotificationChannelsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listNotificationChannels(input, ...args); +}; +export async function* paginateListNotificationChannels( + config: DevOpsGuruPaginationConfiguration, + input: ListNotificationChannelsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListNotificationChannelsCommandOutput; + while (hasNext) { + input.NextToken = token; + if (config.client instanceof DevOpsGuru) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof DevOpsGuruClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected DevOpsGuru | DevOpsGuruClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-devops-guru/pagination/ListRecommendationsPaginator.ts b/clients/client-devops-guru/pagination/ListRecommendationsPaginator.ts new file mode 100644 index 000000000000..f68b77defbc8 --- /dev/null +++ b/clients/client-devops-guru/pagination/ListRecommendationsPaginator.ts @@ -0,0 +1,56 @@ +import { DevOpsGuru } from "../DevOpsGuru"; +import { DevOpsGuruClient } from "../DevOpsGuruClient"; +import { + ListRecommendationsCommand, + ListRecommendationsCommandInput, + ListRecommendationsCommandOutput, +} from "../commands/ListRecommendationsCommand"; +import { DevOpsGuruPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: DevOpsGuruClient, + input: ListRecommendationsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListRecommendationsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: DevOpsGuru, + input: ListRecommendationsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listRecommendations(input, ...args); +}; +export async function* paginateListRecommendations( + config: DevOpsGuruPaginationConfiguration, + input: ListRecommendationsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListRecommendationsCommandOutput; + while (hasNext) { + input.NextToken = token; + if (config.client instanceof DevOpsGuru) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof DevOpsGuruClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected DevOpsGuru | DevOpsGuruClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-devops-guru/pagination/SearchInsightsPaginator.ts b/clients/client-devops-guru/pagination/SearchInsightsPaginator.ts new file mode 100644 index 000000000000..e805578e6e61 --- /dev/null +++ b/clients/client-devops-guru/pagination/SearchInsightsPaginator.ts @@ -0,0 +1,57 @@ +import { DevOpsGuru } from "../DevOpsGuru"; +import { DevOpsGuruClient } from "../DevOpsGuruClient"; +import { + SearchInsightsCommand, + SearchInsightsCommandInput, + SearchInsightsCommandOutput, +} from "../commands/SearchInsightsCommand"; +import { DevOpsGuruPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: DevOpsGuruClient, + input: SearchInsightsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new SearchInsightsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: DevOpsGuru, + input: SearchInsightsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.searchInsights(input, ...args); +}; +export async function* paginateSearchInsights( + config: DevOpsGuruPaginationConfiguration, + input: SearchInsightsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: SearchInsightsCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof DevOpsGuru) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof DevOpsGuruClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected DevOpsGuru | DevOpsGuruClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-devops-guru/protocols/Aws_restJson1.ts b/clients/client-devops-guru/protocols/Aws_restJson1.ts new file mode 100644 index 000000000000..f6635fc0c533 --- /dev/null +++ b/clients/client-devops-guru/protocols/Aws_restJson1.ts @@ -0,0 +1,3271 @@ +import { + AddNotificationChannelCommandInput, + AddNotificationChannelCommandOutput, +} from "../commands/AddNotificationChannelCommand"; +import { + DescribeAccountHealthCommandInput, + DescribeAccountHealthCommandOutput, +} from "../commands/DescribeAccountHealthCommand"; +import { + DescribeAccountOverviewCommandInput, + DescribeAccountOverviewCommandOutput, +} from "../commands/DescribeAccountOverviewCommand"; +import { DescribeAnomalyCommandInput, DescribeAnomalyCommandOutput } from "../commands/DescribeAnomalyCommand"; +import { DescribeInsightCommandInput, DescribeInsightCommandOutput } from "../commands/DescribeInsightCommand"; +import { + DescribeResourceCollectionHealthCommandInput, + DescribeResourceCollectionHealthCommandOutput, +} from "../commands/DescribeResourceCollectionHealthCommand"; +import { + DescribeServiceIntegrationCommandInput, + DescribeServiceIntegrationCommandOutput, +} from "../commands/DescribeServiceIntegrationCommand"; +import { + GetResourceCollectionCommandInput, + GetResourceCollectionCommandOutput, +} from "../commands/GetResourceCollectionCommand"; +import { + ListAnomaliesForInsightCommandInput, + ListAnomaliesForInsightCommandOutput, +} from "../commands/ListAnomaliesForInsightCommand"; +import { ListEventsCommandInput, ListEventsCommandOutput } from "../commands/ListEventsCommand"; +import { ListInsightsCommandInput, ListInsightsCommandOutput } from "../commands/ListInsightsCommand"; +import { + ListNotificationChannelsCommandInput, + ListNotificationChannelsCommandOutput, +} from "../commands/ListNotificationChannelsCommand"; +import { + ListRecommendationsCommandInput, + ListRecommendationsCommandOutput, +} from "../commands/ListRecommendationsCommand"; +import { PutFeedbackCommandInput, PutFeedbackCommandOutput } from "../commands/PutFeedbackCommand"; +import { + RemoveNotificationChannelCommandInput, + RemoveNotificationChannelCommandOutput, +} from "../commands/RemoveNotificationChannelCommand"; +import { SearchInsightsCommandInput, SearchInsightsCommandOutput } from "../commands/SearchInsightsCommand"; +import { + UpdateResourceCollectionCommandInput, + UpdateResourceCollectionCommandOutput, +} from "../commands/UpdateResourceCollectionCommand"; +import { + UpdateServiceIntegrationCommandInput, + UpdateServiceIntegrationCommandOutput, +} from "../commands/UpdateServiceIntegrationCommand"; +import { + AccessDeniedException, + AnomalySourceDetails, + AnomalyTimeRange, + CloudFormationCollection, + CloudFormationCollectionFilter, + CloudFormationHealth, + CloudWatchMetricsDetail, + CloudWatchMetricsDimension, + ConflictException, + EndTimeRange, + Event, + EventResource, + EventTimeRange, + InsightFeedback, + InsightHealth, + InsightSeverity, + InsightStatus, + InsightTimeRange, + InternalServerException, + ListEventsFilters, + ListInsightsAnyStatusFilter, + ListInsightsClosedStatusFilter, + ListInsightsOngoingStatusFilter, + ListInsightsStatusFilter, + NotificationChannel, + NotificationChannelConfig, + OpsCenterIntegration, + OpsCenterIntegrationConfig, + PredictionTimeRange, + ProactiveAnomaly, + ProactiveAnomalySummary, + ProactiveInsight, + ProactiveInsightSummary, + ReactiveAnomaly, + ReactiveAnomalySummary, + ReactiveInsight, + ReactiveInsightSummary, + Recommendation, + RecommendationRelatedAnomaly, + RecommendationRelatedAnomalyResource, + RecommendationRelatedAnomalySourceDetail, + RecommendationRelatedCloudWatchMetricsSourceDetail, + RecommendationRelatedEvent, + RecommendationRelatedEventResource, + ResourceCollection, + ResourceCollectionFilter, + ResourceNotFoundException, + SearchInsightsFilters, + ServiceIntegrationConfig, + ServiceQuotaExceededException, + SnsChannelConfig, + StartTimeRange, + ThrottlingException, + UpdateCloudFormationCollectionFilter, + UpdateResourceCollectionFilter, + UpdateServiceIntegrationConfig, + ValidationException, + ValidationExceptionField, +} from "../models/models_0"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { + SmithyException as __SmithyException, + extendedEncodeURIComponent as __extendedEncodeURIComponent, +} from "@aws-sdk/smithy-client"; +import { + Endpoint as __Endpoint, + MetadataBearer as __MetadataBearer, + ResponseMetadata as __ResponseMetadata, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export const serializeAws_restJson1AddNotificationChannelCommand = async ( + input: AddNotificationChannelCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/channels"; + let body: any; + body = JSON.stringify({ + ...(input.Config !== undefined && { + Config: serializeAws_restJson1NotificationChannelConfig(input.Config, context), + }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "PUT", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DescribeAccountHealthCommand = async ( + input: DescribeAccountHealthCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/accounts/health"; + let body: any; + body = "{}"; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DescribeAccountOverviewCommand = async ( + input: DescribeAccountOverviewCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/accounts/overview"; + let body: any; + body = JSON.stringify({ + ...(input.FromTime !== undefined && { FromTime: Math.round(input.FromTime.getTime() / 1000) }), + ...(input.ToTime !== undefined && { ToTime: Math.round(input.ToTime.getTime() / 1000) }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DescribeAnomalyCommand = async ( + input: DescribeAnomalyCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/anomalies/{Id}"; + if (input.Id !== undefined) { + const labelValue: string = input.Id; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: Id."); + } + resolvedPath = resolvedPath.replace("{Id}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: Id."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DescribeInsightCommand = async ( + input: DescribeInsightCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/insights/{Id}"; + if (input.Id !== undefined) { + const labelValue: string = input.Id; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: Id."); + } + resolvedPath = resolvedPath.replace("{Id}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: Id."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DescribeResourceCollectionHealthCommand = async ( + input: DescribeResourceCollectionHealthCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/accounts/health/resource-collection/{ResourceCollectionType}"; + if (input.ResourceCollectionType !== undefined) { + const labelValue: string = input.ResourceCollectionType; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ResourceCollectionType."); + } + resolvedPath = resolvedPath.replace("{ResourceCollectionType}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ResourceCollectionType."); + } + const query: any = { + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + }; + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; + +export const serializeAws_restJson1DescribeServiceIntegrationCommand = async ( + input: DescribeServiceIntegrationCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/service-integrations"; + let body: any; + body = "{}"; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1GetResourceCollectionCommand = async ( + input: GetResourceCollectionCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/resource-collections/{ResourceCollectionType}"; + if (input.ResourceCollectionType !== undefined) { + const labelValue: string = input.ResourceCollectionType; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ResourceCollectionType."); + } + resolvedPath = resolvedPath.replace("{ResourceCollectionType}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ResourceCollectionType."); + } + const query: any = { + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + }; + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; + +export const serializeAws_restJson1ListAnomaliesForInsightCommand = async ( + input: ListAnomaliesForInsightCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/anomalies/insight/{InsightId}"; + if (input.InsightId !== undefined) { + const labelValue: string = input.InsightId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: InsightId."); + } + resolvedPath = resolvedPath.replace("{InsightId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: InsightId."); + } + let body: any; + body = JSON.stringify({ + ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + ...(input.StartTimeRange !== undefined && { + StartTimeRange: serializeAws_restJson1StartTimeRange(input.StartTimeRange, context), + }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1ListEventsCommand = async ( + input: ListEventsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/events"; + let body: any; + body = JSON.stringify({ + ...(input.Filters !== undefined && { Filters: serializeAws_restJson1ListEventsFilters(input.Filters, context) }), + ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1ListInsightsCommand = async ( + input: ListInsightsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/insights"; + let body: any; + body = JSON.stringify({ + ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + ...(input.StatusFilter !== undefined && { + StatusFilter: serializeAws_restJson1ListInsightsStatusFilter(input.StatusFilter, context), + }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1ListNotificationChannelsCommand = async ( + input: ListNotificationChannelsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/channels"; + let body: any; + body = JSON.stringify({ + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1ListRecommendationsCommand = async ( + input: ListRecommendationsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/recommendations"; + let body: any; + body = JSON.stringify({ + ...(input.InsightId !== undefined && { InsightId: input.InsightId }), + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1PutFeedbackCommand = async ( + input: PutFeedbackCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/feedback"; + let body: any; + body = JSON.stringify({ + ...(input.InsightFeedback !== undefined && { + InsightFeedback: serializeAws_restJson1InsightFeedback(input.InsightFeedback, context), + }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "PUT", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1RemoveNotificationChannelCommand = async ( + input: RemoveNotificationChannelCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/channels/{Id}"; + if (input.Id !== undefined) { + const labelValue: string = input.Id; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: Id."); + } + resolvedPath = resolvedPath.replace("{Id}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: Id."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "DELETE", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1SearchInsightsCommand = async ( + input: SearchInsightsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/insights/search"; + let body: any; + body = JSON.stringify({ + ...(input.Filters !== undefined && { + Filters: serializeAws_restJson1SearchInsightsFilters(input.Filters, context), + }), + ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + ...(input.StartTimeRange !== undefined && { + StartTimeRange: serializeAws_restJson1StartTimeRange(input.StartTimeRange, context), + }), + ...(input.Type !== undefined && { Type: input.Type }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1UpdateResourceCollectionCommand = async ( + input: UpdateResourceCollectionCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/resource-collections"; + let body: any; + body = JSON.stringify({ + ...(input.Action !== undefined && { Action: input.Action }), + ...(input.ResourceCollection !== undefined && { + ResourceCollection: serializeAws_restJson1UpdateResourceCollectionFilter(input.ResourceCollection, context), + }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "PUT", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1UpdateServiceIntegrationCommand = async ( + input: UpdateServiceIntegrationCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/service-integrations"; + let body: any; + body = JSON.stringify({ + ...(input.ServiceIntegration !== undefined && { + ServiceIntegration: serializeAws_restJson1UpdateServiceIntegrationConfig(input.ServiceIntegration, context), + }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "PUT", + headers, + path: resolvedPath, + body, + }); +}; + +export const deserializeAws_restJson1AddNotificationChannelCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1AddNotificationChannelCommandError(output, context); + } + const contents: AddNotificationChannelCommandOutput = { + $metadata: deserializeMetadata(output), + Id: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.Id !== undefined && data.Id !== null) { + contents.Id = data.Id; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1AddNotificationChannelCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.devopsguru#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.devopsguru#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.devopsguru#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.devopsguru#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceQuotaExceededException": + case "com.amazonaws.devopsguru#ServiceQuotaExceededException": + response = { + ...(await deserializeAws_restJson1ServiceQuotaExceededExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.devopsguru#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.devopsguru#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DescribeAccountHealthCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DescribeAccountHealthCommandError(output, context); + } + const contents: DescribeAccountHealthCommandOutput = { + $metadata: deserializeMetadata(output), + MetricsAnalyzed: undefined, + OpenProactiveInsights: undefined, + OpenReactiveInsights: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.MetricsAnalyzed !== undefined && data.MetricsAnalyzed !== null) { + contents.MetricsAnalyzed = data.MetricsAnalyzed; + } + if (data.OpenProactiveInsights !== undefined && data.OpenProactiveInsights !== null) { + contents.OpenProactiveInsights = data.OpenProactiveInsights; + } + if (data.OpenReactiveInsights !== undefined && data.OpenReactiveInsights !== null) { + contents.OpenReactiveInsights = data.OpenReactiveInsights; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DescribeAccountHealthCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.devopsguru#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.devopsguru#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.devopsguru#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.devopsguru#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DescribeAccountOverviewCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DescribeAccountOverviewCommandError(output, context); + } + const contents: DescribeAccountOverviewCommandOutput = { + $metadata: deserializeMetadata(output), + MeanTimeToRecoverInMilliseconds: undefined, + ProactiveInsights: undefined, + ReactiveInsights: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.MeanTimeToRecoverInMilliseconds !== undefined && data.MeanTimeToRecoverInMilliseconds !== null) { + contents.MeanTimeToRecoverInMilliseconds = data.MeanTimeToRecoverInMilliseconds; + } + if (data.ProactiveInsights !== undefined && data.ProactiveInsights !== null) { + contents.ProactiveInsights = data.ProactiveInsights; + } + if (data.ReactiveInsights !== undefined && data.ReactiveInsights !== null) { + contents.ReactiveInsights = data.ReactiveInsights; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DescribeAccountOverviewCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.devopsguru#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.devopsguru#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.devopsguru#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.devopsguru#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DescribeAnomalyCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DescribeAnomalyCommandError(output, context); + } + const contents: DescribeAnomalyCommandOutput = { + $metadata: deserializeMetadata(output), + ProactiveAnomaly: undefined, + ReactiveAnomaly: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.ProactiveAnomaly !== undefined && data.ProactiveAnomaly !== null) { + contents.ProactiveAnomaly = deserializeAws_restJson1ProactiveAnomaly(data.ProactiveAnomaly, context); + } + if (data.ReactiveAnomaly !== undefined && data.ReactiveAnomaly !== null) { + contents.ReactiveAnomaly = deserializeAws_restJson1ReactiveAnomaly(data.ReactiveAnomaly, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DescribeAnomalyCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.devopsguru#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.devopsguru#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.devopsguru#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.devopsguru#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.devopsguru#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DescribeInsightCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DescribeInsightCommandError(output, context); + } + const contents: DescribeInsightCommandOutput = { + $metadata: deserializeMetadata(output), + ProactiveInsight: undefined, + ReactiveInsight: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.ProactiveInsight !== undefined && data.ProactiveInsight !== null) { + contents.ProactiveInsight = deserializeAws_restJson1ProactiveInsight(data.ProactiveInsight, context); + } + if (data.ReactiveInsight !== undefined && data.ReactiveInsight !== null) { + contents.ReactiveInsight = deserializeAws_restJson1ReactiveInsight(data.ReactiveInsight, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DescribeInsightCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.devopsguru#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.devopsguru#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.devopsguru#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.devopsguru#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.devopsguru#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DescribeResourceCollectionHealthCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DescribeResourceCollectionHealthCommandError(output, context); + } + const contents: DescribeResourceCollectionHealthCommandOutput = { + $metadata: deserializeMetadata(output), + CloudFormation: undefined, + NextToken: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.CloudFormation !== undefined && data.CloudFormation !== null) { + contents.CloudFormation = deserializeAws_restJson1CloudFormationHealths(data.CloudFormation, context); + } + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = data.NextToken; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DescribeResourceCollectionHealthCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.devopsguru#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.devopsguru#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.devopsguru#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.devopsguru#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DescribeServiceIntegrationCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DescribeServiceIntegrationCommandError(output, context); + } + const contents: DescribeServiceIntegrationCommandOutput = { + $metadata: deserializeMetadata(output), + ServiceIntegration: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.ServiceIntegration !== undefined && data.ServiceIntegration !== null) { + contents.ServiceIntegration = deserializeAws_restJson1ServiceIntegrationConfig(data.ServiceIntegration, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DescribeServiceIntegrationCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.devopsguru#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.devopsguru#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.devopsguru#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.devopsguru#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1GetResourceCollectionCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1GetResourceCollectionCommandError(output, context); + } + const contents: GetResourceCollectionCommandOutput = { + $metadata: deserializeMetadata(output), + NextToken: undefined, + ResourceCollection: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = data.NextToken; + } + if (data.ResourceCollection !== undefined && data.ResourceCollection !== null) { + contents.ResourceCollection = deserializeAws_restJson1ResourceCollectionFilter(data.ResourceCollection, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1GetResourceCollectionCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.devopsguru#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.devopsguru#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.devopsguru#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.devopsguru#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.devopsguru#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1ListAnomaliesForInsightCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListAnomaliesForInsightCommandError(output, context); + } + const contents: ListAnomaliesForInsightCommandOutput = { + $metadata: deserializeMetadata(output), + NextToken: undefined, + ProactiveAnomalies: undefined, + ReactiveAnomalies: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = data.NextToken; + } + if (data.ProactiveAnomalies !== undefined && data.ProactiveAnomalies !== null) { + contents.ProactiveAnomalies = deserializeAws_restJson1ProactiveAnomalies(data.ProactiveAnomalies, context); + } + if (data.ReactiveAnomalies !== undefined && data.ReactiveAnomalies !== null) { + contents.ReactiveAnomalies = deserializeAws_restJson1ReactiveAnomalies(data.ReactiveAnomalies, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListAnomaliesForInsightCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.devopsguru#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.devopsguru#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.devopsguru#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.devopsguru#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.devopsguru#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1ListEventsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListEventsCommandError(output, context); + } + const contents: ListEventsCommandOutput = { + $metadata: deserializeMetadata(output), + Events: undefined, + NextToken: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.Events !== undefined && data.Events !== null) { + contents.Events = deserializeAws_restJson1Events(data.Events, context); + } + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = data.NextToken; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListEventsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.devopsguru#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.devopsguru#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.devopsguru#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.devopsguru#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.devopsguru#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1ListInsightsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListInsightsCommandError(output, context); + } + const contents: ListInsightsCommandOutput = { + $metadata: deserializeMetadata(output), + NextToken: undefined, + ProactiveInsights: undefined, + ReactiveInsights: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = data.NextToken; + } + if (data.ProactiveInsights !== undefined && data.ProactiveInsights !== null) { + contents.ProactiveInsights = deserializeAws_restJson1ProactiveInsights(data.ProactiveInsights, context); + } + if (data.ReactiveInsights !== undefined && data.ReactiveInsights !== null) { + contents.ReactiveInsights = deserializeAws_restJson1ReactiveInsights(data.ReactiveInsights, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListInsightsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.devopsguru#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.devopsguru#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.devopsguru#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.devopsguru#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1ListNotificationChannelsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListNotificationChannelsCommandError(output, context); + } + const contents: ListNotificationChannelsCommandOutput = { + $metadata: deserializeMetadata(output), + Channels: undefined, + NextToken: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.Channels !== undefined && data.Channels !== null) { + contents.Channels = deserializeAws_restJson1Channels(data.Channels, context); + } + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = data.NextToken; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListNotificationChannelsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.devopsguru#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.devopsguru#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.devopsguru#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.devopsguru#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1ListRecommendationsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListRecommendationsCommandError(output, context); + } + const contents: ListRecommendationsCommandOutput = { + $metadata: deserializeMetadata(output), + NextToken: undefined, + Recommendations: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = data.NextToken; + } + if (data.Recommendations !== undefined && data.Recommendations !== null) { + contents.Recommendations = deserializeAws_restJson1Recommendations(data.Recommendations, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListRecommendationsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.devopsguru#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.devopsguru#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.devopsguru#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.devopsguru#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.devopsguru#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1PutFeedbackCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1PutFeedbackCommandError(output, context); + } + const contents: PutFeedbackCommandOutput = { + $metadata: deserializeMetadata(output), + }; + await collectBody(output.body, context); + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1PutFeedbackCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.devopsguru#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.devopsguru#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.devopsguru#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.devopsguru#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.devopsguru#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.devopsguru#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1RemoveNotificationChannelCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1RemoveNotificationChannelCommandError(output, context); + } + const contents: RemoveNotificationChannelCommandOutput = { + $metadata: deserializeMetadata(output), + }; + await collectBody(output.body, context); + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1RemoveNotificationChannelCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.devopsguru#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.devopsguru#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.devopsguru#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.devopsguru#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.devopsguru#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.devopsguru#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1SearchInsightsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1SearchInsightsCommandError(output, context); + } + const contents: SearchInsightsCommandOutput = { + $metadata: deserializeMetadata(output), + NextToken: undefined, + ProactiveInsights: undefined, + ReactiveInsights: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = data.NextToken; + } + if (data.ProactiveInsights !== undefined && data.ProactiveInsights !== null) { + contents.ProactiveInsights = deserializeAws_restJson1ProactiveInsights(data.ProactiveInsights, context); + } + if (data.ReactiveInsights !== undefined && data.ReactiveInsights !== null) { + contents.ReactiveInsights = deserializeAws_restJson1ReactiveInsights(data.ReactiveInsights, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1SearchInsightsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.devopsguru#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.devopsguru#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.devopsguru#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.devopsguru#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1UpdateResourceCollectionCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1UpdateResourceCollectionCommandError(output, context); + } + const contents: UpdateResourceCollectionCommandOutput = { + $metadata: deserializeMetadata(output), + }; + await collectBody(output.body, context); + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1UpdateResourceCollectionCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.devopsguru#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.devopsguru#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.devopsguru#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.devopsguru#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.devopsguru#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1UpdateServiceIntegrationCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1UpdateServiceIntegrationCommandError(output, context); + } + const contents: UpdateServiceIntegrationCommandOutput = { + $metadata: deserializeMetadata(output), + }; + await collectBody(output.body, context); + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1UpdateServiceIntegrationCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.devopsguru#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.devopsguru#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.devopsguru#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.devopsguru#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.devopsguru#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +const deserializeAws_restJson1AccessDeniedExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: AccessDeniedException = { + name: "AccessDeniedException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1ConflictExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ConflictException = { + name: "ConflictException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + ResourceId: undefined, + ResourceType: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + if (data.ResourceId !== undefined && data.ResourceId !== null) { + contents.ResourceId = data.ResourceId; + } + if (data.ResourceType !== undefined && data.ResourceType !== null) { + contents.ResourceType = data.ResourceType; + } + return contents; +}; + +const deserializeAws_restJson1InternalServerExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: InternalServerException = { + name: "InternalServerException", + $fault: "server", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + RetryAfterSeconds: undefined, + }; + if (parsedOutput.headers["retry-after"] !== undefined) { + contents.RetryAfterSeconds = parseInt(parsedOutput.headers["retry-after"], 10); + } + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1ResourceNotFoundExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ResourceNotFoundException = { + name: "ResourceNotFoundException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + ResourceId: undefined, + ResourceType: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + if (data.ResourceId !== undefined && data.ResourceId !== null) { + contents.ResourceId = data.ResourceId; + } + if (data.ResourceType !== undefined && data.ResourceType !== null) { + contents.ResourceType = data.ResourceType; + } + return contents; +}; + +const deserializeAws_restJson1ServiceQuotaExceededExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ServiceQuotaExceededException = { + name: "ServiceQuotaExceededException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1ThrottlingExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ThrottlingException = { + name: "ThrottlingException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + QuotaCode: undefined, + RetryAfterSeconds: undefined, + ServiceCode: undefined, + }; + if (parsedOutput.headers["retry-after"] !== undefined) { + contents.RetryAfterSeconds = parseInt(parsedOutput.headers["retry-after"], 10); + } + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + if (data.QuotaCode !== undefined && data.QuotaCode !== null) { + contents.QuotaCode = data.QuotaCode; + } + if (data.ServiceCode !== undefined && data.ServiceCode !== null) { + contents.ServiceCode = data.ServiceCode; + } + return contents; +}; + +const deserializeAws_restJson1ValidationExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ValidationException = { + name: "ValidationException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Fields: undefined, + Message: undefined, + Reason: undefined, + }; + const data: any = parsedOutput.body; + if (data.Fields !== undefined && data.Fields !== null) { + contents.Fields = deserializeAws_restJson1ValidationExceptionFields(data.Fields, context); + } + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + if (data.Reason !== undefined && data.Reason !== null) { + contents.Reason = data.Reason; + } + return contents; +}; + +const serializeAws_restJson1CloudFormationCollection = ( + input: CloudFormationCollection, + context: __SerdeContext +): any => { + return { + ...(input.StackNames !== undefined && { StackNames: serializeAws_restJson1StackNames(input.StackNames, context) }), + }; +}; + +const serializeAws_restJson1EndTimeRange = (input: EndTimeRange, context: __SerdeContext): any => { + return { + ...(input.FromTime !== undefined && { FromTime: Math.round(input.FromTime.getTime() / 1000) }), + ...(input.ToTime !== undefined && { ToTime: Math.round(input.ToTime.getTime() / 1000) }), + }; +}; + +const serializeAws_restJson1EventTimeRange = (input: EventTimeRange, context: __SerdeContext): any => { + return { + ...(input.FromTime !== undefined && { FromTime: Math.round(input.FromTime.getTime() / 1000) }), + ...(input.ToTime !== undefined && { ToTime: Math.round(input.ToTime.getTime() / 1000) }), + }; +}; + +const serializeAws_restJson1InsightFeedback = (input: InsightFeedback, context: __SerdeContext): any => { + return { + ...(input.Feedback !== undefined && { Feedback: input.Feedback }), + ...(input.Id !== undefined && { Id: input.Id }), + }; +}; + +const serializeAws_restJson1InsightSeverities = (input: (InsightSeverity | string)[], context: __SerdeContext): any => { + return input.map((entry) => entry); +}; + +const serializeAws_restJson1InsightStatuses = (input: (InsightStatus | string)[], context: __SerdeContext): any => { + return input.map((entry) => entry); +}; + +const serializeAws_restJson1ListEventsFilters = (input: ListEventsFilters, context: __SerdeContext): any => { + return { + ...(input.DataSource !== undefined && { DataSource: input.DataSource }), + ...(input.EventClass !== undefined && { EventClass: input.EventClass }), + ...(input.EventSource !== undefined && { EventSource: input.EventSource }), + ...(input.EventTimeRange !== undefined && { + EventTimeRange: serializeAws_restJson1EventTimeRange(input.EventTimeRange, context), + }), + ...(input.InsightId !== undefined && { InsightId: input.InsightId }), + ...(input.ResourceCollection !== undefined && { + ResourceCollection: serializeAws_restJson1ResourceCollection(input.ResourceCollection, context), + }), + }; +}; + +const serializeAws_restJson1ListInsightsAnyStatusFilter = ( + input: ListInsightsAnyStatusFilter, + context: __SerdeContext +): any => { + return { + ...(input.StartTimeRange !== undefined && { + StartTimeRange: serializeAws_restJson1StartTimeRange(input.StartTimeRange, context), + }), + ...(input.Type !== undefined && { Type: input.Type }), + }; +}; + +const serializeAws_restJson1ListInsightsClosedStatusFilter = ( + input: ListInsightsClosedStatusFilter, + context: __SerdeContext +): any => { + return { + ...(input.EndTimeRange !== undefined && { + EndTimeRange: serializeAws_restJson1EndTimeRange(input.EndTimeRange, context), + }), + ...(input.Type !== undefined && { Type: input.Type }), + }; +}; + +const serializeAws_restJson1ListInsightsOngoingStatusFilter = ( + input: ListInsightsOngoingStatusFilter, + context: __SerdeContext +): any => { + return { + ...(input.Type !== undefined && { Type: input.Type }), + }; +}; + +const serializeAws_restJson1ListInsightsStatusFilter = ( + input: ListInsightsStatusFilter, + context: __SerdeContext +): any => { + return { + ...(input.Any !== undefined && { Any: serializeAws_restJson1ListInsightsAnyStatusFilter(input.Any, context) }), + ...(input.Closed !== undefined && { + Closed: serializeAws_restJson1ListInsightsClosedStatusFilter(input.Closed, context), + }), + ...(input.Ongoing !== undefined && { + Ongoing: serializeAws_restJson1ListInsightsOngoingStatusFilter(input.Ongoing, context), + }), + }; +}; + +const serializeAws_restJson1NotificationChannelConfig = ( + input: NotificationChannelConfig, + context: __SerdeContext +): any => { + return { + ...(input.Sns !== undefined && { Sns: serializeAws_restJson1SnsChannelConfig(input.Sns, context) }), + }; +}; + +const serializeAws_restJson1OpsCenterIntegrationConfig = ( + input: OpsCenterIntegrationConfig, + context: __SerdeContext +): any => { + return { + ...(input.OptInStatus !== undefined && { OptInStatus: input.OptInStatus }), + }; +}; + +const serializeAws_restJson1ResourceCollection = (input: ResourceCollection, context: __SerdeContext): any => { + return { + ...(input.CloudFormation !== undefined && { + CloudFormation: serializeAws_restJson1CloudFormationCollection(input.CloudFormation, context), + }), + }; +}; + +const serializeAws_restJson1SearchInsightsFilters = (input: SearchInsightsFilters, context: __SerdeContext): any => { + return { + ...(input.ResourceCollection !== undefined && { + ResourceCollection: serializeAws_restJson1ResourceCollection(input.ResourceCollection, context), + }), + ...(input.Severities !== undefined && { + Severities: serializeAws_restJson1InsightSeverities(input.Severities, context), + }), + ...(input.Statuses !== undefined && { Statuses: serializeAws_restJson1InsightStatuses(input.Statuses, context) }), + }; +}; + +const serializeAws_restJson1SnsChannelConfig = (input: SnsChannelConfig, context: __SerdeContext): any => { + return { + ...(input.TopicArn !== undefined && { TopicArn: input.TopicArn }), + }; +}; + +const serializeAws_restJson1StackNames = (input: string[], context: __SerdeContext): any => { + return input.map((entry) => entry); +}; + +const serializeAws_restJson1StartTimeRange = (input: StartTimeRange, context: __SerdeContext): any => { + return { + ...(input.FromTime !== undefined && { FromTime: Math.round(input.FromTime.getTime() / 1000) }), + ...(input.ToTime !== undefined && { ToTime: Math.round(input.ToTime.getTime() / 1000) }), + }; +}; + +const serializeAws_restJson1UpdateCloudFormationCollectionFilter = ( + input: UpdateCloudFormationCollectionFilter, + context: __SerdeContext +): any => { + return { + ...(input.StackNames !== undefined && { + StackNames: serializeAws_restJson1UpdateStackNames(input.StackNames, context), + }), + }; +}; + +const serializeAws_restJson1UpdateResourceCollectionFilter = ( + input: UpdateResourceCollectionFilter, + context: __SerdeContext +): any => { + return { + ...(input.CloudFormation !== undefined && { + CloudFormation: serializeAws_restJson1UpdateCloudFormationCollectionFilter(input.CloudFormation, context), + }), + }; +}; + +const serializeAws_restJson1UpdateServiceIntegrationConfig = ( + input: UpdateServiceIntegrationConfig, + context: __SerdeContext +): any => { + return { + ...(input.OpsCenter !== undefined && { + OpsCenter: serializeAws_restJson1OpsCenterIntegrationConfig(input.OpsCenter, context), + }), + }; +}; + +const serializeAws_restJson1UpdateStackNames = (input: string[], context: __SerdeContext): any => { + return input.map((entry) => entry); +}; + +const deserializeAws_restJson1AnomalySourceDetails = (output: any, context: __SerdeContext): AnomalySourceDetails => { + return { + CloudWatchMetrics: + output.CloudWatchMetrics !== undefined && output.CloudWatchMetrics !== null + ? deserializeAws_restJson1CloudWatchMetricsDetails(output.CloudWatchMetrics, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1AnomalyTimeRange = (output: any, context: __SerdeContext): AnomalyTimeRange => { + return { + EndTime: + output.EndTime !== undefined && output.EndTime !== null ? new Date(Math.round(output.EndTime * 1000)) : undefined, + StartTime: + output.StartTime !== undefined && output.StartTime !== null + ? new Date(Math.round(output.StartTime * 1000)) + : undefined, + } as any; +}; + +const deserializeAws_restJson1Channels = (output: any, context: __SerdeContext): NotificationChannel[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1NotificationChannel(entry, context)); +}; + +const deserializeAws_restJson1CloudFormationCollection = ( + output: any, + context: __SerdeContext +): CloudFormationCollection => { + return { + StackNames: + output.StackNames !== undefined && output.StackNames !== null + ? deserializeAws_restJson1StackNames(output.StackNames, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1CloudFormationCollectionFilter = ( + output: any, + context: __SerdeContext +): CloudFormationCollectionFilter => { + return { + StackNames: + output.StackNames !== undefined && output.StackNames !== null + ? deserializeAws_restJson1StackNames(output.StackNames, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1CloudFormationHealth = (output: any, context: __SerdeContext): CloudFormationHealth => { + return { + Insight: + output.Insight !== undefined && output.Insight !== null + ? deserializeAws_restJson1InsightHealth(output.Insight, context) + : undefined, + StackName: output.StackName !== undefined && output.StackName !== null ? output.StackName : undefined, + } as any; +}; + +const deserializeAws_restJson1CloudFormationHealths = ( + output: any, + context: __SerdeContext +): CloudFormationHealth[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1CloudFormationHealth(entry, context)); +}; + +const deserializeAws_restJson1CloudWatchMetricsDetail = ( + output: any, + context: __SerdeContext +): CloudWatchMetricsDetail => { + return { + Dimensions: + output.Dimensions !== undefined && output.Dimensions !== null + ? deserializeAws_restJson1CloudWatchMetricsDimensions(output.Dimensions, context) + : undefined, + MetricName: output.MetricName !== undefined && output.MetricName !== null ? output.MetricName : undefined, + Namespace: output.Namespace !== undefined && output.Namespace !== null ? output.Namespace : undefined, + Period: output.Period !== undefined && output.Period !== null ? output.Period : undefined, + Stat: output.Stat !== undefined && output.Stat !== null ? output.Stat : undefined, + Unit: output.Unit !== undefined && output.Unit !== null ? output.Unit : undefined, + } as any; +}; + +const deserializeAws_restJson1CloudWatchMetricsDetails = ( + output: any, + context: __SerdeContext +): CloudWatchMetricsDetail[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1CloudWatchMetricsDetail(entry, context)); +}; + +const deserializeAws_restJson1CloudWatchMetricsDimension = ( + output: any, + context: __SerdeContext +): CloudWatchMetricsDimension => { + return { + Name: output.Name !== undefined && output.Name !== null ? output.Name : undefined, + Value: output.Value !== undefined && output.Value !== null ? output.Value : undefined, + } as any; +}; + +const deserializeAws_restJson1CloudWatchMetricsDimensions = ( + output: any, + context: __SerdeContext +): CloudWatchMetricsDimension[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1CloudWatchMetricsDimension(entry, context)); +}; + +const deserializeAws_restJson1Event = (output: any, context: __SerdeContext): Event => { + return { + DataSource: output.DataSource !== undefined && output.DataSource !== null ? output.DataSource : undefined, + EventClass: output.EventClass !== undefined && output.EventClass !== null ? output.EventClass : undefined, + EventSource: output.EventSource !== undefined && output.EventSource !== null ? output.EventSource : undefined, + Id: output.Id !== undefined && output.Id !== null ? output.Id : undefined, + Name: output.Name !== undefined && output.Name !== null ? output.Name : undefined, + ResourceCollection: + output.ResourceCollection !== undefined && output.ResourceCollection !== null + ? deserializeAws_restJson1ResourceCollection(output.ResourceCollection, context) + : undefined, + Resources: + output.Resources !== undefined && output.Resources !== null + ? deserializeAws_restJson1EventResources(output.Resources, context) + : undefined, + Time: output.Time !== undefined && output.Time !== null ? new Date(Math.round(output.Time * 1000)) : undefined, + } as any; +}; + +const deserializeAws_restJson1EventResource = (output: any, context: __SerdeContext): EventResource => { + return { + Arn: output.Arn !== undefined && output.Arn !== null ? output.Arn : undefined, + Name: output.Name !== undefined && output.Name !== null ? output.Name : undefined, + Type: output.Type !== undefined && output.Type !== null ? output.Type : undefined, + } as any; +}; + +const deserializeAws_restJson1EventResources = (output: any, context: __SerdeContext): EventResource[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1EventResource(entry, context)); +}; + +const deserializeAws_restJson1Events = (output: any, context: __SerdeContext): Event[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1Event(entry, context)); +}; + +const deserializeAws_restJson1InsightHealth = (output: any, context: __SerdeContext): InsightHealth => { + return { + MeanTimeToRecoverInMilliseconds: + output.MeanTimeToRecoverInMilliseconds !== undefined && output.MeanTimeToRecoverInMilliseconds !== null + ? output.MeanTimeToRecoverInMilliseconds + : undefined, + OpenProactiveInsights: + output.OpenProactiveInsights !== undefined && output.OpenProactiveInsights !== null + ? output.OpenProactiveInsights + : undefined, + OpenReactiveInsights: + output.OpenReactiveInsights !== undefined && output.OpenReactiveInsights !== null + ? output.OpenReactiveInsights + : undefined, + } as any; +}; + +const deserializeAws_restJson1InsightTimeRange = (output: any, context: __SerdeContext): InsightTimeRange => { + return { + EndTime: + output.EndTime !== undefined && output.EndTime !== null ? new Date(Math.round(output.EndTime * 1000)) : undefined, + StartTime: + output.StartTime !== undefined && output.StartTime !== null + ? new Date(Math.round(output.StartTime * 1000)) + : undefined, + } as any; +}; + +const deserializeAws_restJson1NotificationChannel = (output: any, context: __SerdeContext): NotificationChannel => { + return { + Config: + output.Config !== undefined && output.Config !== null + ? deserializeAws_restJson1NotificationChannelConfig(output.Config, context) + : undefined, + Id: output.Id !== undefined && output.Id !== null ? output.Id : undefined, + } as any; +}; + +const deserializeAws_restJson1NotificationChannelConfig = ( + output: any, + context: __SerdeContext +): NotificationChannelConfig => { + return { + Sns: + output.Sns !== undefined && output.Sns !== null + ? deserializeAws_restJson1SnsChannelConfig(output.Sns, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1OpsCenterIntegration = (output: any, context: __SerdeContext): OpsCenterIntegration => { + return { + OptInStatus: output.OptInStatus !== undefined && output.OptInStatus !== null ? output.OptInStatus : undefined, + } as any; +}; + +const deserializeAws_restJson1PredictionTimeRange = (output: any, context: __SerdeContext): PredictionTimeRange => { + return { + EndTime: + output.EndTime !== undefined && output.EndTime !== null ? new Date(Math.round(output.EndTime * 1000)) : undefined, + StartTime: + output.StartTime !== undefined && output.StartTime !== null + ? new Date(Math.round(output.StartTime * 1000)) + : undefined, + } as any; +}; + +const deserializeAws_restJson1ProactiveAnomalies = ( + output: any, + context: __SerdeContext +): ProactiveAnomalySummary[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1ProactiveAnomalySummary(entry, context)); +}; + +const deserializeAws_restJson1ProactiveAnomaly = (output: any, context: __SerdeContext): ProactiveAnomaly => { + return { + AnomalyTimeRange: + output.AnomalyTimeRange !== undefined && output.AnomalyTimeRange !== null + ? deserializeAws_restJson1AnomalyTimeRange(output.AnomalyTimeRange, context) + : undefined, + AssociatedInsightId: + output.AssociatedInsightId !== undefined && output.AssociatedInsightId !== null + ? output.AssociatedInsightId + : undefined, + Id: output.Id !== undefined && output.Id !== null ? output.Id : undefined, + Limit: output.Limit !== undefined && output.Limit !== null ? output.Limit : undefined, + PredictionTimeRange: + output.PredictionTimeRange !== undefined && output.PredictionTimeRange !== null + ? deserializeAws_restJson1PredictionTimeRange(output.PredictionTimeRange, context) + : undefined, + ResourceCollection: + output.ResourceCollection !== undefined && output.ResourceCollection !== null + ? deserializeAws_restJson1ResourceCollection(output.ResourceCollection, context) + : undefined, + Severity: output.Severity !== undefined && output.Severity !== null ? output.Severity : undefined, + SourceDetails: + output.SourceDetails !== undefined && output.SourceDetails !== null + ? deserializeAws_restJson1AnomalySourceDetails(output.SourceDetails, context) + : undefined, + Status: output.Status !== undefined && output.Status !== null ? output.Status : undefined, + UpdateTime: + output.UpdateTime !== undefined && output.UpdateTime !== null + ? new Date(Math.round(output.UpdateTime * 1000)) + : undefined, + } as any; +}; + +const deserializeAws_restJson1ProactiveAnomalySummary = ( + output: any, + context: __SerdeContext +): ProactiveAnomalySummary => { + return { + AnomalyTimeRange: + output.AnomalyTimeRange !== undefined && output.AnomalyTimeRange !== null + ? deserializeAws_restJson1AnomalyTimeRange(output.AnomalyTimeRange, context) + : undefined, + AssociatedInsightId: + output.AssociatedInsightId !== undefined && output.AssociatedInsightId !== null + ? output.AssociatedInsightId + : undefined, + Id: output.Id !== undefined && output.Id !== null ? output.Id : undefined, + Limit: output.Limit !== undefined && output.Limit !== null ? output.Limit : undefined, + PredictionTimeRange: + output.PredictionTimeRange !== undefined && output.PredictionTimeRange !== null + ? deserializeAws_restJson1PredictionTimeRange(output.PredictionTimeRange, context) + : undefined, + ResourceCollection: + output.ResourceCollection !== undefined && output.ResourceCollection !== null + ? deserializeAws_restJson1ResourceCollection(output.ResourceCollection, context) + : undefined, + Severity: output.Severity !== undefined && output.Severity !== null ? output.Severity : undefined, + SourceDetails: + output.SourceDetails !== undefined && output.SourceDetails !== null + ? deserializeAws_restJson1AnomalySourceDetails(output.SourceDetails, context) + : undefined, + Status: output.Status !== undefined && output.Status !== null ? output.Status : undefined, + UpdateTime: + output.UpdateTime !== undefined && output.UpdateTime !== null + ? new Date(Math.round(output.UpdateTime * 1000)) + : undefined, + } as any; +}; + +const deserializeAws_restJson1ProactiveInsight = (output: any, context: __SerdeContext): ProactiveInsight => { + return { + Id: output.Id !== undefined && output.Id !== null ? output.Id : undefined, + InsightTimeRange: + output.InsightTimeRange !== undefined && output.InsightTimeRange !== null + ? deserializeAws_restJson1InsightTimeRange(output.InsightTimeRange, context) + : undefined, + Name: output.Name !== undefined && output.Name !== null ? output.Name : undefined, + PredictionTimeRange: + output.PredictionTimeRange !== undefined && output.PredictionTimeRange !== null + ? deserializeAws_restJson1PredictionTimeRange(output.PredictionTimeRange, context) + : undefined, + ResourceCollection: + output.ResourceCollection !== undefined && output.ResourceCollection !== null + ? deserializeAws_restJson1ResourceCollection(output.ResourceCollection, context) + : undefined, + Severity: output.Severity !== undefined && output.Severity !== null ? output.Severity : undefined, + SsmOpsItemId: output.SsmOpsItemId !== undefined && output.SsmOpsItemId !== null ? output.SsmOpsItemId : undefined, + Status: output.Status !== undefined && output.Status !== null ? output.Status : undefined, + } as any; +}; + +const deserializeAws_restJson1ProactiveInsights = (output: any, context: __SerdeContext): ProactiveInsightSummary[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1ProactiveInsightSummary(entry, context)); +}; + +const deserializeAws_restJson1ProactiveInsightSummary = ( + output: any, + context: __SerdeContext +): ProactiveInsightSummary => { + return { + Id: output.Id !== undefined && output.Id !== null ? output.Id : undefined, + InsightTimeRange: + output.InsightTimeRange !== undefined && output.InsightTimeRange !== null + ? deserializeAws_restJson1InsightTimeRange(output.InsightTimeRange, context) + : undefined, + Name: output.Name !== undefined && output.Name !== null ? output.Name : undefined, + PredictionTimeRange: + output.PredictionTimeRange !== undefined && output.PredictionTimeRange !== null + ? deserializeAws_restJson1PredictionTimeRange(output.PredictionTimeRange, context) + : undefined, + ResourceCollection: + output.ResourceCollection !== undefined && output.ResourceCollection !== null + ? deserializeAws_restJson1ResourceCollection(output.ResourceCollection, context) + : undefined, + Severity: output.Severity !== undefined && output.Severity !== null ? output.Severity : undefined, + Status: output.Status !== undefined && output.Status !== null ? output.Status : undefined, + } as any; +}; + +const deserializeAws_restJson1ReactiveAnomalies = (output: any, context: __SerdeContext): ReactiveAnomalySummary[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1ReactiveAnomalySummary(entry, context)); +}; + +const deserializeAws_restJson1ReactiveAnomaly = (output: any, context: __SerdeContext): ReactiveAnomaly => { + return { + AnomalyTimeRange: + output.AnomalyTimeRange !== undefined && output.AnomalyTimeRange !== null + ? deserializeAws_restJson1AnomalyTimeRange(output.AnomalyTimeRange, context) + : undefined, + AssociatedInsightId: + output.AssociatedInsightId !== undefined && output.AssociatedInsightId !== null + ? output.AssociatedInsightId + : undefined, + Id: output.Id !== undefined && output.Id !== null ? output.Id : undefined, + ResourceCollection: + output.ResourceCollection !== undefined && output.ResourceCollection !== null + ? deserializeAws_restJson1ResourceCollection(output.ResourceCollection, context) + : undefined, + Severity: output.Severity !== undefined && output.Severity !== null ? output.Severity : undefined, + SourceDetails: + output.SourceDetails !== undefined && output.SourceDetails !== null + ? deserializeAws_restJson1AnomalySourceDetails(output.SourceDetails, context) + : undefined, + Status: output.Status !== undefined && output.Status !== null ? output.Status : undefined, + } as any; +}; + +const deserializeAws_restJson1ReactiveAnomalySummary = ( + output: any, + context: __SerdeContext +): ReactiveAnomalySummary => { + return { + AnomalyTimeRange: + output.AnomalyTimeRange !== undefined && output.AnomalyTimeRange !== null + ? deserializeAws_restJson1AnomalyTimeRange(output.AnomalyTimeRange, context) + : undefined, + AssociatedInsightId: + output.AssociatedInsightId !== undefined && output.AssociatedInsightId !== null + ? output.AssociatedInsightId + : undefined, + Id: output.Id !== undefined && output.Id !== null ? output.Id : undefined, + ResourceCollection: + output.ResourceCollection !== undefined && output.ResourceCollection !== null + ? deserializeAws_restJson1ResourceCollection(output.ResourceCollection, context) + : undefined, + Severity: output.Severity !== undefined && output.Severity !== null ? output.Severity : undefined, + SourceDetails: + output.SourceDetails !== undefined && output.SourceDetails !== null + ? deserializeAws_restJson1AnomalySourceDetails(output.SourceDetails, context) + : undefined, + Status: output.Status !== undefined && output.Status !== null ? output.Status : undefined, + } as any; +}; + +const deserializeAws_restJson1ReactiveInsight = (output: any, context: __SerdeContext): ReactiveInsight => { + return { + Id: output.Id !== undefined && output.Id !== null ? output.Id : undefined, + InsightTimeRange: + output.InsightTimeRange !== undefined && output.InsightTimeRange !== null + ? deserializeAws_restJson1InsightTimeRange(output.InsightTimeRange, context) + : undefined, + Name: output.Name !== undefined && output.Name !== null ? output.Name : undefined, + ResourceCollection: + output.ResourceCollection !== undefined && output.ResourceCollection !== null + ? deserializeAws_restJson1ResourceCollection(output.ResourceCollection, context) + : undefined, + Severity: output.Severity !== undefined && output.Severity !== null ? output.Severity : undefined, + SsmOpsItemId: output.SsmOpsItemId !== undefined && output.SsmOpsItemId !== null ? output.SsmOpsItemId : undefined, + Status: output.Status !== undefined && output.Status !== null ? output.Status : undefined, + } as any; +}; + +const deserializeAws_restJson1ReactiveInsights = (output: any, context: __SerdeContext): ReactiveInsightSummary[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1ReactiveInsightSummary(entry, context)); +}; + +const deserializeAws_restJson1ReactiveInsightSummary = ( + output: any, + context: __SerdeContext +): ReactiveInsightSummary => { + return { + Id: output.Id !== undefined && output.Id !== null ? output.Id : undefined, + InsightTimeRange: + output.InsightTimeRange !== undefined && output.InsightTimeRange !== null + ? deserializeAws_restJson1InsightTimeRange(output.InsightTimeRange, context) + : undefined, + Name: output.Name !== undefined && output.Name !== null ? output.Name : undefined, + ResourceCollection: + output.ResourceCollection !== undefined && output.ResourceCollection !== null + ? deserializeAws_restJson1ResourceCollection(output.ResourceCollection, context) + : undefined, + Severity: output.Severity !== undefined && output.Severity !== null ? output.Severity : undefined, + Status: output.Status !== undefined && output.Status !== null ? output.Status : undefined, + } as any; +}; + +const deserializeAws_restJson1Recommendation = (output: any, context: __SerdeContext): Recommendation => { + return { + Description: output.Description !== undefined && output.Description !== null ? output.Description : undefined, + Link: output.Link !== undefined && output.Link !== null ? output.Link : undefined, + Name: output.Name !== undefined && output.Name !== null ? output.Name : undefined, + Reason: output.Reason !== undefined && output.Reason !== null ? output.Reason : undefined, + RelatedAnomalies: + output.RelatedAnomalies !== undefined && output.RelatedAnomalies !== null + ? deserializeAws_restJson1RecommendationRelatedAnomalies(output.RelatedAnomalies, context) + : undefined, + RelatedEvents: + output.RelatedEvents !== undefined && output.RelatedEvents !== null + ? deserializeAws_restJson1RecommendationRelatedEvents(output.RelatedEvents, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1RecommendationRelatedAnomalies = ( + output: any, + context: __SerdeContext +): RecommendationRelatedAnomaly[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1RecommendationRelatedAnomaly(entry, context)); +}; + +const deserializeAws_restJson1RecommendationRelatedAnomaly = ( + output: any, + context: __SerdeContext +): RecommendationRelatedAnomaly => { + return { + Resources: + output.Resources !== undefined && output.Resources !== null + ? deserializeAws_restJson1RecommendationRelatedAnomalyResources(output.Resources, context) + : undefined, + SourceDetails: + output.SourceDetails !== undefined && output.SourceDetails !== null + ? deserializeAws_restJson1RelatedAnomalySourceDetails(output.SourceDetails, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1RecommendationRelatedAnomalyResource = ( + output: any, + context: __SerdeContext +): RecommendationRelatedAnomalyResource => { + return { + Name: output.Name !== undefined && output.Name !== null ? output.Name : undefined, + Type: output.Type !== undefined && output.Type !== null ? output.Type : undefined, + } as any; +}; + +const deserializeAws_restJson1RecommendationRelatedAnomalyResources = ( + output: any, + context: __SerdeContext +): RecommendationRelatedAnomalyResource[] => { + return (output || []).map((entry: any) => + deserializeAws_restJson1RecommendationRelatedAnomalyResource(entry, context) + ); +}; + +const deserializeAws_restJson1RecommendationRelatedAnomalySourceDetail = ( + output: any, + context: __SerdeContext +): RecommendationRelatedAnomalySourceDetail => { + return { + CloudWatchMetrics: + output.CloudWatchMetrics !== undefined && output.CloudWatchMetrics !== null + ? deserializeAws_restJson1RecommendationRelatedCloudWatchMetricsSourceDetails(output.CloudWatchMetrics, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1RecommendationRelatedCloudWatchMetricsSourceDetail = ( + output: any, + context: __SerdeContext +): RecommendationRelatedCloudWatchMetricsSourceDetail => { + return { + MetricName: output.MetricName !== undefined && output.MetricName !== null ? output.MetricName : undefined, + Namespace: output.Namespace !== undefined && output.Namespace !== null ? output.Namespace : undefined, + } as any; +}; + +const deserializeAws_restJson1RecommendationRelatedCloudWatchMetricsSourceDetails = ( + output: any, + context: __SerdeContext +): RecommendationRelatedCloudWatchMetricsSourceDetail[] => { + return (output || []).map((entry: any) => + deserializeAws_restJson1RecommendationRelatedCloudWatchMetricsSourceDetail(entry, context) + ); +}; + +const deserializeAws_restJson1RecommendationRelatedEvent = ( + output: any, + context: __SerdeContext +): RecommendationRelatedEvent => { + return { + Name: output.Name !== undefined && output.Name !== null ? output.Name : undefined, + Resources: + output.Resources !== undefined && output.Resources !== null + ? deserializeAws_restJson1RecommendationRelatedEventResources(output.Resources, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1RecommendationRelatedEventResource = ( + output: any, + context: __SerdeContext +): RecommendationRelatedEventResource => { + return { + Name: output.Name !== undefined && output.Name !== null ? output.Name : undefined, + Type: output.Type !== undefined && output.Type !== null ? output.Type : undefined, + } as any; +}; + +const deserializeAws_restJson1RecommendationRelatedEventResources = ( + output: any, + context: __SerdeContext +): RecommendationRelatedEventResource[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1RecommendationRelatedEventResource(entry, context)); +}; + +const deserializeAws_restJson1RecommendationRelatedEvents = ( + output: any, + context: __SerdeContext +): RecommendationRelatedEvent[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1RecommendationRelatedEvent(entry, context)); +}; + +const deserializeAws_restJson1Recommendations = (output: any, context: __SerdeContext): Recommendation[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1Recommendation(entry, context)); +}; + +const deserializeAws_restJson1RelatedAnomalySourceDetails = ( + output: any, + context: __SerdeContext +): RecommendationRelatedAnomalySourceDetail[] => { + return (output || []).map((entry: any) => + deserializeAws_restJson1RecommendationRelatedAnomalySourceDetail(entry, context) + ); +}; + +const deserializeAws_restJson1ResourceCollection = (output: any, context: __SerdeContext): ResourceCollection => { + return { + CloudFormation: + output.CloudFormation !== undefined && output.CloudFormation !== null + ? deserializeAws_restJson1CloudFormationCollection(output.CloudFormation, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1ResourceCollectionFilter = ( + output: any, + context: __SerdeContext +): ResourceCollectionFilter => { + return { + CloudFormation: + output.CloudFormation !== undefined && output.CloudFormation !== null + ? deserializeAws_restJson1CloudFormationCollectionFilter(output.CloudFormation, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1ServiceIntegrationConfig = ( + output: any, + context: __SerdeContext +): ServiceIntegrationConfig => { + return { + OpsCenter: + output.OpsCenter !== undefined && output.OpsCenter !== null + ? deserializeAws_restJson1OpsCenterIntegration(output.OpsCenter, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1SnsChannelConfig = (output: any, context: __SerdeContext): SnsChannelConfig => { + return { + TopicArn: output.TopicArn !== undefined && output.TopicArn !== null ? output.TopicArn : undefined, + } as any; +}; + +const deserializeAws_restJson1StackNames = (output: any, context: __SerdeContext): string[] => { + return (output || []).map((entry: any) => entry); +}; + +const deserializeAws_restJson1ValidationExceptionField = ( + output: any, + context: __SerdeContext +): ValidationExceptionField => { + return { + Message: output.Message !== undefined && output.Message !== null ? output.Message : undefined, + Name: output.Name !== undefined && output.Name !== null ? output.Name : undefined, + } as any; +}; + +const deserializeAws_restJson1ValidationExceptionFields = ( + output: any, + context: __SerdeContext +): ValidationExceptionField[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1ValidationExceptionField(entry, context)); +}; + +const deserializeMetadata = (output: __HttpResponse): __ResponseMetadata => ({ + httpStatusCode: output.statusCode, + httpHeaders: output.headers, + requestId: output.headers["x-amzn-requestid"], +}); + +// Collect low-level response body stream to Uint8Array. +const collectBody = (streamBody: any = new Uint8Array(), context: __SerdeContext): Promise => { + if (streamBody instanceof Uint8Array) { + return Promise.resolve(streamBody); + } + return context.streamCollector(streamBody) || Promise.resolve(new Uint8Array()); +}; + +// Encode Uint8Array data into string with utf-8. +const collectBodyString = (streamBody: any, context: __SerdeContext): Promise => + collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); + +const isSerializableHeaderValue = (value: any): boolean => + value !== undefined && + value !== "" && + (!Object.getOwnPropertyNames(value).includes("length") || value.length != 0) && + (!Object.getOwnPropertyNames(value).includes("size") || value.size != 0); + +const parseBody = (streamBody: any, context: __SerdeContext): any => + collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + return JSON.parse(encoded); + } + return {}; + }); + +/** + * Load an error code for the aws.rest-json-1.1 protocol. + */ +const loadRestJsonErrorCode = (output: __HttpResponse, data: any): string => { + const findKey = (object: any, key: string) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase()); + + const sanitizeErrorCode = (rawValue: string): string => { + let cleanValue = rawValue; + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== undefined) { + return sanitizeErrorCode(output.headers[headerKey]); + } + + if (data.code !== undefined) { + return sanitizeErrorCode(data.code); + } + + if (data["__type"] !== undefined) { + return sanitizeErrorCode(data["__type"]); + } + + return ""; +}; diff --git a/clients/client-devops-guru/runtimeConfig.browser.ts b/clients/client-devops-guru/runtimeConfig.browser.ts new file mode 100644 index 000000000000..3d624b5e89fe --- /dev/null +++ b/clients/client-devops-guru/runtimeConfig.browser.ts @@ -0,0 +1,34 @@ +import packageInfo from "./package.json"; + +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { FetchHttpHandler, streamCollector } from "@aws-sdk/fetch-http-handler"; +import { invalidAsyncFunction } from "@aws-sdk/invalid-dependency"; +import { DEFAULT_MAX_ATTEMPTS } from "@aws-sdk/middleware-retry"; +import { parseUrl } from "@aws-sdk/url-parser-browser"; +import { fromBase64, toBase64 } from "@aws-sdk/util-base64-browser"; +import { calculateBodyLength } from "@aws-sdk/util-body-length-browser"; +import { defaultUserAgent } from "@aws-sdk/util-user-agent-browser"; +import { fromUtf8, toUtf8 } from "@aws-sdk/util-utf8-browser"; +import { ClientDefaults } from "./DevOpsGuruClient"; +import { ClientSharedValues } from "./runtimeConfig.shared"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...ClientSharedValues, + runtime: "browser", + base64Decoder: fromBase64, + base64Encoder: toBase64, + bodyLengthChecker: calculateBodyLength, + credentialDefaultProvider: invalidAsyncFunction("Credentialis missing") as any, + defaultUserAgent: defaultUserAgent(packageInfo.name, packageInfo.version), + maxAttempts: DEFAULT_MAX_ATTEMPTS, + region: invalidAsyncFunction("Region is missing") as any, + requestHandler: new FetchHttpHandler(), + sha256: Sha256, + streamCollector, + urlParser: parseUrl, + utf8Decoder: fromUtf8, + utf8Encoder: toUtf8, +}; diff --git a/clients/client-devops-guru/runtimeConfig.native.ts b/clients/client-devops-guru/runtimeConfig.native.ts new file mode 100644 index 000000000000..00208d07f98a --- /dev/null +++ b/clients/client-devops-guru/runtimeConfig.native.ts @@ -0,0 +1,17 @@ +import packageInfo from "./package.json"; + +import { Sha256 } from "@aws-crypto/sha256-js"; +import { parseUrl } from "@aws-sdk/url-parser-node"; +import { ClientDefaults } from "./DevOpsGuruClient"; +import { ClientDefaultValues as BrowserDefaults } from "./runtimeConfig.browser"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...BrowserDefaults, + runtime: "react-native", + defaultUserAgent: `aws-sdk-js-v3-react-native-${packageInfo.name}/${packageInfo.version}`, + sha256: Sha256, + urlParser: parseUrl, +}; diff --git a/clients/client-devops-guru/runtimeConfig.shared.ts b/clients/client-devops-guru/runtimeConfig.shared.ts new file mode 100644 index 000000000000..99d656b13a43 --- /dev/null +++ b/clients/client-devops-guru/runtimeConfig.shared.ts @@ -0,0 +1,13 @@ +import { defaultRegionInfoProvider } from "./endpoints"; +import { Logger as __Logger } from "@aws-sdk/types"; + +/** + * @internal + */ +export const ClientSharedValues = { + apiVersion: "2019-12-11", + disableHostPrefix: false, + logger: {} as __Logger, + regionInfoProvider: defaultRegionInfoProvider, + signingName: "devops-guru", +}; diff --git a/clients/client-devops-guru/runtimeConfig.ts b/clients/client-devops-guru/runtimeConfig.ts new file mode 100644 index 000000000000..6b10468c3264 --- /dev/null +++ b/clients/client-devops-guru/runtimeConfig.ts @@ -0,0 +1,36 @@ +import packageInfo from "./package.json"; + +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS } from "@aws-sdk/config-resolver"; +import { defaultProvider as credentialDefaultProvider } from "@aws-sdk/credential-provider-node"; +import { Hash } from "@aws-sdk/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS } from "@aws-sdk/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@aws-sdk/node-config-provider"; +import { NodeHttpHandler, streamCollector } from "@aws-sdk/node-http-handler"; +import { parseUrl } from "@aws-sdk/url-parser-node"; +import { fromBase64, toBase64 } from "@aws-sdk/util-base64-node"; +import { calculateBodyLength } from "@aws-sdk/util-body-length-node"; +import { defaultUserAgent } from "@aws-sdk/util-user-agent-node"; +import { fromUtf8, toUtf8 } from "@aws-sdk/util-utf8-node"; +import { ClientDefaults } from "./DevOpsGuruClient"; +import { ClientSharedValues } from "./runtimeConfig.shared"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...ClientSharedValues, + runtime: "node", + base64Decoder: fromBase64, + base64Encoder: toBase64, + bodyLengthChecker: calculateBodyLength, + credentialDefaultProvider, + defaultUserAgent: defaultUserAgent(packageInfo.name, packageInfo.version), + maxAttempts: loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS), + region: loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, NODE_REGION_CONFIG_FILE_OPTIONS), + requestHandler: new NodeHttpHandler(), + sha256: Hash.bind(null, "sha256"), + streamCollector, + urlParser: parseUrl, + utf8Decoder: fromUtf8, + utf8Encoder: toUtf8, +}; diff --git a/clients/client-devops-guru/tsconfig.es.json b/clients/client-devops-guru/tsconfig.es.json new file mode 100644 index 000000000000..30df5d2e6986 --- /dev/null +++ b/clients/client-devops-guru/tsconfig.es.json @@ -0,0 +1,12 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "target": "es5", + "module": "esnext", + "moduleResolution": "node", + "declaration": false, + "declarationDir": null, + "lib": ["dom", "es5", "es2015.promise", "es2015.collection", "es2015.iterable", "es2015.symbol.wellknown"], + "outDir": "dist/es" + } +} diff --git a/clients/client-devops-guru/tsconfig.json b/clients/client-devops-guru/tsconfig.json new file mode 100644 index 000000000000..4cf936f614b4 --- /dev/null +++ b/clients/client-devops-guru/tsconfig.json @@ -0,0 +1,31 @@ +{ + "compilerOptions": { + "alwaysStrict": true, + "target": "ES2018", + "module": "commonjs", + "declaration": true, + "strict": true, + "sourceMap": true, + "downlevelIteration": true, + "importHelpers": true, + "noEmitHelpers": true, + "incremental": true, + "resolveJsonModule": true, + "esModuleInterop": true, + "declarationDir": "./types", + "outDir": "dist/cjs" + }, + "typedocOptions": { + "exclude": ["**/node_modules/**", "**/*.spec.ts", "./protocols/*.ts", "./e2e/*.ts", "./endpoints.ts"], + "excludeNotExported": true, + "excludePrivate": true, + "hideGenerator": true, + "ignoreCompilerErrors": true, + "includeDeclarations": true, + "readme": "./README.md", + "mode": "file", + "out": "./docs", + "theme": "minimal", + "plugin": ["@aws-sdk/client-documentation-generator"] + } +} diff --git a/clients/client-directory-service/DirectoryService.ts b/clients/client-directory-service/DirectoryService.ts index 71f658b76f70..2a50a36ce14e 100644 --- a/clients/client-directory-service/DirectoryService.ts +++ b/clients/client-directory-service/DirectoryService.ts @@ -134,6 +134,11 @@ import { DescribeTrustsCommandInput, DescribeTrustsCommandOutput, } from "./commands/DescribeTrustsCommand"; +import { + DisableClientAuthenticationCommand, + DisableClientAuthenticationCommandInput, + DisableClientAuthenticationCommandOutput, +} from "./commands/DisableClientAuthenticationCommand"; import { DisableLDAPSCommand, DisableLDAPSCommandInput, @@ -145,6 +150,11 @@ import { DisableRadiusCommandOutput, } from "./commands/DisableRadiusCommand"; import { DisableSsoCommand, DisableSsoCommandInput, DisableSsoCommandOutput } from "./commands/DisableSsoCommand"; +import { + EnableClientAuthenticationCommand, + EnableClientAuthenticationCommandInput, + EnableClientAuthenticationCommandOutput, +} from "./commands/EnableClientAuthenticationCommand"; import { EnableLDAPSCommand, EnableLDAPSCommandInput, EnableLDAPSCommandOutput } from "./commands/EnableLDAPSCommand"; import { EnableRadiusCommand, @@ -1278,6 +1288,38 @@ export class DirectoryService extends DirectoryServiceClient { } } + /** + *

Disable client authentication for smart cards.

+ */ + public disableClientAuthentication( + args: DisableClientAuthenticationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public disableClientAuthentication( + args: DisableClientAuthenticationCommandInput, + cb: (err: any, data?: DisableClientAuthenticationCommandOutput) => void + ): void; + public disableClientAuthentication( + args: DisableClientAuthenticationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DisableClientAuthenticationCommandOutput) => void + ): void; + public disableClientAuthentication( + args: DisableClientAuthenticationCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DisableClientAuthenticationCommandOutput) => void), + cb?: (err: any, data?: DisableClientAuthenticationCommandOutput) => void + ): Promise | void { + const command = new DisableClientAuthenticationCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

Deactivates LDAP secure calls for the specified directory.

*/ @@ -1365,6 +1407,38 @@ export class DirectoryService extends DirectoryServiceClient { } } + /** + *

Enable client authentication for smardtcards.

+ */ + public enableClientAuthentication( + args: EnableClientAuthenticationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public enableClientAuthentication( + args: EnableClientAuthenticationCommandInput, + cb: (err: any, data?: EnableClientAuthenticationCommandOutput) => void + ): void; + public enableClientAuthentication( + args: EnableClientAuthenticationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: EnableClientAuthenticationCommandOutput) => void + ): void; + public enableClientAuthentication( + args: EnableClientAuthenticationCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: EnableClientAuthenticationCommandOutput) => void), + cb?: (err: any, data?: EnableClientAuthenticationCommandOutput) => void + ): Promise | void { + const command = new EnableClientAuthenticationCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

Activates the switch for the specific directory to always use LDAP secure calls.

*/ diff --git a/clients/client-directory-service/DirectoryServiceClient.ts b/clients/client-directory-service/DirectoryServiceClient.ts index 772f909e2b24..d8e201136418 100644 --- a/clients/client-directory-service/DirectoryServiceClient.ts +++ b/clients/client-directory-service/DirectoryServiceClient.ts @@ -74,9 +74,17 @@ import { } from "./commands/DescribeSharedDirectoriesCommand"; import { DescribeSnapshotsCommandInput, DescribeSnapshotsCommandOutput } from "./commands/DescribeSnapshotsCommand"; import { DescribeTrustsCommandInput, DescribeTrustsCommandOutput } from "./commands/DescribeTrustsCommand"; +import { + DisableClientAuthenticationCommandInput, + DisableClientAuthenticationCommandOutput, +} from "./commands/DisableClientAuthenticationCommand"; import { DisableLDAPSCommandInput, DisableLDAPSCommandOutput } from "./commands/DisableLDAPSCommand"; import { DisableRadiusCommandInput, DisableRadiusCommandOutput } from "./commands/DisableRadiusCommand"; import { DisableSsoCommandInput, DisableSsoCommandOutput } from "./commands/DisableSsoCommand"; +import { + EnableClientAuthenticationCommandInput, + EnableClientAuthenticationCommandOutput, +} from "./commands/EnableClientAuthenticationCommand"; import { EnableLDAPSCommandInput, EnableLDAPSCommandOutput } from "./commands/EnableLDAPSCommand"; import { EnableRadiusCommandInput, EnableRadiusCommandOutput } from "./commands/EnableRadiusCommand"; import { EnableSsoCommandInput, EnableSsoCommandOutput } from "./commands/EnableSsoCommand"; @@ -214,9 +222,11 @@ export type ServiceInputTypes = | DescribeSharedDirectoriesCommandInput | DescribeSnapshotsCommandInput | DescribeTrustsCommandInput + | DisableClientAuthenticationCommandInput | DisableLDAPSCommandInput | DisableRadiusCommandInput | DisableSsoCommandInput + | EnableClientAuthenticationCommandInput | EnableLDAPSCommandInput | EnableRadiusCommandInput | EnableSsoCommandInput @@ -276,9 +286,11 @@ export type ServiceOutputTypes = | DescribeSharedDirectoriesCommandOutput | DescribeSnapshotsCommandOutput | DescribeTrustsCommandOutput + | DisableClientAuthenticationCommandOutput | DisableLDAPSCommandOutput | DisableRadiusCommandOutput | DisableSsoCommandOutput + | EnableClientAuthenticationCommandOutput | EnableLDAPSCommandOutput | EnableRadiusCommandOutput | EnableSsoCommandOutput diff --git a/clients/client-directory-service/commands/DisableClientAuthenticationCommand.ts b/clients/client-directory-service/commands/DisableClientAuthenticationCommand.ts new file mode 100644 index 000000000000..05e37e2546b9 --- /dev/null +++ b/clients/client-directory-service/commands/DisableClientAuthenticationCommand.ts @@ -0,0 +1,91 @@ +import { DirectoryServiceClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DirectoryServiceClient"; +import { DisableClientAuthenticationRequest, DisableClientAuthenticationResult } from "../models/models_0"; +import { + deserializeAws_json1_1DisableClientAuthenticationCommand, + serializeAws_json1_1DisableClientAuthenticationCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DisableClientAuthenticationCommandInput = DisableClientAuthenticationRequest; +export type DisableClientAuthenticationCommandOutput = DisableClientAuthenticationResult & __MetadataBearer; + +/** + *

Disable client authentication for smart cards.

+ */ +export class DisableClientAuthenticationCommand extends $Command< + DisableClientAuthenticationCommandInput, + DisableClientAuthenticationCommandOutput, + DirectoryServiceClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DisableClientAuthenticationCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DirectoryServiceClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DirectoryServiceClient"; + const commandName = "DisableClientAuthenticationCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DisableClientAuthenticationRequest.filterSensitiveLog, + outputFilterSensitiveLog: DisableClientAuthenticationResult.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DisableClientAuthenticationCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DisableClientAuthenticationCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_json1_1DisableClientAuthenticationCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-directory-service/commands/EnableClientAuthenticationCommand.ts b/clients/client-directory-service/commands/EnableClientAuthenticationCommand.ts new file mode 100644 index 000000000000..96ed8aafb8e4 --- /dev/null +++ b/clients/client-directory-service/commands/EnableClientAuthenticationCommand.ts @@ -0,0 +1,91 @@ +import { DirectoryServiceClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../DirectoryServiceClient"; +import { EnableClientAuthenticationRequest, EnableClientAuthenticationResult } from "../models/models_0"; +import { + deserializeAws_json1_1EnableClientAuthenticationCommand, + serializeAws_json1_1EnableClientAuthenticationCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type EnableClientAuthenticationCommandInput = EnableClientAuthenticationRequest; +export type EnableClientAuthenticationCommandOutput = EnableClientAuthenticationResult & __MetadataBearer; + +/** + *

Enable client authentication for smardtcards.

+ */ +export class EnableClientAuthenticationCommand extends $Command< + EnableClientAuthenticationCommandInput, + EnableClientAuthenticationCommandOutput, + DirectoryServiceClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: EnableClientAuthenticationCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: DirectoryServiceClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "DirectoryServiceClient"; + const commandName = "EnableClientAuthenticationCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: EnableClientAuthenticationRequest.filterSensitiveLog, + outputFilterSensitiveLog: EnableClientAuthenticationResult.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: EnableClientAuthenticationCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1EnableClientAuthenticationCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_json1_1EnableClientAuthenticationCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-directory-service/index.ts b/clients/client-directory-service/index.ts index ed976685ea0e..d66783064c7c 100644 --- a/clients/client-directory-service/index.ts +++ b/clients/client-directory-service/index.ts @@ -32,9 +32,11 @@ export * from "./commands/DescribeRegionsCommand"; export * from "./commands/DescribeSharedDirectoriesCommand"; export * from "./commands/DescribeSnapshotsCommand"; export * from "./commands/DescribeTrustsCommand"; +export * from "./commands/DisableClientAuthenticationCommand"; export * from "./commands/DisableLDAPSCommand"; export * from "./commands/DisableRadiusCommand"; export * from "./commands/DisableSsoCommand"; +export * from "./commands/EnableClientAuthenticationCommand"; export * from "./commands/EnableLDAPSCommand"; export * from "./commands/EnableRadiusCommand"; export * from "./commands/EnableSsoCommand"; diff --git a/clients/client-directory-service/models/models_0.ts b/clients/client-directory-service/models/models_0.ts index 05c880e04468..e4e90b125f3d 100644 --- a/clients/client-directory-service/models/models_0.ts +++ b/clients/client-directory-service/models/models_0.ts @@ -38,15 +38,29 @@ export enum ShareStatus { */ export interface SharedDirectory { /** - *

Current directory status of the shared AWS Managed Microsoft AD directory.

+ *

Identifier of the directory owner account, which contains the directory that has been + * shared to the consumer account.

*/ - ShareStatus?: ShareStatus | string; + OwnerAccountId?: string; /** *

Identifier of the directory in the directory owner account.

*/ OwnerDirectoryId?: string; + /** + *

The method used when sharing a directory to determine whether the directory should be + * shared within your AWS organization (ORGANIZATIONS) or with any AWS account by + * sending a shared directory request (HANDSHAKE).

+ */ + ShareMethod?: ShareMethod | string; + + /** + *

Identifier of the directory consumer account that has access to the shared directory + * (OwnerDirectoryId) in the directory owner account.

+ */ + SharedAccountId?: string; + /** *

Identifier of the shared directory in the directory consumer account. This identifier is * different for each directory owner account.

@@ -54,10 +68,9 @@ export interface SharedDirectory { SharedDirectoryId?: string; /** - *

Identifier of the directory owner account, which contains the directory that has been - * shared to the consumer account.

+ *

Current directory status of the shared AWS Managed Microsoft AD directory.

*/ - OwnerAccountId?: string; + ShareStatus?: ShareStatus | string; /** *

A directory share request that is sent by the directory owner to the directory consumer. @@ -66,13 +79,6 @@ export interface SharedDirectory { */ ShareNotes?: string; - /** - *

The method used when sharing a directory to determine whether the directory should be - * shared within your AWS organization (ORGANIZATIONS) or with any AWS account by - * sending a shared directory request (HANDSHAKE).

- */ - ShareMethod?: ShareMethod | string; - /** *

The date and time that the shared directory was created.

*/ @@ -82,12 +88,6 @@ export interface SharedDirectory { *

The date and time that the shared directory was last updated.

*/ LastUpdatedDateTime?: Date; - - /** - *

Identifier of the directory consumer account that has access to the shared directory - * (OwnerDirectoryId) in the directory owner account.

- */ - SharedAccountId?: string; } export namespace SharedDirectory { @@ -164,14 +164,14 @@ export interface EntityDoesNotExistException extends __SmithyException, $Metadat name: "EntityDoesNotExistException"; $fault: "client"; /** - *

The AWS request identifier.

+ *

The descriptive message for the exception.

*/ - RequestId?: string; + Message?: string; /** - *

The descriptive message for the exception.

+ *

The AWS request identifier.

*/ - Message?: string; + RequestId?: string; } export namespace EntityDoesNotExistException { @@ -233,14 +233,14 @@ export interface AccessDeniedException extends __SmithyException, $MetadataBeare name: "AccessDeniedException"; $fault: "client"; /** - *

The AWS request identifier.

+ *

The descriptive message for the exception.

*/ - RequestId?: string; + Message?: string; /** - *

The descriptive message for the exception.

+ *

The AWS request identifier.

*/ - Message?: string; + RequestId?: string; } export namespace AccessDeniedException { @@ -396,14 +396,14 @@ export interface EntityAlreadyExistsException extends __SmithyException, $Metada name: "EntityAlreadyExistsException"; $fault: "client"; /** - *

The AWS request identifier.

+ *

The descriptive message for the exception.

*/ - RequestId?: string; + Message?: string; /** - *

The descriptive message for the exception.

+ *

The AWS request identifier.

*/ - Message?: string; + RequestId?: string; } export namespace EntityAlreadyExistsException { @@ -439,17 +439,17 @@ export namespace IpRouteLimitExceededException { *

Contains VPC information for the CreateDirectory or CreateMicrosoftAD operation.

*/ export interface DirectoryVpcSettings { + /** + *

The identifier of the VPC in which to create the directory.

+ */ + VpcId: string | undefined; + /** *

The identifiers of the subnets for the directory servers. The two subnets must be in * different Availability Zones. AWS Directory Service creates a directory server and a DNS * server in each of these subnets.

*/ SubnetIds: string[] | undefined; - - /** - *

The identifier of the VPC in which to create the directory.

- */ - VpcId: string | undefined; } export namespace DirectoryVpcSettings { @@ -459,11 +459,6 @@ export namespace DirectoryVpcSettings { } export interface AddRegionRequest { - /** - *

Contains VPC information for the CreateDirectory or CreateMicrosoftAD operation.

- */ - VPCSettings: DirectoryVpcSettings | undefined; - /** *

The identifier of the directory to which you want to add Region replication.

*/ @@ -473,6 +468,11 @@ export interface AddRegionRequest { *

The name of the Region where you want to add domain controllers for replication. For example, us-east-1.

*/ RegionName: string | undefined; + + /** + *

Contains VPC information for the CreateDirectory or CreateMicrosoftAD operation.

+ */ + VPCSettings: DirectoryVpcSettings | undefined; } export namespace AddRegionRequest { @@ -496,14 +496,14 @@ export interface DirectoryAlreadyInRegionException extends __SmithyException, $M name: "DirectoryAlreadyInRegionException"; $fault: "client"; /** - *

The AWS request identifier.

+ *

The descriptive message for the exception.

*/ - RequestId?: string; + Message?: string; /** - *

The descriptive message for the exception.

+ *

The AWS request identifier.

*/ - Message?: string; + RequestId?: string; } export namespace DirectoryAlreadyInRegionException { @@ -536,7 +536,7 @@ export namespace DirectoryDoesNotExistException { } /** - *

You have reached the limit for maximum number of simultaneous region replications per directory.

+ *

You have reached the limit for maximum number of simultaneous Region replications per directory.

*/ export interface RegionLimitExceededException extends __SmithyException, $MetadataBearer { name: "RegionLimitExceededException"; @@ -565,14 +565,14 @@ export interface UnsupportedOperationException extends __SmithyException, $Metad name: "UnsupportedOperationException"; $fault: "client"; /** - *

The AWS request identifier.

+ *

The descriptive message for the exception.

*/ - RequestId?: string; + Message?: string; /** - *

The descriptive message for the exception.

+ *

The AWS request identifier.

*/ - Message?: string; + RequestId?: string; } export namespace UnsupportedOperationException { @@ -586,14 +586,14 @@ export namespace UnsupportedOperationException { */ export interface Tag { /** - *

The optional value of the tag. The string value can be Unicode characters. The string can contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: "^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$").

+ *

Required name of the tag. The string value can be Unicode characters and cannot be prefixed with "aws:". The string can contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: "^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$").

*/ - Value: string | undefined; + Key: string | undefined; /** - *

Required name of the tag. The string value can be Unicode characters and cannot be prefixed with "aws:". The string can contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: "^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$").

+ *

The optional value of the tag. The string value can be Unicode characters. The string can contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: "^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-]*)$").

*/ - Key: string | undefined; + Value: string | undefined; } export namespace Tag { @@ -635,14 +635,14 @@ export interface TagLimitExceededException extends __SmithyException, $MetadataB name: "TagLimitExceededException"; $fault: "client"; /** - *

The AWS request identifier.

+ *

The descriptive message for the exception.

*/ - RequestId?: string; + Message?: string; /** - *

The descriptive message for the exception.

+ *

The AWS request identifier.

*/ - Message?: string; + RequestId?: string; } export namespace TagLimitExceededException { @@ -656,14 +656,14 @@ export namespace TagLimitExceededException { */ export interface Attribute { /** - *

The value of the attribute.

+ *

The name of the attribute.

*/ - Value?: string; + Name?: string; /** - *

The name of the attribute.

+ *

The value of the attribute.

*/ - Name?: string; + Value?: string; } export namespace Attribute { @@ -679,14 +679,14 @@ export interface AuthenticationFailedException extends __SmithyException, $Metad name: "AuthenticationFailedException"; $fault: "client"; /** - *

The identifier of the request that caused the exception.

+ *

The textual message for the exception.

*/ - RequestId?: string; + Message?: string; /** - *

The textual message for the exception.

+ *

The identifier of the request that caused the exception.

*/ - Message?: string; + RequestId?: string; } export namespace AuthenticationFailedException { @@ -721,6 +721,22 @@ export namespace CancelSchemaExtensionResult { }); } +/** + *

Contains information about the client certificate authentication settings, such as ClientLDAPS or ClientCertAuth.

+ */ +export interface ClientCertAuthSettings { + /** + *

Specifies the URL of the default OCSP server used to check for revocation status.

+ */ + OCSPUrl?: string; +} + +export namespace ClientCertAuthSettings { + export const filterSensitiveLog = (obj: ClientCertAuthSettings): any => ({ + ...obj, + }); +} + export enum CertificateState { DEREGISTERED = "Deregistered", DEREGISTERING = "Deregistering", @@ -730,6 +746,11 @@ export enum CertificateState { REGISTER_FAILED = "RegisterFailed", } +export enum CertificateType { + CLIENT_CERT_AUTH = "ClientCertAuth", + CLIENT_LDAPS = "ClientLDAPS", +} + /** *

Information about the certificate.

*/ @@ -739,6 +760,11 @@ export interface Certificate { */ CertificateId?: string; + /** + *

The state of the certificate.

+ */ + State?: CertificateState | string; + /** *

Describes a state change for the certificate.

*/ @@ -749,20 +775,25 @@ export interface Certificate { */ CommonName?: string; + /** + *

The date and time that the certificate was registered.

+ */ + RegisteredDateTime?: Date; + /** *

The date and time when the certificate will expire.

*/ ExpiryDateTime?: Date; /** - *

The state of the certificate.

+ *

Select ClientCertAuth for smart card integration.

*/ - State?: CertificateState | string; + Type?: CertificateType | string; /** - *

The date and time that the certificate was registered.

+ *

Provides information about the client certificate authentication settings. The default value is ClientLDAPS.

*/ - RegisteredDateTime?: Date; + ClientCertAuthSettings?: ClientCertAuthSettings; } export namespace Certificate { @@ -801,14 +832,14 @@ export interface CertificateDoesNotExistException extends __SmithyException, $Me name: "CertificateDoesNotExistException"; $fault: "client"; /** - *

The AWS request identifier.

+ *

The descriptive message for the exception.

*/ - RequestId?: string; + Message?: string; /** - *

The descriptive message for the exception.

+ *

The AWS request identifier.

*/ - Message?: string; + RequestId?: string; } export namespace CertificateDoesNotExistException { @@ -821,6 +852,16 @@ export namespace CertificateDoesNotExistException { *

Contains general information about a certificate.

*/ export interface CertificateInfo { + /** + *

The identifier of the certificate.

+ */ + CertificateId?: string; + + /** + *

The common name for the certificate.

+ */ + CommonName?: string; + /** *

The state of the certificate.

*/ @@ -832,14 +873,9 @@ export interface CertificateInfo { ExpiryDateTime?: Date; /** - *

The identifier of the certificate.

- */ - CertificateId?: string; - - /** - *

The common name for the certificate.

+ *

Displays the type of certificate.

*/ - CommonName?: string; + Type?: CertificateType | string; } export namespace CertificateInfo { @@ -856,14 +892,14 @@ export interface CertificateInUseException extends __SmithyException, $MetadataB name: "CertificateInUseException"; $fault: "client"; /** - *

The AWS request identifier.

+ *

The descriptive message for the exception.

*/ - RequestId?: string; + Message?: string; /** - *

The descriptive message for the exception.

+ *

The AWS request identifier.

*/ - Message?: string; + RequestId?: string; } export namespace CertificateInUseException { @@ -879,14 +915,14 @@ export interface CertificateLimitExceededException extends __SmithyException, $M name: "CertificateLimitExceededException"; $fault: "client"; /** - *

The AWS request identifier.

+ *

The descriptive message for the exception.

*/ - RequestId?: string; + Message?: string; /** - *

The descriptive message for the exception.

+ *

The AWS request identifier.

*/ - Message?: string; + RequestId?: string; } export namespace CertificateLimitExceededException { @@ -895,16 +931,14 @@ export namespace CertificateLimitExceededException { }); } +export enum ClientAuthenticationType { + SMART_CARD = "SmartCard", +} + /** *

Contains information about a computer account in a directory.

*/ export interface Computer { - /** - *

An array of Attribute objects containing the LDAP attributes that belong to the - * computer account.

- */ - ComputerAttributes?: Attribute[]; - /** *

The identifier of the computer.

*/ @@ -914,6 +948,12 @@ export interface Computer { *

The computer name.

*/ ComputerName?: string; + + /** + *

An array of Attribute objects containing the LDAP attributes that belong to the + * computer account.

+ */ + ComputerAttributes?: Attribute[]; } export namespace Computer { @@ -1009,41 +1049,41 @@ export enum DirectorySize { */ export interface ConnectDirectoryRequest { /** - *

A description for the directory.

+ *

The fully qualified name of the on-premises directory, such as + * corp.example.com.

*/ - Description?: string; + Name: string | undefined; /** - *

The size of the directory.

+ *

The NetBIOS name of the on-premises directory, such as CORP.

*/ - Size: DirectorySize | string | undefined; + ShortName?: string; /** - *

The NetBIOS name of the on-premises directory, such as CORP.

+ *

The password for the on-premises user account.

*/ - ShortName?: string; + Password: string | undefined; /** - *

A DirectoryConnectSettings object that contains additional information - * for the operation.

+ *

A description for the directory.

*/ - ConnectSettings: DirectoryConnectSettings | undefined; + Description?: string; /** - *

The tags to be assigned to AD Connector.

+ *

The size of the directory.

*/ - Tags?: Tag[]; + Size: DirectorySize | string | undefined; /** - *

The fully qualified name of the on-premises directory, such as - * corp.example.com.

+ *

A DirectoryConnectSettings object that contains additional information + * for the operation.

*/ - Name: string | undefined; + ConnectSettings: DirectoryConnectSettings | undefined; /** - *

The password for the on-premises user account.

+ *

The tags to be assigned to AD Connector.

*/ - Password: string | undefined; + Tags?: Tag[]; } export namespace ConnectDirectoryRequest { @@ -1078,14 +1118,14 @@ export interface DirectoryLimitExceededException extends __SmithyException, $Met name: "DirectoryLimitExceededException"; $fault: "client"; /** - *

The AWS request identifier.

+ *

The descriptive message for the exception.

*/ - RequestId?: string; + Message?: string; /** - *

The descriptive message for the exception.

+ *

The AWS request identifier.

*/ - Message?: string; + RequestId?: string; } export namespace DirectoryLimitExceededException { @@ -1143,9 +1183,9 @@ export namespace CreateAliasResult { */ export interface CreateComputerRequest { /** - *

The fully-qualified distinguished name of the organizational unit to place the computer account in.

+ *

The identifier of the directory in which to create the computer account.

*/ - OrganizationalUnitDistinguishedName?: string; + DirectoryId: string | undefined; /** *

The name of the computer account.

@@ -1153,20 +1193,20 @@ export interface CreateComputerRequest { ComputerName: string | undefined; /** - *

The identifier of the directory in which to create the computer account.

+ *

A one-time password that is used to join the computer to the directory. You should generate a random, strong password to use for this parameter.

*/ - DirectoryId: string | undefined; + Password: string | undefined; /** - *

An array of Attribute objects that contain any LDAP attributes to apply to the - * computer account.

+ *

The fully-qualified distinguished name of the organizational unit to place the computer account in.

*/ - ComputerAttributes?: Attribute[]; + OrganizationalUnitDistinguishedName?: string; /** - *

A one-time password that is used to join the computer to the directory. You should generate a random, strong password to use for this parameter.

+ *

An array of Attribute objects that contain any LDAP attributes to apply to the + * computer account.

*/ - Password: string | undefined; + ComputerAttributes?: Attribute[]; } export namespace CreateComputerRequest { @@ -1196,11 +1236,6 @@ export namespace CreateComputerResult { *

Initiates the creation of a conditional forwarder for your AWS Directory Service for Microsoft Active Directory. Conditional forwarders are required in order to set up a trust relationship with another domain.

*/ export interface CreateConditionalForwarderRequest { - /** - *

The IP addresses of the remote DNS server associated with RemoteDomainName.

- */ - DnsIpAddrs: string[] | undefined; - /** *

The directory ID of the AWS directory for which you are creating the conditional forwarder.

*/ @@ -1210,6 +1245,11 @@ export interface CreateConditionalForwarderRequest { *

The fully qualified domain name (FQDN) of the remote domain with which you will set up a trust relationship.

*/ RemoteDomainName: string | undefined; + + /** + *

The IP addresses of the remote DNS server associated with RemoteDomainName.

+ */ + DnsIpAddrs: string[] | undefined; } export namespace CreateConditionalForwarderRequest { @@ -1234,25 +1274,9 @@ export namespace CreateConditionalForwarderResult { */ export interface CreateDirectoryRequest { /** - *

The tags to be assigned to the Simple AD directory.

- */ - Tags?: Tag[]; - - /** - *

A DirectoryVpcSettings object that contains additional information for - * the operation.

- */ - VpcSettings?: DirectoryVpcSettings; - - /** - *

A description for the directory.

- */ - Description?: string; - - /** - *

The size of the directory.

+ *

The fully qualified name for the directory, such as corp.example.com.

*/ - Size: DirectorySize | string | undefined; + Name: string | undefined; /** *

The NetBIOS name of the directory, such as CORP.

@@ -1290,9 +1314,25 @@ export interface CreateDirectoryRequest { Password: string | undefined; /** - *

The fully qualified name for the directory, such as corp.example.com.

+ *

A description for the directory.

*/ - Name: string | undefined; + Description?: string; + + /** + *

The size of the directory.

+ */ + Size: DirectorySize | string | undefined; + + /** + *

A DirectoryVpcSettings object that contains additional information for + * the operation.

+ */ + VpcSettings?: DirectoryVpcSettings; + + /** + *

The tags to be assigned to the Simple AD directory.

+ */ + Tags?: Tag[]; } export namespace CreateDirectoryRequest { @@ -1319,17 +1359,17 @@ export namespace CreateDirectoryResult { } export interface CreateLogSubscriptionRequest { - /** - *

The name of the CloudWatch log group where the real-time domain controller logs are - * forwarded.

- */ - LogGroupName: string | undefined; - /** *

Identifier of the directory to which you want to subscribe and receive real-time logs to * your specified CloudWatch log group.

*/ DirectoryId: string | undefined; + + /** + *

The name of the CloudWatch log group where the real-time domain controller logs are + * forwarded.

+ */ + LogGroupName: string | undefined; } export namespace CreateLogSubscriptionRequest { @@ -1378,17 +1418,6 @@ export enum DirectoryEdition { *

Creates an AWS Managed Microsoft AD directory.

*/ export interface CreateMicrosoftADRequest { - /** - *

Contains VPC information for the CreateDirectory or CreateMicrosoftAD operation.

- */ - VpcSettings: DirectoryVpcSettings | undefined; - - /** - *

The password for the default administrative user named Admin.

- *

If you need to change the password for the administrator account, you can use the ResetUserPassword API call.

- */ - Password: string | undefined; - /** *

The fully qualified domain name for the AWS Managed Microsoft AD directory, such as * corp.example.com. This name will resolve inside your VPC only. It does not need @@ -1397,24 +1426,35 @@ export interface CreateMicrosoftADRequest { Name: string | undefined; /** - *

The tags to be assigned to the AWS Managed Microsoft AD directory.

+ *

The NetBIOS name for your domain, such as CORP. If you don't specify a NetBIOS name, it will default to the first part of your directory DNS. For example, CORP for the directory DNS corp.example.com.

*/ - Tags?: Tag[]; + ShortName?: string; + + /** + *

The password for the default administrative user named Admin.

+ *

If you need to change the password for the administrator account, you can use the ResetUserPassword API call.

+ */ + Password: string | undefined; /** *

A description for the directory. This label will appear on the AWS console Directory Details page after the directory is created.

*/ Description?: string; + /** + *

Contains VPC information for the CreateDirectory or CreateMicrosoftAD operation.

+ */ + VpcSettings: DirectoryVpcSettings | undefined; + /** *

AWS Managed Microsoft AD is available in two editions: Standard and Enterprise. Enterprise is the default.

*/ Edition?: DirectoryEdition | string; /** - *

The NetBIOS name for your domain, such as CORP. If you don't specify a NetBIOS name, it will default to the first part of your directory DNS. For example, CORP for the directory DNS corp.example.com.

+ *

The tags to be assigned to the AWS Managed Microsoft AD directory.

*/ - ShortName?: string; + Tags?: Tag[]; } export namespace CreateMicrosoftADRequest { @@ -1445,14 +1485,14 @@ export namespace CreateMicrosoftADResult { */ export interface CreateSnapshotRequest { /** - *

The descriptive name to apply to the snapshot.

+ *

The identifier of the directory of which to take a snapshot.

*/ - Name?: string; + DirectoryId: string | undefined; /** - *

The identifier of the directory of which to take a snapshot.

+ *

The descriptive name to apply to the snapshot.

*/ - DirectoryId: string | undefined; + Name?: string; } export namespace CreateSnapshotRequest { @@ -1486,14 +1526,14 @@ export interface SnapshotLimitExceededException extends __SmithyException, $Meta name: "SnapshotLimitExceededException"; $fault: "client"; /** - *

The AWS request identifier.

+ *

The descriptive message for the exception.

*/ - RequestId?: string; + Message?: string; /** - *

The descriptive message for the exception.

+ *

The AWS request identifier.

*/ - Message?: string; + RequestId?: string; } export namespace SnapshotLimitExceededException { @@ -1524,29 +1564,29 @@ export enum TrustType { */ export interface CreateTrustRequest { /** - *

The direction of the trust relationship.

+ *

The Directory ID of the AWS Managed Microsoft AD directory for which to establish the trust relationship.

*/ - TrustDirection: TrustDirection | string | undefined; + DirectoryId: string | undefined; /** - *

The trust password. The must be the same password that was used when creating the trust relationship on the external domain.

+ *

The Fully Qualified Domain Name (FQDN) of the external domain for which to create the trust relationship.

*/ - TrustPassword: string | undefined; + RemoteDomainName: string | undefined; /** - *

The Directory ID of the AWS Managed Microsoft AD directory for which to establish the trust relationship.

+ *

The trust password. The must be the same password that was used when creating the trust relationship on the external domain.

*/ - DirectoryId: string | undefined; + TrustPassword: string | undefined; /** - *

Optional parameter to enable selective authentication for the trust.

+ *

The direction of the trust relationship.

*/ - SelectiveAuth?: SelectiveAuth | string; + TrustDirection: TrustDirection | string | undefined; /** - *

The Fully Qualified Domain Name (FQDN) of the external domain for which to create the trust relationship.

+ *

The trust relationship type. Forest is the default.

*/ - RemoteDomainName: string | undefined; + TrustType?: TrustType | string; /** *

The IP addresses of the remote DNS server associated with RemoteDomainName.

@@ -1554,9 +1594,9 @@ export interface CreateTrustRequest { ConditionalForwarderIpAddrs?: string[]; /** - *

The trust relationship type. Forest is the default.

+ *

Optional parameter to enable selective authentication for the trust.

*/ - TrustType?: TrustType | string; + SelectiveAuth?: SelectiveAuth | string; } export namespace CreateTrustRequest { @@ -1738,14 +1778,14 @@ export namespace DeleteTrustResult { export interface DeregisterCertificateRequest { /** - *

The identifier of the certificate.

+ *

The identifier of the directory.

*/ - CertificateId: string | undefined; + DirectoryId: string | undefined; /** - *

The identifier of the directory.

+ *

The identifier of the certificate.

*/ - DirectoryId: string | undefined; + CertificateId: string | undefined; } export namespace DeregisterCertificateRequest { @@ -1831,14 +1871,14 @@ export namespace DescribeCertificateResult { */ export interface DescribeConditionalForwardersRequest { /** - *

The fully qualified domain names (FQDN) of the remote domains for which to get the list of associated conditional forwarders. If this member is null, all conditional forwarders are returned.

+ *

The directory ID for which to get the list of associated conditional forwarders.

*/ - RemoteDomainNames?: string[]; + DirectoryId: string | undefined; /** - *

The directory ID for which to get the list of associated conditional forwarders.

+ *

The fully qualified domain names (FQDN) of the remote domains for which to get the list of associated conditional forwarders. If this member is null, all conditional forwarders are returned.

*/ - DirectoryId: string | undefined; + RemoteDomainNames?: string[]; } export namespace DescribeConditionalForwardersRequest { @@ -1867,6 +1907,13 @@ export namespace DescribeConditionalForwardersResult { *

Contains the inputs for the DescribeDirectories operation.

*/ export interface DescribeDirectoriesRequest { + /** + *

A list of identifiers of the directories for which to obtain the information. If this + * member is null, all directories that belong to the current account are returned.

+ *

An empty list results in an InvalidParameterException being thrown.

+ */ + DirectoryIds?: string[]; + /** *

The DescribeDirectoriesResult.NextToken value from a previous call to DescribeDirectories. Pass null if this is the first call.

*/ @@ -1877,13 +1924,6 @@ export interface DescribeDirectoriesRequest { * is specified by the limitations of the operation.

*/ Limit?: number; - - /** - *

A list of identifiers of the directories for which to obtain the information. If this - * member is null, all directories that belong to the current account are returned.

- *

An empty list results in an InvalidParameterException being thrown.

- */ - DirectoryIds?: string[]; } export namespace DescribeDirectoriesRequest { @@ -1896,11 +1936,6 @@ export namespace DescribeDirectoriesRequest { *

Contains information about an AD Connector directory.

*/ export interface DirectoryConnectSettingsDescription { - /** - *

A list of the Availability Zones that the directory is in.

- */ - AvailabilityZones?: string[]; - /** *

The identifier of the VPC that the AD Connector is in.

*/ @@ -1917,14 +1952,19 @@ export interface DirectoryConnectSettingsDescription { CustomerUserName?: string; /** - *

The IP addresses of the AD Connector servers.

+ *

The security group identifier for the AD Connector directory.

*/ - ConnectIps?: string[]; + SecurityGroupId?: string; /** - *

The security group identifier for the AD Connector directory.

+ *

A list of the Availability Zones that the directory is in.

*/ - SecurityGroupId?: string; + AvailabilityZones?: string[]; + + /** + *

The IP addresses of the AD Connector servers.

+ */ + ConnectIps?: string[]; } export namespace DirectoryConnectSettingsDescription { @@ -1945,19 +1985,19 @@ export enum RadiusAuthenticationProtocol { */ export interface RadiusSettings { /** - *

Not currently used.

+ *

An array of strings that contains the fully qualified domain name (FQDN) or IP addresses of the RADIUS server endpoints, or the FQDN or IP addresses of your RADIUS server load balancer.

*/ - DisplayLabel?: string; + RadiusServers?: string[]; /** - *

Required for enabling RADIUS on the directory.

+ *

The port that your RADIUS server is using for communications. Your on-premises network must allow inbound traffic over this port from the AWS Directory Service servers.

*/ - SharedSecret?: string; + RadiusPort?: number; /** - *

The port that your RADIUS server is using for communications. Your on-premises network must allow inbound traffic over this port from the AWS Directory Service servers.

+ *

The amount of time, in seconds, to wait for the RADIUS server to respond.

*/ - RadiusPort?: number; + RadiusTimeout?: number; /** *

The maximum number of times that communication with the RADIUS server is attempted.

@@ -1965,24 +2005,24 @@ export interface RadiusSettings { RadiusRetries?: number; /** - *

The protocol specified for your RADIUS endpoints.

+ *

Required for enabling RADIUS on the directory.

*/ - AuthenticationProtocol?: RadiusAuthenticationProtocol | string; + SharedSecret?: string; /** - *

Not currently used.

+ *

The protocol specified for your RADIUS endpoints.

*/ - UseSameUsername?: boolean; + AuthenticationProtocol?: RadiusAuthenticationProtocol | string; /** - *

The amount of time, in seconds, to wait for the RADIUS server to respond.

+ *

Not currently used.

*/ - RadiusTimeout?: number; + DisplayLabel?: string; /** - *

An array of strings that contains the fully qualified domain name (FQDN) or IP addresses of the RADIUS server endpoints, or the FQDN or IP addresses of your RADIUS server load balancer.

+ *

Not currently used.

*/ - RadiusServers?: string[]; + UseSameUsername?: boolean; } export namespace RadiusSettings { @@ -2003,24 +2043,24 @@ export enum RadiusStatus { */ export interface DirectoryVpcSettingsDescription { /** - *

The domain controller security group identifier for the directory.

+ *

The identifier of the VPC that the directory is in.

*/ - SecurityGroupId?: string; + VpcId?: string; /** - *

The identifier of the VPC that the directory is in.

+ *

The identifiers of the subnets for the directory servers.

*/ - VpcId?: string; + SubnetIds?: string[]; /** - *

The list of Availability Zones that the directory is in.

+ *

The domain controller security group identifier for the directory.

*/ - AvailabilityZones?: string[]; + SecurityGroupId?: string; /** - *

The identifiers of the subnets for the directory servers.

+ *

The list of Availability Zones that the directory is in.

*/ - SubnetIds?: string[]; + AvailabilityZones?: string[]; } export namespace DirectoryVpcSettingsDescription { @@ -2034,22 +2074,6 @@ export namespace DirectoryVpcSettingsDescription { * consumer account.

*/ export interface OwnerDirectoryDescription { - /** - *

A RadiusSettings object that contains information about the RADIUS - * server.

- */ - RadiusSettings?: RadiusSettings; - - /** - *

IP address of the directory’s domain controllers.

- */ - DnsIpAddrs?: string[]; - - /** - *

Information about the status of the RADIUS server.

- */ - RadiusStatus?: RadiusStatus | string; - /** *

Identifier of the AWS Managed Microsoft AD directory in the directory owner * account.

@@ -2061,10 +2085,26 @@ export interface OwnerDirectoryDescription { */ AccountId?: string; + /** + *

IP address of the directory’s domain controllers.

+ */ + DnsIpAddrs?: string[]; + /** *

Information about the VPC settings for the directory.

*/ VpcSettings?: DirectoryVpcSettingsDescription; + + /** + *

A RadiusSettings object that contains information about the RADIUS + * server.

+ */ + RadiusSettings?: RadiusSettings; + + /** + *

Information about the status of the RADIUS server.

+ */ + RadiusStatus?: RadiusStatus | string; } export namespace OwnerDirectoryDescription { @@ -2079,7 +2119,7 @@ export namespace OwnerDirectoryDescription { */ export interface RegionsInfo { /** - *

The Region from where the AWS Managed Microsoft AD directory was originally created.

+ *

The Region where the AWS Managed Microsoft AD directory was originally created.

*/ PrimaryRegion?: string; @@ -2121,31 +2161,29 @@ export enum DirectoryType { */ export interface DirectoryDescription { /** - *

Current directory status of the shared AWS Managed Microsoft AD directory.

+ *

The directory identifier.

*/ - ShareStatus?: ShareStatus | string; + DirectoryId?: string; /** - *

The current stage of the directory.

+ *

The fully qualified name of the directory.

*/ - Stage?: DirectoryStage | string; + Name?: string; /** - *

A DirectoryConnectSettingsDescription object that contains additional - * information about an AD Connector directory. This member is only present if the directory is - * an AD Connector directory.

+ *

The short name of the directory.

*/ - ConnectSettings?: DirectoryConnectSettingsDescription; + ShortName?: string; /** - *

Specifies when the directory was created.

+ *

The directory size.

*/ - LaunchTime?: Date; + Size?: DirectorySize | string; /** - *

Additional information about the directory stage.

+ *

The edition associated with this directory.

*/ - StageReason?: string; + Edition?: DirectoryEdition | string; /** *

The alias for the directory. If no alias has been created for the directory, the alias is @@ -2153,18 +2191,6 @@ export interface DirectoryDescription { */ Alias?: string; - /** - *

The desired number of domain controllers in the directory if the directory is Microsoft AD.

- */ - DesiredNumberOfDomainControllers?: number; - - /** - *

The method used when sharing a directory to determine whether the directory should be - * shared within your AWS organization (ORGANIZATIONS) or with any AWS account by - * sending a shared directory request (HANDSHAKE).

- */ - ShareMethod?: ShareMethod | string; - /** *

The access URL for the directory, such as * http://.awsapps.com. If no alias has been created for the @@ -2174,47 +2200,46 @@ export interface DirectoryDescription { AccessUrl?: string; /** - *

The directory size.

- */ - Size?: DirectorySize | string; - - /** - *

A directory share request that is sent by the directory owner to the directory consumer. - * The request includes a typed message to help the directory consumer administrator determine - * whether to approve or reject the share invitation.

+ *

The description for the directory.

*/ - ShareNotes?: string; + Description?: string; /** - *

The directory size.

+ *

The IP addresses of the DNS servers for the directory. For a Simple AD or Microsoft AD + * directory, these are the IP addresses of the Simple AD or Microsoft AD directory servers. For + * an AD Connector directory, these are the IP addresses of the DNS servers or domain controllers + * in the on-premises directory to which the AD Connector is connected.

*/ - Type?: DirectoryType | string; + DnsIpAddrs?: string[]; /** - *

The short name of the directory.

+ *

The current stage of the directory.

*/ - ShortName?: string; + Stage?: DirectoryStage | string; /** - *

The status of the RADIUS MFA server connection.

+ *

Current directory status of the shared AWS Managed Microsoft AD directory.

*/ - RadiusStatus?: RadiusStatus | string; + ShareStatus?: ShareStatus | string; /** - *

Describes the AWS Managed Microsoft AD directory in the directory owner account.

+ *

The method used when sharing a directory to determine whether the directory should be + * shared within your AWS organization (ORGANIZATIONS) or with any AWS account by + * sending a shared directory request (HANDSHAKE).

*/ - OwnerDirectoryDescription?: OwnerDirectoryDescription; + ShareMethod?: ShareMethod | string; /** - *

The description for the directory.

+ *

A directory share request that is sent by the directory owner to the directory consumer. + * The request includes a typed message to help the directory consumer administrator determine + * whether to approve or reject the share invitation.

*/ - Description?: string; + ShareNotes?: string; /** - *

A RadiusSettings object that contains information about the RADIUS - * server configured for this directory.

+ *

Specifies when the directory was created.

*/ - RadiusSettings?: RadiusSettings; + LaunchTime?: Date; /** *

The date and time that the stage was last updated.

@@ -2222,9 +2247,9 @@ export interface DirectoryDescription { StageLastUpdatedDateTime?: Date; /** - *

The fully qualified name of the directory.

+ *

The directory size.

*/ - Name?: string; + Type?: DirectoryType | string; /** *

A DirectoryVpcSettingsDescription object that contains additional @@ -2234,14 +2259,27 @@ export interface DirectoryDescription { VpcSettings?: DirectoryVpcSettingsDescription; /** - *

The directory identifier.

+ *

A DirectoryConnectSettingsDescription object that contains additional + * information about an AD Connector directory. This member is only present if the directory is + * an AD Connector directory.

*/ - DirectoryId?: string; + ConnectSettings?: DirectoryConnectSettingsDescription; /** - *

The edition associated with this directory.

+ *

A RadiusSettings object that contains information about the RADIUS + * server configured for this directory.

*/ - Edition?: DirectoryEdition | string; + RadiusSettings?: RadiusSettings; + + /** + *

The status of the RADIUS MFA server connection.

+ */ + RadiusStatus?: RadiusStatus | string; + + /** + *

Additional information about the directory stage.

+ */ + StageReason?: string; /** *

Indicates if single sign-on is enabled for the directory. For more information, see EnableSso and DisableSso.

@@ -2249,27 +2287,29 @@ export interface DirectoryDescription { SsoEnabled?: boolean; /** - *

Lists the Regions where the directory has replicated.

+ *

The desired number of domain controllers in the directory if the directory is Microsoft AD.

*/ - RegionsInfo?: RegionsInfo; + DesiredNumberOfDomainControllers?: number; /** - *

The IP addresses of the DNS servers for the directory. For a Simple AD or Microsoft AD - * directory, these are the IP addresses of the Simple AD or Microsoft AD directory servers. For - * an AD Connector directory, these are the IP addresses of the DNS servers or domain controllers - * in the on-premises directory to which the AD Connector is connected.

+ *

Describes the AWS Managed Microsoft AD directory in the directory owner account.

*/ - DnsIpAddrs?: string[]; + OwnerDirectoryDescription?: OwnerDirectoryDescription; + + /** + *

Lists the Regions where the directory has replicated.

+ */ + RegionsInfo?: RegionsInfo; } export namespace DirectoryDescription { export const filterSensitiveLog = (obj: DirectoryDescription): any => ({ ...obj, ...(obj.ShareNotes && { ShareNotes: SENSITIVE_STRING }), + ...(obj.RadiusSettings && { RadiusSettings: RadiusSettings.filterSensitiveLog(obj.RadiusSettings) }), ...(obj.OwnerDirectoryDescription && { OwnerDirectoryDescription: OwnerDirectoryDescription.filterSensitiveLog(obj.OwnerDirectoryDescription), }), - ...(obj.RadiusSettings && { RadiusSettings: RadiusSettings.filterSensitiveLog(obj.RadiusSettings) }), }); } @@ -2277,13 +2317,6 @@ export namespace DirectoryDescription { *

Contains the results of the DescribeDirectories operation.

*/ export interface DescribeDirectoriesResult { - /** - *

If not null, more results are available. Pass this value for the NextToken - * parameter in a subsequent call to DescribeDirectories to retrieve the next - * set of items.

- */ - NextToken?: string; - /** *

The list of DirectoryDescription objects that were retrieved.

*

It is possible that this list contains less than the number of items specified in the @@ -2292,6 +2325,13 @@ export interface DescribeDirectoriesResult { * exceeded.

*/ DirectoryDescriptions?: DirectoryDescription[]; + + /** + *

If not null, more results are available. Pass this value for the NextToken + * parameter in a subsequent call to DescribeDirectories to retrieve the next + * set of items.

+ */ + NextToken?: string; } export namespace DescribeDirectoriesResult { @@ -2310,14 +2350,14 @@ export interface InvalidNextTokenException extends __SmithyException, $MetadataB name: "InvalidNextTokenException"; $fault: "client"; /** - *

The AWS request identifier.

+ *

The descriptive message for the exception.

*/ - RequestId?: string; + Message?: string; /** - *

The descriptive message for the exception.

+ *

The AWS request identifier.

*/ - Message?: string; + RequestId?: string; } export namespace InvalidNextTokenException { @@ -2328,9 +2368,9 @@ export namespace InvalidNextTokenException { export interface DescribeDomainControllersRequest { /** - *

The DescribeDomainControllers.NextToken value from a previous call to DescribeDomainControllers. Pass null if this is the first call.

+ *

Identifier of the directory for which to retrieve the domain controller information.

*/ - NextToken?: string; + DirectoryId: string | undefined; /** *

A list of identifiers for the domain controllers whose information will be provided.

@@ -2338,9 +2378,9 @@ export interface DescribeDomainControllersRequest { DomainControllerIds?: string[]; /** - *

Identifier of the directory for which to retrieve the domain controller information.

+ *

The DescribeDomainControllers.NextToken value from a previous call to DescribeDomainControllers. Pass null if this is the first call.

*/ - DirectoryId: string | undefined; + NextToken?: string; /** *

The maximum number of items to return.

@@ -2368,25 +2408,20 @@ export enum DomainControllerStatus { *

Contains information about the domain controllers for a specified directory.

*/ export interface DomainController { - /** - *

The IP address of the domain controller.

- */ - DnsIpAddr?: string; - /** *

Identifier of the directory where the domain controller resides.

*/ DirectoryId?: string; /** - *

Specifies when the domain controller was created.

+ *

Identifies a specific domain controller in the directory.

*/ - LaunchTime?: Date; + DomainControllerId?: string; /** - *

The date and time that the status was last updated.

+ *

The IP address of the domain controller.

*/ - StatusLastUpdatedDateTime?: Date; + DnsIpAddr?: string; /** *

The identifier of the VPC that contains the domain controller.

@@ -2394,14 +2429,14 @@ export interface DomainController { VpcId?: string; /** - *

Identifies a specific domain controller in the directory.

+ *

Identifier of the subnet in the VPC that contains the domain controller.

*/ - DomainControllerId?: string; + SubnetId?: string; /** - *

Identifier of the subnet in the VPC that contains the domain controller.

+ *

The Availability Zone where the domain controller is located.

*/ - SubnetId?: string; + AvailabilityZone?: string; /** *

The status of the domain controller.

@@ -2409,14 +2444,19 @@ export interface DomainController { Status?: DomainControllerStatus | string; /** - *

The Availability Zone where the domain controller is located.

+ *

A description of the domain controller state.

*/ - AvailabilityZone?: string; + StatusReason?: string; /** - *

A description of the domain controller state.

+ *

Specifies when the domain controller was created.

*/ - StatusReason?: string; + LaunchTime?: Date; + + /** + *

The date and time that the status was last updated.

+ */ + StatusLastUpdatedDateTime?: Date; } export namespace DomainController { @@ -2448,15 +2488,15 @@ export namespace DescribeDomainControllersResult { */ export interface DescribeEventTopicsRequest { /** - *

A list of SNS topic names for which to obtain the information. If this member is null, all associations for the specified Directory ID are returned.

- *

An empty list results in an InvalidParameterException being thrown.

+ *

The Directory ID for which to get the list of associated SNS topics. If this member is null, associations for all Directory IDs are returned.

*/ - TopicNames?: string[]; + DirectoryId?: string; /** - *

The Directory ID for which to get the list of associated SNS topics. If this member is null, associations for all Directory IDs are returned.

+ *

A list of SNS topic names for which to obtain the information. If this member is null, all associations for the specified Directory ID are returned.

+ *

An empty list results in an InvalidParameterException being thrown.

*/ - DirectoryId?: string; + TopicNames?: string[]; } export namespace DescribeEventTopicsRequest { @@ -2482,24 +2522,24 @@ export interface EventTopic { DirectoryId?: string; /** - *

The SNS topic ARN (Amazon Resource Name).

+ *

The name of an AWS SNS topic the receives status messages from the directory.

*/ - TopicArn?: string; + TopicName?: string; /** - *

The topic registration status.

+ *

The SNS topic ARN (Amazon Resource Name).

*/ - Status?: TopicStatus | string; + TopicArn?: string; /** - *

The name of an AWS SNS topic the receives status messages from the directory.

+ *

The date and time of when you associated your directory with the SNS topic.

*/ - TopicName?: string; + CreatedDateTime?: Date; /** - *

The date and time of when you associated your directory with the SNS topic.

+ *

The topic registration status.

*/ - CreatedDateTime?: Date; + Status?: TopicStatus | string; } export namespace EventTopic { @@ -2530,9 +2570,9 @@ export enum LDAPSType { export interface DescribeLDAPSSettingsRequest { /** - *

The type of next token used for pagination.

+ *

The identifier of the directory.

*/ - NextToken?: string; + DirectoryId: string | undefined; /** *

The type of LDAP security to enable. Currently only the value Client is @@ -2541,9 +2581,9 @@ export interface DescribeLDAPSSettingsRequest { Type?: LDAPSType | string; /** - *

The identifier of the directory.

+ *

The type of next token used for pagination.

*/ - DirectoryId: string | undefined; + NextToken?: string; /** *

Specifies the number of items that should be displayed on one page.

@@ -2569,9 +2609,9 @@ export enum LDAPSStatus { */ export interface LDAPSSettingInfo { /** - *

The date and time when the LDAPS settings were last updated.

+ *

The state of the LDAPS settings.

*/ - LastUpdatedDateTime?: Date; + LDAPSStatus?: LDAPSStatus | string; /** *

Describes a state change for LDAPS.

@@ -2579,9 +2619,9 @@ export interface LDAPSSettingInfo { LDAPSStatusReason?: string; /** - *

The state of the LDAPS settings.

+ *

The date and time when the LDAPS settings were last updated.

*/ - LDAPSStatus?: LDAPSStatus | string; + LastUpdatedDateTime?: Date; } export namespace LDAPSSettingInfo { @@ -2622,7 +2662,7 @@ export interface DescribeRegionsRequest { RegionName?: string; /** - *

The DescribeRegionsResult.NextToken value from a previous call to + *

The DescribeRegionsResult.NextToken value from a previous call to * DescribeRegions. Pass null if this is the first call.

*/ NextToken?: string; @@ -2640,18 +2680,13 @@ export enum RegionType { } /** - *

The replicated regional information for a directory.

+ *

The replicated Region information for a directory.

*/ export interface RegionDescription { /** - *

Contains VPC information for the CreateDirectory or CreateMicrosoftAD operation.

- */ - VpcSettings?: DirectoryVpcSettings; - - /** - *

The date and time that the Region status was last updated.

+ *

The identifier of the directory.

*/ - StatusLastUpdatedDateTime?: Date; + DirectoryId?: string; /** *

The name of the Region. For example, us-east-1.

@@ -2659,7 +2694,7 @@ export interface RegionDescription { RegionName?: string; /** - *

Specifies if the Region is the primary Region or an additional Region.

+ *

Specifies whether the Region is the primary Region or an additional Region.

*/ RegionType?: RegionType | string; @@ -2668,6 +2703,11 @@ export interface RegionDescription { */ Status?: DirectoryStage | string; + /** + *

Contains VPC information for the CreateDirectory or CreateMicrosoftAD operation.

+ */ + VpcSettings?: DirectoryVpcSettings; + /** *

The desired number of domain controllers in the specified Region for the specified directory.

*/ @@ -2679,9 +2719,9 @@ export interface RegionDescription { LaunchTime?: Date; /** - *

The identifier of the directory.

+ *

The date and time that the Region status was last updated.

*/ - DirectoryId?: string; + StatusLastUpdatedDateTime?: Date; /** *

The date and time that the Region description was last updated.

@@ -2697,12 +2737,12 @@ export namespace RegionDescription { export interface DescribeRegionsResult { /** - *

List of regional information related to the directory per replicated Region.

+ *

List of Region information related to the directory for each replicated Region.

*/ RegionsDescription?: RegionDescription[]; /** - *

If not null, more results are available. Pass this value for the NextToken parameter + *

If not null, more results are available. Pass this value for the NextToken parameter * in a subsequent call to DescribeRegions to retrieve the next set of items.

*/ NextToken?: string; @@ -2716,9 +2756,9 @@ export namespace DescribeRegionsResult { export interface DescribeSharedDirectoriesRequest { /** - *

The number of shared directories to return in the response object.

+ *

Returns the identifier of the directory in the directory owner account.

*/ - Limit?: number; + OwnerDirectoryId: string | undefined; /** *

A list of identifiers of all shared directories in your account.

@@ -2732,9 +2772,9 @@ export interface DescribeSharedDirectoriesRequest { NextToken?: string; /** - *

Returns the identifier of the directory in the directory owner account.

+ *

The number of shared directories to return in the response object.

*/ - OwnerDirectoryId: string | undefined; + Limit?: number; } export namespace DescribeSharedDirectoriesRequest { @@ -2745,15 +2785,15 @@ export namespace DescribeSharedDirectoriesRequest { export interface DescribeSharedDirectoriesResult { /** - *

If not null, token that indicates that more results are available. Pass this value for the - * NextToken parameter in a subsequent call to DescribeSharedDirectories to retrieve the next set of items.

+ *

A list of all shared directories in your account.

*/ - NextToken?: string; + SharedDirectories?: SharedDirectory[]; /** - *

A list of all shared directories in your account.

+ *

If not null, token that indicates that more results are available. Pass this value for the + * NextToken parameter in a subsequent call to DescribeSharedDirectories to retrieve the next set of items.

*/ - SharedDirectories?: SharedDirectory[]; + NextToken?: string; } export namespace DescribeSharedDirectoriesResult { @@ -2769,6 +2809,11 @@ export namespace DescribeSharedDirectoriesResult { *

Contains the inputs for the DescribeSnapshots operation.

*/ export interface DescribeSnapshotsRequest { + /** + *

The identifier of the directory for which to retrieve snapshot information.

+ */ + DirectoryId?: string; + /** *

A list of identifiers of the snapshots to obtain the information for. If this member is * null or empty, all snapshots are returned using the Limit and NextToken @@ -2776,21 +2821,16 @@ export interface DescribeSnapshotsRequest { */ SnapshotIds?: string[]; - /** - *

The maximum number of objects to return.

- */ - Limit?: number; - - /** - *

The identifier of the directory for which to retrieve snapshot information.

- */ - DirectoryId?: string; - /** *

The DescribeSnapshotsResult.NextToken value from a previous call to * DescribeSnapshots. Pass null if this is the first call.

*/ NextToken?: string; + + /** + *

The maximum number of objects to return.

+ */ + Limit?: number; } export namespace DescribeSnapshotsRequest { @@ -2815,24 +2855,24 @@ export enum SnapshotType { */ export interface Snapshot { /** - *

The descriptive name of the snapshot.

+ *

The directory identifier.

*/ - Name?: string; + DirectoryId?: string; /** - *

The snapshot type.

+ *

The snapshot identifier.

*/ - Type?: SnapshotType | string; + SnapshotId?: string; /** - *

The snapshot identifier.

+ *

The snapshot type.

*/ - SnapshotId?: string; + Type?: SnapshotType | string; /** - *

The directory identifier.

+ *

The descriptive name of the snapshot.

*/ - DirectoryId?: string; + Name?: string; /** *

The snapshot status.

@@ -2881,12 +2921,6 @@ export namespace DescribeSnapshotsResult { *

Describes the trust relationships for a particular AWS Managed Microsoft AD directory. If no input parameters are are provided, such as directory ID or trust ID, this request describes all the trust relationships.

*/ export interface DescribeTrustsRequest { - /** - *

The DescribeTrustsResult.NextToken value from a previous call to - * DescribeTrusts. Pass null if this is the first call.

- */ - NextToken?: string; - /** *

The Directory ID of the AWS directory that is a part of the requested trust relationship.

*/ @@ -2898,6 +2932,12 @@ export interface DescribeTrustsRequest { */ TrustIds?: string[]; + /** + *

The DescribeTrustsResult.NextToken value from a previous call to + * DescribeTrusts. Pass null if this is the first call.

+ */ + NextToken?: string; + /** *

The maximum number of objects to return.

*/ @@ -2929,19 +2969,24 @@ export enum TrustState { */ export interface Trust { /** - *

The date and time that the trust relationship was last updated.

+ *

The Directory ID of the AWS directory involved in the trust relationship.

*/ - LastUpdatedDateTime?: Date; + DirectoryId?: string; /** - *

The date and time that the TrustState was last updated.

+ *

The unique ID of the trust relationship.

*/ - StateLastUpdatedDateTime?: Date; + TrustId?: string; /** - *

The reason for the TrustState.

+ *

The Fully Qualified Domain Name (FQDN) of the external domain involved in the trust relationship.

*/ - TrustStateReason?: string; + RemoteDomainName?: string; + + /** + *

The trust relationship type. Forest is the default.

+ */ + TrustType?: TrustType | string; /** *

The trust relationship direction.

@@ -2949,39 +2994,34 @@ export interface Trust { TrustDirection?: TrustDirection | string; /** - *

The Fully Qualified Domain Name (FQDN) of the external domain involved in the trust relationship.

+ *

The trust relationship state.

*/ - RemoteDomainName?: string; + TrustState?: TrustState | string; /** - *

The unique ID of the trust relationship.

+ *

The date and time that the trust relationship was created.

*/ - TrustId?: string; + CreatedDateTime?: Date; /** - *

The trust relationship type. Forest is the default.

+ *

The date and time that the trust relationship was last updated.

*/ - TrustType?: TrustType | string; + LastUpdatedDateTime?: Date; /** - *

The Directory ID of the AWS directory involved in the trust relationship.

+ *

The date and time that the TrustState was last updated.

*/ - DirectoryId?: string; + StateLastUpdatedDateTime?: Date; /** - *

The trust relationship state.

+ *

The reason for the TrustState.

*/ - TrustState?: TrustState | string; + TrustStateReason?: string; /** *

Current state of selective authentication for the trust.

*/ SelectiveAuth?: SelectiveAuth | string; - - /** - *

The date and time that the trust relationship was created.

- */ - CreatedDateTime?: Date; } export namespace Trust { @@ -3021,24 +3061,24 @@ export namespace DescribeTrustsResult { */ export interface DirectoryLimits { /** - *

The maximum number of connected directories allowed in the Region.

+ *

The maximum number of cloud directories allowed in the Region.

*/ - ConnectedDirectoriesLimit?: number; + CloudOnlyDirectoriesLimit?: number; /** - *

The maximum number of AWS Managed Microsoft AD directories allowed in the region.

+ *

The current number of cloud directories in the Region.

*/ - CloudOnlyMicrosoftADLimit?: number; + CloudOnlyDirectoriesCurrentCount?: number; /** - *

The current number of connected directories in the Region.

+ *

Indicates if the cloud directory limit has been reached.

*/ - ConnectedDirectoriesCurrentCount?: number; + CloudOnlyDirectoriesLimitReached?: boolean; /** - *

Indicates if the AWS Managed Microsoft AD directory limit has been reached.

+ *

The maximum number of AWS Managed Microsoft AD directories allowed in the region.

*/ - CloudOnlyMicrosoftADLimitReached?: boolean; + CloudOnlyMicrosoftADLimit?: number; /** *

The current number of AWS Managed Microsoft AD directories in the region.

@@ -3046,24 +3086,24 @@ export interface DirectoryLimits { CloudOnlyMicrosoftADCurrentCount?: number; /** - *

Indicates if the connected directory limit has been reached.

+ *

Indicates if the AWS Managed Microsoft AD directory limit has been reached.

*/ - ConnectedDirectoriesLimitReached?: boolean; + CloudOnlyMicrosoftADLimitReached?: boolean; /** - *

The maximum number of cloud directories allowed in the Region.

+ *

The maximum number of connected directories allowed in the Region.

*/ - CloudOnlyDirectoriesLimit?: number; + ConnectedDirectoriesLimit?: number; /** - *

The current number of cloud directories in the Region.

+ *

The current number of connected directories in the Region.

*/ - CloudOnlyDirectoriesCurrentCount?: number; + ConnectedDirectoriesCurrentCount?: number; /** - *

Indicates if the cloud directory limit has been reached.

+ *

Indicates if the connected directory limit has been reached.

*/ - CloudOnlyDirectoriesLimitReached?: boolean; + ConnectedDirectoriesLimitReached?: boolean; } export namespace DirectoryLimits { @@ -3078,19 +3118,68 @@ export namespace DirectoryLimits { export interface DirectoryNotSharedException extends __SmithyException, $MetadataBearer { name: "DirectoryNotSharedException"; $fault: "client"; + /** + *

The descriptive message for the exception.

+ */ + Message?: string; + /** *

The AWS request identifier.

*/ RequestId?: string; +} + +export namespace DirectoryNotSharedException { + export const filterSensitiveLog = (obj: DirectoryNotSharedException): any => ({ + ...obj, + }); +} + +export interface DisableClientAuthenticationRequest { + /** + *

Disable client authentication in a specified directory for smart cards.

+ */ + DirectoryId: string | undefined; + + /** + *

Disable the type of client authentication request.

+ */ + Type: ClientAuthenticationType | string | undefined; +} + +export namespace DisableClientAuthenticationRequest { + export const filterSensitiveLog = (obj: DisableClientAuthenticationRequest): any => ({ + ...obj, + }); +} + +export interface DisableClientAuthenticationResult {} + +export namespace DisableClientAuthenticationResult { + export const filterSensitiveLog = (obj: DisableClientAuthenticationResult): any => ({ + ...obj, + }); +} +/** + *

The client authorization was invalid.

+ */ +export interface InvalidClientAuthStatusException extends __SmithyException, $MetadataBearer { + name: "InvalidClientAuthStatusException"; + $fault: "client"; /** *

The descriptive message for the exception.

*/ Message?: string; + + /** + *

The AWS request identifier.

+ */ + RequestId?: string; } -export namespace DirectoryNotSharedException { - export const filterSensitiveLog = (obj: DirectoryNotSharedException): any => ({ +export namespace InvalidClientAuthStatusException { + export const filterSensitiveLog = (obj: InvalidClientAuthStatusException): any => ({ ...obj, }); } @@ -3130,14 +3219,14 @@ export interface InvalidLDAPSStatusException extends __SmithyException, $Metadat name: "InvalidLDAPSStatusException"; $fault: "client"; /** - *

The AWS request identifier.

+ *

The descriptive message for the exception.

*/ - RequestId?: string; + Message?: string; /** - *

The descriptive message for the exception.

+ *

The AWS request identifier.

*/ - Message?: string; + RequestId?: string; } export namespace InvalidLDAPSStatusException { @@ -3177,12 +3266,6 @@ export namespace DisableRadiusResult { *

Contains the inputs for the DisableSso operation.

*/ export interface DisableSsoRequest { - /** - *

The password of an alternate account to use to disable single-sign on. This is only used - * for AD Connector directories. For more information, see the UserName parameter.

- */ - Password?: string; - /** *

The identifier of the directory for which to disable single-sign on.

*/ @@ -3196,6 +3279,12 @@ export interface DisableSsoRequest { * the service. The AD Connector service account is not changed.

*/ UserName?: string; + + /** + *

The password of an alternate account to use to disable single-sign on. This is only used + * for AD Connector directories. For more information, see the UserName parameter.

+ */ + Password?: string; } export namespace DisableSsoRequest { @@ -3216,29 +3305,28 @@ export namespace DisableSsoResult { }); } -export interface EnableLDAPSRequest { +export interface EnableClientAuthenticationRequest { /** - *

The identifier of the directory.

+ *

Enable client authentication in a specified directory for smart cards.

*/ DirectoryId: string | undefined; /** - *

The type of LDAP security to enable. Currently only the value Client is - * supported.

+ *

Enable the type of client authentication request.

*/ - Type: LDAPSType | string | undefined; + Type: ClientAuthenticationType | string | undefined; } -export namespace EnableLDAPSRequest { - export const filterSensitiveLog = (obj: EnableLDAPSRequest): any => ({ +export namespace EnableClientAuthenticationRequest { + export const filterSensitiveLog = (obj: EnableClientAuthenticationRequest): any => ({ ...obj, }); } -export interface EnableLDAPSResult {} +export interface EnableClientAuthenticationResult {} -export namespace EnableLDAPSResult { - export const filterSensitiveLog = (obj: EnableLDAPSResult): any => ({ +export namespace EnableClientAuthenticationResult { + export const filterSensitiveLog = (obj: EnableClientAuthenticationResult): any => ({ ...obj, }); } @@ -3258,11 +3346,38 @@ export interface NoAvailableCertificateException extends __SmithyException, $Met /** *

The AWS request identifier.

*/ - RequestId?: string; + RequestId?: string; +} + +export namespace NoAvailableCertificateException { + export const filterSensitiveLog = (obj: NoAvailableCertificateException): any => ({ + ...obj, + }); +} + +export interface EnableLDAPSRequest { + /** + *

The identifier of the directory.

+ */ + DirectoryId: string | undefined; + + /** + *

The type of LDAP security to enable. Currently only the value Client is + * supported.

+ */ + Type: LDAPSType | string | undefined; } -export namespace NoAvailableCertificateException { - export const filterSensitiveLog = (obj: NoAvailableCertificateException): any => ({ +export namespace EnableLDAPSRequest { + export const filterSensitiveLog = (obj: EnableLDAPSRequest): any => ({ + ...obj, + }); +} + +export interface EnableLDAPSResult {} + +export namespace EnableLDAPSResult { + export const filterSensitiveLog = (obj: EnableLDAPSResult): any => ({ ...obj, }); } @@ -3392,14 +3507,14 @@ export namespace GetSnapshotLimitsRequest { */ export interface SnapshotLimits { /** - *

The current number of manual snapshots of the directory.

+ *

The maximum number of manual snapshots allowed.

*/ - ManualSnapshotsCurrentCount?: number; + ManualSnapshotsLimit?: number; /** - *

The maximum number of manual snapshots allowed.

+ *

The current number of manual snapshots of the directory.

*/ - ManualSnapshotsLimit?: number; + ManualSnapshotsCurrentCount?: number; /** *

Indicates if the manual snapshot limit has been reached.

@@ -3432,9 +3547,9 @@ export namespace GetSnapshotLimitsResult { export interface ListCertificatesRequest { /** - *

The number of items that should show up on one page

+ *

The identifier of the directory.

*/ - Limit?: number; + DirectoryId: string | undefined; /** *

A token for requesting another page of certificates if the NextToken response @@ -3445,9 +3560,9 @@ export interface ListCertificatesRequest { NextToken?: string; /** - *

The identifier of the directory.

+ *

The number of items that should show up on one page

*/ - DirectoryId: string | undefined; + Limit?: number; } export namespace ListCertificatesRequest { @@ -3477,6 +3592,11 @@ export namespace ListCertificatesResult { } export interface ListIpRoutesRequest { + /** + *

Identifier (ID) of the directory for which you want to retrieve the IP addresses.

+ */ + DirectoryId: string | undefined; + /** *

The ListIpRoutes.NextToken value from a previous call to * ListIpRoutes. Pass null if this is the first call.

@@ -3487,11 +3607,6 @@ export interface ListIpRoutesRequest { *

Maximum number of items to return. If this value is zero, the maximum number of items is specified by the limitations of the operation.

*/ Limit?: number; - - /** - *

Identifier (ID) of the directory for which you want to retrieve the IP addresses.

- */ - DirectoryId: string | undefined; } export namespace ListIpRoutesRequest { @@ -3514,14 +3629,14 @@ export enum IpRouteStatusMsg { */ export interface IpRouteInfo { /** - *

IP address block in the IpRoute.

+ *

Identifier (ID) of the directory associated with the IP addresses.

*/ - CidrIp?: string; + DirectoryId?: string; /** - *

The date and time the address block was added to the directory.

+ *

IP address block in the IpRoute.

*/ - AddedDateTime?: Date; + CidrIp?: string; /** *

The status of the IP address block.

@@ -3529,19 +3644,19 @@ export interface IpRouteInfo { IpRouteStatusMsg?: IpRouteStatusMsg | string; /** - *

Description of the IpRouteInfo.

+ *

The date and time the address block was added to the directory.

*/ - Description?: string; + AddedDateTime?: Date; /** - *

Identifier (ID) of the directory associated with the IP addresses.

+ *

The reason for the IpRouteStatusMsg.

*/ - DirectoryId?: string; + IpRouteStatusReason?: string; /** - *

The reason for the IpRouteStatusMsg.

+ *

Description of the IpRouteInfo.

*/ - IpRouteStatusReason?: string; + Description?: string; } export namespace IpRouteInfo { @@ -3570,11 +3685,6 @@ export namespace ListIpRoutesResult { } export interface ListLogSubscriptionsRequest { - /** - *

The token for the next set of items to return.

- */ - NextToken?: string; - /** *

If a DirectoryID is provided, lists only the log subscription * associated with that directory. If no DirectoryId is provided, lists all @@ -3583,6 +3693,11 @@ export interface ListLogSubscriptionsRequest { */ DirectoryId?: string; + /** + *

The token for the next set of items to return.

+ */ + NextToken?: string; + /** *

The maximum number of items returned.

*/ @@ -3600,6 +3715,12 @@ export namespace ListLogSubscriptionsRequest { * specified destination.

*/ export interface LogSubscription { + /** + *

Identifier (ID) of the directory that you want to associate with the log + * subscription.

+ */ + DirectoryId?: string; + /** *

The name of the log group.

*/ @@ -3609,12 +3730,6 @@ export interface LogSubscription { *

The date and time that the log subscription was created.

*/ SubscriptionCreatedDateTime?: Date; - - /** - *

Identifier (ID) of the directory that you want to associate with the log - * subscription.

- */ - DirectoryId?: string; } export namespace LogSubscription { @@ -3643,11 +3758,6 @@ export namespace ListLogSubscriptionsResult { } export interface ListSchemaExtensionsRequest { - /** - *

The maximum number of items to return.

- */ - Limit?: number; - /** *

The identifier of the directory from which to retrieve the schema extension information.

*/ @@ -3657,6 +3767,11 @@ export interface ListSchemaExtensionsRequest { *

The ListSchemaExtensions.NextToken value from a previous call to ListSchemaExtensions. Pass null if this is the first call.

*/ NextToken?: string; + + /** + *

The maximum number of items to return.

+ */ + Limit?: number; } export namespace ListSchemaExtensionsRequest { @@ -3682,9 +3797,9 @@ export enum SchemaExtensionStatus { */ export interface SchemaExtensionInfo { /** - *

The date and time that the schema extension started being applied to the directory.

+ *

The identifier of the directory to which the schema extension is applied.

*/ - StartDateTime?: Date; + DirectoryId?: string; /** *

The identifier of the schema extension.

@@ -3692,9 +3807,9 @@ export interface SchemaExtensionInfo { SchemaExtensionId?: string; /** - *

The identifier of the directory to which the schema extension is applied.

+ *

A description of the schema extension.

*/ - DirectoryId?: string; + Description?: string; /** *

The current status of the schema extension.

@@ -3707,9 +3822,9 @@ export interface SchemaExtensionInfo { SchemaExtensionStatusReason?: string; /** - *

A description of the schema extension.

+ *

The date and time that the schema extension started being applied to the directory.

*/ - Description?: string; + StartDateTime?: Date; /** *

The date and time that the schema extension was completed.

@@ -3725,14 +3840,14 @@ export namespace SchemaExtensionInfo { export interface ListSchemaExtensionsResult { /** - *

If not null, more results are available. Pass this value for the NextToken parameter in a subsequent call to ListSchemaExtensions to retrieve the next set of items.

+ *

Information about the schema extensions applied to the directory.

*/ - NextToken?: string; + SchemaExtensionsInfo?: SchemaExtensionInfo[]; /** - *

Information about the schema extensions applied to the directory.

+ *

If not null, more results are available. Pass this value for the NextToken parameter in a subsequent call to ListSchemaExtensions to retrieve the next set of items.

*/ - SchemaExtensionsInfo?: SchemaExtensionInfo[]; + NextToken?: string; } export namespace ListSchemaExtensionsResult { @@ -3743,19 +3858,19 @@ export namespace ListSchemaExtensionsResult { export interface ListTagsForResourceRequest { /** - *

Reserved for future use.

+ *

Identifier (ID) of the directory for which you want to retrieve tags.

*/ - NextToken?: string; + ResourceId: string | undefined; /** *

Reserved for future use.

*/ - Limit?: number; + NextToken?: string; /** - *

Identifier (ID) of the directory for which you want to retrieve tags.

+ *

Reserved for future use.

*/ - ResourceId: string | undefined; + Limit?: number; } export namespace ListTagsForResourceRequest { @@ -3815,6 +3930,16 @@ export interface RegisterCertificateRequest { *

The certificate PEM string that needs to be registered.

*/ CertificateData: string | undefined; + + /** + *

The certificate type to register for the request.

+ */ + Type?: CertificateType | string; + + /** + *

Contains information about the client certificate authentication settings, such as ClientLDAPS or ClientCertAuth.

+ */ + ClientCertAuthSettings?: ClientCertAuthSettings; } export namespace RegisterCertificateRequest { @@ -3841,14 +3966,14 @@ export namespace RegisterCertificateResult { */ export interface RegisterEventTopicRequest { /** - *

The SNS topic name to which the directory will publish status messages. This SNS topic must be in the same region as the specified Directory ID.

+ *

The Directory ID that will publish status messages to the SNS topic.

*/ - TopicName: string | undefined; + DirectoryId: string | undefined; /** - *

The Directory ID that will publish status messages to the SNS topic.

+ *

The SNS topic name to which the directory will publish status messages. This SNS topic must be in the same region as the specified Directory ID.

*/ - DirectoryId: string | undefined; + TopicName: string | undefined; } export namespace RegisterEventTopicRequest { @@ -3944,14 +4069,14 @@ export namespace RemoveRegionResult { export interface RemoveTagsFromResourceRequest { /** - *

The tag key (name) of the tag to be removed.

+ *

Identifier (ID) of the directory from which to remove the tag.

*/ - TagKeys: string[] | undefined; + ResourceId: string | undefined; /** - *

Identifier (ID) of the directory from which to remove the tag.

+ *

The tag key (name) of the tag to be removed.

*/ - ResourceId: string | undefined; + TagKeys: string[] | undefined; } export namespace RemoveTagsFromResourceRequest { @@ -3999,14 +4124,14 @@ export interface ResetUserPasswordRequest { DirectoryId: string | undefined; /** - *

The new password that will be reset.

+ *

The user name of the user whose password will be reset.

*/ - NewPassword: string | undefined; + UserName: string | undefined; /** - *

The user name of the user whose password will be reset.

+ *

The new password that will be reset.

*/ - UserName: string | undefined; + NewPassword: string | undefined; } export namespace ResetUserPasswordRequest { @@ -4031,14 +4156,14 @@ export interface UserDoesNotExistException extends __SmithyException, $MetadataB name: "UserDoesNotExistException"; $fault: "client"; /** - *

The AWS request identifier.

+ *

The descriptive message for the exception.

*/ - RequestId?: string; + Message?: string; /** - *

The descriptive message for the exception.

+ *

The AWS request identifier.

*/ - Message?: string; + RequestId?: string; } export namespace UserDoesNotExistException { @@ -4081,14 +4206,14 @@ export interface InvalidTargetException extends __SmithyException, $MetadataBear name: "InvalidTargetException"; $fault: "client"; /** - *

The AWS request identifier.

+ *

The descriptive message for the exception.

*/ - RequestId?: string; + Message?: string; /** - *

The descriptive message for the exception.

+ *

The AWS request identifier.

*/ - Message?: string; + RequestId?: string; } export namespace InvalidTargetException { @@ -4129,14 +4254,14 @@ export enum TargetType { */ export interface ShareTarget { /** - *

Type of identifier to be used in the Id field.

+ *

Identifier of the directory consumer account.

*/ - Type: TargetType | string | undefined; + Id: string | undefined; /** - *

Identifier of the directory consumer account.

+ *

Type of identifier to be used in the Id field.

*/ - Id: string | undefined; + Type: TargetType | string | undefined; } export namespace ShareTarget { @@ -4152,6 +4277,13 @@ export interface ShareDirectoryRequest { */ DirectoryId: string | undefined; + /** + *

A directory share request that is sent by the directory owner to the directory consumer. + * The request includes a typed message to help the directory consumer administrator determine + * whether to approve or reject the share invitation.

+ */ + ShareNotes?: string; + /** *

Identifier for the directory consumer account with whom the directory is to be * shared.

@@ -4164,13 +4296,6 @@ export interface ShareDirectoryRequest { * sending a directory sharing request (HANDSHAKE).

*/ ShareMethod: ShareMethod | string | undefined; - - /** - *

A directory share request that is sent by the directory owner to the directory consumer. - * The request includes a typed message to help the directory consumer administrator determine - * whether to approve or reject the share invitation.

- */ - ShareNotes?: string; } export namespace ShareDirectoryRequest { @@ -4201,14 +4326,14 @@ export interface ShareLimitExceededException extends __SmithyException, $Metadat name: "ShareLimitExceededException"; $fault: "client"; /** - *

The AWS request identifier.

+ *

The descriptive message for the exception.

*/ - RequestId?: string; + Message?: string; /** - *

The descriptive message for the exception.

+ *

The AWS request identifier.

*/ - Message?: string; + RequestId?: string; } export namespace ShareLimitExceededException { @@ -4219,14 +4344,14 @@ export namespace ShareLimitExceededException { export interface StartSchemaExtensionRequest { /** - *

A description of the schema extension.

+ *

The identifier of the directory for which the schema extension will be applied to.

*/ - Description: string | undefined; + DirectoryId: string | undefined; /** - *

The identifier of the directory for which the schema extension will be applied to.

+ *

If true, creates a snapshot of the directory before applying the schema extension.

*/ - DirectoryId: string | undefined; + CreateSnapshotBeforeSchemaExtension: boolean | undefined; /** *

The LDIF file represented as a string. To construct the LdifContent string, precede each line as it would be formatted in an ldif file with \n. See the example request below for more details. The file size can be no larger than 1MB.

@@ -4234,9 +4359,9 @@ export interface StartSchemaExtensionRequest { LdifContent: string | undefined; /** - *

If true, creates a snapshot of the directory before applying the schema extension.

+ *

A description of the schema extension.

*/ - CreateSnapshotBeforeSchemaExtension: boolean | undefined; + Description: string | undefined; } export namespace StartSchemaExtensionRequest { @@ -4281,17 +4406,17 @@ export namespace UnshareTarget { } export interface UnshareDirectoryRequest { - /** - *

Identifier for the directory consumer account with whom the directory has to be - * unshared.

- */ - UnshareTarget: UnshareTarget | undefined; - /** *

The identifier of the AWS Managed Microsoft AD directory that you want to stop * sharing.

*/ DirectoryId: string | undefined; + + /** + *

Identifier for the directory consumer account with whom the directory has to be + * unshared.

+ */ + UnshareTarget: UnshareTarget | undefined; } export namespace UnshareDirectoryRequest { @@ -4324,14 +4449,14 @@ export interface UpdateConditionalForwarderRequest { DirectoryId: string | undefined; /** - *

The updated IP addresses of the remote DNS server associated with the conditional forwarder.

+ *

The fully qualified domain name (FQDN) of the remote domain with which you will set up a trust relationship.

*/ - DnsIpAddrs: string[] | undefined; + RemoteDomainName: string | undefined; /** - *

The fully qualified domain name (FQDN) of the remote domain with which you will set up a trust relationship.

+ *

The updated IP addresses of the remote DNS server associated with the conditional forwarder.

*/ - RemoteDomainName: string | undefined; + DnsIpAddrs: string[] | undefined; } export namespace UpdateConditionalForwarderRequest { @@ -4376,14 +4501,14 @@ export namespace DomainControllerLimitExceededException { export interface UpdateNumberOfDomainControllersRequest { /** - *

The number of domain controllers desired in the directory.

+ *

Identifier of the directory to which the domain controllers will be added or removed.

*/ - DesiredNumber: number | undefined; + DirectoryId: string | undefined; /** - *

Identifier of the directory to which the domain controllers will be added or removed.

+ *

The number of domain controllers desired in the directory.

*/ - DirectoryId: string | undefined; + DesiredNumber: number | undefined; } export namespace UpdateNumberOfDomainControllersRequest { @@ -4435,14 +4560,14 @@ export namespace UpdateRadiusResult { export interface UpdateTrustRequest { /** - *

Updates selective authentication for the trust.

+ *

Identifier of the trust relationship.

*/ - SelectiveAuth?: SelectiveAuth | string; + TrustId: string | undefined; /** - *

Identifier of the trust relationship.

+ *

Updates selective authentication for the trust.

*/ - TrustId: string | undefined; + SelectiveAuth?: SelectiveAuth | string; } export namespace UpdateTrustRequest { @@ -4453,14 +4578,14 @@ export namespace UpdateTrustRequest { export interface UpdateTrustResult { /** - *

Identifier of the trust relationship.

+ *

The AWS request identifier.

*/ - TrustId?: string; + RequestId?: string; /** - *

The AWS request identifier.

+ *

Identifier of the trust relationship.

*/ - RequestId?: string; + TrustId?: string; } export namespace UpdateTrustResult { diff --git a/clients/client-directory-service/protocols/Aws_json1_1.ts b/clients/client-directory-service/protocols/Aws_json1_1.ts index 0f9ce1044fdb..e3f544e00bfc 100644 --- a/clients/client-directory-service/protocols/Aws_json1_1.ts +++ b/clients/client-directory-service/protocols/Aws_json1_1.ts @@ -74,9 +74,17 @@ import { } from "../commands/DescribeSharedDirectoriesCommand"; import { DescribeSnapshotsCommandInput, DescribeSnapshotsCommandOutput } from "../commands/DescribeSnapshotsCommand"; import { DescribeTrustsCommandInput, DescribeTrustsCommandOutput } from "../commands/DescribeTrustsCommand"; +import { + DisableClientAuthenticationCommandInput, + DisableClientAuthenticationCommandOutput, +} from "../commands/DisableClientAuthenticationCommand"; import { DisableLDAPSCommandInput, DisableLDAPSCommandOutput } from "../commands/DisableLDAPSCommand"; import { DisableRadiusCommandInput, DisableRadiusCommandOutput } from "../commands/DisableRadiusCommand"; import { DisableSsoCommandInput, DisableSsoCommandOutput } from "../commands/DisableSsoCommand"; +import { + EnableClientAuthenticationCommandInput, + EnableClientAuthenticationCommandOutput, +} from "../commands/EnableClientAuthenticationCommand"; import { EnableLDAPSCommandInput, EnableLDAPSCommandOutput } from "../commands/EnableLDAPSCommand"; import { EnableRadiusCommandInput, EnableRadiusCommandOutput } from "../commands/EnableRadiusCommand"; import { EnableSsoCommandInput, EnableSsoCommandOutput } from "../commands/EnableSsoCommand"; @@ -153,6 +161,7 @@ import { CertificateInUseException, CertificateInfo, CertificateLimitExceededException, + ClientCertAuthSettings, ClientException, Computer, ConditionalForwarder, @@ -220,6 +229,8 @@ import { DirectoryUnavailableException, DirectoryVpcSettings, DirectoryVpcSettingsDescription, + DisableClientAuthenticationRequest, + DisableClientAuthenticationResult, DisableLDAPSRequest, DisableLDAPSResult, DisableRadiusRequest, @@ -228,6 +239,8 @@ import { DisableSsoResult, DomainController, DomainControllerLimitExceededException, + EnableClientAuthenticationRequest, + EnableClientAuthenticationResult, EnableLDAPSRequest, EnableLDAPSResult, EnableRadiusRequest, @@ -243,6 +256,7 @@ import { GetSnapshotLimitsResult, InsufficientPermissionsException, InvalidCertificateException, + InvalidClientAuthStatusException, InvalidLDAPSStatusException, InvalidNextTokenException, InvalidParameterException, @@ -730,6 +744,19 @@ export const serializeAws_json1_1DescribeTrustsCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1DisableClientAuthenticationCommand = async ( + input: DisableClientAuthenticationCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "DirectoryService_20150416.DisableClientAuthentication", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DisableClientAuthenticationRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1DisableLDAPSCommand = async ( input: DisableLDAPSCommandInput, context: __SerdeContext @@ -769,6 +796,19 @@ export const serializeAws_json1_1DisableSsoCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1EnableClientAuthenticationCommand = async ( + input: EnableClientAuthenticationCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "DirectoryService_20150416.EnableClientAuthentication", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1EnableClientAuthenticationRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1EnableLDAPSCommand = async ( input: EnableLDAPSCommandInput, context: __SerdeContext @@ -3932,6 +3972,101 @@ const deserializeAws_json1_1DescribeTrustsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; +export const deserializeAws_json1_1DisableClientAuthenticationCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DisableClientAuthenticationCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DisableClientAuthenticationResult(data, context); + const response: DisableClientAuthenticationCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DisableClientAuthenticationCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.directoryservice#AccessDeniedException": + response = { + ...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ClientException": + case "com.amazonaws.directoryservice#ClientException": + response = { + ...(await deserializeAws_json1_1ClientExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "DirectoryDoesNotExistException": + case "com.amazonaws.directoryservice#DirectoryDoesNotExistException": + response = { + ...(await deserializeAws_json1_1DirectoryDoesNotExistExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidClientAuthStatusException": + case "com.amazonaws.directoryservice#InvalidClientAuthStatusException": + response = { + ...(await deserializeAws_json1_1InvalidClientAuthStatusExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceException": + case "com.amazonaws.directoryservice#ServiceException": + response = { + ...(await deserializeAws_json1_1ServiceExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "UnsupportedOperationException": + case "com.amazonaws.directoryservice#UnsupportedOperationException": + response = { + ...(await deserializeAws_json1_1UnsupportedOperationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + export const deserializeAws_json1_1DisableLDAPSCommand = async ( output: __HttpResponse, context: __SerdeContext @@ -4193,6 +4328,109 @@ const deserializeAws_json1_1DisableSsoCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; +export const deserializeAws_json1_1EnableClientAuthenticationCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1EnableClientAuthenticationCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1EnableClientAuthenticationResult(data, context); + const response: EnableClientAuthenticationCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1EnableClientAuthenticationCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.directoryservice#AccessDeniedException": + response = { + ...(await deserializeAws_json1_1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ClientException": + case "com.amazonaws.directoryservice#ClientException": + response = { + ...(await deserializeAws_json1_1ClientExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "DirectoryDoesNotExistException": + case "com.amazonaws.directoryservice#DirectoryDoesNotExistException": + response = { + ...(await deserializeAws_json1_1DirectoryDoesNotExistExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidClientAuthStatusException": + case "com.amazonaws.directoryservice#InvalidClientAuthStatusException": + response = { + ...(await deserializeAws_json1_1InvalidClientAuthStatusExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "NoAvailableCertificateException": + case "com.amazonaws.directoryservice#NoAvailableCertificateException": + response = { + ...(await deserializeAws_json1_1NoAvailableCertificateExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceException": + case "com.amazonaws.directoryservice#ServiceException": + response = { + ...(await deserializeAws_json1_1ServiceExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "UnsupportedOperationException": + case "com.amazonaws.directoryservice#UnsupportedOperationException": + response = { + ...(await deserializeAws_json1_1UnsupportedOperationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + export const deserializeAws_json1_1EnableLDAPSCommand = async ( output: __HttpResponse, context: __SerdeContext @@ -6797,6 +7035,21 @@ const deserializeAws_json1_1InvalidCertificateExceptionResponse = async ( return contents; }; +const deserializeAws_json1_1InvalidClientAuthStatusExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1InvalidClientAuthStatusException(body, context); + const contents: InvalidClientAuthStatusException = { + name: "InvalidClientAuthStatusException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + const deserializeAws_json1_1InvalidLDAPSStatusExceptionResponse = async ( parsedOutput: any, context: __SerdeContext @@ -7086,6 +7339,12 @@ const serializeAws_json1_1CidrIps = (input: string[], context: __SerdeContext): return input.map((entry) => entry); }; +const serializeAws_json1_1ClientCertAuthSettings = (input: ClientCertAuthSettings, context: __SerdeContext): any => { + return { + ...(input.OCSPUrl !== undefined && { OCSPUrl: input.OCSPUrl }), + }; +}; + const serializeAws_json1_1ConnectDirectoryRequest = (input: ConnectDirectoryRequest, context: __SerdeContext): any => { return { ...(input.ConnectSettings !== undefined && { @@ -7395,6 +7654,16 @@ const serializeAws_json1_1DirectoryVpcSettings = (input: DirectoryVpcSettings, c }; }; +const serializeAws_json1_1DisableClientAuthenticationRequest = ( + input: DisableClientAuthenticationRequest, + context: __SerdeContext +): any => { + return { + ...(input.DirectoryId !== undefined && { DirectoryId: input.DirectoryId }), + ...(input.Type !== undefined && { Type: input.Type }), + }; +}; + const serializeAws_json1_1DisableLDAPSRequest = (input: DisableLDAPSRequest, context: __SerdeContext): any => { return { ...(input.DirectoryId !== undefined && { DirectoryId: input.DirectoryId }), @@ -7424,6 +7693,16 @@ const serializeAws_json1_1DomainControllerIds = (input: string[], context: __Ser return input.map((entry) => entry); }; +const serializeAws_json1_1EnableClientAuthenticationRequest = ( + input: EnableClientAuthenticationRequest, + context: __SerdeContext +): any => { + return { + ...(input.DirectoryId !== undefined && { DirectoryId: input.DirectoryId }), + ...(input.Type !== undefined && { Type: input.Type }), + }; +}; + const serializeAws_json1_1EnableLDAPSRequest = (input: EnableLDAPSRequest, context: __SerdeContext): any => { return { ...(input.DirectoryId !== undefined && { DirectoryId: input.DirectoryId }), @@ -7545,7 +7824,11 @@ const serializeAws_json1_1RegisterCertificateRequest = ( ): any => { return { ...(input.CertificateData !== undefined && { CertificateData: input.CertificateData }), + ...(input.ClientCertAuthSettings !== undefined && { + ClientCertAuthSettings: serializeAws_json1_1ClientCertAuthSettings(input.ClientCertAuthSettings, context), + }), ...(input.DirectoryId !== undefined && { DirectoryId: input.DirectoryId }), + ...(input.Type !== undefined && { Type: input.Type }), }; }; @@ -7815,6 +8098,10 @@ const deserializeAws_json1_1Certificate = (output: any, context: __SerdeContext) return { CertificateId: output.CertificateId !== undefined && output.CertificateId !== null ? output.CertificateId : undefined, + ClientCertAuthSettings: + output.ClientCertAuthSettings !== undefined && output.ClientCertAuthSettings !== null + ? deserializeAws_json1_1ClientCertAuthSettings(output.ClientCertAuthSettings, context) + : undefined, CommonName: output.CommonName !== undefined && output.CommonName !== null ? output.CommonName : undefined, ExpiryDateTime: output.ExpiryDateTime !== undefined && output.ExpiryDateTime !== null @@ -7826,6 +8113,7 @@ const deserializeAws_json1_1Certificate = (output: any, context: __SerdeContext) : undefined, State: output.State !== undefined && output.State !== null ? output.State : undefined, StateReason: output.StateReason !== undefined && output.StateReason !== null ? output.StateReason : undefined, + Type: output.Type !== undefined && output.Type !== null ? output.Type : undefined, } as any; }; @@ -7859,6 +8147,7 @@ const deserializeAws_json1_1CertificateInfo = (output: any, context: __SerdeCont ? new Date(Math.round(output.ExpiryDateTime * 1000)) : undefined, State: output.State !== undefined && output.State !== null ? output.State : undefined, + Type: output.Type !== undefined && output.Type !== null ? output.Type : undefined, } as any; }; @@ -7886,6 +8175,12 @@ const deserializeAws_json1_1CertificatesInfo = (output: any, context: __SerdeCon return (output || []).map((entry: any) => deserializeAws_json1_1CertificateInfo(entry, context)); }; +const deserializeAws_json1_1ClientCertAuthSettings = (output: any, context: __SerdeContext): ClientCertAuthSettings => { + return { + OCSPUrl: output.OCSPUrl !== undefined && output.OCSPUrl !== null ? output.OCSPUrl : undefined, + } as any; +}; + const deserializeAws_json1_1ClientException = (output: any, context: __SerdeContext): ClientException => { return { Message: output.Message !== undefined && output.Message !== null ? output.Message : undefined, @@ -8367,6 +8662,13 @@ const deserializeAws_json1_1DirectoryVpcSettingsDescription = ( } as any; }; +const deserializeAws_json1_1DisableClientAuthenticationResult = ( + output: any, + context: __SerdeContext +): DisableClientAuthenticationResult => { + return {} as any; +}; + const deserializeAws_json1_1DisableLDAPSResult = (output: any, context: __SerdeContext): DisableLDAPSResult => { return {} as any; }; @@ -8422,6 +8724,13 @@ const deserializeAws_json1_1DomainControllers = (output: any, context: __SerdeCo return (output || []).map((entry: any) => deserializeAws_json1_1DomainController(entry, context)); }; +const deserializeAws_json1_1EnableClientAuthenticationResult = ( + output: any, + context: __SerdeContext +): EnableClientAuthenticationResult => { + return {} as any; +}; + const deserializeAws_json1_1EnableLDAPSResult = (output: any, context: __SerdeContext): EnableLDAPSResult => { return {} as any; }; @@ -8515,6 +8824,16 @@ const deserializeAws_json1_1InvalidCertificateException = ( } as any; }; +const deserializeAws_json1_1InvalidClientAuthStatusException = ( + output: any, + context: __SerdeContext +): InvalidClientAuthStatusException => { + return { + Message: output.Message !== undefined && output.Message !== null ? output.Message : undefined, + RequestId: output.RequestId !== undefined && output.RequestId !== null ? output.RequestId : undefined, + } as any; +}; + const deserializeAws_json1_1InvalidLDAPSStatusException = ( output: any, context: __SerdeContext diff --git a/clients/client-ec2/EC2.ts b/clients/client-ec2/EC2.ts index 143b94134be1..57470db5bd07 100644 --- a/clients/client-ec2/EC2.ts +++ b/clients/client-ec2/EC2.ts @@ -2987,7 +2987,7 @@ export class EC2 extends EC2Client { *

Attaches an EBS volume to a running or stopped instance and exposes it to the instance * with the specified device name.

*

Encrypted EBS volumes must be attached to instances that support Amazon EBS encryption. For - * more information, see Amazon EBS Encryption in the Amazon Elastic Compute Cloud User Guide.

+ * more information, see Amazon EBS encryption in the Amazon Elastic Compute Cloud User Guide.

*

After you attach an EBS volume, you must make it available. For more information, see * Making an EBS volume available for use.

*

If a volume has an AWS Marketplace product code:

@@ -4871,7 +4871,7 @@ export class EC2 extends EC2Client { * source volume are propagated to the snapshot.

*

You can take a snapshot of an attached volume that is in use. However, snapshots only * capture data that has been written to your EBS volume at the time the snapshot command is - * issued; this may exclude any data that has been cached by any applications or the operating + * issued; this might exclude any data that has been cached by any applications or the operating * system. If you can pause any file systems on the volume long enough to take a snapshot, your * snapshot should be complete. However, if you cannot pause all file writes to the volume, you * should unmount the volume from within the instance, issue the snapshot command, and then @@ -4884,7 +4884,7 @@ export class EC2 extends EC2Client { * and any associated snapshots always remain protected.

*

You can tag your snapshots during creation. For more information, see Tagging your Amazon EC2 * resources in the Amazon Elastic Compute Cloud User Guide.

- *

For more information, see Amazon Elastic Block Store and Amazon EBS Encryption in the Amazon Elastic Compute Cloud User Guide.

+ *

For more information, see Amazon Elastic Block Store and Amazon EBS encryption in the Amazon Elastic Compute Cloud User Guide.

*/ public createSnapshot( args: CreateSnapshotCommandInput, @@ -5460,15 +5460,12 @@ export class EC2 extends EC2Client { } /** - *

Creates an EBS volume that can be attached to an instance in the same Availability Zone. - * The volume is created in the regional endpoint that you send the HTTP request to. For more - * information see Regions and - * Endpoints.

+ *

Creates an EBS volume that can be attached to an instance in the same Availability Zone.

*

You can create a new empty volume or restore a volume from an EBS snapshot. * Any AWS Marketplace product codes from the snapshot are propagated to the volume.

*

You can create encrypted volumes. Encrypted volumes must be attached to instances that * support Amazon EBS encryption. Volumes that are created from encrypted snapshots are also automatically - * encrypted. For more information, see Amazon EBS Encryption + * encrypted. For more information, see Amazon EBS encryption * in the Amazon Elastic Compute Cloud User Guide.

*

You can tag your volumes during creation. For more information, see Tagging your Amazon EC2 * resources in the Amazon Elastic Compute Cloud User Guide.

@@ -10445,7 +10442,7 @@ export class EC2 extends EC2Client { /** *

Describes the specified attribute of the specified snapshot. You can specify only one * attribute at a time.

- *

For more information about EBS snapshots, see Amazon EBS Snapshots in the Amazon Elastic Compute Cloud User Guide.

+ *

For more information about EBS snapshots, see Amazon EBS snapshots in the Amazon Elastic Compute Cloud User Guide.

*/ public describeSnapshotAttribute( args: DescribeSnapshotAttributeCommandInput, @@ -10523,7 +10520,7 @@ export class EC2 extends EC2Client { * passed to a subsequent DescribeSnapshots request to retrieve the remaining * results.

*

To get the state of fast snapshot restores for a snapshot, use DescribeFastSnapshotRestores.

- *

For more information about EBS snapshots, see Amazon EBS Snapshots in the Amazon Elastic Compute Cloud User Guide.

+ *

For more information about EBS snapshots, see Amazon EBS snapshots in the Amazon Elastic Compute Cloud User Guide.

*/ public describeSnapshots( args: DescribeSnapshotsCommandInput, @@ -11165,7 +11162,7 @@ export class EC2 extends EC2Client { /** *

Describes the specified attribute of the specified volume. You can specify only one * attribute at a time.

- *

For more information about EBS volumes, see Amazon EBS Volumes in the Amazon Elastic Compute Cloud User Guide.

+ *

For more information about EBS volumes, see Amazon EBS volumes in the Amazon Elastic Compute Cloud User Guide.

*/ public describeVolumeAttribute( args: DescribeVolumeAttributeCommandInput, @@ -11204,7 +11201,7 @@ export class EC2 extends EC2Client { * then that number of results is returned along with a NextToken value that can be * passed to a subsequent DescribeVolumes request to retrieve the remaining * results.

- *

For more information about EBS volumes, see Amazon EBS Volumes in the Amazon Elastic Compute Cloud User Guide.

+ *

For more information about EBS volumes, see Amazon EBS volumes in the Amazon Elastic Compute Cloud User Guide.

*/ public describeVolumes( args: DescribeVolumesCommandInput, @@ -11289,18 +11286,18 @@ export class EC2 extends EC2Client { * values are ok, impaired , warning, or * insufficient-data. If all checks pass, the overall status of the volume is * ok. If the check fails, the overall status is impaired. If the - * status is insufficient-data, then the checks may still be taking place on your + * status is insufficient-data, then the checks might still be taking place on your * volume at the time. We recommend that you retry the request. For more information about volume * status, see Monitoring the status of your volumes in the * Amazon Elastic Compute Cloud User Guide.

*

- * Events: Reflect the cause of a volume status and may require you to + * Events: Reflect the cause of a volume status and might require you to * take action. For example, if your volume returns an impaired status, then the * volume event might be potential-data-inconsistency. This means that your volume * has been affected by an issue with the underlying host, has all I/O operations disabled, and - * may have inconsistent data.

+ * might have inconsistent data.

*

- * Actions: Reflect the actions you may have to take in response to an + * Actions: Reflect the actions you might have to take in response to an * event. For example, if the status of the volume is impaired and the volume event * shows potential-data-inconsistency, then the action shows * enable-volume-io. This means that you may want to enable the I/O operations for @@ -11951,7 +11948,7 @@ export class EC2 extends EC2Client { * enabling encryption when you create each volume.

*

Disabling encryption by default does not change the encryption status of your * existing volumes.

- *

For more information, see Amazon EBS Encryption in the + *

For more information, see Amazon EBS encryption in the * Amazon Elastic Compute Cloud User Guide.

*/ public disableEbsEncryptionByDefault( @@ -12481,7 +12478,7 @@ export class EC2 extends EC2Client { *

Enables EBS encryption by default for your account in the current Region.

*

After you enable encryption by default, the EBS volumes that you create are * are always encrypted, either using the default CMK or the CMK that you specified - * when you created each volume. For more information, see Amazon EBS Encryption in the + * when you created each volume. For more information, see Amazon EBS encryption in the * Amazon Elastic Compute Cloud User Guide.

*

You can specify the default CMK for encryption by default using ModifyEbsDefaultKmsKeyId * or ResetEbsDefaultKmsKeyId.

@@ -13116,7 +13113,7 @@ export class EC2 extends EC2Client { *

Describes the default customer master key (CMK) for EBS encryption by default for your account in this Region. * You can change the default CMK for encryption by default using ModifyEbsDefaultKmsKeyId or * ResetEbsDefaultKmsKeyId.

- *

For more information, see Amazon EBS Encryption + *

For more information, see Amazon EBS encryption * in the Amazon Elastic Compute Cloud User Guide.

*/ public getEbsDefaultKmsKeyId( @@ -13151,7 +13148,7 @@ export class EC2 extends EC2Client { /** *

Describes whether EBS encryption by default is enabled for your account in the current * Region.

- *

For more information, see Amazon EBS Encryption + *

For more information, see Amazon EBS encryption * in the Amazon Elastic Compute Cloud User Guide.

*/ public getEbsEncryptionByDefault( @@ -13953,7 +13950,7 @@ export class EC2 extends EC2Client { * managed CMK. To reset the default CMK to the AWS managed CMK for EBS, use ResetEbsDefaultKmsKeyId. Amazon EBS does not support asymmetric CMKs.

*

If you delete or disable the customer managed CMK that you specified for use with * encryption by default, your instances will fail to launch.

- *

For more information, see Amazon EBS Encryption + *

For more information, see Amazon EBS encryption * in the Amazon Elastic Compute Cloud User Guide.

*/ public modifyEbsDefaultKmsKeyId( @@ -14959,10 +14956,10 @@ export class EC2 extends EC2Client { /** *

You can modify several parameters of an existing EBS volume, including volume size, volume * type, and IOPS capacity. If your EBS volume is attached to a current-generation EC2 instance - * type, you may be able to apply these changes without stopping the instance or detaching the + * type, you might be able to apply these changes without stopping the instance or detaching the * volume from it. For more information about modifying an EBS volume running Linux, see Modifying the size, IOPS, or * type of an EBS volume on Linux. For more information about modifying an EBS volume - * running Windows, see Modifying the size, IOPS, or type of an EBS volume on Windows.

+ * running Windows, see Modifying the size, IOPS, or type of an EBS volume on Windows.

*

When you complete a resize operation on your volume, you need to extend the volume's * file-system size to take advantage of the new storage capacity. For information about * extending a Linux file system, see Extending a Linux @@ -14973,11 +14970,10 @@ export class EC2 extends EC2Client { * modification using DescribeVolumesModifications. For information * about tracking status changes using either method, see Monitoring volume * modifications.

- *

With previous-generation instance types, resizing an EBS volume may require detaching and + *

With previous-generation instance types, resizing an EBS volume might require detaching and * reattaching the volume or stopping and restarting the instance. For more information, see - * Modifying the size, - * IOPS, or type of an EBS volume on Linux and Modifying the size, IOPS, or type of an EBS - * volume on Windows.

+ * Amazon EBS Elastic + * Volumes (Linux) or Amazon EBS Elastic Volumes (Windows).

*

If you reach the maximum volume modification rate per volume limit, you will need to wait * at least six hours before applying further modifications to the affected EBS volume.

*/ @@ -16478,7 +16474,7 @@ export class EC2 extends EC2Client { * to the AWS managed CMK for EBS.

*

After resetting the default CMK to the AWS managed CMK, you can continue to encrypt by a * customer managed CMK by specifying it when you create the volume. For more information, see - * Amazon EBS Encryption + * Amazon EBS encryption * in the Amazon Elastic Compute Cloud User Guide.

*/ public resetEbsDefaultKmsKeyId( diff --git a/clients/client-ec2/commands/AttachVolumeCommand.ts b/clients/client-ec2/commands/AttachVolumeCommand.ts index 9f98f8032dfd..89e03b3b123a 100644 --- a/clients/client-ec2/commands/AttachVolumeCommand.ts +++ b/clients/client-ec2/commands/AttachVolumeCommand.ts @@ -21,7 +21,7 @@ export type AttachVolumeCommandOutput = VolumeAttachment & __MetadataBearer; *

Attaches an EBS volume to a running or stopped instance and exposes it to the instance * with the specified device name.

*

Encrypted EBS volumes must be attached to instances that support Amazon EBS encryption. For - * more information, see Amazon EBS Encryption in the Amazon Elastic Compute Cloud User Guide.

+ * more information, see Amazon EBS encryption in the Amazon Elastic Compute Cloud User Guide.

*

After you attach an EBS volume, you must make it available. For more information, see * Making an EBS volume available for use.

*

If a volume has an AWS Marketplace product code:

diff --git a/clients/client-ec2/commands/CreateSnapshotCommand.ts b/clients/client-ec2/commands/CreateSnapshotCommand.ts index 7bdfcc334d6a..3de04f423c72 100644 --- a/clients/client-ec2/commands/CreateSnapshotCommand.ts +++ b/clients/client-ec2/commands/CreateSnapshotCommand.ts @@ -25,7 +25,7 @@ export type CreateSnapshotCommandOutput = Snapshot & __MetadataBearer; * source volume are propagated to the snapshot.

*

You can take a snapshot of an attached volume that is in use. However, snapshots only * capture data that has been written to your EBS volume at the time the snapshot command is - * issued; this may exclude any data that has been cached by any applications or the operating + * issued; this might exclude any data that has been cached by any applications or the operating * system. If you can pause any file systems on the volume long enough to take a snapshot, your * snapshot should be complete. However, if you cannot pause all file writes to the volume, you * should unmount the volume from within the instance, issue the snapshot command, and then @@ -38,7 +38,7 @@ export type CreateSnapshotCommandOutput = Snapshot & __MetadataBearer; * and any associated snapshots always remain protected.

*

You can tag your snapshots during creation. For more information, see Tagging your Amazon EC2 * resources in the Amazon Elastic Compute Cloud User Guide.

- *

For more information, see Amazon Elastic Block Store and Amazon EBS Encryption in the Amazon Elastic Compute Cloud User Guide.

+ *

For more information, see Amazon Elastic Block Store and Amazon EBS encryption in the Amazon Elastic Compute Cloud User Guide.

*/ export class CreateSnapshotCommand extends $Command< CreateSnapshotCommandInput, diff --git a/clients/client-ec2/commands/CreateVolumeCommand.ts b/clients/client-ec2/commands/CreateVolumeCommand.ts index 0ad13d901229..c53b94e15068 100644 --- a/clients/client-ec2/commands/CreateVolumeCommand.ts +++ b/clients/client-ec2/commands/CreateVolumeCommand.ts @@ -18,15 +18,12 @@ export type CreateVolumeCommandInput = CreateVolumeRequest; export type CreateVolumeCommandOutput = Volume & __MetadataBearer; /** - *

Creates an EBS volume that can be attached to an instance in the same Availability Zone. - * The volume is created in the regional endpoint that you send the HTTP request to. For more - * information see Regions and - * Endpoints.

+ *

Creates an EBS volume that can be attached to an instance in the same Availability Zone.

*

You can create a new empty volume or restore a volume from an EBS snapshot. * Any AWS Marketplace product codes from the snapshot are propagated to the volume.

*

You can create encrypted volumes. Encrypted volumes must be attached to instances that * support Amazon EBS encryption. Volumes that are created from encrypted snapshots are also automatically - * encrypted. For more information, see Amazon EBS Encryption + * encrypted. For more information, see Amazon EBS encryption * in the Amazon Elastic Compute Cloud User Guide.

*

You can tag your volumes during creation. For more information, see Tagging your Amazon EC2 * resources in the Amazon Elastic Compute Cloud User Guide.

diff --git a/clients/client-ec2/commands/DescribeSnapshotAttributeCommand.ts b/clients/client-ec2/commands/DescribeSnapshotAttributeCommand.ts index 05bf43eeeb82..53c04dfaceba 100644 --- a/clients/client-ec2/commands/DescribeSnapshotAttributeCommand.ts +++ b/clients/client-ec2/commands/DescribeSnapshotAttributeCommand.ts @@ -23,7 +23,7 @@ export type DescribeSnapshotAttributeCommandOutput = DescribeSnapshotAttributeRe /** *

Describes the specified attribute of the specified snapshot. You can specify only one * attribute at a time.

- *

For more information about EBS snapshots, see Amazon EBS Snapshots in the Amazon Elastic Compute Cloud User Guide.

+ *

For more information about EBS snapshots, see Amazon EBS snapshots in the Amazon Elastic Compute Cloud User Guide.

*/ export class DescribeSnapshotAttributeCommand extends $Command< DescribeSnapshotAttributeCommandInput, diff --git a/clients/client-ec2/commands/DescribeSnapshotsCommand.ts b/clients/client-ec2/commands/DescribeSnapshotsCommand.ts index 977ed0730688..0890c3d017e8 100644 --- a/clients/client-ec2/commands/DescribeSnapshotsCommand.ts +++ b/clients/client-ec2/commands/DescribeSnapshotsCommand.ts @@ -67,7 +67,7 @@ export type DescribeSnapshotsCommandOutput = DescribeSnapshotsResult & __Metadat * passed to a subsequent DescribeSnapshots request to retrieve the remaining * results.

*

To get the state of fast snapshot restores for a snapshot, use DescribeFastSnapshotRestores.

- *

For more information about EBS snapshots, see Amazon EBS Snapshots in the Amazon Elastic Compute Cloud User Guide.

+ *

For more information about EBS snapshots, see Amazon EBS snapshots in the Amazon Elastic Compute Cloud User Guide.

*/ export class DescribeSnapshotsCommand extends $Command< DescribeSnapshotsCommandInput, diff --git a/clients/client-ec2/commands/DescribeVolumeAttributeCommand.ts b/clients/client-ec2/commands/DescribeVolumeAttributeCommand.ts index dbc332947fea..a70b98b0eb59 100644 --- a/clients/client-ec2/commands/DescribeVolumeAttributeCommand.ts +++ b/clients/client-ec2/commands/DescribeVolumeAttributeCommand.ts @@ -23,7 +23,7 @@ export type DescribeVolumeAttributeCommandOutput = DescribeVolumeAttributeResult /** *

Describes the specified attribute of the specified volume. You can specify only one * attribute at a time.

- *

For more information about EBS volumes, see Amazon EBS Volumes in the Amazon Elastic Compute Cloud User Guide.

+ *

For more information about EBS volumes, see Amazon EBS volumes in the Amazon Elastic Compute Cloud User Guide.

*/ export class DescribeVolumeAttributeCommand extends $Command< DescribeVolumeAttributeCommandInput, diff --git a/clients/client-ec2/commands/DescribeVolumeStatusCommand.ts b/clients/client-ec2/commands/DescribeVolumeStatusCommand.ts index f2b1bdd8f366..c5e036cb16a6 100644 --- a/clients/client-ec2/commands/DescribeVolumeStatusCommand.ts +++ b/clients/client-ec2/commands/DescribeVolumeStatusCommand.ts @@ -35,18 +35,18 @@ export type DescribeVolumeStatusCommandOutput = DescribeVolumeStatusResult & __M * values are ok, impaired , warning, or * insufficient-data. If all checks pass, the overall status of the volume is * ok. If the check fails, the overall status is impaired. If the - * status is insufficient-data, then the checks may still be taking place on your + * status is insufficient-data, then the checks might still be taking place on your * volume at the time. We recommend that you retry the request. For more information about volume * status, see Monitoring the status of your volumes in the * Amazon Elastic Compute Cloud User Guide.

*

- * Events: Reflect the cause of a volume status and may require you to + * Events: Reflect the cause of a volume status and might require you to * take action. For example, if your volume returns an impaired status, then the * volume event might be potential-data-inconsistency. This means that your volume * has been affected by an issue with the underlying host, has all I/O operations disabled, and - * may have inconsistent data.

+ * might have inconsistent data.

*

- * Actions: Reflect the actions you may have to take in response to an + * Actions: Reflect the actions you might have to take in response to an * event. For example, if the status of the volume is impaired and the volume event * shows potential-data-inconsistency, then the action shows * enable-volume-io. This means that you may want to enable the I/O operations for diff --git a/clients/client-ec2/commands/DescribeVolumesCommand.ts b/clients/client-ec2/commands/DescribeVolumesCommand.ts index d4c6ddf07345..18d1ce7d6a08 100644 --- a/clients/client-ec2/commands/DescribeVolumesCommand.ts +++ b/clients/client-ec2/commands/DescribeVolumesCommand.ts @@ -25,7 +25,7 @@ export type DescribeVolumesCommandOutput = DescribeVolumesResult & __MetadataBea * then that number of results is returned along with a NextToken value that can be * passed to a subsequent DescribeVolumes request to retrieve the remaining * results.

- *

For more information about EBS volumes, see Amazon EBS Volumes in the Amazon Elastic Compute Cloud User Guide.

+ *

For more information about EBS volumes, see Amazon EBS volumes in the Amazon Elastic Compute Cloud User Guide.

*/ export class DescribeVolumesCommand extends $Command< DescribeVolumesCommandInput, diff --git a/clients/client-ec2/commands/DisableEbsEncryptionByDefaultCommand.ts b/clients/client-ec2/commands/DisableEbsEncryptionByDefaultCommand.ts index e72a6b56b13c..8e0f21d713f7 100644 --- a/clients/client-ec2/commands/DisableEbsEncryptionByDefaultCommand.ts +++ b/clients/client-ec2/commands/DisableEbsEncryptionByDefaultCommand.ts @@ -26,7 +26,7 @@ export type DisableEbsEncryptionByDefaultCommandOutput = DisableEbsEncryptionByD * enabling encryption when you create each volume.

*

Disabling encryption by default does not change the encryption status of your * existing volumes.

- *

For more information, see Amazon EBS Encryption in the + *

For more information, see Amazon EBS encryption in the * Amazon Elastic Compute Cloud User Guide.

*/ export class DisableEbsEncryptionByDefaultCommand extends $Command< diff --git a/clients/client-ec2/commands/EnableEbsEncryptionByDefaultCommand.ts b/clients/client-ec2/commands/EnableEbsEncryptionByDefaultCommand.ts index 8195ca4f07e4..226d7239cecc 100644 --- a/clients/client-ec2/commands/EnableEbsEncryptionByDefaultCommand.ts +++ b/clients/client-ec2/commands/EnableEbsEncryptionByDefaultCommand.ts @@ -24,7 +24,7 @@ export type EnableEbsEncryptionByDefaultCommandOutput = EnableEbsEncryptionByDef *

Enables EBS encryption by default for your account in the current Region.

*

After you enable encryption by default, the EBS volumes that you create are * are always encrypted, either using the default CMK or the CMK that you specified - * when you created each volume. For more information, see Amazon EBS Encryption in the + * when you created each volume. For more information, see Amazon EBS encryption in the * Amazon Elastic Compute Cloud User Guide.

*

You can specify the default CMK for encryption by default using ModifyEbsDefaultKmsKeyId * or ResetEbsDefaultKmsKeyId.

diff --git a/clients/client-ec2/commands/GetEbsDefaultKmsKeyIdCommand.ts b/clients/client-ec2/commands/GetEbsDefaultKmsKeyIdCommand.ts index 51ee20192920..61a87a161b03 100644 --- a/clients/client-ec2/commands/GetEbsDefaultKmsKeyIdCommand.ts +++ b/clients/client-ec2/commands/GetEbsDefaultKmsKeyIdCommand.ts @@ -24,7 +24,7 @@ export type GetEbsDefaultKmsKeyIdCommandOutput = GetEbsDefaultKmsKeyIdResult & _ *

Describes the default customer master key (CMK) for EBS encryption by default for your account in this Region. * You can change the default CMK for encryption by default using ModifyEbsDefaultKmsKeyId or * ResetEbsDefaultKmsKeyId.

- *

For more information, see Amazon EBS Encryption + *

For more information, see Amazon EBS encryption * in the Amazon Elastic Compute Cloud User Guide.

*/ export class GetEbsDefaultKmsKeyIdCommand extends $Command< diff --git a/clients/client-ec2/commands/GetEbsEncryptionByDefaultCommand.ts b/clients/client-ec2/commands/GetEbsEncryptionByDefaultCommand.ts index 33c28a518983..eee47651f2cb 100644 --- a/clients/client-ec2/commands/GetEbsEncryptionByDefaultCommand.ts +++ b/clients/client-ec2/commands/GetEbsEncryptionByDefaultCommand.ts @@ -23,7 +23,7 @@ export type GetEbsEncryptionByDefaultCommandOutput = GetEbsEncryptionByDefaultRe /** *

Describes whether EBS encryption by default is enabled for your account in the current * Region.

- *

For more information, see Amazon EBS Encryption + *

For more information, see Amazon EBS encryption * in the Amazon Elastic Compute Cloud User Guide.

*/ export class GetEbsEncryptionByDefaultCommand extends $Command< diff --git a/clients/client-ec2/commands/ModifyEbsDefaultKmsKeyIdCommand.ts b/clients/client-ec2/commands/ModifyEbsDefaultKmsKeyIdCommand.ts index 6c80d8e9b02c..e67d56f436c1 100644 --- a/clients/client-ec2/commands/ModifyEbsDefaultKmsKeyIdCommand.ts +++ b/clients/client-ec2/commands/ModifyEbsDefaultKmsKeyIdCommand.ts @@ -27,7 +27,7 @@ export type ModifyEbsDefaultKmsKeyIdCommandOutput = ModifyEbsDefaultKmsKeyIdResu * managed CMK. To reset the default CMK to the AWS managed CMK for EBS, use ResetEbsDefaultKmsKeyId. Amazon EBS does not support asymmetric CMKs.

*

If you delete or disable the customer managed CMK that you specified for use with * encryption by default, your instances will fail to launch.

- *

For more information, see Amazon EBS Encryption + *

For more information, see Amazon EBS encryption * in the Amazon Elastic Compute Cloud User Guide.

*/ export class ModifyEbsDefaultKmsKeyIdCommand extends $Command< diff --git a/clients/client-ec2/commands/ModifyVolumeCommand.ts b/clients/client-ec2/commands/ModifyVolumeCommand.ts index 85ca3b162bfc..ede6c022af9e 100644 --- a/clients/client-ec2/commands/ModifyVolumeCommand.ts +++ b/clients/client-ec2/commands/ModifyVolumeCommand.ts @@ -20,10 +20,10 @@ export type ModifyVolumeCommandOutput = ModifyVolumeResult & __MetadataBearer; /** *

You can modify several parameters of an existing EBS volume, including volume size, volume * type, and IOPS capacity. If your EBS volume is attached to a current-generation EC2 instance - * type, you may be able to apply these changes without stopping the instance or detaching the + * type, you might be able to apply these changes without stopping the instance or detaching the * volume from it. For more information about modifying an EBS volume running Linux, see Modifying the size, IOPS, or * type of an EBS volume on Linux. For more information about modifying an EBS volume - * running Windows, see Modifying the size, IOPS, or type of an EBS volume on Windows.

+ * running Windows, see Modifying the size, IOPS, or type of an EBS volume on Windows.

*

When you complete a resize operation on your volume, you need to extend the volume's * file-system size to take advantage of the new storage capacity. For information about * extending a Linux file system, see Extending a Linux @@ -34,11 +34,10 @@ export type ModifyVolumeCommandOutput = ModifyVolumeResult & __MetadataBearer; * modification using DescribeVolumesModifications. For information * about tracking status changes using either method, see Monitoring volume * modifications.

- *

With previous-generation instance types, resizing an EBS volume may require detaching and + *

With previous-generation instance types, resizing an EBS volume might require detaching and * reattaching the volume or stopping and restarting the instance. For more information, see - * Modifying the size, - * IOPS, or type of an EBS volume on Linux and Modifying the size, IOPS, or type of an EBS - * volume on Windows.

+ * Amazon EBS Elastic + * Volumes (Linux) or Amazon EBS Elastic Volumes (Windows).

*

If you reach the maximum volume modification rate per volume limit, you will need to wait * at least six hours before applying further modifications to the affected EBS volume.

*/ diff --git a/clients/client-ec2/commands/ResetEbsDefaultKmsKeyIdCommand.ts b/clients/client-ec2/commands/ResetEbsDefaultKmsKeyIdCommand.ts index b26d3dcef473..2ae1f0bed5d8 100644 --- a/clients/client-ec2/commands/ResetEbsDefaultKmsKeyIdCommand.ts +++ b/clients/client-ec2/commands/ResetEbsDefaultKmsKeyIdCommand.ts @@ -25,7 +25,7 @@ export type ResetEbsDefaultKmsKeyIdCommandOutput = ResetEbsDefaultKmsKeyIdResult * to the AWS managed CMK for EBS.

*

After resetting the default CMK to the AWS managed CMK, you can continue to encrypt by a * customer managed CMK by specifying it when you create the volume. For more information, see - * Amazon EBS Encryption + * Amazon EBS encryption * in the Amazon Elastic Compute Cloud User Guide.

*/ export class ResetEbsDefaultKmsKeyIdCommand extends $Command< diff --git a/clients/client-ec2/models/models_0.ts b/clients/client-ec2/models/models_0.ts index 5284e753c43c..221913a13c66 100644 --- a/clients/client-ec2/models/models_0.ts +++ b/clients/client-ec2/models/models_0.ts @@ -3814,7 +3814,7 @@ export interface CopySnapshotRequest { *

To encrypt a copy of an unencrypted snapshot if encryption by default is not enabled, * enable encryption using this parameter. Otherwise, omit this parameter. Encrypted snapshots * are encrypted, even if you omit this parameter and encryption by default is not enabled. You - * cannot set this parameter to false. For more information, see Amazon EBS Encryption in the + * cannot set this parameter to false. For more information, see Amazon EBS encryption in the * Amazon Elastic Compute Cloud User Guide.

*/ Encrypted?: boolean; @@ -3847,13 +3847,13 @@ export interface CopySnapshotRequest { *

When you copy an encrypted source snapshot using the Amazon EC2 Query API, you must supply a * pre-signed URL. This parameter is optional for unencrypted snapshots. For more information, * see Query - * Requests.

+ * requests.

*

The PresignedUrl should use the snapshot source endpoint, the * CopySnapshot action, and include the SourceRegion, * SourceSnapshotId, and DestinationRegion parameters. The * PresignedUrl must be signed using AWS Signature Version 4. Because EBS * snapshots are stored in Amazon S3, the signing algorithm for this parameter uses the same logic - * that is described in Authenticating Requests by Using Query + * that is described in Authenticating Requests: Using Query * Parameters (AWS Signature Version 4) in the Amazon Simple Storage Service API Reference. An * invalid or improperly signed PresignedUrl will cause the copy operation to fail * asynchronously, and the snapshot will move to an error state.

@@ -5443,6 +5443,16 @@ export type _InstanceType = | "d2.4xlarge" | "d2.8xlarge" | "d2.xlarge" + | "d3.2xlarge" + | "d3.4xlarge" + | "d3.8xlarge" + | "d3.xlarge" + | "d3en.12xlarge" + | "d3en.2xlarge" + | "d3en.4xlarge" + | "d3en.6xlarge" + | "d3en.8xlarge" + | "d3en.xlarge" | "f1.16xlarge" | "f1.2xlarge" | "f1.4xlarge" @@ -5555,6 +5565,13 @@ export type _InstanceType = | "m5n.8xlarge" | "m5n.large" | "m5n.xlarge" + | "m5zn.12xlarge" + | "m5zn.2xlarge" + | "m5zn.3xlarge" + | "m5zn.6xlarge" + | "m5zn.large" + | "m5zn.metal" + | "m5zn.xlarge" | "m6g.12xlarge" | "m6g.16xlarge" | "m6g.2xlarge" @@ -5618,6 +5635,15 @@ export type _InstanceType = | "r5ad.8xlarge" | "r5ad.large" | "r5ad.xlarge" + | "r5b.12xlarge" + | "r5b.16xlarge" + | "r5b.24xlarge" + | "r5b.2xlarge" + | "r5b.4xlarge" + | "r5b.8xlarge" + | "r5b.large" + | "r5b.metal" + | "r5b.xlarge" | "r5d.12xlarge" | "r5d.16xlarge" | "r5d.24xlarge" @@ -6669,7 +6695,7 @@ export namespace CreateFpgaImageResult { }); } -export type VolumeType = "gp2" | "io1" | "io2" | "sc1" | "st1" | "standard"; +export type VolumeType = "gp2" | "gp3" | "io1" | "io2" | "sc1" | "st1" | "standard"; /** *

Describes a block device for an EBS volume.

@@ -6683,22 +6709,31 @@ export interface EbsBlockDevice { DeleteOnTermination?: boolean; /** - *

The number of I/O operations per second (IOPS) that the volume supports. For - * io1 and io2 volumes, this represents the number of IOPS that are provisioned - * for the volume. For gp2 volumes, this represents the baseline performance - * of the volume and the rate at which the volume accumulates I/O credits for bursting. For - * more information, see Amazon EBS volume types in the - * Amazon Elastic Compute Cloud User Guide.

- *

Constraints: Range is 100-16,000 IOPS for gp2 volumes and - * 100 to 64,000 IOPS for io1 and io2 volumes in - * most Regions. Maximum io1 and io2 IOPS of 64,000 is guaranteed - * only on Nitro-based - * instances. Other instance families guarantee performance up to - * 32,000 IOPS. For more information, see Amazon EBS Volume - * Types in the Amazon Elastic Compute Cloud User Guide.

- *

Condition: This parameter is required for requests to create io1 and io2 volumes; - * it is not used in requests to create gp2, st1, - * sc1, or standard volumes.

+ *

The number of I/O operations per second (IOPS). For gp3, io1, and io2 volumes, this + * represents the number of IOPS that are provisioned for the volume. For gp2 volumes, this + * represents the baseline performance of the volume and the rate at which the volume accumulates + * I/O credits for bursting.

+ *

The following are the supported values for each volume type:

+ *
    + *
  • + *

    + * gp3: 3,000-16,000 IOPS

    + *
  • + *
  • + *

    + * io1: 100-64,000 IOPS

    + *
  • + *
  • + *

    + * io2: 100-64,000 IOPS

    + *
  • + *
+ *

For io1 and io2 volumes, we guarantee 64,000 IOPS only for + * Instances built on the Nitro System. Other instance families guarantee performance + * up to 32,000 IOPS.

+ *

This parameter is required for io1 and io2 volumes. + * The default for gp3 volumes is 3,000 IOPS. + * This parameter is not supported for gp2, st1, sc1, or standard volumes.

*/ Iops?: number; @@ -6708,24 +6743,39 @@ export interface EbsBlockDevice { SnapshotId?: string; /** - *

The size of the volume, in GiB.

- *

Default: If you're creating the volume from a snapshot and don't specify a volume - * size, the default is the snapshot size.

- *

Constraints: 1-16384 for General Purpose SSD (gp2), 4-16384 for - * Provisioned IOPS SSD (io1 and io2), 500-16384 for Throughput Optimized HDD - * (st1), 500-16384 for Cold HDD (sc1), and 1-1024 for - * Magnetic (standard) volumes. If you specify a snapshot, the volume size - * must be equal to or larger than the snapshot size.

+ *

The size of the volume, in GiBs. You must specify either a snapshot ID or a volume size. + * If you specify a snapshot, the default is the snapshot size. You can specify a volume + * size that is equal to or larger than the snapshot size.

+ *

The following are the supported volumes sizes for each volume type:

+ *
    + *
  • + *

    + * gp2 and gp3:1-16,384

    + *
  • + *
  • + *

    + * io1 and io2: 4-16,384

    + *
  • + *
  • + *

    + * st1: 500-16,384

    + *
  • + *
  • + *

    + * sc1: 500-16,384

    + *
  • + *
  • + *

    + * standard: 1-1,024

    + *
  • + *
*/ VolumeSize?: number; /** - *

The volume type. If you set the type to io1 or io2, you must also specify - * the Iops parameter. If you set the type to gp2, - * st1, sc1, or standard, you must omit - * the Iops parameter.

- *

Default: gp2 - *

+ *

The volume type. For more information, see Amazon EBS volume types in the + * Amazon Elastic Compute Cloud User Guide. If the volume type is io1 or io2, + * you must specify the IOPS that the volume supports.

*/ VolumeType?: VolumeType | string; @@ -6738,6 +6788,13 @@ export interface EbsBlockDevice { */ KmsKeyId?: string; + /** + *

The throughput that the volume supports, in MiB/s.

+ *

This parameter is valid only for gp3 volumes.

+ *

Valid Range: Minimum value of 125. Maximum value of 1000.

+ */ + Throughput?: number; + /** *

Indicates whether the encryption state of an EBS volume is changed while being * restored from a backing snapshot. @@ -7177,7 +7234,8 @@ export namespace KeyPair { */ export interface LaunchTemplateEbsBlockDeviceRequest { /** - *

Indicates whether the EBS volume is encrypted. Encrypted volumes can only be attached to instances that support Amazon EBS encryption. If you are creating a volume from a snapshot, you can't specify an encryption value.

+ *

Indicates whether the EBS volume is encrypted. Encrypted volumes can only be attached to instances that support Amazon EBS encryption. + * If you are creating a volume from a snapshot, you can't specify an encryption value.

*/ Encrypted?: boolean; @@ -7187,13 +7245,31 @@ export interface LaunchTemplateEbsBlockDeviceRequest { DeleteOnTermination?: boolean; /** - *

The number of I/O operations per second (IOPS) to provision for an io1 or io2 volume, with a maximum - * ratio of 50 IOPS/GiB for io1, and 500 IOPS/GiB for io2. Range is 100 to 64,000 IOPS for - * volumes in most Regions. Maximum IOPS of 64,000 is guaranteed only on - * Nitro-based instances. Other instance families guarantee performance up to - * 32,000 IOPS. For more information, see Amazon EBS Volume Types in the - * Amazon Elastic Compute Cloud User Guide.

- *

This parameter is valid only for Provisioned IOPS SSD (io1 and io2) volumes.

+ *

The number of I/O operations per second (IOPS). For gp3, io1, and io2 + * volumes, this represents the number of IOPS that are provisioned for the volume. For gp2 volumes, + * this represents the baseline performance of the volume and the rate at which the volume accumulates I/O credits + * for bursting.

+ *

The following are the supported values for each volume type:

+ *
    + *
  • + *

    + * gp3: 3,000-16,000 IOPS

    + *
  • + *
  • + *

    + * io1: 100-64,000 IOPS

    + *
  • + *
  • + *

    + * io2: 100-64,000 IOPS

    + *
  • + *
+ *

For io1 and io2 volumes, we guarantee 64,000 IOPS + * only for Instances built on the Nitro System. Other instance families guarantee performance up + * to 32,000 IOPS.

+ *

This parameter is required for io1 and io2 volumes. + * The default for gp3 volumes is 3,000 IOPS. + * This parameter is not supported for gp2, st1, sc1, or standard volumes.

*/ Iops?: number; @@ -7209,15 +7285,42 @@ export interface LaunchTemplateEbsBlockDeviceRequest { SnapshotId?: string; /** - *

The size of the volume, in GiB.

- *

Default: If you're creating the volume from a snapshot and don't specify a volume size, the default is the snapshot size.

+ *

The size of the volume, in GiBs. You must specify either a snapshot ID or a volume size. + * If you specify a snapshot, the default is the snapshot size. You can specify a volume + * size that is equal to or larger than the snapshot size.

+ *

The following are the supported volumes sizes for each volume type:

+ *
    + *
  • + *

    + * gp2 and gp3: 1-16,384

    + *
  • + *
  • + *

    + * io1 and io2: 4-16,384

    + *
  • + *
  • + *

    + * st1 and sc1: 125-16,384

    + *
  • + *
  • + *

    + * standard: 1-1,024

    + *
  • + *
*/ VolumeSize?: number; /** - *

The volume type.

+ *

The volume type. The default is gp2. For more information, see Amazon EBS volume types in the + * Amazon Elastic Compute Cloud User Guide.

*/ VolumeType?: VolumeType | string; + + /** + *

The throughput to provision for a gp3 volume, with a maximum of 1,000 MiB/s.

+ *

Valid Range: Minimum value of 125. Maximum value of 1000.

+ */ + Throughput?: number; } export namespace LaunchTemplateEbsBlockDeviceRequest { diff --git a/clients/client-ec2/models/models_1.ts b/clients/client-ec2/models/models_1.ts index 326bc1e2e50b..ba9303ed838b 100644 --- a/clients/client-ec2/models/models_1.ts +++ b/clients/client-ec2/models/models_1.ts @@ -713,6 +713,11 @@ export interface LaunchTemplateEbsBlockDevice { *

The volume type.

*/ VolumeType?: VolumeType | string; + + /** + *

The throughput that the volume supports, in MiB/s.

+ */ + Throughput?: number; } export namespace LaunchTemplateEbsBlockDevice { @@ -5174,7 +5179,7 @@ export interface CreateVolumeRequest { AvailabilityZone: string | undefined; /** - *

Specifies whether the volume should be encrypted. + *

Indicates whether the volume should be encrypted. * The effect of setting the encryption state to true depends on * the volume origin (new or from a snapshot), starting encryption state, ownership, and whether encryption by default is enabled. * For more information, see Encryption by default @@ -5186,13 +5191,30 @@ export interface CreateVolumeRequest { Encrypted?: boolean; /** - *

The number of I/O operations per second (IOPS) to provision for an io1 or io2 volume, with a maximum - * ratio of 50 IOPS/GiB for io1, and 500 IOPS/GiB for io2. Range is 100 to 64,000 IOPS for - * volumes in most Regions. Maximum IOPS of 64,000 is guaranteed only on - * Nitro-based instances. Other instance families guarantee performance up to - * 32,000 IOPS. For more information, see Amazon EBS volume types in the - * Amazon Elastic Compute Cloud User Guide.

- *

This parameter is valid only for Provisioned IOPS SSD (io1 and io2) volumes.

+ *

The number of I/O operations per second (IOPS). For gp3, io1, and io2 volumes, this represents + * the number of IOPS that are provisioned for the volume. For gp2 volumes, this represents the baseline + * performance of the volume and the rate at which the volume accumulates I/O credits for bursting.

+ *

The following are the supported values for each volume type:

+ *
    + *
  • + *

    + * gp3: 3,000-16,000 IOPS

    + *
  • + *
  • + *

    + * io1: 100-64,000 IOPS

    + *
  • + *
  • + *

    + * io2: 100-64,000 IOPS

    + *
  • + *
+ *

For io1 and io2 volumes, we guarantee 64,000 IOPS only for + * Instances built on the Nitro System. Other instance families guarantee performance + * up to 32,000 IOPS.

+ *

This parameter is required for io1 and io2 volumes. + * The default for gp3 volumes is 3,000 IOPS. + * This parameter is not supported for gp2, st1, sc1, or standard volumes.

*/ Iops?: number; @@ -5226,14 +5248,28 @@ export interface CreateVolumeRequest { OutpostArn?: string; /** - *

The size of the volume, in GiBs. You must specify either a snapshot ID or a volume size.

- *

Constraints: 1-16,384 for gp2, - * 4-16,384 for io1 and io2, 500-16,384 for - * st1, 500-16,384 for sc1, and - * 1-1,024 for standard. If you specify a - * snapshot, the volume size must be equal to or larger than the snapshot size.

- *

Default: If you're creating the volume from a snapshot and don't specify a volume size, - * the default is the snapshot size.

+ *

The size of the volume, in GiBs. You must specify either a snapshot ID or a volume size. + * If you specify a snapshot, the default is the snapshot size. You can specify a volume + * size that is equal to or larger than the snapshot size.

+ *

The following are the supported volumes sizes for each volume type:

+ *
    + *
  • + *

    + * gp2 and gp3: 1-16,384

    + *
  • + *
  • + *

    + * io1 and io2: 4-16,384

    + *
  • + *
  • + *

    + * st1 and sc1: 125-16,384

    + *
  • + *
  • + *

    + * standard: 1-1,024

    + *
  • + *
*/ Size?: number; @@ -5243,9 +5279,31 @@ export interface CreateVolumeRequest { SnapshotId?: string; /** - *

The volume type. This can be gp2 for General Purpose SSD, io1 or io2 for Provisioned IOPS SSD, - * st1 for Throughput Optimized HDD, sc1 for Cold HDD, or - * standard for Magnetic volumes.

+ *

The volume type. This parameter can be one of the following values:

+ *
    + *
  • + *

    General Purpose SSD: gp2 | gp3 + *

    + *
  • + *
  • + *

    Provisioned IOPS SSD: io1 | io2 + *

    + *
  • + *
  • + *

    Throughput Optimized HDD: st1 + *

    + *
  • + *
  • + *

    Cold HDD: sc1 + *

    + *
  • + *
  • + *

    Magnetic: standard + *

    + *
  • + *
+ *

For more information, see Amazon EBS volume types in the + * Amazon Elastic Compute Cloud User Guide.

*

Default: gp2 *

*/ @@ -5264,12 +5322,20 @@ export interface CreateVolumeRequest { TagSpecifications?: TagSpecification[]; /** - *

Specifies whether to enable Amazon EBS Multi-Attach. If you enable Multi-Attach, you can attach the - * volume to up to 16 Nitro-based instances in the same Availability Zone. For more information, + *

Indicates whether to enable Amazon EBS Multi-Attach. If you enable Multi-Attach, you can attach the + * volume to up to 16 Instances built on the Nitro System in the same Availability Zone. This parameter is + * supported with io1 volumes only. For more information, * see * Amazon EBS Multi-Attach in the Amazon Elastic Compute Cloud User Guide.

*/ MultiAttachEnabled?: boolean; + + /** + *

The throughput to provision for a volume, with a maximum of 1,000 MiB/s.

+ *

This parameter is valid only for gp3 volumes.

+ *

Valid Range: Minimum value of 125. Maximum value of 1000.

+ */ + Throughput?: number; } export namespace CreateVolumeRequest { @@ -5336,19 +5402,9 @@ export interface Volume { VolumeId?: string; /** - *

The number of I/O operations per second (IOPS) that the volume supports. For Provisioned IOPS SSD - * volumes, this represents the number of IOPS that are provisioned for the volume. For General Purpose SSD - * volumes, this represents the baseline performance of the volume and the rate at which the - * volume accumulates I/O credits for bursting. For more information, see Amazon EBS volume types in - * the Amazon Elastic Compute Cloud User Guide.

- *

Constraints: Range is 100-16,000 IOPS for gp2 volumes and 100 - * to 64,000 IOPS for io1 and io2 volumes, in most Regions. The maximum - * IOPS for io1 and io2 of 64,000 is guaranteed only on Nitro-based - * instances. Other instance families guarantee performance up to - * 32,000 IOPS.

- *

Condition: This parameter is required for requests to create io1 and io2 volumes; - * it is not used in requests to create gp2, st1, - * sc1, or standard volumes.

+ *

The number of I/O operations per second (IOPS). For gp3, io1, and io2 volumes, this represents + * the number of IOPS that are provisioned for the volume. For gp2 volumes, this represents the baseline + * performance of the volume and the rate at which the volume accumulates I/O credits for bursting.

*/ Iops?: number; @@ -5358,9 +5414,7 @@ export interface Volume { Tags?: Tag[]; /** - *

The volume type. This can be gp2 for General Purpose SSD, io1 or io2 for Provisioned IOPS SSD, - * st1 for Throughput Optimized HDD, sc1 for Cold HDD, or - * standard for Magnetic volumes.

+ *

The volume type.

*/ VolumeType?: VolumeType | string; @@ -5373,6 +5427,11 @@ export interface Volume { *

Indicates whether Amazon EBS Multi-Attach is enabled.

*/ MultiAttachEnabled?: boolean; + + /** + *

The throughput that the volume supports, in MiB/s.

+ */ + Throughput?: number; } export namespace Volume { diff --git a/clients/client-ec2/models/models_3.ts b/clients/client-ec2/models/models_3.ts index baeb794dabd7..0dfe05df2416 100644 --- a/clients/client-ec2/models/models_3.ts +++ b/clients/client-ec2/models/models_3.ts @@ -3765,8 +3765,8 @@ export interface DescribeSnapshotsRequest { * MaxResults results in a single page along with a NextToken * response element. The remaining results of the initial request can be seen by sending another * DescribeSnapshots request with the returned NextToken value. This - * value can be between 5 and 1000; if MaxResults is given a value larger than 1000, - * only 1000 results are returned. If this parameter is not used, then + * value can be between 5 and 1,000; if MaxResults is given a value larger than 1,000, + * only 1,000 results are returned. If this parameter is not used, then * DescribeSnapshots returns all results. You cannot specify this parameter and * the snapshot IDs parameter in the same request.

*/ @@ -6846,9 +6846,8 @@ export interface DescribeVolumesRequest { * *
  • *

    - * volume-type - The Amazon EBS volume type. This can be gp2 - * for General Purpose SSD, io1 or io2 for Provisioned IOPS SSD, st1 for Throughput Optimized HDD, - * sc1 for Cold HDD, or standard for Magnetic volumes.

    + * volume-type - The Amazon EBS volume type (gp2 | gp3 | io1 | io2 | + * st1 | sc1| standard)

    *
  • * */ @@ -7039,6 +7038,11 @@ export interface VolumeModification { */ TargetVolumeType?: VolumeType | string; + /** + *

    The target throughput of the volume, in MiB/s.

    + */ + TargetThroughput?: number; + /** *

    The original size of the volume, in GiB.

    */ @@ -7054,6 +7058,11 @@ export interface VolumeModification { */ OriginalVolumeType?: VolumeType | string; + /** + *

    The original throughput of the volume, in MiB/s.

    + */ + OriginalThroughput?: number; + /** *

    The modification progress, from 0 to 100 percent complete.

    */ @@ -7166,8 +7175,8 @@ export interface DescribeVolumeStatusRequest { * paginated output. When this parameter is used, the request only returns * MaxResults results in a single page along with a NextToken * response element. The remaining results of the initial request can be seen by sending another - * request with the returned NextToken value. This value can be between 5 and 1000; - * if MaxResults is given a value larger than 1000, only 1000 results are returned. + * request with the returned NextToken value. This value can be between 5 and 1,000; + * if MaxResults is given a value larger than 1,000, only 1,000 results are returned. * If this parameter is not used, then DescribeVolumeStatus returns all results. You * cannot specify this parameter and the volume IDs parameter in the same request.

    */ diff --git a/clients/client-ec2/models/models_4.ts b/clients/client-ec2/models/models_4.ts index b48ea87ad2a4..4fea82fbc6e9 100644 --- a/clients/client-ec2/models/models_4.ts +++ b/clients/client-ec2/models/models_4.ts @@ -4121,27 +4121,64 @@ export interface ModifyVolumeRequest { /** *

    The target size of the volume, in GiB. The target volume size must be greater than or - * equal to than the existing size of the volume. For information about available EBS volume - * sizes, see Amazon EBS Volume - * Types.

    + * equal to the existing size of the volume.

    + *

    The following are the supported volumes sizes for each volume type:

    + *
      + *
    • + *

      + * gp2 and gp3: 1-16,384

      + *
    • + *
    • + *

      + * io1 and io2: 4-16,384

      + *
    • + *
    • + *

      + * st1 and sc1: 125-16,384

      + *
    • + *
    • + *

      + * standard: 1-1,024

      + *
    • + *
    *

    Default: If no size is specified, the existing size is retained.

    */ Size?: number; /** - *

    The target EBS volume type of the volume.

    + *

    The target EBS volume type of the volume. For more information, see Amazon EBS volume types in the Amazon Elastic Compute Cloud User Guide.

    *

    Default: If no type is specified, the existing type is retained.

    */ VolumeType?: VolumeType | string; /** - *

    The target IOPS rate of the volume.

    - *

    This is only valid for Provisioned IOPS SSD (io1 and io2) volumes. For moreinformation, see - * - * Provisioned IOPS SSD (io1 and io2) volumes.

    + *

    The target IOPS rate of the volume. This parameter is valid only for gp3, io1, and io2 volumes.

    + *

    The following are the supported values for each volume type:

    + *
      + *
    • + *

      + * gp3: 3,000-16,000 IOPS

      + *
    • + *
    • + *

      + * io1: 100-64,000 IOPS

      + *
    • + *
    • + *

      + * io2: 100-64,000 IOPS

      + *
    • + *
    *

    Default: If no IOPS value is specified, the existing value is retained.

    */ Iops?: number; + + /** + *

    The target throughput of the volume, in MiB/s. This parameter is valid only for gp3 volumes. + * The maximum value is 1,000.

    + *

    Default: If no throughput value is specified, the existing value is retained.

    + *

    Valid Range: Minimum value of 125. Maximum value of 1000.

    + */ + Throughput?: number; } export namespace ModifyVolumeRequest { diff --git a/clients/client-ec2/protocols/Aws_ec2.ts b/clients/client-ec2/protocols/Aws_ec2.ts index ad2c49799beb..f39ea0f621c5 100644 --- a/clients/client-ec2/protocols/Aws_ec2.ts +++ b/clients/client-ec2/protocols/Aws_ec2.ts @@ -31638,6 +31638,9 @@ const serializeAws_ec2CreateVolumeRequest = (input: CreateVolumeRequest, context if (input.MultiAttachEnabled !== undefined) { entries["MultiAttachEnabled"] = input.MultiAttachEnabled; } + if (input.Throughput !== undefined) { + entries["Throughput"] = input.Throughput; + } return entries; }; @@ -36464,6 +36467,9 @@ const serializeAws_ec2EbsBlockDevice = (input: EbsBlockDevice, context: __SerdeC if (input.KmsKeyId !== undefined) { entries["KmsKeyId"] = input.KmsKeyId; } + if (input.Throughput !== undefined) { + entries["Throughput"] = input.Throughput; + } if (input.Encrypted !== undefined) { entries["Encrypted"] = input.Encrypted; } @@ -38580,6 +38586,9 @@ const serializeAws_ec2LaunchTemplateEbsBlockDeviceRequest = ( if (input.VolumeType !== undefined) { entries["VolumeType"] = input.VolumeType; } + if (input.Throughput !== undefined) { + entries["Throughput"] = input.Throughput; + } return entries; }; @@ -40192,6 +40201,9 @@ const serializeAws_ec2ModifyVolumeRequest = (input: ModifyVolumeRequest, context if (input.Iops !== undefined) { entries["Iops"] = input.Iops; } + if (input.Throughput !== undefined) { + entries["Throughput"] = input.Throughput; + } return entries; }; @@ -51396,6 +51408,7 @@ const deserializeAws_ec2EbsBlockDevice = (output: any, context: __SerdeContext): VolumeSize: undefined, VolumeType: undefined, KmsKeyId: undefined, + Throughput: undefined, Encrypted: undefined, }; if (output["deleteOnTermination"] !== undefined) { @@ -51416,6 +51429,9 @@ const deserializeAws_ec2EbsBlockDevice = (output: any, context: __SerdeContext): if (output["KmsKeyId"] !== undefined) { contents.KmsKeyId = output["KmsKeyId"]; } + if (output["throughput"] !== undefined) { + contents.Throughput = parseInt(output["throughput"]); + } if (output["encrypted"] !== undefined) { contents.Encrypted = output["encrypted"] == "true"; } @@ -56027,6 +56043,7 @@ const deserializeAws_ec2LaunchTemplateEbsBlockDevice = ( SnapshotId: undefined, VolumeSize: undefined, VolumeType: undefined, + Throughput: undefined, }; if (output["encrypted"] !== undefined) { contents.Encrypted = output["encrypted"] == "true"; @@ -56049,6 +56066,9 @@ const deserializeAws_ec2LaunchTemplateEbsBlockDevice = ( if (output["volumeType"] !== undefined) { contents.VolumeType = output["volumeType"]; } + if (output["throughput"] !== undefined) { + contents.Throughput = parseInt(output["throughput"]); + } return contents; }; @@ -63748,6 +63768,7 @@ const deserializeAws_ec2Volume = (output: any, context: __SerdeContext): Volume VolumeType: undefined, FastRestored: undefined, MultiAttachEnabled: undefined, + Throughput: undefined, }; if (output.attachmentSet === "") { contents.Attachments = []; @@ -63803,6 +63824,9 @@ const deserializeAws_ec2Volume = (output: any, context: __SerdeContext): Volume if (output["multiAttachEnabled"] !== undefined) { contents.MultiAttachEnabled = output["multiAttachEnabled"] == "true"; } + if (output["throughput"] !== undefined) { + contents.Throughput = parseInt(output["throughput"]); + } return contents; }; @@ -63852,9 +63876,11 @@ const deserializeAws_ec2VolumeModification = (output: any, context: __SerdeConte TargetSize: undefined, TargetIops: undefined, TargetVolumeType: undefined, + TargetThroughput: undefined, OriginalSize: undefined, OriginalIops: undefined, OriginalVolumeType: undefined, + OriginalThroughput: undefined, Progress: undefined, StartTime: undefined, EndTime: undefined, @@ -63877,6 +63903,9 @@ const deserializeAws_ec2VolumeModification = (output: any, context: __SerdeConte if (output["targetVolumeType"] !== undefined) { contents.TargetVolumeType = output["targetVolumeType"]; } + if (output["targetThroughput"] !== undefined) { + contents.TargetThroughput = parseInt(output["targetThroughput"]); + } if (output["originalSize"] !== undefined) { contents.OriginalSize = parseInt(output["originalSize"]); } @@ -63886,6 +63915,9 @@ const deserializeAws_ec2VolumeModification = (output: any, context: __SerdeConte if (output["originalVolumeType"] !== undefined) { contents.OriginalVolumeType = output["originalVolumeType"]; } + if (output["originalThroughput"] !== undefined) { + contents.OriginalThroughput = parseInt(output["originalThroughput"]); + } if (output["progress"] !== undefined) { contents.Progress = parseInt(output["progress"]); } diff --git a/clients/client-ecr-public/.gitignore b/clients/client-ecr-public/.gitignore new file mode 100644 index 000000000000..b41c05b597c4 --- /dev/null +++ b/clients/client-ecr-public/.gitignore @@ -0,0 +1,14 @@ +/node_modules/ +/build/ +/coverage/ +/docs/ +/types/ +/dist/ +*.tsbuildinfo +*.tgz +*.log +package-lock.json + +*.d.ts +*.js +*.js.map diff --git a/clients/client-ecr-public/.npmignore b/clients/client-ecr-public/.npmignore new file mode 100644 index 000000000000..b7ff81137c4a --- /dev/null +++ b/clients/client-ecr-public/.npmignore @@ -0,0 +1,4 @@ +/coverage/ +/docs/ +tsconfig.test.json +*.tsbuildinfo diff --git a/clients/client-ecr-public/ECRPUBLIC.ts b/clients/client-ecr-public/ECRPUBLIC.ts new file mode 100644 index 000000000000..7d07b0f42ad6 --- /dev/null +++ b/clients/client-ecr-public/ECRPUBLIC.ts @@ -0,0 +1,796 @@ +import { ECRPUBLICClient } from "./ECRPUBLICClient"; +import { + BatchCheckLayerAvailabilityCommand, + BatchCheckLayerAvailabilityCommandInput, + BatchCheckLayerAvailabilityCommandOutput, +} from "./commands/BatchCheckLayerAvailabilityCommand"; +import { + BatchDeleteImageCommand, + BatchDeleteImageCommandInput, + BatchDeleteImageCommandOutput, +} from "./commands/BatchDeleteImageCommand"; +import { + CompleteLayerUploadCommand, + CompleteLayerUploadCommandInput, + CompleteLayerUploadCommandOutput, +} from "./commands/CompleteLayerUploadCommand"; +import { + CreateRepositoryCommand, + CreateRepositoryCommandInput, + CreateRepositoryCommandOutput, +} from "./commands/CreateRepositoryCommand"; +import { + DeleteRepositoryCommand, + DeleteRepositoryCommandInput, + DeleteRepositoryCommandOutput, +} from "./commands/DeleteRepositoryCommand"; +import { + DeleteRepositoryPolicyCommand, + DeleteRepositoryPolicyCommandInput, + DeleteRepositoryPolicyCommandOutput, +} from "./commands/DeleteRepositoryPolicyCommand"; +import { + DescribeImageTagsCommand, + DescribeImageTagsCommandInput, + DescribeImageTagsCommandOutput, +} from "./commands/DescribeImageTagsCommand"; +import { + DescribeImagesCommand, + DescribeImagesCommandInput, + DescribeImagesCommandOutput, +} from "./commands/DescribeImagesCommand"; +import { + DescribeRegistriesCommand, + DescribeRegistriesCommandInput, + DescribeRegistriesCommandOutput, +} from "./commands/DescribeRegistriesCommand"; +import { + DescribeRepositoriesCommand, + DescribeRepositoriesCommandInput, + DescribeRepositoriesCommandOutput, +} from "./commands/DescribeRepositoriesCommand"; +import { + GetAuthorizationTokenCommand, + GetAuthorizationTokenCommandInput, + GetAuthorizationTokenCommandOutput, +} from "./commands/GetAuthorizationTokenCommand"; +import { + GetRegistryCatalogDataCommand, + GetRegistryCatalogDataCommandInput, + GetRegistryCatalogDataCommandOutput, +} from "./commands/GetRegistryCatalogDataCommand"; +import { + GetRepositoryCatalogDataCommand, + GetRepositoryCatalogDataCommandInput, + GetRepositoryCatalogDataCommandOutput, +} from "./commands/GetRepositoryCatalogDataCommand"; +import { + GetRepositoryPolicyCommand, + GetRepositoryPolicyCommandInput, + GetRepositoryPolicyCommandOutput, +} from "./commands/GetRepositoryPolicyCommand"; +import { + InitiateLayerUploadCommand, + InitiateLayerUploadCommandInput, + InitiateLayerUploadCommandOutput, +} from "./commands/InitiateLayerUploadCommand"; +import { PutImageCommand, PutImageCommandInput, PutImageCommandOutput } from "./commands/PutImageCommand"; +import { + PutRegistryCatalogDataCommand, + PutRegistryCatalogDataCommandInput, + PutRegistryCatalogDataCommandOutput, +} from "./commands/PutRegistryCatalogDataCommand"; +import { + PutRepositoryCatalogDataCommand, + PutRepositoryCatalogDataCommandInput, + PutRepositoryCatalogDataCommandOutput, +} from "./commands/PutRepositoryCatalogDataCommand"; +import { + SetRepositoryPolicyCommand, + SetRepositoryPolicyCommandInput, + SetRepositoryPolicyCommandOutput, +} from "./commands/SetRepositoryPolicyCommand"; +import { + UploadLayerPartCommand, + UploadLayerPartCommandInput, + UploadLayerPartCommandOutput, +} from "./commands/UploadLayerPartCommand"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@aws-sdk/types"; + +/** + * Amazon Elastic Container Registry Public + *

    Amazon Elastic Container Registry (Amazon ECR) is a managed container image registry service. Amazon ECR provides both + * public and private registries to host your container images. You can use the familiar + * Docker CLI, or their preferred client, to push, pull, and manage images. Amazon ECR provides a + * secure, scalable, and reliable registry for your Docker or Open Container Initiative (OCI) + * images. Amazon ECR supports public repositories with this API. For information about the Amazon ECR + * API for private repositories, see Amazon Elastic Container Registry API Reference.

    + */ +export class ECRPUBLIC extends ECRPUBLICClient { + /** + *

    Checks the availability of one or more image layers within a repository in a public + * registry. When an image is pushed to a repository, each image layer is checked to verify if + * it has been uploaded before. If it has been uploaded, then the image layer is + * skipped.

    + * + *

    This operation is used by the Amazon ECR proxy and is not generally used by customers for pulling and pushing images. In most cases, you should use the docker CLI to pull, tag, and push images.

    + *
    + */ + public batchCheckLayerAvailability( + args: BatchCheckLayerAvailabilityCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public batchCheckLayerAvailability( + args: BatchCheckLayerAvailabilityCommandInput, + cb: (err: any, data?: BatchCheckLayerAvailabilityCommandOutput) => void + ): void; + public batchCheckLayerAvailability( + args: BatchCheckLayerAvailabilityCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchCheckLayerAvailabilityCommandOutput) => void + ): void; + public batchCheckLayerAvailability( + args: BatchCheckLayerAvailabilityCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: BatchCheckLayerAvailabilityCommandOutput) => void), + cb?: (err: any, data?: BatchCheckLayerAvailabilityCommandOutput) => void + ): Promise | void { + const command = new BatchCheckLayerAvailabilityCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Deletes a list of specified images within a repository in a public registry. Images are + * specified with either an imageTag or imageDigest.

    + *

    You can remove a tag from an image by specifying the image's tag in your request. When + * you remove the last tag from an image, the image is deleted from your repository.

    + *

    You can completely delete an image (and all of its tags) by specifying the image's + * digest in your request.

    + */ + public batchDeleteImage( + args: BatchDeleteImageCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public batchDeleteImage( + args: BatchDeleteImageCommandInput, + cb: (err: any, data?: BatchDeleteImageCommandOutput) => void + ): void; + public batchDeleteImage( + args: BatchDeleteImageCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchDeleteImageCommandOutput) => void + ): void; + public batchDeleteImage( + args: BatchDeleteImageCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: BatchDeleteImageCommandOutput) => void), + cb?: (err: any, data?: BatchDeleteImageCommandOutput) => void + ): Promise | void { + const command = new BatchDeleteImageCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Informs Amazon ECR that the image layer upload has completed for a specified public registry, + * repository name, and upload ID. You can optionally provide a sha256 digest of + * the image layer for data validation purposes.

    + *

    When an image is pushed, the CompleteLayerUpload API is called once per each new image + * layer to verify that the upload has completed.

    + * + *

    This operation is used by the Amazon ECR proxy and is not generally used by customers for pulling and pushing images. In most cases, you should use the docker CLI to pull, tag, and push images.

    + *
    + */ + public completeLayerUpload( + args: CompleteLayerUploadCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public completeLayerUpload( + args: CompleteLayerUploadCommandInput, + cb: (err: any, data?: CompleteLayerUploadCommandOutput) => void + ): void; + public completeLayerUpload( + args: CompleteLayerUploadCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CompleteLayerUploadCommandOutput) => void + ): void; + public completeLayerUpload( + args: CompleteLayerUploadCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CompleteLayerUploadCommandOutput) => void), + cb?: (err: any, data?: CompleteLayerUploadCommandOutput) => void + ): Promise | void { + const command = new CompleteLayerUploadCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Creates a repository in a public registry. For more information, see Amazon ECR + * repositories in the Amazon Elastic Container Registry User Guide.

    + */ + public createRepository( + args: CreateRepositoryCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public createRepository( + args: CreateRepositoryCommandInput, + cb: (err: any, data?: CreateRepositoryCommandOutput) => void + ): void; + public createRepository( + args: CreateRepositoryCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateRepositoryCommandOutput) => void + ): void; + public createRepository( + args: CreateRepositoryCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateRepositoryCommandOutput) => void), + cb?: (err: any, data?: CreateRepositoryCommandOutput) => void + ): Promise | void { + const command = new CreateRepositoryCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Deletes a repository in a public registry. If the repository contains images, you must + * either delete all images in the repository or use the force option which + * deletes all images on your behalf before deleting the repository.

    + */ + public deleteRepository( + args: DeleteRepositoryCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteRepository( + args: DeleteRepositoryCommandInput, + cb: (err: any, data?: DeleteRepositoryCommandOutput) => void + ): void; + public deleteRepository( + args: DeleteRepositoryCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteRepositoryCommandOutput) => void + ): void; + public deleteRepository( + args: DeleteRepositoryCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteRepositoryCommandOutput) => void), + cb?: (err: any, data?: DeleteRepositoryCommandOutput) => void + ): Promise | void { + const command = new DeleteRepositoryCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Deletes the repository policy associated with the specified repository.

    + */ + public deleteRepositoryPolicy( + args: DeleteRepositoryPolicyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteRepositoryPolicy( + args: DeleteRepositoryPolicyCommandInput, + cb: (err: any, data?: DeleteRepositoryPolicyCommandOutput) => void + ): void; + public deleteRepositoryPolicy( + args: DeleteRepositoryPolicyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteRepositoryPolicyCommandOutput) => void + ): void; + public deleteRepositoryPolicy( + args: DeleteRepositoryPolicyCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteRepositoryPolicyCommandOutput) => void), + cb?: (err: any, data?: DeleteRepositoryPolicyCommandOutput) => void + ): Promise | void { + const command = new DeleteRepositoryPolicyCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Returns metadata about the images in a repository in a public registry.

    + * + *

    Beginning with Docker version 1.9, the Docker client compresses image layers before + * pushing them to a V2 Docker registry. The output of the docker images + * command shows the uncompressed image size, so it may return a larger image size than the + * image sizes returned by DescribeImages.

    + *
    + */ + public describeImages( + args: DescribeImagesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeImages( + args: DescribeImagesCommandInput, + cb: (err: any, data?: DescribeImagesCommandOutput) => void + ): void; + public describeImages( + args: DescribeImagesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeImagesCommandOutput) => void + ): void; + public describeImages( + args: DescribeImagesCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeImagesCommandOutput) => void), + cb?: (err: any, data?: DescribeImagesCommandOutput) => void + ): Promise | void { + const command = new DescribeImagesCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Returns the image tag details for a repository in a public registry.

    + */ + public describeImageTags( + args: DescribeImageTagsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeImageTags( + args: DescribeImageTagsCommandInput, + cb: (err: any, data?: DescribeImageTagsCommandOutput) => void + ): void; + public describeImageTags( + args: DescribeImageTagsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeImageTagsCommandOutput) => void + ): void; + public describeImageTags( + args: DescribeImageTagsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeImageTagsCommandOutput) => void), + cb?: (err: any, data?: DescribeImageTagsCommandOutput) => void + ): Promise | void { + const command = new DescribeImageTagsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Returns details for a public registry.

    + */ + public describeRegistries( + args: DescribeRegistriesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeRegistries( + args: DescribeRegistriesCommandInput, + cb: (err: any, data?: DescribeRegistriesCommandOutput) => void + ): void; + public describeRegistries( + args: DescribeRegistriesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeRegistriesCommandOutput) => void + ): void; + public describeRegistries( + args: DescribeRegistriesCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeRegistriesCommandOutput) => void), + cb?: (err: any, data?: DescribeRegistriesCommandOutput) => void + ): Promise | void { + const command = new DescribeRegistriesCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Describes repositories in a public registry.

    + */ + public describeRepositories( + args: DescribeRepositoriesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeRepositories( + args: DescribeRepositoriesCommandInput, + cb: (err: any, data?: DescribeRepositoriesCommandOutput) => void + ): void; + public describeRepositories( + args: DescribeRepositoriesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeRepositoriesCommandOutput) => void + ): void; + public describeRepositories( + args: DescribeRepositoriesCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeRepositoriesCommandOutput) => void), + cb?: (err: any, data?: DescribeRepositoriesCommandOutput) => void + ): Promise | void { + const command = new DescribeRepositoriesCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Retrieves an authorization token. An authorization token represents your IAM + * authentication credentials and can be used to access any Amazon ECR registry that your IAM + * principal has access to. The authorization token is valid for 12 hours. This API requires + * the ecr-public:GetAuthorizationToken and + * sts:GetServiceBearerToken permissions.

    + */ + public getAuthorizationToken( + args: GetAuthorizationTokenCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public getAuthorizationToken( + args: GetAuthorizationTokenCommandInput, + cb: (err: any, data?: GetAuthorizationTokenCommandOutput) => void + ): void; + public getAuthorizationToken( + args: GetAuthorizationTokenCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetAuthorizationTokenCommandOutput) => void + ): void; + public getAuthorizationToken( + args: GetAuthorizationTokenCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: GetAuthorizationTokenCommandOutput) => void), + cb?: (err: any, data?: GetAuthorizationTokenCommandOutput) => void + ): Promise | void { + const command = new GetAuthorizationTokenCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Retrieves catalog metadata for a public registry.

    + */ + public getRegistryCatalogData( + args: GetRegistryCatalogDataCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public getRegistryCatalogData( + args: GetRegistryCatalogDataCommandInput, + cb: (err: any, data?: GetRegistryCatalogDataCommandOutput) => void + ): void; + public getRegistryCatalogData( + args: GetRegistryCatalogDataCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetRegistryCatalogDataCommandOutput) => void + ): void; + public getRegistryCatalogData( + args: GetRegistryCatalogDataCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: GetRegistryCatalogDataCommandOutput) => void), + cb?: (err: any, data?: GetRegistryCatalogDataCommandOutput) => void + ): Promise | void { + const command = new GetRegistryCatalogDataCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Retrieve catalog metadata for a repository in a public registry. This metadata is + * displayed publicly in the Amazon ECR Public Gallery.

    + */ + public getRepositoryCatalogData( + args: GetRepositoryCatalogDataCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public getRepositoryCatalogData( + args: GetRepositoryCatalogDataCommandInput, + cb: (err: any, data?: GetRepositoryCatalogDataCommandOutput) => void + ): void; + public getRepositoryCatalogData( + args: GetRepositoryCatalogDataCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetRepositoryCatalogDataCommandOutput) => void + ): void; + public getRepositoryCatalogData( + args: GetRepositoryCatalogDataCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: GetRepositoryCatalogDataCommandOutput) => void), + cb?: (err: any, data?: GetRepositoryCatalogDataCommandOutput) => void + ): Promise | void { + const command = new GetRepositoryCatalogDataCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Retrieves the repository policy for the specified repository.

    + */ + public getRepositoryPolicy( + args: GetRepositoryPolicyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public getRepositoryPolicy( + args: GetRepositoryPolicyCommandInput, + cb: (err: any, data?: GetRepositoryPolicyCommandOutput) => void + ): void; + public getRepositoryPolicy( + args: GetRepositoryPolicyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetRepositoryPolicyCommandOutput) => void + ): void; + public getRepositoryPolicy( + args: GetRepositoryPolicyCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: GetRepositoryPolicyCommandOutput) => void), + cb?: (err: any, data?: GetRepositoryPolicyCommandOutput) => void + ): Promise | void { + const command = new GetRepositoryPolicyCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Notifies Amazon ECR that you intend to upload an image layer.

    + *

    When an image is pushed, the InitiateLayerUpload API is called once per image layer that + * has not already been uploaded. Whether or not an image layer has been uploaded is + * determined by the BatchCheckLayerAvailability API action.

    + * + *

    This operation is used by the Amazon ECR proxy and is not generally used by customers for pulling and pushing images. In most cases, you should use the docker CLI to pull, tag, and push images.

    + *
    + */ + public initiateLayerUpload( + args: InitiateLayerUploadCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public initiateLayerUpload( + args: InitiateLayerUploadCommandInput, + cb: (err: any, data?: InitiateLayerUploadCommandOutput) => void + ): void; + public initiateLayerUpload( + args: InitiateLayerUploadCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: InitiateLayerUploadCommandOutput) => void + ): void; + public initiateLayerUpload( + args: InitiateLayerUploadCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: InitiateLayerUploadCommandOutput) => void), + cb?: (err: any, data?: InitiateLayerUploadCommandOutput) => void + ): Promise | void { + const command = new InitiateLayerUploadCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Creates or updates the image manifest and tags associated with an image.

    + *

    When an image is pushed and all new image layers have been uploaded, the PutImage API is + * called once to create or update the image manifest and the tags associated with the + * image.

    + * + * + *

    This operation is used by the Amazon ECR proxy and is not generally used by customers for pulling and pushing images. In most cases, you should use the docker CLI to pull, tag, and push images.

    + *
    + */ + public putImage(args: PutImageCommandInput, options?: __HttpHandlerOptions): Promise; + public putImage(args: PutImageCommandInput, cb: (err: any, data?: PutImageCommandOutput) => void): void; + public putImage( + args: PutImageCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutImageCommandOutput) => void + ): void; + public putImage( + args: PutImageCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: PutImageCommandOutput) => void), + cb?: (err: any, data?: PutImageCommandOutput) => void + ): Promise | void { + const command = new PutImageCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Create or updates the catalog data for a public registry.

    + */ + public putRegistryCatalogData( + args: PutRegistryCatalogDataCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public putRegistryCatalogData( + args: PutRegistryCatalogDataCommandInput, + cb: (err: any, data?: PutRegistryCatalogDataCommandOutput) => void + ): void; + public putRegistryCatalogData( + args: PutRegistryCatalogDataCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutRegistryCatalogDataCommandOutput) => void + ): void; + public putRegistryCatalogData( + args: PutRegistryCatalogDataCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: PutRegistryCatalogDataCommandOutput) => void), + cb?: (err: any, data?: PutRegistryCatalogDataCommandOutput) => void + ): Promise | void { + const command = new PutRegistryCatalogDataCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Creates or updates the catalog data for a repository in a public registry.

    + */ + public putRepositoryCatalogData( + args: PutRepositoryCatalogDataCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public putRepositoryCatalogData( + args: PutRepositoryCatalogDataCommandInput, + cb: (err: any, data?: PutRepositoryCatalogDataCommandOutput) => void + ): void; + public putRepositoryCatalogData( + args: PutRepositoryCatalogDataCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutRepositoryCatalogDataCommandOutput) => void + ): void; + public putRepositoryCatalogData( + args: PutRepositoryCatalogDataCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: PutRepositoryCatalogDataCommandOutput) => void), + cb?: (err: any, data?: PutRepositoryCatalogDataCommandOutput) => void + ): Promise | void { + const command = new PutRepositoryCatalogDataCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Applies a repository policy to the specified public repository to control access + * permissions. For more information, see Amazon ECR Repository + * Policies in the Amazon Elastic Container Registry User Guide.

    + */ + public setRepositoryPolicy( + args: SetRepositoryPolicyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public setRepositoryPolicy( + args: SetRepositoryPolicyCommandInput, + cb: (err: any, data?: SetRepositoryPolicyCommandOutput) => void + ): void; + public setRepositoryPolicy( + args: SetRepositoryPolicyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: SetRepositoryPolicyCommandOutput) => void + ): void; + public setRepositoryPolicy( + args: SetRepositoryPolicyCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: SetRepositoryPolicyCommandOutput) => void), + cb?: (err: any, data?: SetRepositoryPolicyCommandOutput) => void + ): Promise | void { + const command = new SetRepositoryPolicyCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Uploads an image layer part to Amazon ECR.

    + *

    When an image is pushed, each new image layer is uploaded in parts. The maximum size of + * each image layer part can be 20971520 bytes (or about 20MB). The UploadLayerPart API is + * called once per each new image layer part.

    + * + *

    This operation is used by the Amazon ECR proxy and is not generally used by customers for pulling and pushing images. In most cases, you should use the docker CLI to pull, tag, and push images.

    + *
    + */ + public uploadLayerPart( + args: UploadLayerPartCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public uploadLayerPart( + args: UploadLayerPartCommandInput, + cb: (err: any, data?: UploadLayerPartCommandOutput) => void + ): void; + public uploadLayerPart( + args: UploadLayerPartCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UploadLayerPartCommandOutput) => void + ): void; + public uploadLayerPart( + args: UploadLayerPartCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: UploadLayerPartCommandOutput) => void), + cb?: (err: any, data?: UploadLayerPartCommandOutput) => void + ): Promise | void { + const command = new UploadLayerPartCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } +} diff --git a/clients/client-ecr-public/ECRPUBLICClient.ts b/clients/client-ecr-public/ECRPUBLICClient.ts new file mode 100644 index 000000000000..f15c762e04fd --- /dev/null +++ b/clients/client-ecr-public/ECRPUBLICClient.ts @@ -0,0 +1,303 @@ +import { + BatchCheckLayerAvailabilityCommandInput, + BatchCheckLayerAvailabilityCommandOutput, +} from "./commands/BatchCheckLayerAvailabilityCommand"; +import { BatchDeleteImageCommandInput, BatchDeleteImageCommandOutput } from "./commands/BatchDeleteImageCommand"; +import { + CompleteLayerUploadCommandInput, + CompleteLayerUploadCommandOutput, +} from "./commands/CompleteLayerUploadCommand"; +import { CreateRepositoryCommandInput, CreateRepositoryCommandOutput } from "./commands/CreateRepositoryCommand"; +import { DeleteRepositoryCommandInput, DeleteRepositoryCommandOutput } from "./commands/DeleteRepositoryCommand"; +import { + DeleteRepositoryPolicyCommandInput, + DeleteRepositoryPolicyCommandOutput, +} from "./commands/DeleteRepositoryPolicyCommand"; +import { DescribeImageTagsCommandInput, DescribeImageTagsCommandOutput } from "./commands/DescribeImageTagsCommand"; +import { DescribeImagesCommandInput, DescribeImagesCommandOutput } from "./commands/DescribeImagesCommand"; +import { DescribeRegistriesCommandInput, DescribeRegistriesCommandOutput } from "./commands/DescribeRegistriesCommand"; +import { + DescribeRepositoriesCommandInput, + DescribeRepositoriesCommandOutput, +} from "./commands/DescribeRepositoriesCommand"; +import { + GetAuthorizationTokenCommandInput, + GetAuthorizationTokenCommandOutput, +} from "./commands/GetAuthorizationTokenCommand"; +import { + GetRegistryCatalogDataCommandInput, + GetRegistryCatalogDataCommandOutput, +} from "./commands/GetRegistryCatalogDataCommand"; +import { + GetRepositoryCatalogDataCommandInput, + GetRepositoryCatalogDataCommandOutput, +} from "./commands/GetRepositoryCatalogDataCommand"; +import { + GetRepositoryPolicyCommandInput, + GetRepositoryPolicyCommandOutput, +} from "./commands/GetRepositoryPolicyCommand"; +import { + InitiateLayerUploadCommandInput, + InitiateLayerUploadCommandOutput, +} from "./commands/InitiateLayerUploadCommand"; +import { PutImageCommandInput, PutImageCommandOutput } from "./commands/PutImageCommand"; +import { + PutRegistryCatalogDataCommandInput, + PutRegistryCatalogDataCommandOutput, +} from "./commands/PutRegistryCatalogDataCommand"; +import { + PutRepositoryCatalogDataCommandInput, + PutRepositoryCatalogDataCommandOutput, +} from "./commands/PutRepositoryCatalogDataCommand"; +import { + SetRepositoryPolicyCommandInput, + SetRepositoryPolicyCommandOutput, +} from "./commands/SetRepositoryPolicyCommand"; +import { UploadLayerPartCommandInput, UploadLayerPartCommandOutput } from "./commands/UploadLayerPartCommand"; +import { ClientDefaultValues as __ClientDefaultValues } from "./runtimeConfig"; +import { + EndpointsInputConfig, + EndpointsResolvedConfig, + RegionInputConfig, + RegionResolvedConfig, + resolveEndpointsConfig, + resolveRegionConfig, +} from "@aws-sdk/config-resolver"; +import { getContentLengthPlugin } from "@aws-sdk/middleware-content-length"; +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, + getHostHeaderPlugin, + resolveHostHeaderConfig, +} from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { RetryInputConfig, RetryResolvedConfig, getRetryPlugin, resolveRetryConfig } from "@aws-sdk/middleware-retry"; +import { + AwsAuthInputConfig, + AwsAuthResolvedConfig, + getAwsAuthPlugin, + resolveAwsAuthConfig, +} from "@aws-sdk/middleware-signing"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, + getUserAgentPlugin, + resolveUserAgentConfig, +} from "@aws-sdk/middleware-user-agent"; +import { HttpHandler as __HttpHandler } from "@aws-sdk/protocol-http"; +import { + Client as __Client, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@aws-sdk/smithy-client"; +import { + RegionInfoProvider, + Credentials as __Credentials, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, +} from "@aws-sdk/types"; + +export type ServiceInputTypes = + | BatchCheckLayerAvailabilityCommandInput + | BatchDeleteImageCommandInput + | CompleteLayerUploadCommandInput + | CreateRepositoryCommandInput + | DeleteRepositoryCommandInput + | DeleteRepositoryPolicyCommandInput + | DescribeImageTagsCommandInput + | DescribeImagesCommandInput + | DescribeRegistriesCommandInput + | DescribeRepositoriesCommandInput + | GetAuthorizationTokenCommandInput + | GetRegistryCatalogDataCommandInput + | GetRepositoryCatalogDataCommandInput + | GetRepositoryPolicyCommandInput + | InitiateLayerUploadCommandInput + | PutImageCommandInput + | PutRegistryCatalogDataCommandInput + | PutRepositoryCatalogDataCommandInput + | SetRepositoryPolicyCommandInput + | UploadLayerPartCommandInput; + +export type ServiceOutputTypes = + | BatchCheckLayerAvailabilityCommandOutput + | BatchDeleteImageCommandOutput + | CompleteLayerUploadCommandOutput + | CreateRepositoryCommandOutput + | DeleteRepositoryCommandOutput + | DeleteRepositoryPolicyCommandOutput + | DescribeImageTagsCommandOutput + | DescribeImagesCommandOutput + | DescribeRegistriesCommandOutput + | DescribeRepositoriesCommandOutput + | GetAuthorizationTokenCommandOutput + | GetRegistryCatalogDataCommandOutput + | GetRepositoryCatalogDataCommandOutput + | GetRepositoryPolicyCommandOutput + | InitiateLayerUploadCommandOutput + | PutImageCommandOutput + | PutRegistryCatalogDataCommandOutput + | PutRepositoryCatalogDataCommandOutput + | SetRepositoryPolicyCommandOutput + | UploadLayerPartCommandOutput; + +export interface ClientDefaults extends Partial<__SmithyResolvedConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandler; + + /** + * A constructor for a class implementing the @aws-sdk/types.Hash interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + */ + sha256?: __HashConstructor; + + /** + * The function that will be used to convert strings into HTTP endpoints. + */ + urlParser?: __UrlParser; + + /** + * A function that can calculate the length of a request body. + */ + bodyLengthChecker?: (body: any) => number | undefined; + + /** + * A function that converts a stream into an array of bytes. + */ + streamCollector?: __StreamCollector; + + /** + * The function that will be used to convert a base64-encoded string to a byte array + */ + base64Decoder?: __Decoder; + + /** + * The function that will be used to convert binary data to a base64-encoded string + */ + base64Encoder?: __Encoder; + + /** + * The function that will be used to convert a UTF8-encoded string to a byte array + */ + utf8Decoder?: __Decoder; + + /** + * The function that will be used to convert binary data to a UTF-8 encoded string + */ + utf8Encoder?: __Encoder; + + /** + * The string that will be used to populate default value in 'User-Agent' header + */ + defaultUserAgent?: string; + + /** + * The runtime environment + */ + runtime?: string; + + /** + * Disable dyanamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + + /** + * The service name with which to sign requests. + */ + signingName?: string; + + /** + * Default credentials provider; Not available in browser runtime + */ + credentialDefaultProvider?: (input: any) => __Provider<__Credentials>; + + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + + /** + * Fetch related hostname, signing name or signing region with given region. + */ + regionInfoProvider?: RegionInfoProvider; +} + +export type ECRPUBLICClientConfig = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & + ClientDefaults & + RegionInputConfig & + EndpointsInputConfig & + AwsAuthInputConfig & + RetryInputConfig & + UserAgentInputConfig & + HostHeaderInputConfig; + +export type ECRPUBLICClientResolvedConfig = __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RegionResolvedConfig & + EndpointsResolvedConfig & + AwsAuthResolvedConfig & + RetryResolvedConfig & + UserAgentResolvedConfig & + HostHeaderResolvedConfig; + +/** + * Amazon Elastic Container Registry Public + *

    Amazon Elastic Container Registry (Amazon ECR) is a managed container image registry service. Amazon ECR provides both + * public and private registries to host your container images. You can use the familiar + * Docker CLI, or their preferred client, to push, pull, and manage images. Amazon ECR provides a + * secure, scalable, and reliable registry for your Docker or Open Container Initiative (OCI) + * images. Amazon ECR supports public repositories with this API. For information about the Amazon ECR + * API for private repositories, see Amazon Elastic Container Registry API Reference.

    + */ +export class ECRPUBLICClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + ECRPUBLICClientResolvedConfig +> { + readonly config: ECRPUBLICClientResolvedConfig; + + constructor(configuration: ECRPUBLICClientConfig) { + let _config_0 = { + ...__ClientDefaultValues, + ...configuration, + }; + let _config_1 = resolveRegionConfig(_config_0); + let _config_2 = resolveEndpointsConfig(_config_1); + let _config_3 = resolveAwsAuthConfig(_config_2); + let _config_4 = resolveRetryConfig(_config_3); + let _config_5 = resolveUserAgentConfig(_config_4); + let _config_6 = resolveHostHeaderConfig(_config_5); + super(_config_6); + this.config = _config_6; + this.middlewareStack.use(getAwsAuthPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + } + + destroy(): void { + super.destroy(); + } +} diff --git a/clients/client-ecr-public/LICENSE b/clients/client-ecr-public/LICENSE new file mode 100644 index 000000000000..dd65ae06be7a --- /dev/null +++ b/clients/client-ecr-public/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/clients/client-ecr-public/README.md b/clients/client-ecr-public/README.md new file mode 100644 index 000000000000..3699b5256baa --- /dev/null +++ b/clients/client-ecr-public/README.md @@ -0,0 +1,6 @@ +# @aws-sdk/client-ecr-public + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/client-ecr-public/rc.svg)](https://www.npmjs.com/package/@aws-sdk/client-ecr-public) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/client-ecr-public.svg)](https://www.npmjs.com/package/@aws-sdk/client-ecr-public) + +For SDK usage, please step to [SDK readme](https://github.com/aws/aws-sdk-js-v3). diff --git a/clients/client-ecr-public/commands/BatchCheckLayerAvailabilityCommand.ts b/clients/client-ecr-public/commands/BatchCheckLayerAvailabilityCommand.ts new file mode 100644 index 000000000000..e1c5008b4e06 --- /dev/null +++ b/clients/client-ecr-public/commands/BatchCheckLayerAvailabilityCommand.ts @@ -0,0 +1,97 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { BatchCheckLayerAvailabilityRequest, BatchCheckLayerAvailabilityResponse } from "../models/models_0"; +import { + deserializeAws_json1_1BatchCheckLayerAvailabilityCommand, + serializeAws_json1_1BatchCheckLayerAvailabilityCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type BatchCheckLayerAvailabilityCommandInput = BatchCheckLayerAvailabilityRequest; +export type BatchCheckLayerAvailabilityCommandOutput = BatchCheckLayerAvailabilityResponse & __MetadataBearer; + +/** + *

    Checks the availability of one or more image layers within a repository in a public + * registry. When an image is pushed to a repository, each image layer is checked to verify if + * it has been uploaded before. If it has been uploaded, then the image layer is + * skipped.

    + * + *

    This operation is used by the Amazon ECR proxy and is not generally used by customers for pulling and pushing images. In most cases, you should use the docker CLI to pull, tag, and push images.

    + *
    + */ +export class BatchCheckLayerAvailabilityCommand extends $Command< + BatchCheckLayerAvailabilityCommandInput, + BatchCheckLayerAvailabilityCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: BatchCheckLayerAvailabilityCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "BatchCheckLayerAvailabilityCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: BatchCheckLayerAvailabilityRequest.filterSensitiveLog, + outputFilterSensitiveLog: BatchCheckLayerAvailabilityResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: BatchCheckLayerAvailabilityCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1BatchCheckLayerAvailabilityCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_json1_1BatchCheckLayerAvailabilityCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/BatchDeleteImageCommand.ts b/clients/client-ecr-public/commands/BatchDeleteImageCommand.ts new file mode 100644 index 000000000000..b83482e4e67b --- /dev/null +++ b/clients/client-ecr-public/commands/BatchDeleteImageCommand.ts @@ -0,0 +1,93 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { BatchDeleteImageRequest, BatchDeleteImageResponse } from "../models/models_0"; +import { + deserializeAws_json1_1BatchDeleteImageCommand, + serializeAws_json1_1BatchDeleteImageCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type BatchDeleteImageCommandInput = BatchDeleteImageRequest; +export type BatchDeleteImageCommandOutput = BatchDeleteImageResponse & __MetadataBearer; + +/** + *

    Deletes a list of specified images within a repository in a public registry. Images are + * specified with either an imageTag or imageDigest.

    + *

    You can remove a tag from an image by specifying the image's tag in your request. When + * you remove the last tag from an image, the image is deleted from your repository.

    + *

    You can completely delete an image (and all of its tags) by specifying the image's + * digest in your request.

    + */ +export class BatchDeleteImageCommand extends $Command< + BatchDeleteImageCommandInput, + BatchDeleteImageCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: BatchDeleteImageCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "BatchDeleteImageCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: BatchDeleteImageRequest.filterSensitiveLog, + outputFilterSensitiveLog: BatchDeleteImageResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: BatchDeleteImageCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1BatchDeleteImageCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1BatchDeleteImageCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/CompleteLayerUploadCommand.ts b/clients/client-ecr-public/commands/CompleteLayerUploadCommand.ts new file mode 100644 index 000000000000..c30d89f4080c --- /dev/null +++ b/clients/client-ecr-public/commands/CompleteLayerUploadCommand.ts @@ -0,0 +1,95 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { CompleteLayerUploadRequest, CompleteLayerUploadResponse } from "../models/models_0"; +import { + deserializeAws_json1_1CompleteLayerUploadCommand, + serializeAws_json1_1CompleteLayerUploadCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CompleteLayerUploadCommandInput = CompleteLayerUploadRequest; +export type CompleteLayerUploadCommandOutput = CompleteLayerUploadResponse & __MetadataBearer; + +/** + *

    Informs Amazon ECR that the image layer upload has completed for a specified public registry, + * repository name, and upload ID. You can optionally provide a sha256 digest of + * the image layer for data validation purposes.

    + *

    When an image is pushed, the CompleteLayerUpload API is called once per each new image + * layer to verify that the upload has completed.

    + * + *

    This operation is used by the Amazon ECR proxy and is not generally used by customers for pulling and pushing images. In most cases, you should use the docker CLI to pull, tag, and push images.

    + *
    + */ +export class CompleteLayerUploadCommand extends $Command< + CompleteLayerUploadCommandInput, + CompleteLayerUploadCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CompleteLayerUploadCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "CompleteLayerUploadCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CompleteLayerUploadRequest.filterSensitiveLog, + outputFilterSensitiveLog: CompleteLayerUploadResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CompleteLayerUploadCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1CompleteLayerUploadCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1CompleteLayerUploadCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/CreateRepositoryCommand.ts b/clients/client-ecr-public/commands/CreateRepositoryCommand.ts new file mode 100644 index 000000000000..2e570973eb73 --- /dev/null +++ b/clients/client-ecr-public/commands/CreateRepositoryCommand.ts @@ -0,0 +1,89 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { CreateRepositoryRequest, CreateRepositoryResponse } from "../models/models_0"; +import { + deserializeAws_json1_1CreateRepositoryCommand, + serializeAws_json1_1CreateRepositoryCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateRepositoryCommandInput = CreateRepositoryRequest; +export type CreateRepositoryCommandOutput = CreateRepositoryResponse & __MetadataBearer; + +/** + *

    Creates a repository in a public registry. For more information, see Amazon ECR + * repositories in the Amazon Elastic Container Registry User Guide.

    + */ +export class CreateRepositoryCommand extends $Command< + CreateRepositoryCommandInput, + CreateRepositoryCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateRepositoryCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "CreateRepositoryCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateRepositoryRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreateRepositoryResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateRepositoryCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1CreateRepositoryCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1CreateRepositoryCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/DeleteRepositoryCommand.ts b/clients/client-ecr-public/commands/DeleteRepositoryCommand.ts new file mode 100644 index 000000000000..b6a02766d289 --- /dev/null +++ b/clients/client-ecr-public/commands/DeleteRepositoryCommand.ts @@ -0,0 +1,90 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { DeleteRepositoryRequest, DeleteRepositoryResponse } from "../models/models_0"; +import { + deserializeAws_json1_1DeleteRepositoryCommand, + serializeAws_json1_1DeleteRepositoryCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteRepositoryCommandInput = DeleteRepositoryRequest; +export type DeleteRepositoryCommandOutput = DeleteRepositoryResponse & __MetadataBearer; + +/** + *

    Deletes a repository in a public registry. If the repository contains images, you must + * either delete all images in the repository or use the force option which + * deletes all images on your behalf before deleting the repository.

    + */ +export class DeleteRepositoryCommand extends $Command< + DeleteRepositoryCommandInput, + DeleteRepositoryCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteRepositoryCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "DeleteRepositoryCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteRepositoryRequest.filterSensitiveLog, + outputFilterSensitiveLog: DeleteRepositoryResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteRepositoryCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DeleteRepositoryCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DeleteRepositoryCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/DeleteRepositoryPolicyCommand.ts b/clients/client-ecr-public/commands/DeleteRepositoryPolicyCommand.ts new file mode 100644 index 000000000000..a591e873654b --- /dev/null +++ b/clients/client-ecr-public/commands/DeleteRepositoryPolicyCommand.ts @@ -0,0 +1,88 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { DeleteRepositoryPolicyRequest, DeleteRepositoryPolicyResponse } from "../models/models_0"; +import { + deserializeAws_json1_1DeleteRepositoryPolicyCommand, + serializeAws_json1_1DeleteRepositoryPolicyCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteRepositoryPolicyCommandInput = DeleteRepositoryPolicyRequest; +export type DeleteRepositoryPolicyCommandOutput = DeleteRepositoryPolicyResponse & __MetadataBearer; + +/** + *

    Deletes the repository policy associated with the specified repository.

    + */ +export class DeleteRepositoryPolicyCommand extends $Command< + DeleteRepositoryPolicyCommandInput, + DeleteRepositoryPolicyCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteRepositoryPolicyCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "DeleteRepositoryPolicyCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteRepositoryPolicyRequest.filterSensitiveLog, + outputFilterSensitiveLog: DeleteRepositoryPolicyResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteRepositoryPolicyCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DeleteRepositoryPolicyCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DeleteRepositoryPolicyCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/DescribeImageTagsCommand.ts b/clients/client-ecr-public/commands/DescribeImageTagsCommand.ts new file mode 100644 index 000000000000..135bfab6ea74 --- /dev/null +++ b/clients/client-ecr-public/commands/DescribeImageTagsCommand.ts @@ -0,0 +1,88 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { DescribeImageTagsRequest, DescribeImageTagsResponse } from "../models/models_0"; +import { + deserializeAws_json1_1DescribeImageTagsCommand, + serializeAws_json1_1DescribeImageTagsCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeImageTagsCommandInput = DescribeImageTagsRequest; +export type DescribeImageTagsCommandOutput = DescribeImageTagsResponse & __MetadataBearer; + +/** + *

    Returns the image tag details for a repository in a public registry.

    + */ +export class DescribeImageTagsCommand extends $Command< + DescribeImageTagsCommandInput, + DescribeImageTagsCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeImageTagsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "DescribeImageTagsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeImageTagsRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeImageTagsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeImageTagsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DescribeImageTagsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DescribeImageTagsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/DescribeImagesCommand.ts b/clients/client-ecr-public/commands/DescribeImagesCommand.ts new file mode 100644 index 000000000000..3a90f4095139 --- /dev/null +++ b/clients/client-ecr-public/commands/DescribeImagesCommand.ts @@ -0,0 +1,94 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { DescribeImagesRequest, DescribeImagesResponse } from "../models/models_0"; +import { + deserializeAws_json1_1DescribeImagesCommand, + serializeAws_json1_1DescribeImagesCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeImagesCommandInput = DescribeImagesRequest; +export type DescribeImagesCommandOutput = DescribeImagesResponse & __MetadataBearer; + +/** + *

    Returns metadata about the images in a repository in a public registry.

    + * + *

    Beginning with Docker version 1.9, the Docker client compresses image layers before + * pushing them to a V2 Docker registry. The output of the docker images + * command shows the uncompressed image size, so it may return a larger image size than the + * image sizes returned by DescribeImages.

    + *
    + */ +export class DescribeImagesCommand extends $Command< + DescribeImagesCommandInput, + DescribeImagesCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeImagesCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "DescribeImagesCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeImagesRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeImagesResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeImagesCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DescribeImagesCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DescribeImagesCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/DescribeRegistriesCommand.ts b/clients/client-ecr-public/commands/DescribeRegistriesCommand.ts new file mode 100644 index 000000000000..0d664b07440a --- /dev/null +++ b/clients/client-ecr-public/commands/DescribeRegistriesCommand.ts @@ -0,0 +1,88 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { DescribeRegistriesRequest, DescribeRegistriesResponse } from "../models/models_0"; +import { + deserializeAws_json1_1DescribeRegistriesCommand, + serializeAws_json1_1DescribeRegistriesCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeRegistriesCommandInput = DescribeRegistriesRequest; +export type DescribeRegistriesCommandOutput = DescribeRegistriesResponse & __MetadataBearer; + +/** + *

    Returns details for a public registry.

    + */ +export class DescribeRegistriesCommand extends $Command< + DescribeRegistriesCommandInput, + DescribeRegistriesCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeRegistriesCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "DescribeRegistriesCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeRegistriesRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeRegistriesResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeRegistriesCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DescribeRegistriesCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DescribeRegistriesCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/DescribeRepositoriesCommand.ts b/clients/client-ecr-public/commands/DescribeRepositoriesCommand.ts new file mode 100644 index 000000000000..2efbd6cdd1cf --- /dev/null +++ b/clients/client-ecr-public/commands/DescribeRepositoriesCommand.ts @@ -0,0 +1,88 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { DescribeRepositoriesRequest, DescribeRepositoriesResponse } from "../models/models_0"; +import { + deserializeAws_json1_1DescribeRepositoriesCommand, + serializeAws_json1_1DescribeRepositoriesCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeRepositoriesCommandInput = DescribeRepositoriesRequest; +export type DescribeRepositoriesCommandOutput = DescribeRepositoriesResponse & __MetadataBearer; + +/** + *

    Describes repositories in a public registry.

    + */ +export class DescribeRepositoriesCommand extends $Command< + DescribeRepositoriesCommandInput, + DescribeRepositoriesCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeRepositoriesCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "DescribeRepositoriesCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeRepositoriesRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeRepositoriesResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeRepositoriesCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DescribeRepositoriesCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DescribeRepositoriesCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/GetAuthorizationTokenCommand.ts b/clients/client-ecr-public/commands/GetAuthorizationTokenCommand.ts new file mode 100644 index 000000000000..45d2d42522b3 --- /dev/null +++ b/clients/client-ecr-public/commands/GetAuthorizationTokenCommand.ts @@ -0,0 +1,92 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { GetAuthorizationTokenRequest, GetAuthorizationTokenResponse } from "../models/models_0"; +import { + deserializeAws_json1_1GetAuthorizationTokenCommand, + serializeAws_json1_1GetAuthorizationTokenCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type GetAuthorizationTokenCommandInput = GetAuthorizationTokenRequest; +export type GetAuthorizationTokenCommandOutput = GetAuthorizationTokenResponse & __MetadataBearer; + +/** + *

    Retrieves an authorization token. An authorization token represents your IAM + * authentication credentials and can be used to access any Amazon ECR registry that your IAM + * principal has access to. The authorization token is valid for 12 hours. This API requires + * the ecr-public:GetAuthorizationToken and + * sts:GetServiceBearerToken permissions.

    + */ +export class GetAuthorizationTokenCommand extends $Command< + GetAuthorizationTokenCommandInput, + GetAuthorizationTokenCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: GetAuthorizationTokenCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "GetAuthorizationTokenCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: GetAuthorizationTokenRequest.filterSensitiveLog, + outputFilterSensitiveLog: GetAuthorizationTokenResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: GetAuthorizationTokenCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1GetAuthorizationTokenCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1GetAuthorizationTokenCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/GetRegistryCatalogDataCommand.ts b/clients/client-ecr-public/commands/GetRegistryCatalogDataCommand.ts new file mode 100644 index 000000000000..755e0103e1b0 --- /dev/null +++ b/clients/client-ecr-public/commands/GetRegistryCatalogDataCommand.ts @@ -0,0 +1,88 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { GetRegistryCatalogDataRequest, GetRegistryCatalogDataResponse } from "../models/models_0"; +import { + deserializeAws_json1_1GetRegistryCatalogDataCommand, + serializeAws_json1_1GetRegistryCatalogDataCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type GetRegistryCatalogDataCommandInput = GetRegistryCatalogDataRequest; +export type GetRegistryCatalogDataCommandOutput = GetRegistryCatalogDataResponse & __MetadataBearer; + +/** + *

    Retrieves catalog metadata for a public registry.

    + */ +export class GetRegistryCatalogDataCommand extends $Command< + GetRegistryCatalogDataCommandInput, + GetRegistryCatalogDataCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: GetRegistryCatalogDataCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "GetRegistryCatalogDataCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: GetRegistryCatalogDataRequest.filterSensitiveLog, + outputFilterSensitiveLog: GetRegistryCatalogDataResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: GetRegistryCatalogDataCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1GetRegistryCatalogDataCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1GetRegistryCatalogDataCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/GetRepositoryCatalogDataCommand.ts b/clients/client-ecr-public/commands/GetRepositoryCatalogDataCommand.ts new file mode 100644 index 000000000000..741c00eb2f8a --- /dev/null +++ b/clients/client-ecr-public/commands/GetRepositoryCatalogDataCommand.ts @@ -0,0 +1,89 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { GetRepositoryCatalogDataRequest, GetRepositoryCatalogDataResponse } from "../models/models_0"; +import { + deserializeAws_json1_1GetRepositoryCatalogDataCommand, + serializeAws_json1_1GetRepositoryCatalogDataCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type GetRepositoryCatalogDataCommandInput = GetRepositoryCatalogDataRequest; +export type GetRepositoryCatalogDataCommandOutput = GetRepositoryCatalogDataResponse & __MetadataBearer; + +/** + *

    Retrieve catalog metadata for a repository in a public registry. This metadata is + * displayed publicly in the Amazon ECR Public Gallery.

    + */ +export class GetRepositoryCatalogDataCommand extends $Command< + GetRepositoryCatalogDataCommandInput, + GetRepositoryCatalogDataCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: GetRepositoryCatalogDataCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "GetRepositoryCatalogDataCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: GetRepositoryCatalogDataRequest.filterSensitiveLog, + outputFilterSensitiveLog: GetRepositoryCatalogDataResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: GetRepositoryCatalogDataCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1GetRepositoryCatalogDataCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1GetRepositoryCatalogDataCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/GetRepositoryPolicyCommand.ts b/clients/client-ecr-public/commands/GetRepositoryPolicyCommand.ts new file mode 100644 index 000000000000..83eb54090748 --- /dev/null +++ b/clients/client-ecr-public/commands/GetRepositoryPolicyCommand.ts @@ -0,0 +1,88 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { GetRepositoryPolicyRequest, GetRepositoryPolicyResponse } from "../models/models_0"; +import { + deserializeAws_json1_1GetRepositoryPolicyCommand, + serializeAws_json1_1GetRepositoryPolicyCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type GetRepositoryPolicyCommandInput = GetRepositoryPolicyRequest; +export type GetRepositoryPolicyCommandOutput = GetRepositoryPolicyResponse & __MetadataBearer; + +/** + *

    Retrieves the repository policy for the specified repository.

    + */ +export class GetRepositoryPolicyCommand extends $Command< + GetRepositoryPolicyCommandInput, + GetRepositoryPolicyCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: GetRepositoryPolicyCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "GetRepositoryPolicyCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: GetRepositoryPolicyRequest.filterSensitiveLog, + outputFilterSensitiveLog: GetRepositoryPolicyResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: GetRepositoryPolicyCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1GetRepositoryPolicyCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1GetRepositoryPolicyCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/InitiateLayerUploadCommand.ts b/clients/client-ecr-public/commands/InitiateLayerUploadCommand.ts new file mode 100644 index 000000000000..75d0611f719e --- /dev/null +++ b/clients/client-ecr-public/commands/InitiateLayerUploadCommand.ts @@ -0,0 +1,94 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { InitiateLayerUploadRequest, InitiateLayerUploadResponse } from "../models/models_0"; +import { + deserializeAws_json1_1InitiateLayerUploadCommand, + serializeAws_json1_1InitiateLayerUploadCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type InitiateLayerUploadCommandInput = InitiateLayerUploadRequest; +export type InitiateLayerUploadCommandOutput = InitiateLayerUploadResponse & __MetadataBearer; + +/** + *

    Notifies Amazon ECR that you intend to upload an image layer.

    + *

    When an image is pushed, the InitiateLayerUpload API is called once per image layer that + * has not already been uploaded. Whether or not an image layer has been uploaded is + * determined by the BatchCheckLayerAvailability API action.

    + * + *

    This operation is used by the Amazon ECR proxy and is not generally used by customers for pulling and pushing images. In most cases, you should use the docker CLI to pull, tag, and push images.

    + *
    + */ +export class InitiateLayerUploadCommand extends $Command< + InitiateLayerUploadCommandInput, + InitiateLayerUploadCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: InitiateLayerUploadCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "InitiateLayerUploadCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: InitiateLayerUploadRequest.filterSensitiveLog, + outputFilterSensitiveLog: InitiateLayerUploadResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: InitiateLayerUploadCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1InitiateLayerUploadCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1InitiateLayerUploadCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/PutImageCommand.ts b/clients/client-ecr-public/commands/PutImageCommand.ts new file mode 100644 index 000000000000..a800ce030fe9 --- /dev/null +++ b/clients/client-ecr-public/commands/PutImageCommand.ts @@ -0,0 +1,92 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { PutImageRequest, PutImageResponse } from "../models/models_0"; +import { deserializeAws_json1_1PutImageCommand, serializeAws_json1_1PutImageCommand } from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type PutImageCommandInput = PutImageRequest; +export type PutImageCommandOutput = PutImageResponse & __MetadataBearer; + +/** + *

    Creates or updates the image manifest and tags associated with an image.

    + *

    When an image is pushed and all new image layers have been uploaded, the PutImage API is + * called once to create or update the image manifest and the tags associated with the + * image.

    + * + * + *

    This operation is used by the Amazon ECR proxy and is not generally used by customers for pulling and pushing images. In most cases, you should use the docker CLI to pull, tag, and push images.

    + *
    + */ +export class PutImageCommand extends $Command< + PutImageCommandInput, + PutImageCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: PutImageCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "PutImageCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: PutImageRequest.filterSensitiveLog, + outputFilterSensitiveLog: PutImageResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: PutImageCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1PutImageCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1PutImageCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/PutRegistryCatalogDataCommand.ts b/clients/client-ecr-public/commands/PutRegistryCatalogDataCommand.ts new file mode 100644 index 000000000000..7f7aefe887d3 --- /dev/null +++ b/clients/client-ecr-public/commands/PutRegistryCatalogDataCommand.ts @@ -0,0 +1,88 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { PutRegistryCatalogDataRequest, PutRegistryCatalogDataResponse } from "../models/models_0"; +import { + deserializeAws_json1_1PutRegistryCatalogDataCommand, + serializeAws_json1_1PutRegistryCatalogDataCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type PutRegistryCatalogDataCommandInput = PutRegistryCatalogDataRequest; +export type PutRegistryCatalogDataCommandOutput = PutRegistryCatalogDataResponse & __MetadataBearer; + +/** + *

    Create or updates the catalog data for a public registry.

    + */ +export class PutRegistryCatalogDataCommand extends $Command< + PutRegistryCatalogDataCommandInput, + PutRegistryCatalogDataCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: PutRegistryCatalogDataCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "PutRegistryCatalogDataCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: PutRegistryCatalogDataRequest.filterSensitiveLog, + outputFilterSensitiveLog: PutRegistryCatalogDataResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: PutRegistryCatalogDataCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1PutRegistryCatalogDataCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1PutRegistryCatalogDataCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/PutRepositoryCatalogDataCommand.ts b/clients/client-ecr-public/commands/PutRepositoryCatalogDataCommand.ts new file mode 100644 index 000000000000..ea72854865d1 --- /dev/null +++ b/clients/client-ecr-public/commands/PutRepositoryCatalogDataCommand.ts @@ -0,0 +1,88 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { PutRepositoryCatalogDataRequest, PutRepositoryCatalogDataResponse } from "../models/models_0"; +import { + deserializeAws_json1_1PutRepositoryCatalogDataCommand, + serializeAws_json1_1PutRepositoryCatalogDataCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type PutRepositoryCatalogDataCommandInput = PutRepositoryCatalogDataRequest; +export type PutRepositoryCatalogDataCommandOutput = PutRepositoryCatalogDataResponse & __MetadataBearer; + +/** + *

    Creates or updates the catalog data for a repository in a public registry.

    + */ +export class PutRepositoryCatalogDataCommand extends $Command< + PutRepositoryCatalogDataCommandInput, + PutRepositoryCatalogDataCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: PutRepositoryCatalogDataCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "PutRepositoryCatalogDataCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: PutRepositoryCatalogDataRequest.filterSensitiveLog, + outputFilterSensitiveLog: PutRepositoryCatalogDataResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: PutRepositoryCatalogDataCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1PutRepositoryCatalogDataCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1PutRepositoryCatalogDataCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/SetRepositoryPolicyCommand.ts b/clients/client-ecr-public/commands/SetRepositoryPolicyCommand.ts new file mode 100644 index 000000000000..6aa54abebbb6 --- /dev/null +++ b/clients/client-ecr-public/commands/SetRepositoryPolicyCommand.ts @@ -0,0 +1,90 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { SetRepositoryPolicyRequest, SetRepositoryPolicyResponse } from "../models/models_0"; +import { + deserializeAws_json1_1SetRepositoryPolicyCommand, + serializeAws_json1_1SetRepositoryPolicyCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type SetRepositoryPolicyCommandInput = SetRepositoryPolicyRequest; +export type SetRepositoryPolicyCommandOutput = SetRepositoryPolicyResponse & __MetadataBearer; + +/** + *

    Applies a repository policy to the specified public repository to control access + * permissions. For more information, see Amazon ECR Repository + * Policies in the Amazon Elastic Container Registry User Guide.

    + */ +export class SetRepositoryPolicyCommand extends $Command< + SetRepositoryPolicyCommandInput, + SetRepositoryPolicyCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: SetRepositoryPolicyCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "SetRepositoryPolicyCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: SetRepositoryPolicyRequest.filterSensitiveLog, + outputFilterSensitiveLog: SetRepositoryPolicyResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: SetRepositoryPolicyCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1SetRepositoryPolicyCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1SetRepositoryPolicyCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/commands/UploadLayerPartCommand.ts b/clients/client-ecr-public/commands/UploadLayerPartCommand.ts new file mode 100644 index 000000000000..a22a2465ae0b --- /dev/null +++ b/clients/client-ecr-public/commands/UploadLayerPartCommand.ts @@ -0,0 +1,94 @@ +import { ECRPUBLICClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../ECRPUBLICClient"; +import { UploadLayerPartRequest, UploadLayerPartResponse } from "../models/models_0"; +import { + deserializeAws_json1_1UploadLayerPartCommand, + serializeAws_json1_1UploadLayerPartCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type UploadLayerPartCommandInput = UploadLayerPartRequest; +export type UploadLayerPartCommandOutput = UploadLayerPartResponse & __MetadataBearer; + +/** + *

    Uploads an image layer part to Amazon ECR.

    + *

    When an image is pushed, each new image layer is uploaded in parts. The maximum size of + * each image layer part can be 20971520 bytes (or about 20MB). The UploadLayerPart API is + * called once per each new image layer part.

    + * + *

    This operation is used by the Amazon ECR proxy and is not generally used by customers for pulling and pushing images. In most cases, you should use the docker CLI to pull, tag, and push images.

    + *
    + */ +export class UploadLayerPartCommand extends $Command< + UploadLayerPartCommandInput, + UploadLayerPartCommandOutput, + ECRPUBLICClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: UploadLayerPartCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: ECRPUBLICClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "ECRPUBLICClient"; + const commandName = "UploadLayerPartCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: UploadLayerPartRequest.filterSensitiveLog, + outputFilterSensitiveLog: UploadLayerPartResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: UploadLayerPartCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1UploadLayerPartCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1UploadLayerPartCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-ecr-public/endpoints.ts b/clients/client-ecr-public/endpoints.ts new file mode 100644 index 000000000000..18f9703cb549 --- /dev/null +++ b/clients/client-ecr-public/endpoints.ts @@ -0,0 +1,81 @@ +import { RegionInfo, RegionInfoProvider } from "@aws-sdk/types"; + +// Partition default templates +const AWS_TEMPLATE = "ecr-public.{region}.amazonaws.com"; +const AWS_CN_TEMPLATE = "ecr-public.{region}.amazonaws.com.cn"; +const AWS_ISO_TEMPLATE = "ecr-public.{region}.c2s.ic.gov"; +const AWS_ISO_B_TEMPLATE = "ecr-public.{region}.sc2s.sgov.gov"; +const AWS_US_GOV_TEMPLATE = "ecr-public.{region}.amazonaws.com"; + +// Partition regions +const AWS_REGIONS = new Set([ + "ap-east-1", + "ap-northeast-1", + "ap-northeast-2", + "ap-south-1", + "ap-southeast-1", + "ap-southeast-2", + "ca-central-1", + "eu-central-1", + "eu-north-1", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "me-south-1", + "sa-east-1", + "us-east-1", + "us-east-2", + "us-west-1", + "us-west-2", +]); +const AWS_CN_REGIONS = new Set(["cn-north-1", "cn-northwest-1"]); +const AWS_ISO_REGIONS = new Set(["us-iso-east-1"]); +const AWS_ISO_B_REGIONS = new Set(["us-isob-east-1"]); +const AWS_US_GOV_REGIONS = new Set(["us-gov-east-1", "us-gov-west-1"]); + +export const defaultRegionInfoProvider: RegionInfoProvider = (region: string, options?: any) => { + let regionInfo: RegionInfo | undefined = undefined; + switch (region) { + // First, try to match exact region names. + // Next, try to match partition endpoints. + default: + if (AWS_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_TEMPLATE.replace("{region}", region), + partition: "aws", + }; + } + if (AWS_CN_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_CN_TEMPLATE.replace("{region}", region), + partition: "aws-cn", + }; + } + if (AWS_ISO_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_ISO_TEMPLATE.replace("{region}", region), + partition: "aws-iso", + }; + } + if (AWS_ISO_B_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_ISO_B_TEMPLATE.replace("{region}", region), + partition: "aws-iso-b", + }; + } + if (AWS_US_GOV_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_US_GOV_TEMPLATE.replace("{region}", region), + partition: "aws-us-gov", + }; + } + // Finally, assume it's an AWS partition endpoint. + if (regionInfo === undefined) { + regionInfo = { + hostname: AWS_TEMPLATE.replace("{region}", region), + partition: "aws", + }; + } + } + return Promise.resolve(regionInfo); +}; diff --git a/clients/client-ecr-public/index.ts b/clients/client-ecr-public/index.ts new file mode 100644 index 000000000000..69c688ebffe2 --- /dev/null +++ b/clients/client-ecr-public/index.ts @@ -0,0 +1,28 @@ +export * from "./ECRPUBLICClient"; +export * from "./ECRPUBLIC"; +export * from "./commands/BatchCheckLayerAvailabilityCommand"; +export * from "./commands/BatchDeleteImageCommand"; +export * from "./commands/CompleteLayerUploadCommand"; +export * from "./commands/CreateRepositoryCommand"; +export * from "./commands/DeleteRepositoryCommand"; +export * from "./commands/DeleteRepositoryPolicyCommand"; +export * from "./commands/DescribeImagesCommand"; +export * from "./pagination/DescribeImagesPaginator"; +export * from "./commands/DescribeImageTagsCommand"; +export * from "./pagination/DescribeImageTagsPaginator"; +export * from "./commands/DescribeRegistriesCommand"; +export * from "./pagination/DescribeRegistriesPaginator"; +export * from "./commands/DescribeRepositoriesCommand"; +export * from "./pagination/DescribeRepositoriesPaginator"; +export * from "./commands/GetAuthorizationTokenCommand"; +export * from "./commands/GetRegistryCatalogDataCommand"; +export * from "./commands/GetRepositoryCatalogDataCommand"; +export * from "./commands/GetRepositoryPolicyCommand"; +export * from "./commands/InitiateLayerUploadCommand"; +export * from "./commands/PutImageCommand"; +export * from "./commands/PutRegistryCatalogDataCommand"; +export * from "./commands/PutRepositoryCatalogDataCommand"; +export * from "./commands/SetRepositoryPolicyCommand"; +export * from "./commands/UploadLayerPartCommand"; +export * from "./pagination/Interfaces"; +export * from "./models/index"; diff --git a/clients/client-ecr-public/models/index.ts b/clients/client-ecr-public/models/index.ts new file mode 100644 index 000000000000..09c5d6e09b8c --- /dev/null +++ b/clients/client-ecr-public/models/index.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/clients/client-ecr-public/models/models_0.ts b/clients/client-ecr-public/models/models_0.ts new file mode 100644 index 000000000000..166d5e80e850 --- /dev/null +++ b/clients/client-ecr-public/models/models_0.ts @@ -0,0 +1,1894 @@ +import { SENSITIVE_STRING, SmithyException as __SmithyException } from "@aws-sdk/smithy-client"; +import { MetadataBearer as $MetadataBearer } from "@aws-sdk/types"; + +/** + *

    An authorization token data object that corresponds to a public registry.

    + */ +export interface AuthorizationData { + /** + *

    A base64-encoded string that contains authorization data for a public Amazon ECR registry. + * When the string is decoded, it is presented in the format user:password for + * public registry authentication using docker login.

    + */ + authorizationToken?: string; + + /** + *

    The Unix time in seconds and milliseconds when the authorization token expires. + * Authorization tokens are valid for 12 hours.

    + */ + expiresAt?: Date; +} + +export namespace AuthorizationData { + export const filterSensitiveLog = (obj: AuthorizationData): any => ({ + ...obj, + }); +} + +export interface BatchCheckLayerAvailabilityRequest { + /** + *

    The AWS account ID associated with the public registry that contains the image layers to + * check. If you do not specify a registry, the default public registry is assumed.

    + */ + registryId?: string; + + /** + *

    The name of the repository that is associated with the image layers to check.

    + */ + repositoryName: string | undefined; + + /** + *

    The digests of the image layers to check.

    + */ + layerDigests: string[] | undefined; +} + +export namespace BatchCheckLayerAvailabilityRequest { + export const filterSensitiveLog = (obj: BatchCheckLayerAvailabilityRequest): any => ({ + ...obj, + }); +} + +export enum LayerFailureCode { + InvalidLayerDigest = "InvalidLayerDigest", + MissingLayerDigest = "MissingLayerDigest", +} + +/** + *

    An object representing an Amazon ECR image layer failure.

    + */ +export interface LayerFailure { + /** + *

    The layer digest associated with the failure.

    + */ + layerDigest?: string; + + /** + *

    The failure code associated with the failure.

    + */ + failureCode?: LayerFailureCode | string; + + /** + *

    The reason for the failure.

    + */ + failureReason?: string; +} + +export namespace LayerFailure { + export const filterSensitiveLog = (obj: LayerFailure): any => ({ + ...obj, + }); +} + +export enum LayerAvailability { + AVAILABLE = "AVAILABLE", + UNAVAILABLE = "UNAVAILABLE", +} + +/** + *

    An object representing an Amazon ECR image layer.

    + */ +export interface Layer { + /** + *

    The sha256 digest of the image layer.

    + */ + layerDigest?: string; + + /** + *

    The availability status of the image layer.

    + */ + layerAvailability?: LayerAvailability | string; + + /** + *

    The size, in bytes, of the image layer.

    + */ + layerSize?: number; + + /** + *

    The media type of the layer, such as + * application/vnd.docker.image.rootfs.diff.tar.gzip or + * application/vnd.oci.image.layer.v1.tar+gzip.

    + */ + mediaType?: string; +} + +export namespace Layer { + export const filterSensitiveLog = (obj: Layer): any => ({ + ...obj, + }); +} + +export interface BatchCheckLayerAvailabilityResponse { + /** + *

    A list of image layer objects corresponding to the image layer references in the + * request.

    + */ + layers?: Layer[]; + + /** + *

    Any failures associated with the call.

    + */ + failures?: LayerFailure[]; +} + +export namespace BatchCheckLayerAvailabilityResponse { + export const filterSensitiveLog = (obj: BatchCheckLayerAvailabilityResponse): any => ({ + ...obj, + }); +} + +/** + *

    The specified parameter is invalid. Review the available parameters for the API + * request.

    + */ +export interface InvalidParameterException extends __SmithyException, $MetadataBearer { + name: "InvalidParameterException"; + $fault: "client"; + message?: string; +} + +export namespace InvalidParameterException { + export const filterSensitiveLog = (obj: InvalidParameterException): any => ({ + ...obj, + }); +} + +/** + *

    The registry does not exist.

    + */ +export interface RegistryNotFoundException extends __SmithyException, $MetadataBearer { + name: "RegistryNotFoundException"; + $fault: "client"; + message?: string; +} + +export namespace RegistryNotFoundException { + export const filterSensitiveLog = (obj: RegistryNotFoundException): any => ({ + ...obj, + }); +} + +/** + *

    The specified repository could not be found. Check the spelling of the specified + * repository and ensure that you are performing operations on the correct registry.

    + */ +export interface RepositoryNotFoundException extends __SmithyException, $MetadataBearer { + name: "RepositoryNotFoundException"; + $fault: "client"; + message?: string; +} + +export namespace RepositoryNotFoundException { + export const filterSensitiveLog = (obj: RepositoryNotFoundException): any => ({ + ...obj, + }); +} + +/** + *

    These errors are usually caused by a server-side issue.

    + */ +export interface ServerException extends __SmithyException, $MetadataBearer { + name: "ServerException"; + $fault: "server"; + message?: string; +} + +export namespace ServerException { + export const filterSensitiveLog = (obj: ServerException): any => ({ + ...obj, + }); +} + +/** + *

    An object with identifying information for an Amazon ECR image.

    + */ +export interface ImageIdentifier { + /** + *

    The sha256 digest of the image manifest.

    + */ + imageDigest?: string; + + /** + *

    The tag used for the image.

    + */ + imageTag?: string; +} + +export namespace ImageIdentifier { + export const filterSensitiveLog = (obj: ImageIdentifier): any => ({ + ...obj, + }); +} + +export interface BatchDeleteImageRequest { + /** + *

    The AWS account ID associated with the registry that contains the image to delete. + * If you do not specify a registry, the default public registry is assumed.

    + */ + registryId?: string; + + /** + *

    The repository in a public registry that contains the image to delete.

    + */ + repositoryName: string | undefined; + + /** + *

    A list of image ID references that correspond to images to delete. The format of the + * imageIds reference is imageTag=tag or + * imageDigest=digest.

    + */ + imageIds: ImageIdentifier[] | undefined; +} + +export namespace BatchDeleteImageRequest { + export const filterSensitiveLog = (obj: BatchDeleteImageRequest): any => ({ + ...obj, + }); +} + +export enum ImageFailureCode { + ImageNotFound = "ImageNotFound", + ImageReferencedByManifestList = "ImageReferencedByManifestList", + ImageTagDoesNotMatchDigest = "ImageTagDoesNotMatchDigest", + InvalidImageDigest = "InvalidImageDigest", + InvalidImageTag = "InvalidImageTag", + KmsError = "KmsError", + MissingDigestAndTag = "MissingDigestAndTag", +} + +/** + *

    An object representing an Amazon ECR image failure.

    + */ +export interface ImageFailure { + /** + *

    The image ID associated with the failure.

    + */ + imageId?: ImageIdentifier; + + /** + *

    The code associated with the failure.

    + */ + failureCode?: ImageFailureCode | string; + + /** + *

    The reason for the failure.

    + */ + failureReason?: string; +} + +export namespace ImageFailure { + export const filterSensitiveLog = (obj: ImageFailure): any => ({ + ...obj, + }); +} + +export interface BatchDeleteImageResponse { + /** + *

    The image IDs of the deleted images.

    + */ + imageIds?: ImageIdentifier[]; + + /** + *

    Any failures associated with the call.

    + */ + failures?: ImageFailure[]; +} + +export namespace BatchDeleteImageResponse { + export const filterSensitiveLog = (obj: BatchDeleteImageResponse): any => ({ + ...obj, + }); +} + +export interface CompleteLayerUploadRequest { + /** + *

    The AWS account ID associated with the registry to which to upload layers. + * If you do not specify a registry, the default public registry is assumed.

    + */ + registryId?: string; + + /** + *

    The name of the repository in a public registry to associate with the image + * layer.

    + */ + repositoryName: string | undefined; + + /** + *

    The upload ID from a previous InitiateLayerUpload operation to + * associate with the image layer.

    + */ + uploadId: string | undefined; + + /** + *

    The sha256 digest of the image layer.

    + */ + layerDigests: string[] | undefined; +} + +export namespace CompleteLayerUploadRequest { + export const filterSensitiveLog = (obj: CompleteLayerUploadRequest): any => ({ + ...obj, + }); +} + +export interface CompleteLayerUploadResponse { + /** + *

    The public registry ID associated with the request.

    + */ + registryId?: string; + + /** + *

    The repository name associated with the request.

    + */ + repositoryName?: string; + + /** + *

    The upload ID associated with the layer.

    + */ + uploadId?: string; + + /** + *

    The sha256 digest of the image layer.

    + */ + layerDigest?: string; +} + +export namespace CompleteLayerUploadResponse { + export const filterSensitiveLog = (obj: CompleteLayerUploadResponse): any => ({ + ...obj, + }); +} + +/** + *

    The specified layer upload does not contain any layer parts.

    + */ +export interface EmptyUploadException extends __SmithyException, $MetadataBearer { + name: "EmptyUploadException"; + $fault: "client"; + message?: string; +} + +export namespace EmptyUploadException { + export const filterSensitiveLog = (obj: EmptyUploadException): any => ({ + ...obj, + }); +} + +/** + *

    The layer digest calculation performed by Amazon ECR upon receipt of the image layer does not + * match the digest specified.

    + */ +export interface InvalidLayerException extends __SmithyException, $MetadataBearer { + name: "InvalidLayerException"; + $fault: "client"; + message?: string; +} + +export namespace InvalidLayerException { + export const filterSensitiveLog = (obj: InvalidLayerException): any => ({ + ...obj, + }); +} + +/** + *

    The image layer already exists in the associated repository.

    + */ +export interface LayerAlreadyExistsException extends __SmithyException, $MetadataBearer { + name: "LayerAlreadyExistsException"; + $fault: "client"; + message?: string; +} + +export namespace LayerAlreadyExistsException { + export const filterSensitiveLog = (obj: LayerAlreadyExistsException): any => ({ + ...obj, + }); +} + +/** + *

    Layer parts must be at least 5 MiB in size.

    + */ +export interface LayerPartTooSmallException extends __SmithyException, $MetadataBearer { + name: "LayerPartTooSmallException"; + $fault: "client"; + message?: string; +} + +export namespace LayerPartTooSmallException { + export const filterSensitiveLog = (obj: LayerPartTooSmallException): any => ({ + ...obj, + }); +} + +/** + *

    The action is not supported in this Region.

    + */ +export interface UnsupportedCommandException extends __SmithyException, $MetadataBearer { + name: "UnsupportedCommandException"; + $fault: "client"; + message?: string; +} + +export namespace UnsupportedCommandException { + export const filterSensitiveLog = (obj: UnsupportedCommandException): any => ({ + ...obj, + }); +} + +/** + *

    The upload could not be found, or the specified upload ID is not valid for this + * repository.

    + */ +export interface UploadNotFoundException extends __SmithyException, $MetadataBearer { + name: "UploadNotFoundException"; + $fault: "client"; + message?: string; +} + +export namespace UploadNotFoundException { + export const filterSensitiveLog = (obj: UploadNotFoundException): any => ({ + ...obj, + }); +} + +/** + *

    An object containing the catalog data for a repository. This data is publicly visible in + * the Amazon ECR Public Gallery.

    + */ +export interface RepositoryCatalogDataInput { + /** + *

    A short description of the contents of the repository. This text appears in both the + * image details and also when searching for repositories on the Amazon ECR Public Gallery.

    + */ + description?: string; + + /** + *

    The system architecture that the images in the repository are compatible with. On the + * Amazon ECR Public Gallery, the following supported architectures will appear as badges on the + * repository and are used as search filters.

    + *
      + *
    • + *

      + * Linux + *

      + *
    • + *
    • + *

      + * Windows + *

      + *
    • + *
    + * + *

    If an unsupported tag is added to your repository catalog data, it will be associated + * with the repository and can be retrieved using the API but will not be discoverable in + * the Amazon ECR Public Gallery.

    + *
    + */ + architectures?: string[]; + + /** + *

    The operating systems that the images in the repository are compatible with. On the + * Amazon ECR Public Gallery, the following supported operating systems will appear as badges on + * the repository and are used as search filters.

    + *
      + *
    • + *

      + * ARM + *

      + *
    • + *
    • + *

      + * ARM 64 + *

      + *
    • + *
    • + *

      + * x86 + *

      + *
    • + *
    • + *

      + * x86-64 + *

      + *
    • + *
    + * + *

    If an unsupported tag is added to your repository catalog data, it will be associated + * with the repository and can be retrieved using the API but will not be discoverable in + * the Amazon ECR Public Gallery.

    + *
    + */ + operatingSystems?: string[]; + + /** + *

    The base64-encoded repository logo payload.

    + * + *

    The repository logo is only publicly visible in the Amazon ECR Public Gallery for verified + * accounts.

    + *
    + */ + logoImageBlob?: Uint8Array; + + /** + *

    A detailed description of the contents of the repository. It is publicly visible in the + * Amazon ECR Public Gallery. The text must be in markdown format.

    + */ + aboutText?: string; + + /** + *

    Detailed information on how to use the contents of the repository. It is publicly + * visible in the Amazon ECR Public Gallery. The usage text provides context, support information, + * and additional usage details for users of the repository. The text must be in markdown + * format.

    + */ + usageText?: string; +} + +export namespace RepositoryCatalogDataInput { + export const filterSensitiveLog = (obj: RepositoryCatalogDataInput): any => ({ + ...obj, + }); +} + +export interface CreateRepositoryRequest { + /** + *

    The name to use for the repository. This appears publicly in the Amazon ECR Public Gallery. + * The repository name may be specified on its own (such as nginx-web-app) or it + * can be prepended with a namespace to group the repository into a category (such as + * project-a/nginx-web-app).

    + */ + repositoryName: string | undefined; + + /** + *

    The details about the repository that are publicly visible in the + * Amazon ECR Public Gallery.

    + */ + catalogData?: RepositoryCatalogDataInput; +} + +export namespace CreateRepositoryRequest { + export const filterSensitiveLog = (obj: CreateRepositoryRequest): any => ({ + ...obj, + }); +} + +/** + *

    The catalog data for a repository. This data is publicly visible in the + * Amazon ECR Public Gallery.

    + */ +export interface RepositoryCatalogData { + /** + *

    The short description of the repository.

    + */ + description?: string; + + /** + *

    The architecture tags that are associated with the repository.

    + * + *

    Only supported operating system tags appear publicly in the Amazon ECR Public Gallery. For + * more information, see RepositoryCatalogDataInput.

    + *
    + */ + architectures?: string[]; + + /** + *

    The operating system tags that are associated with the repository.

    + * + *

    Only supported operating system tags appear publicly in the Amazon ECR Public Gallery. For + * more information, see RepositoryCatalogDataInput.

    + *
    + */ + operatingSystems?: string[]; + + /** + *

    The URL containing the logo associated with the repository.

    + */ + logoUrl?: string; + + /** + *

    The longform description of the contents of the repository. This text appears in the + * repository details on the Amazon ECR Public Gallery.

    + */ + aboutText?: string; + + /** + *

    The longform usage details of the contents of the repository. The usage text provides + * context for users of the repository.

    + */ + usageText?: string; + + /** + *

    Whether or not the repository is certified by AWS Marketplace.

    + */ + marketplaceCertified?: boolean; +} + +export namespace RepositoryCatalogData { + export const filterSensitiveLog = (obj: RepositoryCatalogData): any => ({ + ...obj, + }); +} + +/** + *

    An object representing a repository.

    + */ +export interface Repository { + /** + *

    The Amazon Resource Name (ARN) that identifies the repository. The ARN contains the arn:aws:ecr namespace, followed by the region of the repository, AWS account ID of the repository owner, repository namespace, and repository name. For example, arn:aws:ecr:region:012345678910:repository/test.

    + */ + repositoryArn?: string; + + /** + *

    The AWS account ID associated with the public registry that contains the + * repository.

    + */ + registryId?: string; + + /** + *

    The name of the repository.

    + */ + repositoryName?: string; + + /** + *

    The URI for the repository. You can use this URI for container image push + * and pull operations.

    + */ + repositoryUri?: string; + + /** + *

    The date and time, in JavaScript date format, when the repository was created.

    + */ + createdAt?: Date; +} + +export namespace Repository { + export const filterSensitiveLog = (obj: Repository): any => ({ + ...obj, + }); +} + +export interface CreateRepositoryResponse { + /** + *

    The repository that was created.

    + */ + repository?: Repository; + + /** + *

    The catalog data for a repository. This data is publicly visible in the + * Amazon ECR Public Gallery.

    + */ + catalogData?: RepositoryCatalogData; +} + +export namespace CreateRepositoryResponse { + export const filterSensitiveLog = (obj: CreateRepositoryResponse): any => ({ + ...obj, + }); +} + +/** + *

    The operation did not succeed because it would have exceeded a service limit for your + * account. For more information, see Amazon ECR Service Quotas in the + * Amazon Elastic Container Registry User Guide.

    + */ +export interface LimitExceededException extends __SmithyException, $MetadataBearer { + name: "LimitExceededException"; + $fault: "client"; + message?: string; +} + +export namespace LimitExceededException { + export const filterSensitiveLog = (obj: LimitExceededException): any => ({ + ...obj, + }); +} + +/** + *

    The specified repository already exists in the specified registry.

    + */ +export interface RepositoryAlreadyExistsException extends __SmithyException, $MetadataBearer { + name: "RepositoryAlreadyExistsException"; + $fault: "client"; + message?: string; +} + +export namespace RepositoryAlreadyExistsException { + export const filterSensitiveLog = (obj: RepositoryAlreadyExistsException): any => ({ + ...obj, + }); +} + +export interface DeleteRepositoryRequest { + /** + *

    The AWS account ID associated with the public registry that contains the repository to + * delete. If you do not specify a registry, the default public registry is assumed.

    + */ + registryId?: string; + + /** + *

    The name of the repository to delete.

    + */ + repositoryName: string | undefined; + + /** + *

    If a repository contains images, forces the deletion.

    + */ + force?: boolean; +} + +export namespace DeleteRepositoryRequest { + export const filterSensitiveLog = (obj: DeleteRepositoryRequest): any => ({ + ...obj, + }); +} + +export interface DeleteRepositoryResponse { + /** + *

    The repository that was deleted.

    + */ + repository?: Repository; +} + +export namespace DeleteRepositoryResponse { + export const filterSensitiveLog = (obj: DeleteRepositoryResponse): any => ({ + ...obj, + }); +} + +/** + *

    The specified repository contains images. To delete a repository that contains images, + * you must force the deletion with the force parameter.

    + */ +export interface RepositoryNotEmptyException extends __SmithyException, $MetadataBearer { + name: "RepositoryNotEmptyException"; + $fault: "client"; + message?: string; +} + +export namespace RepositoryNotEmptyException { + export const filterSensitiveLog = (obj: RepositoryNotEmptyException): any => ({ + ...obj, + }); +} + +export interface DeleteRepositoryPolicyRequest { + /** + *

    The AWS account ID associated with the public registry that contains the repository + * policy to delete. If you do not specify a registry, the default public registry is assumed.

    + */ + registryId?: string; + + /** + *

    The name of the repository that is associated with the repository policy to + * delete.

    + */ + repositoryName: string | undefined; +} + +export namespace DeleteRepositoryPolicyRequest { + export const filterSensitiveLog = (obj: DeleteRepositoryPolicyRequest): any => ({ + ...obj, + }); +} + +export interface DeleteRepositoryPolicyResponse { + /** + *

    The registry ID associated with the request.

    + */ + registryId?: string; + + /** + *

    The repository name associated with the request.

    + */ + repositoryName?: string; + + /** + *

    The JSON repository policy that was deleted from the repository.

    + */ + policyText?: string; +} + +export namespace DeleteRepositoryPolicyResponse { + export const filterSensitiveLog = (obj: DeleteRepositoryPolicyResponse): any => ({ + ...obj, + }); +} + +/** + *

    The specified repository and registry combination does not have an associated repository + * policy.

    + */ +export interface RepositoryPolicyNotFoundException extends __SmithyException, $MetadataBearer { + name: "RepositoryPolicyNotFoundException"; + $fault: "client"; + message?: string; +} + +export namespace RepositoryPolicyNotFoundException { + export const filterSensitiveLog = (obj: RepositoryPolicyNotFoundException): any => ({ + ...obj, + }); +} + +export interface DescribeImagesRequest { + /** + *

    The AWS account ID associated with the public registry that contains the repository in + * which to describe images. If you do not specify a registry, the default public registry is assumed.

    + */ + registryId?: string; + + /** + *

    The repository that contains the images to describe.

    + */ + repositoryName: string | undefined; + + /** + *

    The list of image IDs for the requested repository.

    + */ + imageIds?: ImageIdentifier[]; + + /** + *

    The nextToken value returned from a previous paginated + * DescribeImages request where maxResults was used and the + * results exceeded the value of that parameter. Pagination continues from the end of the + * previous results that returned the nextToken value. This value is + * null when there are no more results to return. This option cannot be used + * when you specify images with imageIds.

    + */ + nextToken?: string; + + /** + *

    The maximum number of repository results returned by DescribeImages in + * paginated output. When this parameter is used, DescribeImages only returns + * maxResults results in a single page along with a nextToken + * response element. The remaining results of the initial request can be seen by sending + * another DescribeImages request with the returned nextToken value. + * This value can be between 1 and 1000. If this parameter is not + * used, then DescribeImages returns up to 100 results and a + * nextToken value, if applicable. This option cannot be used when you specify + * images with imageIds.

    + */ + maxResults?: number; +} + +export namespace DescribeImagesRequest { + export const filterSensitiveLog = (obj: DescribeImagesRequest): any => ({ + ...obj, + }); +} + +/** + *

    An object that describes an image returned by a DescribeImages + * operation.

    + */ +export interface ImageDetail { + /** + *

    The AWS account ID associated with the public registry to which this image + * belongs.

    + */ + registryId?: string; + + /** + *

    The name of the repository to which this image belongs.

    + */ + repositoryName?: string; + + /** + *

    The sha256 digest of the image manifest.

    + */ + imageDigest?: string; + + /** + *

    The list of tags associated with this image.

    + */ + imageTags?: string[]; + + /** + *

    The size, in bytes, of the image in the repository.

    + *

    If the image is a manifest list, this will be the max size of all manifests in the + * list.

    + * + *

    Beginning with Docker version 1.9, the Docker client compresses image layers before + * pushing them to a V2 Docker registry. The output of the docker images + * command shows the uncompressed image size, so it may return a larger image size than the + * image sizes returned by DescribeImages.

    + *
    + */ + imageSizeInBytes?: number; + + /** + *

    The date and time, expressed in standard JavaScript date format, at which the current + * image was pushed to the repository.

    + */ + imagePushedAt?: Date; + + /** + *

    The media type of the image manifest.

    + */ + imageManifestMediaType?: string; + + /** + *

    The artifact media type of the image.

    + */ + artifactMediaType?: string; +} + +export namespace ImageDetail { + export const filterSensitiveLog = (obj: ImageDetail): any => ({ + ...obj, + }); +} + +export interface DescribeImagesResponse { + /** + *

    A list of ImageDetail objects that contain data about the + * image.

    + */ + imageDetails?: ImageDetail[]; + + /** + *

    The nextToken value to include in a future DescribeImages + * request. When the results of a DescribeImages request exceed + * maxResults, this value can be used to retrieve the next page of results. + * This value is null when there are no more results to return.

    + */ + nextToken?: string; +} + +export namespace DescribeImagesResponse { + export const filterSensitiveLog = (obj: DescribeImagesResponse): any => ({ + ...obj, + }); +} + +/** + *

    The image requested does not exist in the specified repository.

    + */ +export interface ImageNotFoundException extends __SmithyException, $MetadataBearer { + name: "ImageNotFoundException"; + $fault: "client"; + message?: string; +} + +export namespace ImageNotFoundException { + export const filterSensitiveLog = (obj: ImageNotFoundException): any => ({ + ...obj, + }); +} + +export interface DescribeImageTagsRequest { + /** + *

    The AWS account ID associated with the public registry that contains the repository in + * which to describe images. If you do not specify a registry, the default public registry is assumed.

    + */ + registryId?: string; + + /** + *

    The name of the repository that contains the image tag details to describe.

    + */ + repositoryName: string | undefined; + + /** + *

    The nextToken value returned from a previous paginated + * DescribeImageTags request where maxResults was used and the + * results exceeded the value of that parameter. Pagination continues from the end of the + * previous results that returned the nextToken value. This value is + * null when there are no more results to return. This option cannot be used + * when you specify images with imageIds.

    + */ + nextToken?: string; + + /** + *

    The maximum number of repository results returned by DescribeImageTags in + * paginated output. When this parameter is used, DescribeImageTags only returns + * maxResults results in a single page along with a nextToken + * response element. The remaining results of the initial request can be seen by sending + * another DescribeImageTags request with the returned nextToken + * value. This value can be between 1 and 1000. If this parameter + * is not used, then DescribeImageTags returns up to 100 + * results and a nextToken value, if applicable. This option cannot be used when + * you specify images with imageIds.

    + */ + maxResults?: number; +} + +export namespace DescribeImageTagsRequest { + export const filterSensitiveLog = (obj: DescribeImageTagsRequest): any => ({ + ...obj, + }); +} + +/** + *

    An object that describes the image tag details returned by a DescribeImageTags action.

    + */ +export interface ReferencedImageDetail { + /** + *

    The sha256 digest of the image manifest.

    + */ + imageDigest?: string; + + /** + *

    The size, in bytes, of the image in the repository.

    + *

    If the image is a manifest list, this will be the max size of all manifests in the + * list.

    + * + *

    Beginning with Docker version 1.9, the Docker client compresses image layers before + * pushing them to a V2 Docker registry. The output of the docker images + * command shows the uncompressed image size, so it may return a larger image size than the + * image sizes returned by DescribeImages.

    + *
    + */ + imageSizeInBytes?: number; + + /** + *

    The date and time, expressed in standard JavaScript date format, at which the current + * image tag was pushed to the repository.

    + */ + imagePushedAt?: Date; + + /** + *

    The media type of the image manifest.

    + */ + imageManifestMediaType?: string; + + /** + *

    The artifact media type of the image.

    + */ + artifactMediaType?: string; +} + +export namespace ReferencedImageDetail { + export const filterSensitiveLog = (obj: ReferencedImageDetail): any => ({ + ...obj, + }); +} + +/** + *

    An object representing the image tag details for an image.

    + */ +export interface ImageTagDetail { + /** + *

    The tag associated with the image.

    + */ + imageTag?: string; + + /** + *

    The time stamp indicating when the image tag was created.

    + */ + createdAt?: Date; + + /** + *

    An object that describes the details of an image.

    + */ + imageDetail?: ReferencedImageDetail; +} + +export namespace ImageTagDetail { + export const filterSensitiveLog = (obj: ImageTagDetail): any => ({ + ...obj, + }); +} + +export interface DescribeImageTagsResponse { + /** + *

    The image tag details for the images in the requested repository.

    + */ + imageTagDetails?: ImageTagDetail[]; + + /** + *

    The nextToken value to include in a future DescribeImageTags + * request. When the results of a DescribeImageTags request exceed + * maxResults, this value can be used to retrieve the next page of results. + * This value is null when there are no more results to return.

    + */ + nextToken?: string; +} + +export namespace DescribeImageTagsResponse { + export const filterSensitiveLog = (obj: DescribeImageTagsResponse): any => ({ + ...obj, + }); +} + +export interface DescribeRegistriesRequest { + /** + *

    The nextToken value returned from a previous paginated + * DescribeRegistries request where maxResults was used and the + * results exceeded the value of that parameter. Pagination continues from the end of the + * previous results that returned the nextToken value. This value is + * null when there are no more results to return.

    + * + *

    This token should be treated as an opaque identifier that is only used to retrieve the next items in a list and not for other programmatic purposes.

    + *
    + */ + nextToken?: string; + + /** + *

    The maximum number of repository results returned by DescribeRegistries in + * paginated output. When this parameter is used, DescribeRegistries only returns + * maxResults results in a single page along with a nextToken + * response element. The remaining results of the initial request can be seen by sending + * another DescribeRegistries request with the returned nextToken + * value. This value can be between 1 and 1000. If this parameter + * is not used, then DescribeRegistries returns up to 100 + * results and a nextToken value, if applicable.

    + */ + maxResults?: number; +} + +export namespace DescribeRegistriesRequest { + export const filterSensitiveLog = (obj: DescribeRegistriesRequest): any => ({ + ...obj, + }); +} + +export enum RegistryAliasStatus { + ACTIVE = "ACTIVE", + PENDING = "PENDING", + REJECTED = "REJECTED", +} + +/** + *

    An object representing the aliases for a public registry. A public registry is given an + * alias upon creation but a custom alias can be set using the Amazon ECR console. For more + * information, see Registries in the + * Amazon Elastic Container Registry User Guide.

    + */ +export interface RegistryAlias { + /** + *

    The name of the registry alias.

    + */ + name: string | undefined; + + /** + *

    The status of the registry alias.

    + */ + status: RegistryAliasStatus | string | undefined; + + /** + *

    Whether or not the registry alias is the primary alias for the registry. If true, the + * alias is the primary registry alias and is displayed in both the repository URL and the + * image URI used in the docker pull commands on the Amazon ECR Public Gallery.

    + * + *

    A registry alias that is not the primary registry alias can be used in the repository + * URI in a docker pull command.

    + *
    + */ + primaryRegistryAlias: boolean | undefined; + + /** + *

    Whether or not the registry alias is the default alias for the registry. When the first + * public repository is created, your public registry is assigned a default registry + * alias.

    + */ + defaultRegistryAlias: boolean | undefined; +} + +export namespace RegistryAlias { + export const filterSensitiveLog = (obj: RegistryAlias): any => ({ + ...obj, + }); +} + +/** + *

    The details of a public registry.

    + */ +export interface Registry { + /** + *

    The AWS account ID associated with the registry. If you do not specify a registry, the default public registry is assumed.

    + */ + registryId: string | undefined; + + /** + *

    The Amazon Resource Name (ARN) of the public registry.

    + */ + registryArn: string | undefined; + + /** + *

    The URI of a public registry. The URI contains a universal prefix and the registry + * alias.

    + */ + registryUri: string | undefined; + + /** + *

    Whether the account is verified. This indicates whether the account is an AWS + * Marketplace vendor. If an account is verified, each public repository will received a + * verified account badge on the Amazon ECR Public Gallery.

    + */ + verified: boolean | undefined; + + /** + *

    An array of objects representing the aliases for a public registry.

    + */ + aliases: RegistryAlias[] | undefined; +} + +export namespace Registry { + export const filterSensitiveLog = (obj: Registry): any => ({ + ...obj, + }); +} + +export interface DescribeRegistriesResponse { + /** + *

    An object containing the details for a public registry.

    + */ + registries: Registry[] | undefined; + + /** + *

    The nextToken value to include in a future + * DescribeRepositories request. When the results of a + * DescribeRepositories request exceed maxResults, this value can + * be used to retrieve the next page of results. This value is null when there + * are no more results to return.

    + */ + nextToken?: string; +} + +export namespace DescribeRegistriesResponse { + export const filterSensitiveLog = (obj: DescribeRegistriesResponse): any => ({ + ...obj, + }); +} + +export interface DescribeRepositoriesRequest { + /** + *

    The AWS account ID associated with the registry that contains the repositories to be + * described. If you do not specify a registry, the default public registry is assumed.

    + */ + registryId?: string; + + /** + *

    A list of repositories to describe. If this parameter is omitted, then all repositories + * in a registry are described.

    + */ + repositoryNames?: string[]; + + /** + *

    The nextToken value returned from a previous paginated + * DescribeRepositories request where maxResults was used and the + * results exceeded the value of that parameter. Pagination continues from the end of the + * previous results that returned the nextToken value. This value is + * null when there are no more results to return. This option cannot be used + * when you specify repositories with repositoryNames.

    + * + *

    This token should be treated as an opaque identifier that is only used to retrieve the next items in a list and not for other programmatic purposes.

    + *
    + */ + nextToken?: string; + + /** + *

    The maximum number of repository results returned by DescribeRepositories + * in paginated output. When this parameter is used, DescribeRepositories only + * returns maxResults results in a single page along with a + * nextToken response element. The remaining results of the initial request + * can be seen by sending another DescribeRepositories request with the returned + * nextToken value. This value can be between 1 and + * 1000. If this parameter is not used, then DescribeRepositories + * returns up to 100 results and a nextToken value, if + * applicable. This option cannot be used when you specify repositories with + * repositoryNames.

    + */ + maxResults?: number; +} + +export namespace DescribeRepositoriesRequest { + export const filterSensitiveLog = (obj: DescribeRepositoriesRequest): any => ({ + ...obj, + }); +} + +export interface DescribeRepositoriesResponse { + /** + *

    A list of repository objects corresponding to valid repositories.

    + */ + repositories?: Repository[]; + + /** + *

    The nextToken value to include in a future + * DescribeRepositories request. When the results of a + * DescribeRepositories request exceed maxResults, this value can + * be used to retrieve the next page of results. This value is null when there + * are no more results to return.

    + */ + nextToken?: string; +} + +export namespace DescribeRepositoriesResponse { + export const filterSensitiveLog = (obj: DescribeRepositoriesResponse): any => ({ + ...obj, + }); +} + +export interface GetAuthorizationTokenRequest {} + +export namespace GetAuthorizationTokenRequest { + export const filterSensitiveLog = (obj: GetAuthorizationTokenRequest): any => ({ + ...obj, + }); +} + +export interface GetAuthorizationTokenResponse { + /** + *

    An authorization token data object that corresponds to a public registry.

    + */ + authorizationData?: AuthorizationData; +} + +export namespace GetAuthorizationTokenResponse { + export const filterSensitiveLog = (obj: GetAuthorizationTokenResponse): any => ({ + ...obj, + }); +} + +export interface GetRegistryCatalogDataRequest {} + +export namespace GetRegistryCatalogDataRequest { + export const filterSensitiveLog = (obj: GetRegistryCatalogDataRequest): any => ({ + ...obj, + }); +} + +/** + *

    The metadata for a public registry.

    + */ +export interface RegistryCatalogData { + /** + *

    The display name for a public registry. This appears on the Amazon ECR Public Gallery.

    + * + *

    Only accounts that have the verified account badge can have a registry display + * name.

    + *
    + */ + displayName?: string; +} + +export namespace RegistryCatalogData { + export const filterSensitiveLog = (obj: RegistryCatalogData): any => ({ + ...obj, + }); +} + +export interface GetRegistryCatalogDataResponse { + /** + *

    The catalog metadata for the public registry.

    + */ + registryCatalogData: RegistryCatalogData | undefined; +} + +export namespace GetRegistryCatalogDataResponse { + export const filterSensitiveLog = (obj: GetRegistryCatalogDataResponse): any => ({ + ...obj, + }); +} + +export interface GetRepositoryCatalogDataRequest { + /** + *

    The AWS account ID associated with the registry that contains the repositories to be + * described. If you do not specify a registry, the default public registry is assumed.

    + */ + registryId?: string; + + /** + *

    The name of the repository to retrieve the catalog metadata for.

    + */ + repositoryName: string | undefined; +} + +export namespace GetRepositoryCatalogDataRequest { + export const filterSensitiveLog = (obj: GetRepositoryCatalogDataRequest): any => ({ + ...obj, + }); +} + +export interface GetRepositoryCatalogDataResponse { + /** + *

    The catalog metadata for the repository.

    + */ + catalogData?: RepositoryCatalogData; +} + +export namespace GetRepositoryCatalogDataResponse { + export const filterSensitiveLog = (obj: GetRepositoryCatalogDataResponse): any => ({ + ...obj, + }); +} + +export interface GetRepositoryPolicyRequest { + /** + *

    The AWS account ID associated with the public registry that contains the repository. + * If you do not specify a registry, the default public registry is assumed.

    + */ + registryId?: string; + + /** + *

    The name of the repository with the policy to retrieve.

    + */ + repositoryName: string | undefined; +} + +export namespace GetRepositoryPolicyRequest { + export const filterSensitiveLog = (obj: GetRepositoryPolicyRequest): any => ({ + ...obj, + }); +} + +export interface GetRepositoryPolicyResponse { + /** + *

    The registry ID associated with the request.

    + */ + registryId?: string; + + /** + *

    The repository name associated with the request.

    + */ + repositoryName?: string; + + /** + *

    The repository policy text associated with the repository. The policy text will be in + * JSON format.

    + */ + policyText?: string; +} + +export namespace GetRepositoryPolicyResponse { + export const filterSensitiveLog = (obj: GetRepositoryPolicyResponse): any => ({ + ...obj, + }); +} + +/** + *

    An object representing an Amazon ECR image.

    + */ +export interface Image { + /** + *

    The AWS account ID associated with the registry containing the image.

    + */ + registryId?: string; + + /** + *

    The name of the repository associated with the image.

    + */ + repositoryName?: string; + + /** + *

    An object containing the image tag and image digest associated with an image.

    + */ + imageId?: ImageIdentifier; + + /** + *

    The image manifest associated with the image.

    + */ + imageManifest?: string; + + /** + *

    The manifest media type of the image.

    + */ + imageManifestMediaType?: string; +} + +export namespace Image { + export const filterSensitiveLog = (obj: Image): any => ({ + ...obj, + }); +} + +/** + *

    The specified image has already been pushed, and there were no changes to the manifest + * or image tag after the last push.

    + */ +export interface ImageAlreadyExistsException extends __SmithyException, $MetadataBearer { + name: "ImageAlreadyExistsException"; + $fault: "client"; + message?: string; +} + +export namespace ImageAlreadyExistsException { + export const filterSensitiveLog = (obj: ImageAlreadyExistsException): any => ({ + ...obj, + }); +} + +/** + *

    The specified image digest does not match the digest that Amazon ECR calculated for the + * image.

    + */ +export interface ImageDigestDoesNotMatchException extends __SmithyException, $MetadataBearer { + name: "ImageDigestDoesNotMatchException"; + $fault: "client"; + message?: string; +} + +export namespace ImageDigestDoesNotMatchException { + export const filterSensitiveLog = (obj: ImageDigestDoesNotMatchException): any => ({ + ...obj, + }); +} + +/** + *

    The specified image is tagged with a tag that already exists. The repository is + * configured for tag immutability.

    + */ +export interface ImageTagAlreadyExistsException extends __SmithyException, $MetadataBearer { + name: "ImageTagAlreadyExistsException"; + $fault: "client"; + message?: string; +} + +export namespace ImageTagAlreadyExistsException { + export const filterSensitiveLog = (obj: ImageTagAlreadyExistsException): any => ({ + ...obj, + }); +} + +export interface InitiateLayerUploadRequest { + /** + *

    The AWS account ID associated with the registry to which you intend to upload layers. + * If you do not specify a registry, the default public registry is assumed.

    + */ + registryId?: string; + + /** + *

    The name of the repository to which you intend to upload layers.

    + */ + repositoryName: string | undefined; +} + +export namespace InitiateLayerUploadRequest { + export const filterSensitiveLog = (obj: InitiateLayerUploadRequest): any => ({ + ...obj, + }); +} + +export interface InitiateLayerUploadResponse { + /** + *

    The upload ID for the layer upload. This parameter is passed to further UploadLayerPart and CompleteLayerUpload operations.

    + */ + uploadId?: string; + + /** + *

    The size, in bytes, that Amazon ECR expects future layer part uploads to be.

    + */ + partSize?: number; +} + +export namespace InitiateLayerUploadResponse { + export const filterSensitiveLog = (obj: InitiateLayerUploadResponse): any => ({ + ...obj, + }); +} + +/** + *

    The layer part size is not valid, or the first byte specified is not consecutive to the + * last byte of a previous layer part upload.

    + */ +export interface InvalidLayerPartException extends __SmithyException, $MetadataBearer { + name: "InvalidLayerPartException"; + $fault: "client"; + /** + *

    The AWS account ID associated with the layer part.

    + */ + registryId?: string; + + /** + *

    The name of the repository.

    + */ + repositoryName?: string; + + /** + *

    The upload ID associated with the layer part.

    + */ + uploadId?: string; + + /** + *

    The position of the last byte of the layer part.

    + */ + lastValidByteReceived?: number; + + message?: string; +} + +export namespace InvalidLayerPartException { + export const filterSensitiveLog = (obj: InvalidLayerPartException): any => ({ + ...obj, + }); +} + +/** + *

    The specified layers could not be found, or the specified layer is not valid for this + * repository.

    + */ +export interface LayersNotFoundException extends __SmithyException, $MetadataBearer { + name: "LayersNotFoundException"; + $fault: "client"; + message?: string; +} + +export namespace LayersNotFoundException { + export const filterSensitiveLog = (obj: LayersNotFoundException): any => ({ + ...obj, + }); +} + +export interface PutImageRequest { + /** + *

    The AWS account ID associated with the public registry that contains the repository in + * which to put the image. If you do not specify a registry, the default public registry is assumed.

    + */ + registryId?: string; + + /** + *

    The name of the repository in which to put the image.

    + */ + repositoryName: string | undefined; + + /** + *

    The image manifest corresponding to the image to be uploaded.

    + */ + imageManifest: string | undefined; + + /** + *

    The media type of the image manifest. If you push an image manifest that does not + * contain the mediaType field, you must specify the + * imageManifestMediaType in the request.

    + */ + imageManifestMediaType?: string; + + /** + *

    The tag to associate with the image. This parameter is required for images that use the + * Docker Image Manifest V2 Schema 2 or Open Container Initiative (OCI) formats.

    + */ + imageTag?: string; + + /** + *

    The image digest of the image manifest corresponding to the image.

    + */ + imageDigest?: string; +} + +export namespace PutImageRequest { + export const filterSensitiveLog = (obj: PutImageRequest): any => ({ + ...obj, + }); +} + +export interface PutImageResponse { + /** + *

    Details of the image uploaded.

    + */ + image?: Image; +} + +export namespace PutImageResponse { + export const filterSensitiveLog = (obj: PutImageResponse): any => ({ + ...obj, + }); +} + +/** + *

    The manifest list is referencing an image that does not exist.

    + */ +export interface ReferencedImagesNotFoundException extends __SmithyException, $MetadataBearer { + name: "ReferencedImagesNotFoundException"; + $fault: "client"; + message?: string; +} + +export namespace ReferencedImagesNotFoundException { + export const filterSensitiveLog = (obj: ReferencedImagesNotFoundException): any => ({ + ...obj, + }); +} + +export interface PutRegistryCatalogDataRequest { + /** + *

    The display name for a public registry. The display name is shown as the repository + * author in the Amazon ECR Public Gallery.

    + * + *

    The registry display name is only publicly visible in the Amazon ECR Public Gallery for + * verified accounts.

    + *
    + */ + displayName?: string; +} + +export namespace PutRegistryCatalogDataRequest { + export const filterSensitiveLog = (obj: PutRegistryCatalogDataRequest): any => ({ + ...obj, + }); +} + +export interface PutRegistryCatalogDataResponse { + /** + *

    The catalog data for the public registry.

    + */ + registryCatalogData: RegistryCatalogData | undefined; +} + +export namespace PutRegistryCatalogDataResponse { + export const filterSensitiveLog = (obj: PutRegistryCatalogDataResponse): any => ({ + ...obj, + }); +} + +export interface PutRepositoryCatalogDataRequest { + /** + *

    The AWS account ID associated with the public registry the repository is in. + * If you do not specify a registry, the default public registry is assumed.

    + */ + registryId?: string; + + /** + *

    The name of the repository to create or update the catalog data for.

    + */ + repositoryName: string | undefined; + + /** + *

    An object containing the catalog data for a repository. This data is publicly visible in + * the Amazon ECR Public Gallery.

    + */ + catalogData: RepositoryCatalogDataInput | undefined; +} + +export namespace PutRepositoryCatalogDataRequest { + export const filterSensitiveLog = (obj: PutRepositoryCatalogDataRequest): any => ({ + ...obj, + }); +} + +export interface PutRepositoryCatalogDataResponse { + /** + *

    The catalog data for the repository.

    + */ + catalogData?: RepositoryCatalogData; +} + +export namespace PutRepositoryCatalogDataResponse { + export const filterSensitiveLog = (obj: PutRepositoryCatalogDataResponse): any => ({ + ...obj, + }); +} + +export interface SetRepositoryPolicyRequest { + /** + *

    The AWS account ID associated with the registry that contains the repository. + * If you do not specify a registry, the default public registry is assumed.

    + */ + registryId?: string; + + /** + *

    The name of the repository to receive the policy.

    + */ + repositoryName: string | undefined; + + /** + *

    The JSON repository policy text to apply to the repository. For more information, see + * Amazon ECR Repository + * Policies in the Amazon Elastic Container Registry User Guide.

    + */ + policyText: string | undefined; + + /** + *

    If the policy you are attempting to set on a repository policy would prevent you from + * setting another policy in the future, you must force the SetRepositoryPolicy operation. This is intended to prevent accidental + * repository lock outs.

    + */ + force?: boolean; +} + +export namespace SetRepositoryPolicyRequest { + export const filterSensitiveLog = (obj: SetRepositoryPolicyRequest): any => ({ + ...obj, + }); +} + +export interface SetRepositoryPolicyResponse { + /** + *

    The registry ID associated with the request.

    + */ + registryId?: string; + + /** + *

    The repository name associated with the request.

    + */ + repositoryName?: string; + + /** + *

    The JSON repository policy text applied to the repository.

    + */ + policyText?: string; +} + +export namespace SetRepositoryPolicyResponse { + export const filterSensitiveLog = (obj: SetRepositoryPolicyResponse): any => ({ + ...obj, + }); +} + +export interface UploadLayerPartRequest { + /** + *

    The AWS account ID associated with the registry to which you are uploading layer parts. + * If you do not specify a registry, the default public registry is assumed.

    + */ + registryId?: string; + + /** + *

    The name of the repository to which you are uploading layer parts.

    + */ + repositoryName: string | undefined; + + /** + *

    The upload ID from a previous InitiateLayerUpload operation to + * associate with the layer part upload.

    + */ + uploadId: string | undefined; + + /** + *

    The position of the first byte of the layer part witin the overall image layer.

    + */ + partFirstByte: number | undefined; + + /** + *

    The position of the last byte of the layer part within the overall image layer.

    + */ + partLastByte: number | undefined; + + /** + *

    The base64-encoded layer part payload.

    + */ + layerPartBlob: Uint8Array | undefined; +} + +export namespace UploadLayerPartRequest { + export const filterSensitiveLog = (obj: UploadLayerPartRequest): any => ({ + ...obj, + }); +} + +export interface UploadLayerPartResponse { + /** + *

    The registry ID associated with the request.

    + */ + registryId?: string; + + /** + *

    The repository name associated with the request.

    + */ + repositoryName?: string; + + /** + *

    The upload ID associated with the request.

    + */ + uploadId?: string; + + /** + *

    The integer value of the last byte received in the request.

    + */ + lastByteReceived?: number; +} + +export namespace UploadLayerPartResponse { + export const filterSensitiveLog = (obj: UploadLayerPartResponse): any => ({ + ...obj, + }); +} diff --git a/clients/client-ecr-public/package.json b/clients/client-ecr-public/package.json new file mode 100644 index 000000000000..cff3ba16dfb7 --- /dev/null +++ b/clients/client-ecr-public/package.json @@ -0,0 +1,83 @@ +{ + "name": "@aws-sdk/client-ecr-public", + "description": "AWS SDK for JavaScript Ecr Public Client for Node.js, Browser and React Native", + "version": "1.0.0-rc.1", + "scripts": { + "clean": "yarn remove-definitions && yarn remove-dist && yarn remove-documentation", + "build-documentation": "yarn remove-documentation && typedoc ./", + "prepublishOnly": "yarn build", + "pretest": "yarn build:cjs", + "remove-definitions": "rimraf ./types", + "remove-dist": "rimraf ./dist", + "remove-documentation": "rimraf ./docs", + "test": "yarn build && jest --coverage --passWithNoTests", + "build:cjs": "tsc -p tsconfig.json", + "build:es": "tsc -p tsconfig.es.json", + "build": "yarn build:cjs && yarn build:es" + }, + "main": "./dist/cjs/index.js", + "types": "./types/index.d.ts", + "module": "./dist/es/index.js", + "browser": { + "./runtimeConfig": "./runtimeConfig.browser" + }, + "react-native": { + "./runtimeConfig": "./runtimeConfig.native" + }, + "sideEffects": false, + "dependencies": { + "@aws-crypto/sha256-browser": "^1.0.0", + "@aws-crypto/sha256-js": "^1.0.0", + "@aws-sdk/config-resolver": "1.0.0-rc.7", + "@aws-sdk/credential-provider-node": "1.0.0-rc.7", + "@aws-sdk/fetch-http-handler": "1.0.0-rc.7", + "@aws-sdk/hash-node": "1.0.0-rc.7", + "@aws-sdk/invalid-dependency": "1.0.0-rc.3", + "@aws-sdk/middleware-content-length": "1.0.0-rc.7", + "@aws-sdk/middleware-host-header": "1.0.0-rc.7", + "@aws-sdk/middleware-logger": "1.0.0-rc.7", + "@aws-sdk/middleware-retry": "1.0.0-rc.7", + "@aws-sdk/middleware-serde": "1.0.0-rc.7", + "@aws-sdk/middleware-signing": "1.0.0-rc.7", + "@aws-sdk/middleware-stack": "1.0.0-rc.7", + "@aws-sdk/middleware-user-agent": "1.0.0-rc.7", + "@aws-sdk/node-config-provider": "1.0.0-rc.7", + "@aws-sdk/node-http-handler": "1.0.0-rc.7", + "@aws-sdk/protocol-http": "1.0.0-rc.7", + "@aws-sdk/smithy-client": "1.0.0-rc.7", + "@aws-sdk/url-parser-browser": "1.0.0-rc.7", + "@aws-sdk/url-parser-node": "1.0.0-rc.7", + "@aws-sdk/util-base64-browser": "1.0.0-rc.3", + "@aws-sdk/util-base64-node": "1.0.0-rc.3", + "@aws-sdk/util-body-length-browser": "1.0.0-rc.3", + "@aws-sdk/util-body-length-node": "1.0.0-rc.3", + "@aws-sdk/util-user-agent-browser": "1.0.0-rc.7", + "@aws-sdk/util-user-agent-node": "1.0.0-rc.7", + "@aws-sdk/util-utf8-browser": "1.0.0-rc.3", + "@aws-sdk/util-utf8-node": "1.0.0-rc.3", + "tslib": "^2.0.0" + }, + "devDependencies": { + "@aws-sdk/client-documentation-generator": "1.0.0-rc.7", + "@aws-sdk/types": "1.0.0-rc.7", + "@types/node": "^12.7.5", + "jest": "^26.1.0", + "rimraf": "^3.0.0", + "typedoc": "^0.19.2", + "typescript": "~4.1.2" + }, + "engines": { + "node": ">=10.0.0" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/master/clients/client-ecr-public", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "clients/client-ecr-public" + } +} diff --git a/clients/client-ecr-public/pagination/DescribeImageTagsPaginator.ts b/clients/client-ecr-public/pagination/DescribeImageTagsPaginator.ts new file mode 100644 index 000000000000..93ee7a0ccfd3 --- /dev/null +++ b/clients/client-ecr-public/pagination/DescribeImageTagsPaginator.ts @@ -0,0 +1,57 @@ +import { ECRPUBLIC } from "../ECRPUBLIC"; +import { ECRPUBLICClient } from "../ECRPUBLICClient"; +import { + DescribeImageTagsCommand, + DescribeImageTagsCommandInput, + DescribeImageTagsCommandOutput, +} from "../commands/DescribeImageTagsCommand"; +import { ECRPUBLICPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: ECRPUBLICClient, + input: DescribeImageTagsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new DescribeImageTagsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: ECRPUBLIC, + input: DescribeImageTagsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.describeImageTags(input, ...args); +}; +export async function* paginateDescribeImageTags( + config: ECRPUBLICPaginationConfiguration, + input: DescribeImageTagsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: DescribeImageTagsCommandOutput; + while (hasNext) { + input.nextToken = token; + input["maxResults"] = config.pageSize; + if (config.client instanceof ECRPUBLIC) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof ECRPUBLICClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected ECRPUBLIC | ECRPUBLICClient"); + } + yield page; + token = page.nextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-ecr-public/pagination/DescribeImagesPaginator.ts b/clients/client-ecr-public/pagination/DescribeImagesPaginator.ts new file mode 100644 index 000000000000..b7f1ee181295 --- /dev/null +++ b/clients/client-ecr-public/pagination/DescribeImagesPaginator.ts @@ -0,0 +1,57 @@ +import { ECRPUBLIC } from "../ECRPUBLIC"; +import { ECRPUBLICClient } from "../ECRPUBLICClient"; +import { + DescribeImagesCommand, + DescribeImagesCommandInput, + DescribeImagesCommandOutput, +} from "../commands/DescribeImagesCommand"; +import { ECRPUBLICPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: ECRPUBLICClient, + input: DescribeImagesCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new DescribeImagesCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: ECRPUBLIC, + input: DescribeImagesCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.describeImages(input, ...args); +}; +export async function* paginateDescribeImages( + config: ECRPUBLICPaginationConfiguration, + input: DescribeImagesCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: DescribeImagesCommandOutput; + while (hasNext) { + input.nextToken = token; + input["maxResults"] = config.pageSize; + if (config.client instanceof ECRPUBLIC) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof ECRPUBLICClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected ECRPUBLIC | ECRPUBLICClient"); + } + yield page; + token = page.nextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-ecr-public/pagination/DescribeRegistriesPaginator.ts b/clients/client-ecr-public/pagination/DescribeRegistriesPaginator.ts new file mode 100644 index 000000000000..78745fcc22dd --- /dev/null +++ b/clients/client-ecr-public/pagination/DescribeRegistriesPaginator.ts @@ -0,0 +1,57 @@ +import { ECRPUBLIC } from "../ECRPUBLIC"; +import { ECRPUBLICClient } from "../ECRPUBLICClient"; +import { + DescribeRegistriesCommand, + DescribeRegistriesCommandInput, + DescribeRegistriesCommandOutput, +} from "../commands/DescribeRegistriesCommand"; +import { ECRPUBLICPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: ECRPUBLICClient, + input: DescribeRegistriesCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new DescribeRegistriesCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: ECRPUBLIC, + input: DescribeRegistriesCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.describeRegistries(input, ...args); +}; +export async function* paginateDescribeRegistries( + config: ECRPUBLICPaginationConfiguration, + input: DescribeRegistriesCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: DescribeRegistriesCommandOutput; + while (hasNext) { + input.nextToken = token; + input["maxResults"] = config.pageSize; + if (config.client instanceof ECRPUBLIC) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof ECRPUBLICClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected ECRPUBLIC | ECRPUBLICClient"); + } + yield page; + token = page.nextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-ecr-public/pagination/DescribeRepositoriesPaginator.ts b/clients/client-ecr-public/pagination/DescribeRepositoriesPaginator.ts new file mode 100644 index 000000000000..8fe183cb488f --- /dev/null +++ b/clients/client-ecr-public/pagination/DescribeRepositoriesPaginator.ts @@ -0,0 +1,57 @@ +import { ECRPUBLIC } from "../ECRPUBLIC"; +import { ECRPUBLICClient } from "../ECRPUBLICClient"; +import { + DescribeRepositoriesCommand, + DescribeRepositoriesCommandInput, + DescribeRepositoriesCommandOutput, +} from "../commands/DescribeRepositoriesCommand"; +import { ECRPUBLICPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: ECRPUBLICClient, + input: DescribeRepositoriesCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new DescribeRepositoriesCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: ECRPUBLIC, + input: DescribeRepositoriesCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.describeRepositories(input, ...args); +}; +export async function* paginateDescribeRepositories( + config: ECRPUBLICPaginationConfiguration, + input: DescribeRepositoriesCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: DescribeRepositoriesCommandOutput; + while (hasNext) { + input.nextToken = token; + input["maxResults"] = config.pageSize; + if (config.client instanceof ECRPUBLIC) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof ECRPUBLICClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected ECRPUBLIC | ECRPUBLICClient"); + } + yield page; + token = page.nextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-ecr-public/pagination/Interfaces.ts b/clients/client-ecr-public/pagination/Interfaces.ts new file mode 100644 index 000000000000..d0948182f53d --- /dev/null +++ b/clients/client-ecr-public/pagination/Interfaces.ts @@ -0,0 +1,7 @@ +import { ECRPUBLIC } from "../ECRPUBLIC"; +import { ECRPUBLICClient } from "../ECRPUBLICClient"; +import { PaginationConfiguration } from "@aws-sdk/types"; + +export interface ECRPUBLICPaginationConfiguration extends PaginationConfiguration { + client: ECRPUBLIC | ECRPUBLICClient; +} diff --git a/clients/client-ecr-public/protocols/Aws_json1_1.ts b/clients/client-ecr-public/protocols/Aws_json1_1.ts new file mode 100644 index 000000000000..7d6c4ca17518 --- /dev/null +++ b/clients/client-ecr-public/protocols/Aws_json1_1.ts @@ -0,0 +1,3338 @@ +import { + BatchCheckLayerAvailabilityCommandInput, + BatchCheckLayerAvailabilityCommandOutput, +} from "../commands/BatchCheckLayerAvailabilityCommand"; +import { BatchDeleteImageCommandInput, BatchDeleteImageCommandOutput } from "../commands/BatchDeleteImageCommand"; +import { + CompleteLayerUploadCommandInput, + CompleteLayerUploadCommandOutput, +} from "../commands/CompleteLayerUploadCommand"; +import { CreateRepositoryCommandInput, CreateRepositoryCommandOutput } from "../commands/CreateRepositoryCommand"; +import { DeleteRepositoryCommandInput, DeleteRepositoryCommandOutput } from "../commands/DeleteRepositoryCommand"; +import { + DeleteRepositoryPolicyCommandInput, + DeleteRepositoryPolicyCommandOutput, +} from "../commands/DeleteRepositoryPolicyCommand"; +import { DescribeImageTagsCommandInput, DescribeImageTagsCommandOutput } from "../commands/DescribeImageTagsCommand"; +import { DescribeImagesCommandInput, DescribeImagesCommandOutput } from "../commands/DescribeImagesCommand"; +import { DescribeRegistriesCommandInput, DescribeRegistriesCommandOutput } from "../commands/DescribeRegistriesCommand"; +import { + DescribeRepositoriesCommandInput, + DescribeRepositoriesCommandOutput, +} from "../commands/DescribeRepositoriesCommand"; +import { + GetAuthorizationTokenCommandInput, + GetAuthorizationTokenCommandOutput, +} from "../commands/GetAuthorizationTokenCommand"; +import { + GetRegistryCatalogDataCommandInput, + GetRegistryCatalogDataCommandOutput, +} from "../commands/GetRegistryCatalogDataCommand"; +import { + GetRepositoryCatalogDataCommandInput, + GetRepositoryCatalogDataCommandOutput, +} from "../commands/GetRepositoryCatalogDataCommand"; +import { + GetRepositoryPolicyCommandInput, + GetRepositoryPolicyCommandOutput, +} from "../commands/GetRepositoryPolicyCommand"; +import { + InitiateLayerUploadCommandInput, + InitiateLayerUploadCommandOutput, +} from "../commands/InitiateLayerUploadCommand"; +import { PutImageCommandInput, PutImageCommandOutput } from "../commands/PutImageCommand"; +import { + PutRegistryCatalogDataCommandInput, + PutRegistryCatalogDataCommandOutput, +} from "../commands/PutRegistryCatalogDataCommand"; +import { + PutRepositoryCatalogDataCommandInput, + PutRepositoryCatalogDataCommandOutput, +} from "../commands/PutRepositoryCatalogDataCommand"; +import { + SetRepositoryPolicyCommandInput, + SetRepositoryPolicyCommandOutput, +} from "../commands/SetRepositoryPolicyCommand"; +import { UploadLayerPartCommandInput, UploadLayerPartCommandOutput } from "../commands/UploadLayerPartCommand"; +import { + AuthorizationData, + BatchCheckLayerAvailabilityRequest, + BatchCheckLayerAvailabilityResponse, + BatchDeleteImageRequest, + BatchDeleteImageResponse, + CompleteLayerUploadRequest, + CompleteLayerUploadResponse, + CreateRepositoryRequest, + CreateRepositoryResponse, + DeleteRepositoryPolicyRequest, + DeleteRepositoryPolicyResponse, + DeleteRepositoryRequest, + DeleteRepositoryResponse, + DescribeImageTagsRequest, + DescribeImageTagsResponse, + DescribeImagesRequest, + DescribeImagesResponse, + DescribeRegistriesRequest, + DescribeRegistriesResponse, + DescribeRepositoriesRequest, + DescribeRepositoriesResponse, + EmptyUploadException, + GetAuthorizationTokenRequest, + GetAuthorizationTokenResponse, + GetRegistryCatalogDataRequest, + GetRegistryCatalogDataResponse, + GetRepositoryCatalogDataRequest, + GetRepositoryCatalogDataResponse, + GetRepositoryPolicyRequest, + GetRepositoryPolicyResponse, + Image, + ImageAlreadyExistsException, + ImageDetail, + ImageDigestDoesNotMatchException, + ImageFailure, + ImageIdentifier, + ImageNotFoundException, + ImageTagAlreadyExistsException, + ImageTagDetail, + InitiateLayerUploadRequest, + InitiateLayerUploadResponse, + InvalidLayerException, + InvalidLayerPartException, + InvalidParameterException, + Layer, + LayerAlreadyExistsException, + LayerFailure, + LayerPartTooSmallException, + LayersNotFoundException, + LimitExceededException, + PutImageRequest, + PutImageResponse, + PutRegistryCatalogDataRequest, + PutRegistryCatalogDataResponse, + PutRepositoryCatalogDataRequest, + PutRepositoryCatalogDataResponse, + ReferencedImageDetail, + ReferencedImagesNotFoundException, + Registry, + RegistryAlias, + RegistryCatalogData, + RegistryNotFoundException, + Repository, + RepositoryAlreadyExistsException, + RepositoryCatalogData, + RepositoryCatalogDataInput, + RepositoryNotEmptyException, + RepositoryNotFoundException, + RepositoryPolicyNotFoundException, + ServerException, + SetRepositoryPolicyRequest, + SetRepositoryPolicyResponse, + UnsupportedCommandException, + UploadLayerPartRequest, + UploadLayerPartResponse, + UploadNotFoundException, +} from "../models/models_0"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { SmithyException as __SmithyException } from "@aws-sdk/smithy-client"; +import { + Endpoint as __Endpoint, + HeaderBag as __HeaderBag, + MetadataBearer as __MetadataBearer, + ResponseMetadata as __ResponseMetadata, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export const serializeAws_json1_1BatchCheckLayerAvailabilityCommand = async ( + input: BatchCheckLayerAvailabilityCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.BatchCheckLayerAvailability", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1BatchCheckLayerAvailabilityRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1BatchDeleteImageCommand = async ( + input: BatchDeleteImageCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.BatchDeleteImage", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1BatchDeleteImageRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1CompleteLayerUploadCommand = async ( + input: CompleteLayerUploadCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.CompleteLayerUpload", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1CompleteLayerUploadRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1CreateRepositoryCommand = async ( + input: CreateRepositoryCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.CreateRepository", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1CreateRepositoryRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1DeleteRepositoryCommand = async ( + input: DeleteRepositoryCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.DeleteRepository", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DeleteRepositoryRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1DeleteRepositoryPolicyCommand = async ( + input: DeleteRepositoryPolicyCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.DeleteRepositoryPolicy", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DeleteRepositoryPolicyRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1DescribeImagesCommand = async ( + input: DescribeImagesCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.DescribeImages", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DescribeImagesRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1DescribeImageTagsCommand = async ( + input: DescribeImageTagsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.DescribeImageTags", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DescribeImageTagsRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1DescribeRegistriesCommand = async ( + input: DescribeRegistriesCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.DescribeRegistries", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DescribeRegistriesRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1DescribeRepositoriesCommand = async ( + input: DescribeRepositoriesCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.DescribeRepositories", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DescribeRepositoriesRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1GetAuthorizationTokenCommand = async ( + input: GetAuthorizationTokenCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.GetAuthorizationToken", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1GetAuthorizationTokenRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1GetRegistryCatalogDataCommand = async ( + input: GetRegistryCatalogDataCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.GetRegistryCatalogData", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1GetRegistryCatalogDataRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1GetRepositoryCatalogDataCommand = async ( + input: GetRepositoryCatalogDataCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.GetRepositoryCatalogData", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1GetRepositoryCatalogDataRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1GetRepositoryPolicyCommand = async ( + input: GetRepositoryPolicyCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.GetRepositoryPolicy", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1GetRepositoryPolicyRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1InitiateLayerUploadCommand = async ( + input: InitiateLayerUploadCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.InitiateLayerUpload", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1InitiateLayerUploadRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1PutImageCommand = async ( + input: PutImageCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.PutImage", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1PutImageRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1PutRegistryCatalogDataCommand = async ( + input: PutRegistryCatalogDataCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.PutRegistryCatalogData", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1PutRegistryCatalogDataRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1PutRepositoryCatalogDataCommand = async ( + input: PutRepositoryCatalogDataCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.PutRepositoryCatalogData", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1PutRepositoryCatalogDataRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1SetRepositoryPolicyCommand = async ( + input: SetRepositoryPolicyCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.SetRepositoryPolicy", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1SetRepositoryPolicyRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1UploadLayerPartCommand = async ( + input: UploadLayerPartCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SpencerFrontendService.UploadLayerPart", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1UploadLayerPartRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const deserializeAws_json1_1BatchCheckLayerAvailabilityCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1BatchCheckLayerAvailabilityCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1BatchCheckLayerAvailabilityResponse(data, context); + const response: BatchCheckLayerAvailabilityCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1BatchCheckLayerAvailabilityCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RegistryNotFoundException": + case "com.amazonaws.ecrpublic#RegistryNotFoundException": + response = { + ...(await deserializeAws_json1_1RegistryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryNotFoundException": + case "com.amazonaws.ecrpublic#RepositoryNotFoundException": + response = { + ...(await deserializeAws_json1_1RepositoryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1BatchDeleteImageCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1BatchDeleteImageCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1BatchDeleteImageResponse(data, context); + const response: BatchDeleteImageCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1BatchDeleteImageCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryNotFoundException": + case "com.amazonaws.ecrpublic#RepositoryNotFoundException": + response = { + ...(await deserializeAws_json1_1RepositoryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1CompleteLayerUploadCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1CompleteLayerUploadCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1CompleteLayerUploadResponse(data, context); + const response: CompleteLayerUploadCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1CompleteLayerUploadCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "EmptyUploadException": + case "com.amazonaws.ecrpublic#EmptyUploadException": + response = { + ...(await deserializeAws_json1_1EmptyUploadExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidLayerException": + case "com.amazonaws.ecrpublic#InvalidLayerException": + response = { + ...(await deserializeAws_json1_1InvalidLayerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "LayerAlreadyExistsException": + case "com.amazonaws.ecrpublic#LayerAlreadyExistsException": + response = { + ...(await deserializeAws_json1_1LayerAlreadyExistsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "LayerPartTooSmallException": + case "com.amazonaws.ecrpublic#LayerPartTooSmallException": + response = { + ...(await deserializeAws_json1_1LayerPartTooSmallExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RegistryNotFoundException": + case "com.amazonaws.ecrpublic#RegistryNotFoundException": + response = { + ...(await deserializeAws_json1_1RegistryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryNotFoundException": + case "com.amazonaws.ecrpublic#RepositoryNotFoundException": + response = { + ...(await deserializeAws_json1_1RepositoryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "UnsupportedCommandException": + case "com.amazonaws.ecrpublic#UnsupportedCommandException": + response = { + ...(await deserializeAws_json1_1UnsupportedCommandExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "UploadNotFoundException": + case "com.amazonaws.ecrpublic#UploadNotFoundException": + response = { + ...(await deserializeAws_json1_1UploadNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1CreateRepositoryCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1CreateRepositoryCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1CreateRepositoryResponse(data, context); + const response: CreateRepositoryCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1CreateRepositoryCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "LimitExceededException": + case "com.amazonaws.ecrpublic#LimitExceededException": + response = { + ...(await deserializeAws_json1_1LimitExceededExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryAlreadyExistsException": + case "com.amazonaws.ecrpublic#RepositoryAlreadyExistsException": + response = { + ...(await deserializeAws_json1_1RepositoryAlreadyExistsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteRepositoryCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteRepositoryCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DeleteRepositoryResponse(data, context); + const response: DeleteRepositoryCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteRepositoryCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryNotEmptyException": + case "com.amazonaws.ecrpublic#RepositoryNotEmptyException": + response = { + ...(await deserializeAws_json1_1RepositoryNotEmptyExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryNotFoundException": + case "com.amazonaws.ecrpublic#RepositoryNotFoundException": + response = { + ...(await deserializeAws_json1_1RepositoryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteRepositoryPolicyCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteRepositoryPolicyCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DeleteRepositoryPolicyResponse(data, context); + const response: DeleteRepositoryPolicyCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteRepositoryPolicyCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryNotFoundException": + case "com.amazonaws.ecrpublic#RepositoryNotFoundException": + response = { + ...(await deserializeAws_json1_1RepositoryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryPolicyNotFoundException": + case "com.amazonaws.ecrpublic#RepositoryPolicyNotFoundException": + response = { + ...(await deserializeAws_json1_1RepositoryPolicyNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeImagesCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeImagesCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeImagesResponse(data, context); + const response: DescribeImagesCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeImagesCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ImageNotFoundException": + case "com.amazonaws.ecrpublic#ImageNotFoundException": + response = { + ...(await deserializeAws_json1_1ImageNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryNotFoundException": + case "com.amazonaws.ecrpublic#RepositoryNotFoundException": + response = { + ...(await deserializeAws_json1_1RepositoryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeImageTagsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeImageTagsCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeImageTagsResponse(data, context); + const response: DescribeImageTagsCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeImageTagsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryNotFoundException": + case "com.amazonaws.ecrpublic#RepositoryNotFoundException": + response = { + ...(await deserializeAws_json1_1RepositoryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeRegistriesCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeRegistriesCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeRegistriesResponse(data, context); + const response: DescribeRegistriesCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeRegistriesCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "UnsupportedCommandException": + case "com.amazonaws.ecrpublic#UnsupportedCommandException": + response = { + ...(await deserializeAws_json1_1UnsupportedCommandExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeRepositoriesCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeRepositoriesCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeRepositoriesResponse(data, context); + const response: DescribeRepositoriesCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeRepositoriesCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryNotFoundException": + case "com.amazonaws.ecrpublic#RepositoryNotFoundException": + response = { + ...(await deserializeAws_json1_1RepositoryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1GetAuthorizationTokenCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1GetAuthorizationTokenCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1GetAuthorizationTokenResponse(data, context); + const response: GetAuthorizationTokenCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1GetAuthorizationTokenCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1GetRegistryCatalogDataCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1GetRegistryCatalogDataCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1GetRegistryCatalogDataResponse(data, context); + const response: GetRegistryCatalogDataCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1GetRegistryCatalogDataCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "UnsupportedCommandException": + case "com.amazonaws.ecrpublic#UnsupportedCommandException": + response = { + ...(await deserializeAws_json1_1UnsupportedCommandExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1GetRepositoryCatalogDataCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1GetRepositoryCatalogDataCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1GetRepositoryCatalogDataResponse(data, context); + const response: GetRepositoryCatalogDataCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1GetRepositoryCatalogDataCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryNotFoundException": + case "com.amazonaws.ecrpublic#RepositoryNotFoundException": + response = { + ...(await deserializeAws_json1_1RepositoryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1GetRepositoryPolicyCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1GetRepositoryPolicyCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1GetRepositoryPolicyResponse(data, context); + const response: GetRepositoryPolicyCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1GetRepositoryPolicyCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryNotFoundException": + case "com.amazonaws.ecrpublic#RepositoryNotFoundException": + response = { + ...(await deserializeAws_json1_1RepositoryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryPolicyNotFoundException": + case "com.amazonaws.ecrpublic#RepositoryPolicyNotFoundException": + response = { + ...(await deserializeAws_json1_1RepositoryPolicyNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1InitiateLayerUploadCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1InitiateLayerUploadCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1InitiateLayerUploadResponse(data, context); + const response: InitiateLayerUploadCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1InitiateLayerUploadCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RegistryNotFoundException": + case "com.amazonaws.ecrpublic#RegistryNotFoundException": + response = { + ...(await deserializeAws_json1_1RegistryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryNotFoundException": + case "com.amazonaws.ecrpublic#RepositoryNotFoundException": + response = { + ...(await deserializeAws_json1_1RepositoryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "UnsupportedCommandException": + case "com.amazonaws.ecrpublic#UnsupportedCommandException": + response = { + ...(await deserializeAws_json1_1UnsupportedCommandExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1PutImageCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1PutImageCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1PutImageResponse(data, context); + const response: PutImageCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1PutImageCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ImageAlreadyExistsException": + case "com.amazonaws.ecrpublic#ImageAlreadyExistsException": + response = { + ...(await deserializeAws_json1_1ImageAlreadyExistsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ImageDigestDoesNotMatchException": + case "com.amazonaws.ecrpublic#ImageDigestDoesNotMatchException": + response = { + ...(await deserializeAws_json1_1ImageDigestDoesNotMatchExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ImageTagAlreadyExistsException": + case "com.amazonaws.ecrpublic#ImageTagAlreadyExistsException": + response = { + ...(await deserializeAws_json1_1ImageTagAlreadyExistsExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "LayersNotFoundException": + case "com.amazonaws.ecrpublic#LayersNotFoundException": + response = { + ...(await deserializeAws_json1_1LayersNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "LimitExceededException": + case "com.amazonaws.ecrpublic#LimitExceededException": + response = { + ...(await deserializeAws_json1_1LimitExceededExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ReferencedImagesNotFoundException": + case "com.amazonaws.ecrpublic#ReferencedImagesNotFoundException": + response = { + ...(await deserializeAws_json1_1ReferencedImagesNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RegistryNotFoundException": + case "com.amazonaws.ecrpublic#RegistryNotFoundException": + response = { + ...(await deserializeAws_json1_1RegistryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryNotFoundException": + case "com.amazonaws.ecrpublic#RepositoryNotFoundException": + response = { + ...(await deserializeAws_json1_1RepositoryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "UnsupportedCommandException": + case "com.amazonaws.ecrpublic#UnsupportedCommandException": + response = { + ...(await deserializeAws_json1_1UnsupportedCommandExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1PutRegistryCatalogDataCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1PutRegistryCatalogDataCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1PutRegistryCatalogDataResponse(data, context); + const response: PutRegistryCatalogDataCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1PutRegistryCatalogDataCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "UnsupportedCommandException": + case "com.amazonaws.ecrpublic#UnsupportedCommandException": + response = { + ...(await deserializeAws_json1_1UnsupportedCommandExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1PutRepositoryCatalogDataCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1PutRepositoryCatalogDataCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1PutRepositoryCatalogDataResponse(data, context); + const response: PutRepositoryCatalogDataCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1PutRepositoryCatalogDataCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryNotFoundException": + case "com.amazonaws.ecrpublic#RepositoryNotFoundException": + response = { + ...(await deserializeAws_json1_1RepositoryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1SetRepositoryPolicyCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1SetRepositoryPolicyCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1SetRepositoryPolicyResponse(data, context); + const response: SetRepositoryPolicyCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1SetRepositoryPolicyCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryNotFoundException": + case "com.amazonaws.ecrpublic#RepositoryNotFoundException": + response = { + ...(await deserializeAws_json1_1RepositoryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1UploadLayerPartCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1UploadLayerPartCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1UploadLayerPartResponse(data, context); + const response: UploadLayerPartCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1UploadLayerPartCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "InvalidLayerPartException": + case "com.amazonaws.ecrpublic#InvalidLayerPartException": + response = { + ...(await deserializeAws_json1_1InvalidLayerPartExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidParameterException": + case "com.amazonaws.ecrpublic#InvalidParameterException": + response = { + ...(await deserializeAws_json1_1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "LimitExceededException": + case "com.amazonaws.ecrpublic#LimitExceededException": + response = { + ...(await deserializeAws_json1_1LimitExceededExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RegistryNotFoundException": + case "com.amazonaws.ecrpublic#RegistryNotFoundException": + response = { + ...(await deserializeAws_json1_1RegistryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RepositoryNotFoundException": + case "com.amazonaws.ecrpublic#RepositoryNotFoundException": + response = { + ...(await deserializeAws_json1_1RepositoryNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.ecrpublic#ServerException": + response = { + ...(await deserializeAws_json1_1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "UnsupportedCommandException": + case "com.amazonaws.ecrpublic#UnsupportedCommandException": + response = { + ...(await deserializeAws_json1_1UnsupportedCommandExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "UploadNotFoundException": + case "com.amazonaws.ecrpublic#UploadNotFoundException": + response = { + ...(await deserializeAws_json1_1UploadNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +const deserializeAws_json1_1EmptyUploadExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1EmptyUploadException(body, context); + const contents: EmptyUploadException = { + name: "EmptyUploadException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1ImageAlreadyExistsExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1ImageAlreadyExistsException(body, context); + const contents: ImageAlreadyExistsException = { + name: "ImageAlreadyExistsException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1ImageDigestDoesNotMatchExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1ImageDigestDoesNotMatchException(body, context); + const contents: ImageDigestDoesNotMatchException = { + name: "ImageDigestDoesNotMatchException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1ImageNotFoundExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1ImageNotFoundException(body, context); + const contents: ImageNotFoundException = { + name: "ImageNotFoundException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1ImageTagAlreadyExistsExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1ImageTagAlreadyExistsException(body, context); + const contents: ImageTagAlreadyExistsException = { + name: "ImageTagAlreadyExistsException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1InvalidLayerExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1InvalidLayerException(body, context); + const contents: InvalidLayerException = { + name: "InvalidLayerException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1InvalidLayerPartExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1InvalidLayerPartException(body, context); + const contents: InvalidLayerPartException = { + name: "InvalidLayerPartException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1InvalidParameterExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1InvalidParameterException(body, context); + const contents: InvalidParameterException = { + name: "InvalidParameterException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1LayerAlreadyExistsExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1LayerAlreadyExistsException(body, context); + const contents: LayerAlreadyExistsException = { + name: "LayerAlreadyExistsException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1LayerPartTooSmallExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1LayerPartTooSmallException(body, context); + const contents: LayerPartTooSmallException = { + name: "LayerPartTooSmallException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1LayersNotFoundExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1LayersNotFoundException(body, context); + const contents: LayersNotFoundException = { + name: "LayersNotFoundException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1LimitExceededExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1LimitExceededException(body, context); + const contents: LimitExceededException = { + name: "LimitExceededException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1ReferencedImagesNotFoundExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1ReferencedImagesNotFoundException(body, context); + const contents: ReferencedImagesNotFoundException = { + name: "ReferencedImagesNotFoundException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1RegistryNotFoundExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1RegistryNotFoundException(body, context); + const contents: RegistryNotFoundException = { + name: "RegistryNotFoundException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1RepositoryAlreadyExistsExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1RepositoryAlreadyExistsException(body, context); + const contents: RepositoryAlreadyExistsException = { + name: "RepositoryAlreadyExistsException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1RepositoryNotEmptyExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1RepositoryNotEmptyException(body, context); + const contents: RepositoryNotEmptyException = { + name: "RepositoryNotEmptyException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1RepositoryNotFoundExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1RepositoryNotFoundException(body, context); + const contents: RepositoryNotFoundException = { + name: "RepositoryNotFoundException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1RepositoryPolicyNotFoundExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1RepositoryPolicyNotFoundException(body, context); + const contents: RepositoryPolicyNotFoundException = { + name: "RepositoryPolicyNotFoundException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1ServerExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1ServerException(body, context); + const contents: ServerException = { + name: "ServerException", + $fault: "server", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1UnsupportedCommandExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1UnsupportedCommandException(body, context); + const contents: UnsupportedCommandException = { + name: "UnsupportedCommandException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const deserializeAws_json1_1UploadNotFoundExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const body = parsedOutput.body; + const deserialized: any = deserializeAws_json1_1UploadNotFoundException(body, context); + const contents: UploadNotFoundException = { + name: "UploadNotFoundException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + ...deserialized, + }; + return contents; +}; + +const serializeAws_json1_1ArchitectureList = (input: string[], context: __SerdeContext): any => { + return input.map((entry) => entry); +}; + +const serializeAws_json1_1BatchCheckLayerAvailabilityRequest = ( + input: BatchCheckLayerAvailabilityRequest, + context: __SerdeContext +): any => { + return { + ...(input.layerDigests !== undefined && { + layerDigests: serializeAws_json1_1BatchedOperationLayerDigestList(input.layerDigests, context), + }), + ...(input.registryId !== undefined && { registryId: input.registryId }), + ...(input.repositoryName !== undefined && { repositoryName: input.repositoryName }), + }; +}; + +const serializeAws_json1_1BatchDeleteImageRequest = (input: BatchDeleteImageRequest, context: __SerdeContext): any => { + return { + ...(input.imageIds !== undefined && { imageIds: serializeAws_json1_1ImageIdentifierList(input.imageIds, context) }), + ...(input.registryId !== undefined && { registryId: input.registryId }), + ...(input.repositoryName !== undefined && { repositoryName: input.repositoryName }), + }; +}; + +const serializeAws_json1_1BatchedOperationLayerDigestList = (input: string[], context: __SerdeContext): any => { + return input.map((entry) => entry); +}; + +const serializeAws_json1_1CompleteLayerUploadRequest = ( + input: CompleteLayerUploadRequest, + context: __SerdeContext +): any => { + return { + ...(input.layerDigests !== undefined && { + layerDigests: serializeAws_json1_1LayerDigestList(input.layerDigests, context), + }), + ...(input.registryId !== undefined && { registryId: input.registryId }), + ...(input.repositoryName !== undefined && { repositoryName: input.repositoryName }), + ...(input.uploadId !== undefined && { uploadId: input.uploadId }), + }; +}; + +const serializeAws_json1_1CreateRepositoryRequest = (input: CreateRepositoryRequest, context: __SerdeContext): any => { + return { + ...(input.catalogData !== undefined && { + catalogData: serializeAws_json1_1RepositoryCatalogDataInput(input.catalogData, context), + }), + ...(input.repositoryName !== undefined && { repositoryName: input.repositoryName }), + }; +}; + +const serializeAws_json1_1DeleteRepositoryPolicyRequest = ( + input: DeleteRepositoryPolicyRequest, + context: __SerdeContext +): any => { + return { + ...(input.registryId !== undefined && { registryId: input.registryId }), + ...(input.repositoryName !== undefined && { repositoryName: input.repositoryName }), + }; +}; + +const serializeAws_json1_1DeleteRepositoryRequest = (input: DeleteRepositoryRequest, context: __SerdeContext): any => { + return { + ...(input.force !== undefined && { force: input.force }), + ...(input.registryId !== undefined && { registryId: input.registryId }), + ...(input.repositoryName !== undefined && { repositoryName: input.repositoryName }), + }; +}; + +const serializeAws_json1_1DescribeImagesRequest = (input: DescribeImagesRequest, context: __SerdeContext): any => { + return { + ...(input.imageIds !== undefined && { imageIds: serializeAws_json1_1ImageIdentifierList(input.imageIds, context) }), + ...(input.maxResults !== undefined && { maxResults: input.maxResults }), + ...(input.nextToken !== undefined && { nextToken: input.nextToken }), + ...(input.registryId !== undefined && { registryId: input.registryId }), + ...(input.repositoryName !== undefined && { repositoryName: input.repositoryName }), + }; +}; + +const serializeAws_json1_1DescribeImageTagsRequest = ( + input: DescribeImageTagsRequest, + context: __SerdeContext +): any => { + return { + ...(input.maxResults !== undefined && { maxResults: input.maxResults }), + ...(input.nextToken !== undefined && { nextToken: input.nextToken }), + ...(input.registryId !== undefined && { registryId: input.registryId }), + ...(input.repositoryName !== undefined && { repositoryName: input.repositoryName }), + }; +}; + +const serializeAws_json1_1DescribeRegistriesRequest = ( + input: DescribeRegistriesRequest, + context: __SerdeContext +): any => { + return { + ...(input.maxResults !== undefined && { maxResults: input.maxResults }), + ...(input.nextToken !== undefined && { nextToken: input.nextToken }), + }; +}; + +const serializeAws_json1_1DescribeRepositoriesRequest = ( + input: DescribeRepositoriesRequest, + context: __SerdeContext +): any => { + return { + ...(input.maxResults !== undefined && { maxResults: input.maxResults }), + ...(input.nextToken !== undefined && { nextToken: input.nextToken }), + ...(input.registryId !== undefined && { registryId: input.registryId }), + ...(input.repositoryNames !== undefined && { + repositoryNames: serializeAws_json1_1RepositoryNameList(input.repositoryNames, context), + }), + }; +}; + +const serializeAws_json1_1GetAuthorizationTokenRequest = ( + input: GetAuthorizationTokenRequest, + context: __SerdeContext +): any => { + return {}; +}; + +const serializeAws_json1_1GetRegistryCatalogDataRequest = ( + input: GetRegistryCatalogDataRequest, + context: __SerdeContext +): any => { + return {}; +}; + +const serializeAws_json1_1GetRepositoryCatalogDataRequest = ( + input: GetRepositoryCatalogDataRequest, + context: __SerdeContext +): any => { + return { + ...(input.registryId !== undefined && { registryId: input.registryId }), + ...(input.repositoryName !== undefined && { repositoryName: input.repositoryName }), + }; +}; + +const serializeAws_json1_1GetRepositoryPolicyRequest = ( + input: GetRepositoryPolicyRequest, + context: __SerdeContext +): any => { + return { + ...(input.registryId !== undefined && { registryId: input.registryId }), + ...(input.repositoryName !== undefined && { repositoryName: input.repositoryName }), + }; +}; + +const serializeAws_json1_1ImageIdentifier = (input: ImageIdentifier, context: __SerdeContext): any => { + return { + ...(input.imageDigest !== undefined && { imageDigest: input.imageDigest }), + ...(input.imageTag !== undefined && { imageTag: input.imageTag }), + }; +}; + +const serializeAws_json1_1ImageIdentifierList = (input: ImageIdentifier[], context: __SerdeContext): any => { + return input.map((entry) => serializeAws_json1_1ImageIdentifier(entry, context)); +}; + +const serializeAws_json1_1InitiateLayerUploadRequest = ( + input: InitiateLayerUploadRequest, + context: __SerdeContext +): any => { + return { + ...(input.registryId !== undefined && { registryId: input.registryId }), + ...(input.repositoryName !== undefined && { repositoryName: input.repositoryName }), + }; +}; + +const serializeAws_json1_1LayerDigestList = (input: string[], context: __SerdeContext): any => { + return input.map((entry) => entry); +}; + +const serializeAws_json1_1OperatingSystemList = (input: string[], context: __SerdeContext): any => { + return input.map((entry) => entry); +}; + +const serializeAws_json1_1PutImageRequest = (input: PutImageRequest, context: __SerdeContext): any => { + return { + ...(input.imageDigest !== undefined && { imageDigest: input.imageDigest }), + ...(input.imageManifest !== undefined && { imageManifest: input.imageManifest }), + ...(input.imageManifestMediaType !== undefined && { imageManifestMediaType: input.imageManifestMediaType }), + ...(input.imageTag !== undefined && { imageTag: input.imageTag }), + ...(input.registryId !== undefined && { registryId: input.registryId }), + ...(input.repositoryName !== undefined && { repositoryName: input.repositoryName }), + }; +}; + +const serializeAws_json1_1PutRegistryCatalogDataRequest = ( + input: PutRegistryCatalogDataRequest, + context: __SerdeContext +): any => { + return { + ...(input.displayName !== undefined && { displayName: input.displayName }), + }; +}; + +const serializeAws_json1_1PutRepositoryCatalogDataRequest = ( + input: PutRepositoryCatalogDataRequest, + context: __SerdeContext +): any => { + return { + ...(input.catalogData !== undefined && { + catalogData: serializeAws_json1_1RepositoryCatalogDataInput(input.catalogData, context), + }), + ...(input.registryId !== undefined && { registryId: input.registryId }), + ...(input.repositoryName !== undefined && { repositoryName: input.repositoryName }), + }; +}; + +const serializeAws_json1_1RepositoryCatalogDataInput = ( + input: RepositoryCatalogDataInput, + context: __SerdeContext +): any => { + return { + ...(input.aboutText !== undefined && { aboutText: input.aboutText }), + ...(input.architectures !== undefined && { + architectures: serializeAws_json1_1ArchitectureList(input.architectures, context), + }), + ...(input.description !== undefined && { description: input.description }), + ...(input.logoImageBlob !== undefined && { logoImageBlob: context.base64Encoder(input.logoImageBlob) }), + ...(input.operatingSystems !== undefined && { + operatingSystems: serializeAws_json1_1OperatingSystemList(input.operatingSystems, context), + }), + ...(input.usageText !== undefined && { usageText: input.usageText }), + }; +}; + +const serializeAws_json1_1RepositoryNameList = (input: string[], context: __SerdeContext): any => { + return input.map((entry) => entry); +}; + +const serializeAws_json1_1SetRepositoryPolicyRequest = ( + input: SetRepositoryPolicyRequest, + context: __SerdeContext +): any => { + return { + ...(input.force !== undefined && { force: input.force }), + ...(input.policyText !== undefined && { policyText: input.policyText }), + ...(input.registryId !== undefined && { registryId: input.registryId }), + ...(input.repositoryName !== undefined && { repositoryName: input.repositoryName }), + }; +}; + +const serializeAws_json1_1UploadLayerPartRequest = (input: UploadLayerPartRequest, context: __SerdeContext): any => { + return { + ...(input.layerPartBlob !== undefined && { layerPartBlob: context.base64Encoder(input.layerPartBlob) }), + ...(input.partFirstByte !== undefined && { partFirstByte: input.partFirstByte }), + ...(input.partLastByte !== undefined && { partLastByte: input.partLastByte }), + ...(input.registryId !== undefined && { registryId: input.registryId }), + ...(input.repositoryName !== undefined && { repositoryName: input.repositoryName }), + ...(input.uploadId !== undefined && { uploadId: input.uploadId }), + }; +}; + +const deserializeAws_json1_1ArchitectureList = (output: any, context: __SerdeContext): string[] => { + return (output || []).map((entry: any) => entry); +}; + +const deserializeAws_json1_1AuthorizationData = (output: any, context: __SerdeContext): AuthorizationData => { + return { + authorizationToken: + output.authorizationToken !== undefined && output.authorizationToken !== null + ? output.authorizationToken + : undefined, + expiresAt: + output.expiresAt !== undefined && output.expiresAt !== null + ? new Date(Math.round(output.expiresAt * 1000)) + : undefined, + } as any; +}; + +const deserializeAws_json1_1BatchCheckLayerAvailabilityResponse = ( + output: any, + context: __SerdeContext +): BatchCheckLayerAvailabilityResponse => { + return { + failures: + output.failures !== undefined && output.failures !== null + ? deserializeAws_json1_1LayerFailureList(output.failures, context) + : undefined, + layers: + output.layers !== undefined && output.layers !== null + ? deserializeAws_json1_1LayerList(output.layers, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1BatchDeleteImageResponse = ( + output: any, + context: __SerdeContext +): BatchDeleteImageResponse => { + return { + failures: + output.failures !== undefined && output.failures !== null + ? deserializeAws_json1_1ImageFailureList(output.failures, context) + : undefined, + imageIds: + output.imageIds !== undefined && output.imageIds !== null + ? deserializeAws_json1_1ImageIdentifierList(output.imageIds, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1CompleteLayerUploadResponse = ( + output: any, + context: __SerdeContext +): CompleteLayerUploadResponse => { + return { + layerDigest: output.layerDigest !== undefined && output.layerDigest !== null ? output.layerDigest : undefined, + registryId: output.registryId !== undefined && output.registryId !== null ? output.registryId : undefined, + repositoryName: + output.repositoryName !== undefined && output.repositoryName !== null ? output.repositoryName : undefined, + uploadId: output.uploadId !== undefined && output.uploadId !== null ? output.uploadId : undefined, + } as any; +}; + +const deserializeAws_json1_1CreateRepositoryResponse = ( + output: any, + context: __SerdeContext +): CreateRepositoryResponse => { + return { + catalogData: + output.catalogData !== undefined && output.catalogData !== null + ? deserializeAws_json1_1RepositoryCatalogData(output.catalogData, context) + : undefined, + repository: + output.repository !== undefined && output.repository !== null + ? deserializeAws_json1_1Repository(output.repository, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1DeleteRepositoryPolicyResponse = ( + output: any, + context: __SerdeContext +): DeleteRepositoryPolicyResponse => { + return { + policyText: output.policyText !== undefined && output.policyText !== null ? output.policyText : undefined, + registryId: output.registryId !== undefined && output.registryId !== null ? output.registryId : undefined, + repositoryName: + output.repositoryName !== undefined && output.repositoryName !== null ? output.repositoryName : undefined, + } as any; +}; + +const deserializeAws_json1_1DeleteRepositoryResponse = ( + output: any, + context: __SerdeContext +): DeleteRepositoryResponse => { + return { + repository: + output.repository !== undefined && output.repository !== null + ? deserializeAws_json1_1Repository(output.repository, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1DescribeImagesResponse = (output: any, context: __SerdeContext): DescribeImagesResponse => { + return { + imageDetails: + output.imageDetails !== undefined && output.imageDetails !== null + ? deserializeAws_json1_1ImageDetailList(output.imageDetails, context) + : undefined, + nextToken: output.nextToken !== undefined && output.nextToken !== null ? output.nextToken : undefined, + } as any; +}; + +const deserializeAws_json1_1DescribeImageTagsResponse = ( + output: any, + context: __SerdeContext +): DescribeImageTagsResponse => { + return { + imageTagDetails: + output.imageTagDetails !== undefined && output.imageTagDetails !== null + ? deserializeAws_json1_1ImageTagDetailList(output.imageTagDetails, context) + : undefined, + nextToken: output.nextToken !== undefined && output.nextToken !== null ? output.nextToken : undefined, + } as any; +}; + +const deserializeAws_json1_1DescribeRegistriesResponse = ( + output: any, + context: __SerdeContext +): DescribeRegistriesResponse => { + return { + nextToken: output.nextToken !== undefined && output.nextToken !== null ? output.nextToken : undefined, + registries: + output.registries !== undefined && output.registries !== null + ? deserializeAws_json1_1RegistryList(output.registries, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1DescribeRepositoriesResponse = ( + output: any, + context: __SerdeContext +): DescribeRepositoriesResponse => { + return { + nextToken: output.nextToken !== undefined && output.nextToken !== null ? output.nextToken : undefined, + repositories: + output.repositories !== undefined && output.repositories !== null + ? deserializeAws_json1_1RepositoryList(output.repositories, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1EmptyUploadException = (output: any, context: __SerdeContext): EmptyUploadException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1GetAuthorizationTokenResponse = ( + output: any, + context: __SerdeContext +): GetAuthorizationTokenResponse => { + return { + authorizationData: + output.authorizationData !== undefined && output.authorizationData !== null + ? deserializeAws_json1_1AuthorizationData(output.authorizationData, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1GetRegistryCatalogDataResponse = ( + output: any, + context: __SerdeContext +): GetRegistryCatalogDataResponse => { + return { + registryCatalogData: + output.registryCatalogData !== undefined && output.registryCatalogData !== null + ? deserializeAws_json1_1RegistryCatalogData(output.registryCatalogData, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1GetRepositoryCatalogDataResponse = ( + output: any, + context: __SerdeContext +): GetRepositoryCatalogDataResponse => { + return { + catalogData: + output.catalogData !== undefined && output.catalogData !== null + ? deserializeAws_json1_1RepositoryCatalogData(output.catalogData, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1GetRepositoryPolicyResponse = ( + output: any, + context: __SerdeContext +): GetRepositoryPolicyResponse => { + return { + policyText: output.policyText !== undefined && output.policyText !== null ? output.policyText : undefined, + registryId: output.registryId !== undefined && output.registryId !== null ? output.registryId : undefined, + repositoryName: + output.repositoryName !== undefined && output.repositoryName !== null ? output.repositoryName : undefined, + } as any; +}; + +const deserializeAws_json1_1Image = (output: any, context: __SerdeContext): Image => { + return { + imageId: + output.imageId !== undefined && output.imageId !== null + ? deserializeAws_json1_1ImageIdentifier(output.imageId, context) + : undefined, + imageManifest: + output.imageManifest !== undefined && output.imageManifest !== null ? output.imageManifest : undefined, + imageManifestMediaType: + output.imageManifestMediaType !== undefined && output.imageManifestMediaType !== null + ? output.imageManifestMediaType + : undefined, + registryId: output.registryId !== undefined && output.registryId !== null ? output.registryId : undefined, + repositoryName: + output.repositoryName !== undefined && output.repositoryName !== null ? output.repositoryName : undefined, + } as any; +}; + +const deserializeAws_json1_1ImageAlreadyExistsException = ( + output: any, + context: __SerdeContext +): ImageAlreadyExistsException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1ImageDetail = (output: any, context: __SerdeContext): ImageDetail => { + return { + artifactMediaType: + output.artifactMediaType !== undefined && output.artifactMediaType !== null + ? output.artifactMediaType + : undefined, + imageDigest: output.imageDigest !== undefined && output.imageDigest !== null ? output.imageDigest : undefined, + imageManifestMediaType: + output.imageManifestMediaType !== undefined && output.imageManifestMediaType !== null + ? output.imageManifestMediaType + : undefined, + imagePushedAt: + output.imagePushedAt !== undefined && output.imagePushedAt !== null + ? new Date(Math.round(output.imagePushedAt * 1000)) + : undefined, + imageSizeInBytes: + output.imageSizeInBytes !== undefined && output.imageSizeInBytes !== null ? output.imageSizeInBytes : undefined, + imageTags: + output.imageTags !== undefined && output.imageTags !== null + ? deserializeAws_json1_1ImageTagList(output.imageTags, context) + : undefined, + registryId: output.registryId !== undefined && output.registryId !== null ? output.registryId : undefined, + repositoryName: + output.repositoryName !== undefined && output.repositoryName !== null ? output.repositoryName : undefined, + } as any; +}; + +const deserializeAws_json1_1ImageDetailList = (output: any, context: __SerdeContext): ImageDetail[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1ImageDetail(entry, context)); +}; + +const deserializeAws_json1_1ImageDigestDoesNotMatchException = ( + output: any, + context: __SerdeContext +): ImageDigestDoesNotMatchException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1ImageFailure = (output: any, context: __SerdeContext): ImageFailure => { + return { + failureCode: output.failureCode !== undefined && output.failureCode !== null ? output.failureCode : undefined, + failureReason: + output.failureReason !== undefined && output.failureReason !== null ? output.failureReason : undefined, + imageId: + output.imageId !== undefined && output.imageId !== null + ? deserializeAws_json1_1ImageIdentifier(output.imageId, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1ImageFailureList = (output: any, context: __SerdeContext): ImageFailure[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1ImageFailure(entry, context)); +}; + +const deserializeAws_json1_1ImageIdentifier = (output: any, context: __SerdeContext): ImageIdentifier => { + return { + imageDigest: output.imageDigest !== undefined && output.imageDigest !== null ? output.imageDigest : undefined, + imageTag: output.imageTag !== undefined && output.imageTag !== null ? output.imageTag : undefined, + } as any; +}; + +const deserializeAws_json1_1ImageIdentifierList = (output: any, context: __SerdeContext): ImageIdentifier[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1ImageIdentifier(entry, context)); +}; + +const deserializeAws_json1_1ImageNotFoundException = (output: any, context: __SerdeContext): ImageNotFoundException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1ImageTagAlreadyExistsException = ( + output: any, + context: __SerdeContext +): ImageTagAlreadyExistsException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1ImageTagDetail = (output: any, context: __SerdeContext): ImageTagDetail => { + return { + createdAt: + output.createdAt !== undefined && output.createdAt !== null + ? new Date(Math.round(output.createdAt * 1000)) + : undefined, + imageDetail: + output.imageDetail !== undefined && output.imageDetail !== null + ? deserializeAws_json1_1ReferencedImageDetail(output.imageDetail, context) + : undefined, + imageTag: output.imageTag !== undefined && output.imageTag !== null ? output.imageTag : undefined, + } as any; +}; + +const deserializeAws_json1_1ImageTagDetailList = (output: any, context: __SerdeContext): ImageTagDetail[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1ImageTagDetail(entry, context)); +}; + +const deserializeAws_json1_1ImageTagList = (output: any, context: __SerdeContext): string[] => { + return (output || []).map((entry: any) => entry); +}; + +const deserializeAws_json1_1InitiateLayerUploadResponse = ( + output: any, + context: __SerdeContext +): InitiateLayerUploadResponse => { + return { + partSize: output.partSize !== undefined && output.partSize !== null ? output.partSize : undefined, + uploadId: output.uploadId !== undefined && output.uploadId !== null ? output.uploadId : undefined, + } as any; +}; + +const deserializeAws_json1_1InvalidLayerException = (output: any, context: __SerdeContext): InvalidLayerException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1InvalidLayerPartException = ( + output: any, + context: __SerdeContext +): InvalidLayerPartException => { + return { + lastValidByteReceived: + output.lastValidByteReceived !== undefined && output.lastValidByteReceived !== null + ? output.lastValidByteReceived + : undefined, + message: output.message !== undefined && output.message !== null ? output.message : undefined, + registryId: output.registryId !== undefined && output.registryId !== null ? output.registryId : undefined, + repositoryName: + output.repositoryName !== undefined && output.repositoryName !== null ? output.repositoryName : undefined, + uploadId: output.uploadId !== undefined && output.uploadId !== null ? output.uploadId : undefined, + } as any; +}; + +const deserializeAws_json1_1InvalidParameterException = ( + output: any, + context: __SerdeContext +): InvalidParameterException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1Layer = (output: any, context: __SerdeContext): Layer => { + return { + layerAvailability: + output.layerAvailability !== undefined && output.layerAvailability !== null + ? output.layerAvailability + : undefined, + layerDigest: output.layerDigest !== undefined && output.layerDigest !== null ? output.layerDigest : undefined, + layerSize: output.layerSize !== undefined && output.layerSize !== null ? output.layerSize : undefined, + mediaType: output.mediaType !== undefined && output.mediaType !== null ? output.mediaType : undefined, + } as any; +}; + +const deserializeAws_json1_1LayerAlreadyExistsException = ( + output: any, + context: __SerdeContext +): LayerAlreadyExistsException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1LayerFailure = (output: any, context: __SerdeContext): LayerFailure => { + return { + failureCode: output.failureCode !== undefined && output.failureCode !== null ? output.failureCode : undefined, + failureReason: + output.failureReason !== undefined && output.failureReason !== null ? output.failureReason : undefined, + layerDigest: output.layerDigest !== undefined && output.layerDigest !== null ? output.layerDigest : undefined, + } as any; +}; + +const deserializeAws_json1_1LayerFailureList = (output: any, context: __SerdeContext): LayerFailure[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1LayerFailure(entry, context)); +}; + +const deserializeAws_json1_1LayerList = (output: any, context: __SerdeContext): Layer[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1Layer(entry, context)); +}; + +const deserializeAws_json1_1LayerPartTooSmallException = ( + output: any, + context: __SerdeContext +): LayerPartTooSmallException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1LayersNotFoundException = ( + output: any, + context: __SerdeContext +): LayersNotFoundException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1LimitExceededException = (output: any, context: __SerdeContext): LimitExceededException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1OperatingSystemList = (output: any, context: __SerdeContext): string[] => { + return (output || []).map((entry: any) => entry); +}; + +const deserializeAws_json1_1PutImageResponse = (output: any, context: __SerdeContext): PutImageResponse => { + return { + image: + output.image !== undefined && output.image !== null + ? deserializeAws_json1_1Image(output.image, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1PutRegistryCatalogDataResponse = ( + output: any, + context: __SerdeContext +): PutRegistryCatalogDataResponse => { + return { + registryCatalogData: + output.registryCatalogData !== undefined && output.registryCatalogData !== null + ? deserializeAws_json1_1RegistryCatalogData(output.registryCatalogData, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1PutRepositoryCatalogDataResponse = ( + output: any, + context: __SerdeContext +): PutRepositoryCatalogDataResponse => { + return { + catalogData: + output.catalogData !== undefined && output.catalogData !== null + ? deserializeAws_json1_1RepositoryCatalogData(output.catalogData, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1ReferencedImageDetail = (output: any, context: __SerdeContext): ReferencedImageDetail => { + return { + artifactMediaType: + output.artifactMediaType !== undefined && output.artifactMediaType !== null + ? output.artifactMediaType + : undefined, + imageDigest: output.imageDigest !== undefined && output.imageDigest !== null ? output.imageDigest : undefined, + imageManifestMediaType: + output.imageManifestMediaType !== undefined && output.imageManifestMediaType !== null + ? output.imageManifestMediaType + : undefined, + imagePushedAt: + output.imagePushedAt !== undefined && output.imagePushedAt !== null + ? new Date(Math.round(output.imagePushedAt * 1000)) + : undefined, + imageSizeInBytes: + output.imageSizeInBytes !== undefined && output.imageSizeInBytes !== null ? output.imageSizeInBytes : undefined, + } as any; +}; + +const deserializeAws_json1_1ReferencedImagesNotFoundException = ( + output: any, + context: __SerdeContext +): ReferencedImagesNotFoundException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1Registry = (output: any, context: __SerdeContext): Registry => { + return { + aliases: + output.aliases !== undefined && output.aliases !== null + ? deserializeAws_json1_1RegistryAliasList(output.aliases, context) + : undefined, + registryArn: output.registryArn !== undefined && output.registryArn !== null ? output.registryArn : undefined, + registryId: output.registryId !== undefined && output.registryId !== null ? output.registryId : undefined, + registryUri: output.registryUri !== undefined && output.registryUri !== null ? output.registryUri : undefined, + verified: output.verified !== undefined && output.verified !== null ? output.verified : undefined, + } as any; +}; + +const deserializeAws_json1_1RegistryAlias = (output: any, context: __SerdeContext): RegistryAlias => { + return { + defaultRegistryAlias: + output.defaultRegistryAlias !== undefined && output.defaultRegistryAlias !== null + ? output.defaultRegistryAlias + : undefined, + name: output.name !== undefined && output.name !== null ? output.name : undefined, + primaryRegistryAlias: + output.primaryRegistryAlias !== undefined && output.primaryRegistryAlias !== null + ? output.primaryRegistryAlias + : undefined, + status: output.status !== undefined && output.status !== null ? output.status : undefined, + } as any; +}; + +const deserializeAws_json1_1RegistryAliasList = (output: any, context: __SerdeContext): RegistryAlias[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1RegistryAlias(entry, context)); +}; + +const deserializeAws_json1_1RegistryCatalogData = (output: any, context: __SerdeContext): RegistryCatalogData => { + return { + displayName: output.displayName !== undefined && output.displayName !== null ? output.displayName : undefined, + } as any; +}; + +const deserializeAws_json1_1RegistryList = (output: any, context: __SerdeContext): Registry[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1Registry(entry, context)); +}; + +const deserializeAws_json1_1RegistryNotFoundException = ( + output: any, + context: __SerdeContext +): RegistryNotFoundException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1Repository = (output: any, context: __SerdeContext): Repository => { + return { + createdAt: + output.createdAt !== undefined && output.createdAt !== null + ? new Date(Math.round(output.createdAt * 1000)) + : undefined, + registryId: output.registryId !== undefined && output.registryId !== null ? output.registryId : undefined, + repositoryArn: + output.repositoryArn !== undefined && output.repositoryArn !== null ? output.repositoryArn : undefined, + repositoryName: + output.repositoryName !== undefined && output.repositoryName !== null ? output.repositoryName : undefined, + repositoryUri: + output.repositoryUri !== undefined && output.repositoryUri !== null ? output.repositoryUri : undefined, + } as any; +}; + +const deserializeAws_json1_1RepositoryAlreadyExistsException = ( + output: any, + context: __SerdeContext +): RepositoryAlreadyExistsException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1RepositoryCatalogData = (output: any, context: __SerdeContext): RepositoryCatalogData => { + return { + aboutText: output.aboutText !== undefined && output.aboutText !== null ? output.aboutText : undefined, + architectures: + output.architectures !== undefined && output.architectures !== null + ? deserializeAws_json1_1ArchitectureList(output.architectures, context) + : undefined, + description: output.description !== undefined && output.description !== null ? output.description : undefined, + logoUrl: output.logoUrl !== undefined && output.logoUrl !== null ? output.logoUrl : undefined, + marketplaceCertified: + output.marketplaceCertified !== undefined && output.marketplaceCertified !== null + ? output.marketplaceCertified + : undefined, + operatingSystems: + output.operatingSystems !== undefined && output.operatingSystems !== null + ? deserializeAws_json1_1OperatingSystemList(output.operatingSystems, context) + : undefined, + usageText: output.usageText !== undefined && output.usageText !== null ? output.usageText : undefined, + } as any; +}; + +const deserializeAws_json1_1RepositoryList = (output: any, context: __SerdeContext): Repository[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1Repository(entry, context)); +}; + +const deserializeAws_json1_1RepositoryNotEmptyException = ( + output: any, + context: __SerdeContext +): RepositoryNotEmptyException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1RepositoryNotFoundException = ( + output: any, + context: __SerdeContext +): RepositoryNotFoundException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1RepositoryPolicyNotFoundException = ( + output: any, + context: __SerdeContext +): RepositoryPolicyNotFoundException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1ServerException = (output: any, context: __SerdeContext): ServerException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1SetRepositoryPolicyResponse = ( + output: any, + context: __SerdeContext +): SetRepositoryPolicyResponse => { + return { + policyText: output.policyText !== undefined && output.policyText !== null ? output.policyText : undefined, + registryId: output.registryId !== undefined && output.registryId !== null ? output.registryId : undefined, + repositoryName: + output.repositoryName !== undefined && output.repositoryName !== null ? output.repositoryName : undefined, + } as any; +}; + +const deserializeAws_json1_1UnsupportedCommandException = ( + output: any, + context: __SerdeContext +): UnsupportedCommandException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeAws_json1_1UploadLayerPartResponse = ( + output: any, + context: __SerdeContext +): UploadLayerPartResponse => { + return { + lastByteReceived: + output.lastByteReceived !== undefined && output.lastByteReceived !== null ? output.lastByteReceived : undefined, + registryId: output.registryId !== undefined && output.registryId !== null ? output.registryId : undefined, + repositoryName: + output.repositoryName !== undefined && output.repositoryName !== null ? output.repositoryName : undefined, + uploadId: output.uploadId !== undefined && output.uploadId !== null ? output.uploadId : undefined, + } as any; +}; + +const deserializeAws_json1_1UploadNotFoundException = ( + output: any, + context: __SerdeContext +): UploadNotFoundException => { + return { + message: output.message !== undefined && output.message !== null ? output.message : undefined, + } as any; +}; + +const deserializeMetadata = (output: __HttpResponse): __ResponseMetadata => ({ + httpStatusCode: output.statusCode, + httpHeaders: output.headers, + requestId: output.headers["x-amzn-requestid"], +}); + +// Collect low-level response body stream to Uint8Array. +const collectBody = (streamBody: any = new Uint8Array(), context: __SerdeContext): Promise => { + if (streamBody instanceof Uint8Array) { + return Promise.resolve(streamBody); + } + return context.streamCollector(streamBody) || Promise.resolve(new Uint8Array()); +}; + +// Encode Uint8Array data into string with utf-8. +const collectBodyString = (streamBody: any, context: __SerdeContext): Promise => + collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); + +const buildHttpRpcRequest = async ( + context: __SerdeContext, + headers: __HeaderBag, + path: string, + resolvedHostname: string | undefined, + body: any +): Promise<__HttpRequest> => { + const { hostname, protocol = "https", port } = await context.endpoint(); + const contents: any = { + protocol, + hostname, + port, + method: "POST", + path, + headers, + }; + if (resolvedHostname !== undefined) { + contents.hostname = resolvedHostname; + } + if (body !== undefined) { + contents.body = body; + } + return new __HttpRequest(contents); +}; + +const parseBody = (streamBody: any, context: __SerdeContext): any => + collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + return JSON.parse(encoded); + } + return {}; + }); diff --git a/clients/client-ecr-public/runtimeConfig.browser.ts b/clients/client-ecr-public/runtimeConfig.browser.ts new file mode 100644 index 000000000000..5e3675eab6a1 --- /dev/null +++ b/clients/client-ecr-public/runtimeConfig.browser.ts @@ -0,0 +1,34 @@ +import packageInfo from "./package.json"; + +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { FetchHttpHandler, streamCollector } from "@aws-sdk/fetch-http-handler"; +import { invalidAsyncFunction } from "@aws-sdk/invalid-dependency"; +import { DEFAULT_MAX_ATTEMPTS } from "@aws-sdk/middleware-retry"; +import { parseUrl } from "@aws-sdk/url-parser-browser"; +import { fromBase64, toBase64 } from "@aws-sdk/util-base64-browser"; +import { calculateBodyLength } from "@aws-sdk/util-body-length-browser"; +import { defaultUserAgent } from "@aws-sdk/util-user-agent-browser"; +import { fromUtf8, toUtf8 } from "@aws-sdk/util-utf8-browser"; +import { ClientDefaults } from "./ECRPUBLICClient"; +import { ClientSharedValues } from "./runtimeConfig.shared"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...ClientSharedValues, + runtime: "browser", + base64Decoder: fromBase64, + base64Encoder: toBase64, + bodyLengthChecker: calculateBodyLength, + credentialDefaultProvider: invalidAsyncFunction("Credentialis missing") as any, + defaultUserAgent: defaultUserAgent(packageInfo.name, packageInfo.version), + maxAttempts: DEFAULT_MAX_ATTEMPTS, + region: invalidAsyncFunction("Region is missing") as any, + requestHandler: new FetchHttpHandler(), + sha256: Sha256, + streamCollector, + urlParser: parseUrl, + utf8Decoder: fromUtf8, + utf8Encoder: toUtf8, +}; diff --git a/clients/client-ecr-public/runtimeConfig.native.ts b/clients/client-ecr-public/runtimeConfig.native.ts new file mode 100644 index 000000000000..97909e5f0316 --- /dev/null +++ b/clients/client-ecr-public/runtimeConfig.native.ts @@ -0,0 +1,17 @@ +import packageInfo from "./package.json"; + +import { Sha256 } from "@aws-crypto/sha256-js"; +import { parseUrl } from "@aws-sdk/url-parser-node"; +import { ClientDefaults } from "./ECRPUBLICClient"; +import { ClientDefaultValues as BrowserDefaults } from "./runtimeConfig.browser"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...BrowserDefaults, + runtime: "react-native", + defaultUserAgent: `aws-sdk-js-v3-react-native-${packageInfo.name}/${packageInfo.version}`, + sha256: Sha256, + urlParser: parseUrl, +}; diff --git a/clients/client-ecr-public/runtimeConfig.shared.ts b/clients/client-ecr-public/runtimeConfig.shared.ts new file mode 100644 index 000000000000..2914e8d7d679 --- /dev/null +++ b/clients/client-ecr-public/runtimeConfig.shared.ts @@ -0,0 +1,13 @@ +import { defaultRegionInfoProvider } from "./endpoints"; +import { Logger as __Logger } from "@aws-sdk/types"; + +/** + * @internal + */ +export const ClientSharedValues = { + apiVersion: "2020-10-30", + disableHostPrefix: false, + logger: {} as __Logger, + regionInfoProvider: defaultRegionInfoProvider, + signingName: "ecr-public", +}; diff --git a/clients/client-ecr-public/runtimeConfig.ts b/clients/client-ecr-public/runtimeConfig.ts new file mode 100644 index 000000000000..f8589fd728f5 --- /dev/null +++ b/clients/client-ecr-public/runtimeConfig.ts @@ -0,0 +1,36 @@ +import packageInfo from "./package.json"; + +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS } from "@aws-sdk/config-resolver"; +import { defaultProvider as credentialDefaultProvider } from "@aws-sdk/credential-provider-node"; +import { Hash } from "@aws-sdk/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS } from "@aws-sdk/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@aws-sdk/node-config-provider"; +import { NodeHttpHandler, streamCollector } from "@aws-sdk/node-http-handler"; +import { parseUrl } from "@aws-sdk/url-parser-node"; +import { fromBase64, toBase64 } from "@aws-sdk/util-base64-node"; +import { calculateBodyLength } from "@aws-sdk/util-body-length-node"; +import { defaultUserAgent } from "@aws-sdk/util-user-agent-node"; +import { fromUtf8, toUtf8 } from "@aws-sdk/util-utf8-node"; +import { ClientDefaults } from "./ECRPUBLICClient"; +import { ClientSharedValues } from "./runtimeConfig.shared"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...ClientSharedValues, + runtime: "node", + base64Decoder: fromBase64, + base64Encoder: toBase64, + bodyLengthChecker: calculateBodyLength, + credentialDefaultProvider, + defaultUserAgent: defaultUserAgent(packageInfo.name, packageInfo.version), + maxAttempts: loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS), + region: loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, NODE_REGION_CONFIG_FILE_OPTIONS), + requestHandler: new NodeHttpHandler(), + sha256: Hash.bind(null, "sha256"), + streamCollector, + urlParser: parseUrl, + utf8Decoder: fromUtf8, + utf8Encoder: toUtf8, +}; diff --git a/clients/client-ecr-public/tsconfig.es.json b/clients/client-ecr-public/tsconfig.es.json new file mode 100644 index 000000000000..30df5d2e6986 --- /dev/null +++ b/clients/client-ecr-public/tsconfig.es.json @@ -0,0 +1,12 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "target": "es5", + "module": "esnext", + "moduleResolution": "node", + "declaration": false, + "declarationDir": null, + "lib": ["dom", "es5", "es2015.promise", "es2015.collection", "es2015.iterable", "es2015.symbol.wellknown"], + "outDir": "dist/es" + } +} diff --git a/clients/client-ecr-public/tsconfig.json b/clients/client-ecr-public/tsconfig.json new file mode 100644 index 000000000000..4cf936f614b4 --- /dev/null +++ b/clients/client-ecr-public/tsconfig.json @@ -0,0 +1,31 @@ +{ + "compilerOptions": { + "alwaysStrict": true, + "target": "ES2018", + "module": "commonjs", + "declaration": true, + "strict": true, + "sourceMap": true, + "downlevelIteration": true, + "importHelpers": true, + "noEmitHelpers": true, + "incremental": true, + "resolveJsonModule": true, + "esModuleInterop": true, + "declarationDir": "./types", + "outDir": "dist/cjs" + }, + "typedocOptions": { + "exclude": ["**/node_modules/**", "**/*.spec.ts", "./protocols/*.ts", "./e2e/*.ts", "./endpoints.ts"], + "excludeNotExported": true, + "excludePrivate": true, + "hideGenerator": true, + "ignoreCompilerErrors": true, + "includeDeclarations": true, + "readme": "./README.md", + "mode": "file", + "out": "./docs", + "theme": "minimal", + "plugin": ["@aws-sdk/client-documentation-generator"] + } +} diff --git a/clients/client-eks/EKS.ts b/clients/client-eks/EKS.ts index 4ea2160b1d3f..ba1d1a595f71 100644 --- a/clients/client-eks/EKS.ts +++ b/clients/client-eks/EKS.ts @@ -1,4 +1,5 @@ import { EKSClient } from "./EKSClient"; +import { CreateAddonCommand, CreateAddonCommandInput, CreateAddonCommandOutput } from "./commands/CreateAddonCommand"; import { CreateClusterCommand, CreateClusterCommandInput, @@ -14,6 +15,7 @@ import { CreateNodegroupCommandInput, CreateNodegroupCommandOutput, } from "./commands/CreateNodegroupCommand"; +import { DeleteAddonCommand, DeleteAddonCommandInput, DeleteAddonCommandOutput } from "./commands/DeleteAddonCommand"; import { DeleteClusterCommand, DeleteClusterCommandInput, @@ -29,6 +31,16 @@ import { DeleteNodegroupCommandInput, DeleteNodegroupCommandOutput, } from "./commands/DeleteNodegroupCommand"; +import { + DescribeAddonCommand, + DescribeAddonCommandInput, + DescribeAddonCommandOutput, +} from "./commands/DescribeAddonCommand"; +import { + DescribeAddonVersionsCommand, + DescribeAddonVersionsCommandInput, + DescribeAddonVersionsCommandOutput, +} from "./commands/DescribeAddonVersionsCommand"; import { DescribeClusterCommand, DescribeClusterCommandInput, @@ -49,6 +61,7 @@ import { DescribeUpdateCommandInput, DescribeUpdateCommandOutput, } from "./commands/DescribeUpdateCommand"; +import { ListAddonsCommand, ListAddonsCommandInput, ListAddonsCommandOutput } from "./commands/ListAddonsCommand"; import { ListClustersCommand, ListClustersCommandInput, @@ -76,6 +89,7 @@ import { UntagResourceCommandInput, UntagResourceCommandOutput, } from "./commands/UntagResourceCommand"; +import { UpdateAddonCommand, UpdateAddonCommandInput, UpdateAddonCommandOutput } from "./commands/UpdateAddonCommand"; import { UpdateClusterConfigCommand, UpdateClusterConfigCommandInput, @@ -111,6 +125,37 @@ import { HttpHandlerOptions as __HttpHandlerOptions } from "@aws-sdk/types"; * code modification required.

    */ export class EKS extends EKSClient { + /** + *

    Creates an Amazon EKS add-on.

    + *

    Amazon EKS add-ons help to automate the provisioning and lifecycle management of common + * operational software for Amazon EKS clusters. Amazon EKS add-ons can only be used with Amazon EKS + * clusters running version 1.18 with platform version eks.3 or later because + * add-ons rely on the Server-side Apply Kubernetes feature, which is only available in + * Kubernetes 1.18 and later.

    + */ + public createAddon(args: CreateAddonCommandInput, options?: __HttpHandlerOptions): Promise; + public createAddon(args: CreateAddonCommandInput, cb: (err: any, data?: CreateAddonCommandOutput) => void): void; + public createAddon( + args: CreateAddonCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateAddonCommandOutput) => void + ): void; + public createAddon( + args: CreateAddonCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateAddonCommandOutput) => void), + cb?: (err: any, data?: CreateAddonCommandOutput) => void + ): Promise | void { + const command = new CreateAddonCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Creates an Amazon EKS control plane.

    *

    The Amazon EKS control plane consists of control plane instances that run the Kubernetes @@ -242,8 +287,8 @@ export class EKS extends EKSClient { * template. For more information about using launch templates, see Launch * template support.

    *

    An Amazon EKS managed node group is an Amazon EC2 Auto Scaling group and associated Amazon EC2 instances that - * are managed by AWS for an Amazon EKS cluster. Each node group uses a version of the - * Amazon EKS-optimized Amazon Linux 2 AMI. For more information, see Managed + * are managed by AWS for an Amazon EKS cluster. Each node group uses a version of the Amazon EKS + * optimized Amazon Linux 2 AMI. For more information, see Managed * Node Groups in the Amazon EKS User Guide.

    */ public createNodegroup( @@ -275,6 +320,34 @@ export class EKS extends EKSClient { } } + /** + *

    Delete an Amazon EKS add-on.

    + *

    When you remove the add-on, it will also be deleted from the cluster. You can always + * manually start an add-on on the cluster using the Kubernetes API.

    + */ + public deleteAddon(args: DeleteAddonCommandInput, options?: __HttpHandlerOptions): Promise; + public deleteAddon(args: DeleteAddonCommandInput, cb: (err: any, data?: DeleteAddonCommandOutput) => void): void; + public deleteAddon( + args: DeleteAddonCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteAddonCommandOutput) => void + ): void; + public deleteAddon( + args: DeleteAddonCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteAddonCommandOutput) => void), + cb?: (err: any, data?: DeleteAddonCommandOutput) => void + ): Promise | void { + const command = new DeleteAddonCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Deletes the Amazon EKS cluster control plane.

    *

    If you have active services in your cluster that are associated with a load balancer, @@ -385,6 +458,70 @@ export class EKS extends EKSClient { } } + /** + *

    Describes an Amazon EKS add-on.

    + */ + public describeAddon( + args: DescribeAddonCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeAddon( + args: DescribeAddonCommandInput, + cb: (err: any, data?: DescribeAddonCommandOutput) => void + ): void; + public describeAddon( + args: DescribeAddonCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeAddonCommandOutput) => void + ): void; + public describeAddon( + args: DescribeAddonCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeAddonCommandOutput) => void), + cb?: (err: any, data?: DescribeAddonCommandOutput) => void + ): Promise | void { + const command = new DescribeAddonCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Describes the Kubernetes versions that the add-on can be used with.

    + */ + public describeAddonVersions( + args: DescribeAddonVersionsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeAddonVersions( + args: DescribeAddonVersionsCommandInput, + cb: (err: any, data?: DescribeAddonVersionsCommandOutput) => void + ): void; + public describeAddonVersions( + args: DescribeAddonVersionsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeAddonVersionsCommandOutput) => void + ): void; + public describeAddonVersions( + args: DescribeAddonVersionsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeAddonVersionsCommandOutput) => void), + cb?: (err: any, data?: DescribeAddonVersionsCommandOutput) => void + ): Promise | void { + const command = new DescribeAddonVersionsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Returns descriptive information about an Amazon EKS cluster.

    *

    The API server endpoint and certificate authority data returned by this operation are @@ -525,6 +662,32 @@ export class EKS extends EKSClient { } } + /** + *

    Lists the available add-ons.

    + */ + public listAddons(args: ListAddonsCommandInput, options?: __HttpHandlerOptions): Promise; + public listAddons(args: ListAddonsCommandInput, cb: (err: any, data?: ListAddonsCommandOutput) => void): void; + public listAddons( + args: ListAddonsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListAddonsCommandOutput) => void + ): void; + public listAddons( + args: ListAddonsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListAddonsCommandOutput) => void), + cb?: (err: any, data?: ListAddonsCommandOutput) => void + ): Promise | void { + const command = new ListAddonsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Lists the Amazon EKS clusters in your AWS account in the specified Region.

    */ @@ -743,6 +906,32 @@ export class EKS extends EKSClient { } } + /** + *

    Updates an Amazon EKS add-on.

    + */ + public updateAddon(args: UpdateAddonCommandInput, options?: __HttpHandlerOptions): Promise; + public updateAddon(args: UpdateAddonCommandInput, cb: (err: any, data?: UpdateAddonCommandOutput) => void): void; + public updateAddon( + args: UpdateAddonCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateAddonCommandOutput) => void + ): void; + public updateAddon( + args: UpdateAddonCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: UpdateAddonCommandOutput) => void), + cb?: (err: any, data?: UpdateAddonCommandOutput) => void + ): Promise | void { + const command = new UpdateAddonCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Updates an Amazon EKS cluster configuration. Your cluster continues to function during the * update. The response output includes an update ID that you can use to track the status @@ -891,7 +1080,8 @@ export class EKS extends EKSClient { * AMI version of a node group's current Kubernetes version by not specifying a Kubernetes * version in the request. You can update to the latest AMI version of your cluster's * current Kubernetes version by specifying your cluster's Kubernetes version in the - * request. For more information, see Amazon EKS-Optimized Linux AMI Versions in the Amazon EKS User Guide.

    + * request. For more information, see Amazon EKS + * optimized Amazon Linux 2 AMI versions in the Amazon EKS User Guide.

    *

    You cannot roll back a node group to an earlier Kubernetes version or AMI * version.

    *

    When a node in a managed node group is terminated due to a scaling action or update, diff --git a/clients/client-eks/EKSClient.ts b/clients/client-eks/EKSClient.ts index 21ea7dfd25a8..0c4eea09e26b 100644 --- a/clients/client-eks/EKSClient.ts +++ b/clients/client-eks/EKSClient.ts @@ -1,15 +1,22 @@ +import { CreateAddonCommandInput, CreateAddonCommandOutput } from "./commands/CreateAddonCommand"; import { CreateClusterCommandInput, CreateClusterCommandOutput } from "./commands/CreateClusterCommand"; import { CreateFargateProfileCommandInput, CreateFargateProfileCommandOutput, } from "./commands/CreateFargateProfileCommand"; import { CreateNodegroupCommandInput, CreateNodegroupCommandOutput } from "./commands/CreateNodegroupCommand"; +import { DeleteAddonCommandInput, DeleteAddonCommandOutput } from "./commands/DeleteAddonCommand"; import { DeleteClusterCommandInput, DeleteClusterCommandOutput } from "./commands/DeleteClusterCommand"; import { DeleteFargateProfileCommandInput, DeleteFargateProfileCommandOutput, } from "./commands/DeleteFargateProfileCommand"; import { DeleteNodegroupCommandInput, DeleteNodegroupCommandOutput } from "./commands/DeleteNodegroupCommand"; +import { DescribeAddonCommandInput, DescribeAddonCommandOutput } from "./commands/DescribeAddonCommand"; +import { + DescribeAddonVersionsCommandInput, + DescribeAddonVersionsCommandOutput, +} from "./commands/DescribeAddonVersionsCommand"; import { DescribeClusterCommandInput, DescribeClusterCommandOutput } from "./commands/DescribeClusterCommand"; import { DescribeFargateProfileCommandInput, @@ -17,6 +24,7 @@ import { } from "./commands/DescribeFargateProfileCommand"; import { DescribeNodegroupCommandInput, DescribeNodegroupCommandOutput } from "./commands/DescribeNodegroupCommand"; import { DescribeUpdateCommandInput, DescribeUpdateCommandOutput } from "./commands/DescribeUpdateCommand"; +import { ListAddonsCommandInput, ListAddonsCommandOutput } from "./commands/ListAddonsCommand"; import { ListClustersCommandInput, ListClustersCommandOutput } from "./commands/ListClustersCommand"; import { ListFargateProfilesCommandInput, @@ -30,6 +38,7 @@ import { import { ListUpdatesCommandInput, ListUpdatesCommandOutput } from "./commands/ListUpdatesCommand"; import { TagResourceCommandInput, TagResourceCommandOutput } from "./commands/TagResourceCommand"; import { UntagResourceCommandInput, UntagResourceCommandOutput } from "./commands/UntagResourceCommand"; +import { UpdateAddonCommandInput, UpdateAddonCommandOutput } from "./commands/UpdateAddonCommand"; import { UpdateClusterConfigCommandInput, UpdateClusterConfigCommandOutput, @@ -96,16 +105,21 @@ import { } from "@aws-sdk/types"; export type ServiceInputTypes = + | CreateAddonCommandInput | CreateClusterCommandInput | CreateFargateProfileCommandInput | CreateNodegroupCommandInput + | DeleteAddonCommandInput | DeleteClusterCommandInput | DeleteFargateProfileCommandInput | DeleteNodegroupCommandInput + | DescribeAddonCommandInput + | DescribeAddonVersionsCommandInput | DescribeClusterCommandInput | DescribeFargateProfileCommandInput | DescribeNodegroupCommandInput | DescribeUpdateCommandInput + | ListAddonsCommandInput | ListClustersCommandInput | ListFargateProfilesCommandInput | ListNodegroupsCommandInput @@ -113,22 +127,28 @@ export type ServiceInputTypes = | ListUpdatesCommandInput | TagResourceCommandInput | UntagResourceCommandInput + | UpdateAddonCommandInput | UpdateClusterConfigCommandInput | UpdateClusterVersionCommandInput | UpdateNodegroupConfigCommandInput | UpdateNodegroupVersionCommandInput; export type ServiceOutputTypes = + | CreateAddonCommandOutput | CreateClusterCommandOutput | CreateFargateProfileCommandOutput | CreateNodegroupCommandOutput + | DeleteAddonCommandOutput | DeleteClusterCommandOutput | DeleteFargateProfileCommandOutput | DeleteNodegroupCommandOutput + | DescribeAddonCommandOutput + | DescribeAddonVersionsCommandOutput | DescribeClusterCommandOutput | DescribeFargateProfileCommandOutput | DescribeNodegroupCommandOutput | DescribeUpdateCommandOutput + | ListAddonsCommandOutput | ListClustersCommandOutput | ListFargateProfilesCommandOutput | ListNodegroupsCommandOutput @@ -136,6 +156,7 @@ export type ServiceOutputTypes = | ListUpdatesCommandOutput | TagResourceCommandOutput | UntagResourceCommandOutput + | UpdateAddonCommandOutput | UpdateClusterConfigCommandOutput | UpdateClusterVersionCommandOutput | UpdateNodegroupConfigCommandOutput diff --git a/clients/client-eks/commands/CreateAddonCommand.ts b/clients/client-eks/commands/CreateAddonCommand.ts new file mode 100644 index 000000000000..4a547a8ba3e6 --- /dev/null +++ b/clients/client-eks/commands/CreateAddonCommand.ts @@ -0,0 +1,93 @@ +import { EKSClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../EKSClient"; +import { CreateAddonRequest, CreateAddonResponse } from "../models/models_0"; +import { + deserializeAws_restJson1CreateAddonCommand, + serializeAws_restJson1CreateAddonCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateAddonCommandInput = CreateAddonRequest; +export type CreateAddonCommandOutput = CreateAddonResponse & __MetadataBearer; + +/** + *

    Creates an Amazon EKS add-on.

    + *

    Amazon EKS add-ons help to automate the provisioning and lifecycle management of common + * operational software for Amazon EKS clusters. Amazon EKS add-ons can only be used with Amazon EKS + * clusters running version 1.18 with platform version eks.3 or later because + * add-ons rely on the Server-side Apply Kubernetes feature, which is only available in + * Kubernetes 1.18 and later.

    + */ +export class CreateAddonCommand extends $Command< + CreateAddonCommandInput, + CreateAddonCommandOutput, + EKSClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateAddonCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: EKSClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "EKSClient"; + const commandName = "CreateAddonCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateAddonRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreateAddonResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateAddonCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1CreateAddonCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1CreateAddonCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-eks/commands/CreateNodegroupCommand.ts b/clients/client-eks/commands/CreateNodegroupCommand.ts index 42d79f830f37..b116f4c4e232 100644 --- a/clients/client-eks/commands/CreateNodegroupCommand.ts +++ b/clients/client-eks/commands/CreateNodegroupCommand.ts @@ -28,8 +28,8 @@ export type CreateNodegroupCommandOutput = CreateNodegroupResponse & __MetadataB * template. For more information about using launch templates, see Launch * template support.

    *

    An Amazon EKS managed node group is an Amazon EC2 Auto Scaling group and associated Amazon EC2 instances that - * are managed by AWS for an Amazon EKS cluster. Each node group uses a version of the - * Amazon EKS-optimized Amazon Linux 2 AMI. For more information, see Managed + * are managed by AWS for an Amazon EKS cluster. Each node group uses a version of the Amazon EKS + * optimized Amazon Linux 2 AMI. For more information, see Managed * Node Groups in the Amazon EKS User Guide.

    */ export class CreateNodegroupCommand extends $Command< diff --git a/clients/client-eks/commands/DeleteAddonCommand.ts b/clients/client-eks/commands/DeleteAddonCommand.ts new file mode 100644 index 000000000000..0fc98a18106b --- /dev/null +++ b/clients/client-eks/commands/DeleteAddonCommand.ts @@ -0,0 +1,90 @@ +import { EKSClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../EKSClient"; +import { DeleteAddonRequest, DeleteAddonResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DeleteAddonCommand, + serializeAws_restJson1DeleteAddonCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteAddonCommandInput = DeleteAddonRequest; +export type DeleteAddonCommandOutput = DeleteAddonResponse & __MetadataBearer; + +/** + *

    Delete an Amazon EKS add-on.

    + *

    When you remove the add-on, it will also be deleted from the cluster. You can always + * manually start an add-on on the cluster using the Kubernetes API.

    + */ +export class DeleteAddonCommand extends $Command< + DeleteAddonCommandInput, + DeleteAddonCommandOutput, + EKSClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteAddonCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: EKSClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "EKSClient"; + const commandName = "DeleteAddonCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteAddonRequest.filterSensitiveLog, + outputFilterSensitiveLog: DeleteAddonResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteAddonCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DeleteAddonCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DeleteAddonCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-eks/commands/DescribeAddonCommand.ts b/clients/client-eks/commands/DescribeAddonCommand.ts new file mode 100644 index 000000000000..54ddb5563d52 --- /dev/null +++ b/clients/client-eks/commands/DescribeAddonCommand.ts @@ -0,0 +1,88 @@ +import { EKSClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../EKSClient"; +import { DescribeAddonRequest, DescribeAddonResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DescribeAddonCommand, + serializeAws_restJson1DescribeAddonCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeAddonCommandInput = DescribeAddonRequest; +export type DescribeAddonCommandOutput = DescribeAddonResponse & __MetadataBearer; + +/** + *

    Describes an Amazon EKS add-on.

    + */ +export class DescribeAddonCommand extends $Command< + DescribeAddonCommandInput, + DescribeAddonCommandOutput, + EKSClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeAddonCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: EKSClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "EKSClient"; + const commandName = "DescribeAddonCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeAddonRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeAddonResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeAddonCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DescribeAddonCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DescribeAddonCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-eks/commands/DescribeAddonVersionsCommand.ts b/clients/client-eks/commands/DescribeAddonVersionsCommand.ts new file mode 100644 index 000000000000..b034243b3117 --- /dev/null +++ b/clients/client-eks/commands/DescribeAddonVersionsCommand.ts @@ -0,0 +1,88 @@ +import { EKSClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../EKSClient"; +import { DescribeAddonVersionsRequest, DescribeAddonVersionsResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DescribeAddonVersionsCommand, + serializeAws_restJson1DescribeAddonVersionsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeAddonVersionsCommandInput = DescribeAddonVersionsRequest; +export type DescribeAddonVersionsCommandOutput = DescribeAddonVersionsResponse & __MetadataBearer; + +/** + *

    Describes the Kubernetes versions that the add-on can be used with.

    + */ +export class DescribeAddonVersionsCommand extends $Command< + DescribeAddonVersionsCommandInput, + DescribeAddonVersionsCommandOutput, + EKSClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeAddonVersionsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: EKSClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "EKSClient"; + const commandName = "DescribeAddonVersionsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeAddonVersionsRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeAddonVersionsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeAddonVersionsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DescribeAddonVersionsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DescribeAddonVersionsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-eks/commands/ListAddonsCommand.ts b/clients/client-eks/commands/ListAddonsCommand.ts new file mode 100644 index 000000000000..c0f15ae7db74 --- /dev/null +++ b/clients/client-eks/commands/ListAddonsCommand.ts @@ -0,0 +1,88 @@ +import { EKSClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../EKSClient"; +import { ListAddonsRequest, ListAddonsResponse } from "../models/models_0"; +import { + deserializeAws_restJson1ListAddonsCommand, + serializeAws_restJson1ListAddonsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListAddonsCommandInput = ListAddonsRequest; +export type ListAddonsCommandOutput = ListAddonsResponse & __MetadataBearer; + +/** + *

    Lists the available add-ons.

    + */ +export class ListAddonsCommand extends $Command< + ListAddonsCommandInput, + ListAddonsCommandOutput, + EKSClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListAddonsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: EKSClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "EKSClient"; + const commandName = "ListAddonsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListAddonsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListAddonsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListAddonsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1ListAddonsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1ListAddonsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-eks/commands/UpdateAddonCommand.ts b/clients/client-eks/commands/UpdateAddonCommand.ts new file mode 100644 index 000000000000..7372ccfe777b --- /dev/null +++ b/clients/client-eks/commands/UpdateAddonCommand.ts @@ -0,0 +1,88 @@ +import { EKSClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../EKSClient"; +import { UpdateAddonRequest, UpdateAddonResponse } from "../models/models_0"; +import { + deserializeAws_restJson1UpdateAddonCommand, + serializeAws_restJson1UpdateAddonCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type UpdateAddonCommandInput = UpdateAddonRequest; +export type UpdateAddonCommandOutput = UpdateAddonResponse & __MetadataBearer; + +/** + *

    Updates an Amazon EKS add-on.

    + */ +export class UpdateAddonCommand extends $Command< + UpdateAddonCommandInput, + UpdateAddonCommandOutput, + EKSClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: UpdateAddonCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: EKSClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "EKSClient"; + const commandName = "UpdateAddonCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: UpdateAddonRequest.filterSensitiveLog, + outputFilterSensitiveLog: UpdateAddonResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: UpdateAddonCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1UpdateAddonCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1UpdateAddonCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-eks/commands/UpdateNodegroupVersionCommand.ts b/clients/client-eks/commands/UpdateNodegroupVersionCommand.ts index 6500403fc73c..ee773f1bb754 100644 --- a/clients/client-eks/commands/UpdateNodegroupVersionCommand.ts +++ b/clients/client-eks/commands/UpdateNodegroupVersionCommand.ts @@ -31,7 +31,8 @@ export type UpdateNodegroupVersionCommandOutput = UpdateNodegroupVersionResponse * AMI version of a node group's current Kubernetes version by not specifying a Kubernetes * version in the request. You can update to the latest AMI version of your cluster's * current Kubernetes version by specifying your cluster's Kubernetes version in the - * request. For more information, see Amazon EKS-Optimized Linux AMI Versions in the Amazon EKS User Guide.

    + * request. For more information, see Amazon EKS + * optimized Amazon Linux 2 AMI versions in the Amazon EKS User Guide.

    *

    You cannot roll back a node group to an earlier Kubernetes version or AMI * version.

    *

    When a node in a managed node group is terminated due to a scaling action or update, diff --git a/clients/client-eks/index.ts b/clients/client-eks/index.ts index 9d5d5676ab6e..27272cbed53a 100644 --- a/clients/client-eks/index.ts +++ b/clients/client-eks/index.ts @@ -1,15 +1,22 @@ export * from "./EKSClient"; export * from "./EKS"; +export * from "./commands/CreateAddonCommand"; export * from "./commands/CreateClusterCommand"; export * from "./commands/CreateFargateProfileCommand"; export * from "./commands/CreateNodegroupCommand"; +export * from "./commands/DeleteAddonCommand"; export * from "./commands/DeleteClusterCommand"; export * from "./commands/DeleteFargateProfileCommand"; export * from "./commands/DeleteNodegroupCommand"; +export * from "./commands/DescribeAddonCommand"; +export * from "./commands/DescribeAddonVersionsCommand"; +export * from "./pagination/DescribeAddonVersionsPaginator"; export * from "./commands/DescribeClusterCommand"; export * from "./commands/DescribeFargateProfileCommand"; export * from "./commands/DescribeNodegroupCommand"; export * from "./commands/DescribeUpdateCommand"; +export * from "./commands/ListAddonsCommand"; +export * from "./pagination/ListAddonsPaginator"; export * from "./commands/ListClustersCommand"; export * from "./pagination/ListClustersPaginator"; export * from "./commands/ListFargateProfilesCommand"; @@ -21,6 +28,7 @@ export * from "./commands/ListUpdatesCommand"; export * from "./pagination/ListUpdatesPaginator"; export * from "./commands/TagResourceCommand"; export * from "./commands/UntagResourceCommand"; +export * from "./commands/UpdateAddonCommand"; export * from "./commands/UpdateClusterConfigCommand"; export * from "./commands/UpdateClusterVersionCommand"; export * from "./commands/UpdateNodegroupConfigCommand"; diff --git a/clients/client-eks/models/models_0.ts b/clients/client-eks/models/models_0.ts index 814c9a87244f..ea4d68c8e352 100644 --- a/clients/client-eks/models/models_0.ts +++ b/clients/client-eks/models/models_0.ts @@ -1,6 +1,209 @@ import { SENSITIVE_STRING, SmithyException as __SmithyException } from "@aws-sdk/smithy-client"; import { MetadataBearer as $MetadataBearer } from "@aws-sdk/types"; +export enum AddonIssueCode { + ACCESS_DENIED = "AccessDenied", + CLUSTER_UNREACHABLE = "ClusterUnreachable", + CONFIGURATION_CONFLICT = "ConfigurationConflict", + INSUFFICIENT_NUMBER_OF_REPLICAS = "InsufficientNumberOfReplicas", + INTERNAL_FAILURE = "InternalFailure", +} + +/** + *

    An issue related to an add-on.

    + */ +export interface AddonIssue { + /** + *

    A code that describes the type of issue.

    + */ + code?: AddonIssueCode | string; + + /** + *

    A message that provides details about the issue and what might cause it.

    + */ + message?: string; + + /** + *

    The resource IDs of the issue.

    + */ + resourceIds?: string[]; +} + +export namespace AddonIssue { + export const filterSensitiveLog = (obj: AddonIssue): any => ({ + ...obj, + }); +} + +/** + *

    The health of the add-on.

    + */ +export interface AddonHealth { + /** + *

    An object that represents the add-on's health issues.

    + */ + issues?: AddonIssue[]; +} + +export namespace AddonHealth { + export const filterSensitiveLog = (obj: AddonHealth): any => ({ + ...obj, + }); +} + +export type AddonStatus = + | "ACTIVE" + | "CREATE_FAILED" + | "CREATING" + | "DEGRADED" + | "DELETE_FAILED" + | "DELETING" + | "UPDATING"; + +/** + *

    An Amazon EKS add-on.

    + */ +export interface Addon { + /** + *

    The name of the add-on.

    + */ + addonName?: string; + + /** + *

    The name of the cluster.

    + */ + clusterName?: string; + + /** + *

    The status of the add-on.

    + */ + status?: AddonStatus | string; + + /** + *

    The version of the add-on.

    + */ + addonVersion?: string; + + /** + *

    An object that represents the health of the add-on.

    + */ + health?: AddonHealth; + + /** + *

    The Amazon Resource Name (ARN) of the add-on.

    + */ + addonArn?: string; + + /** + *

    The date and time that the add-on was created.

    + */ + createdAt?: Date; + + /** + *

    The date and time that the add-on was last modified.

    + */ + modifiedAt?: Date; + + /** + *

    The Amazon Resource Name (ARN) of the IAM role that is bound to the Kubernetes service account used + * by the add-on.

    + */ + serviceAccountRoleArn?: string; + + /** + *

    The metadata that you apply to the cluster to assist with categorization and + * organization. Each tag consists of a key and an optional value, both of which you + * define. Cluster tags do not propagate to any other resources associated with the + * cluster.

    + */ + tags?: { [key: string]: string }; +} + +export namespace Addon { + export const filterSensitiveLog = (obj: Addon): any => ({ + ...obj, + }); +} + +/** + *

    Compatibility information.

    + */ +export interface Compatibility { + /** + *

    The supported Kubernetes version of the cluster.

    + */ + clusterVersion?: string; + + /** + *

    The supported compute platform.

    + */ + platformVersions?: string[]; + + /** + *

    The supported default version.

    + */ + defaultVersion?: boolean; +} + +export namespace Compatibility { + export const filterSensitiveLog = (obj: Compatibility): any => ({ + ...obj, + }); +} + +/** + *

    Information about an add-on version.

    + */ +export interface AddonVersionInfo { + /** + *

    The version of the add-on.

    + */ + addonVersion?: string; + + /** + *

    The architectures that the version supports.

    + */ + architecture?: string[]; + + /** + *

    An object that represents the compatibilities of a version.

    + */ + compatibilities?: Compatibility[]; +} + +export namespace AddonVersionInfo { + export const filterSensitiveLog = (obj: AddonVersionInfo): any => ({ + ...obj, + }); +} + +/** + *

    Information about an add-on.

    + */ +export interface AddonInfo { + /** + *

    The name of the add-on.

    + */ + addonName?: string; + + /** + *

    The type of the add-on.

    + */ + type?: string; + + /** + *

    An object that represents information about available add-on versions and compatible + * Kubernetes versions.

    + */ + addonVersions?: AddonVersionInfo[]; +} + +export namespace AddonInfo { + export const filterSensitiveLog = (obj: AddonInfo): any => ({ + ...obj, + }); +} + export type AMITypes = "AL2_ARM_64" | "AL2_x86_64" | "AL2_x86_64_GPU"; /** @@ -27,20 +230,239 @@ export namespace AutoScalingGroup { export interface ClientException extends __SmithyException, $MetadataBearer { name: "ClientException"; $fault: "client"; + /** + *

    The Amazon EKS cluster associated with the exception.

    + */ + clusterName?: string; + + /** + *

    The Amazon EKS managed node group associated with the exception.

    + */ + nodegroupName?: string; + + addonName?: string; + message?: string; +} + +export namespace ClientException { + export const filterSensitiveLog = (obj: ClientException): any => ({ + ...obj, + }); +} + +export type ResolveConflicts = "NONE" | "OVERWRITE"; + +export interface CreateAddonRequest { + /** + *

    The name of the cluster to create the add-on for.

    + */ + clusterName: string | undefined; + + /** + *

    The name of the add-on. The name must match one of the names returned by + * ListAddons + * .

    + */ + addonName: string | undefined; + + /** + *

    The version of the add-on. The version must match one of the versions returned by + * DescribeAddonVersions + * .

    + */ + addonVersion?: string; + + /** + *

    The Amazon Resource Name (ARN) of an existing IAM role to bind to the add-on's service account. The role must be assigned the IAM permissions required by the add-on. If you don't specify an existing IAM role, then the add-on uses the + * permissions assigned to the node IAM role. For more information, see Amazon EKS node IAM role in the Amazon EKS User Guide.

    + * + *

    To specify an existing IAM role, you must have an IAM OpenID Connect (OIDC) provider created for + * your cluster. For more information, see Enabling + * IAM roles for service accounts on your cluster in the + * Amazon EKS User Guide.

    + *
    + */ + serviceAccountRoleArn?: string; + + /** + *

    How to resolve parameter value conflicts when migrating an existing add-on to an + * Amazon EKS add-on.

    + */ + resolveConflicts?: ResolveConflicts | string; + + /** + *

    A unique, case-sensitive identifier that you provide to ensure the idempotency of the + * request.

    + */ + clientRequestToken?: string; + + /** + *

    The metadata to apply to the cluster to assist with categorization and organization. + * Each tag consists of a key and an optional value, both of which you define.

    + */ + tags?: { [key: string]: string }; +} + +export namespace CreateAddonRequest { + export const filterSensitiveLog = (obj: CreateAddonRequest): any => ({ + ...obj, + }); +} + +export interface CreateAddonResponse { + /** + *

    An Amazon EKS add-on.

    + */ + addon?: Addon; +} + +export namespace CreateAddonResponse { + export const filterSensitiveLog = (obj: CreateAddonResponse): any => ({ + ...obj, + }); +} + +/** + *

    The specified parameter is invalid. Review the available parameters for the API + * request.

    + */ +export interface InvalidParameterException extends __SmithyException, $MetadataBearer { + name: "InvalidParameterException"; + $fault: "client"; + /** + *

    The Amazon EKS cluster associated with the exception.

    + */ + clusterName?: string; + /** *

    The Amazon EKS managed node group associated with the exception.

    */ nodegroupName?: string; + /** + *

    The Fargate profile associated with the exception.

    + */ + fargateProfileName?: string; + + addonName?: string; message?: string; +} + +export namespace InvalidParameterException { + export const filterSensitiveLog = (obj: InvalidParameterException): any => ({ + ...obj, + }); +} + +/** + *

    The request is invalid given the state of the cluster. Check the state of the cluster + * and the associated operations.

    + */ +export interface InvalidRequestException extends __SmithyException, $MetadataBearer { + name: "InvalidRequestException"; + $fault: "client"; /** *

    The Amazon EKS cluster associated with the exception.

    */ clusterName?: string; + + /** + *

    The Amazon EKS managed node group associated with the exception.

    + */ + nodegroupName?: string; + + addonName?: string; + message?: string; } -export namespace ClientException { - export const filterSensitiveLog = (obj: ClientException): any => ({ +export namespace InvalidRequestException { + export const filterSensitiveLog = (obj: InvalidRequestException): any => ({ + ...obj, + }); +} + +/** + *

    The specified resource is in use.

    + */ +export interface ResourceInUseException extends __SmithyException, $MetadataBearer { + name: "ResourceInUseException"; + $fault: "client"; + /** + *

    The Amazon EKS cluster associated with the exception.

    + */ + clusterName?: string; + + /** + *

    The Amazon EKS managed node group associated with the exception.

    + */ + nodegroupName?: string; + + addonName?: string; + message?: string; +} + +export namespace ResourceInUseException { + export const filterSensitiveLog = (obj: ResourceInUseException): any => ({ + ...obj, + }); +} + +/** + *

    The specified resource could not be found. You can view your available clusters with + * ListClusters. You can view your available managed node groups with + * ListNodegroups. Amazon EKS clusters and node groups are + * Region-specific.

    + */ +export interface ResourceNotFoundException extends __SmithyException, $MetadataBearer { + name: "ResourceNotFoundException"; + $fault: "client"; + /** + *

    The Amazon EKS cluster associated with the exception.

    + */ + clusterName?: string; + + /** + *

    The Amazon EKS managed node group associated with the exception.

    + */ + nodegroupName?: string; + + /** + *

    The Fargate profile associated with the exception.

    + */ + fargateProfileName?: string; + + addonName?: string; + message?: string; +} + +export namespace ResourceNotFoundException { + export const filterSensitiveLog = (obj: ResourceNotFoundException): any => ({ + ...obj, + }); +} + +/** + *

    These errors are usually caused by a server-side issue.

    + */ +export interface ServerException extends __SmithyException, $MetadataBearer { + name: "ServerException"; + $fault: "server"; + /** + *

    The Amazon EKS cluster associated with the exception.

    + */ + clusterName?: string; + + /** + *

    The Amazon EKS managed node group associated with the exception.

    + */ + nodegroupName?: string; + + addonName?: string; + message?: string; +} + +export namespace ServerException { + export const filterSensitiveLog = (obj: ServerException): any => ({ ...obj, }); } @@ -93,22 +515,28 @@ export namespace EncryptionConfig { */ export interface KubernetesNetworkConfigRequest { /** - *

    The CIDR block to assign Kubernetes service IP addresses from. If you don't specify a block, Kubernetes assigns addresses from either the 10.100.0.0/16 or 172.20.0.0/16 CIDR blocks. We recommend that you specify a block that does not overlap with resources in other networks - * that are peered or connected to your VPC. The block must meet the following requirements:

    - *
      + *

      The CIDR block to assign Kubernetes service IP addresses from. If you don't specify a + * block, Kubernetes assigns addresses from either the 10.100.0.0/16 or 172.20.0.0/16 CIDR + * blocks. We recommend that you specify a block that does not overlap with resources in + * other networks that are peered or connected to your VPC. The block must meet the + * following requirements:

      + *
        *
      • - *

        Within one of the following private IP address blocks: 10.0.0.0/8, 172.16.0.0.0/12, or 192.168.0.0/16.

        + *

        Within one of the following private IP address blocks: 10.0.0.0/8, + * 172.16.0.0.0/12, or 192.168.0.0/16.

        *
      • *
      • - *

        Doesn't overlap with any CIDR block assigned to the VPC that you selected for VPC.

        + *

        Doesn't overlap with any CIDR block assigned to the VPC that you selected for + * VPC.

        *
      • *
      • - *

        Between /24 and /12.

        + *

        Between /24 and /12.

        *
      • *
      * - *

      You can only specify a custom CIDR block when you create a cluster and can't change this value once the cluster is created.

      - *
      + *

      You can only specify a custom CIDR block when you create a cluster and can't + * change this value once the cluster is created.

      + * */ serviceIpv4Cidr?: string; } @@ -132,17 +560,17 @@ export enum LogType { * cluster.

      */ export interface LogSetup { + /** + *

      The available cluster control plane log types.

      + */ + types?: (LogType | string)[]; + /** *

      If a log type is enabled, that log type exports its control plane logs to CloudWatch Logs. If a * log type isn't enabled, that log type doesn't export its control plane logs. Each * individual log type can be enabled or disabled independently.

      */ enabled?: boolean; - - /** - *

      The available cluster control plane log types.

      - */ - types?: (LogType | string)[]; } export namespace LogSetup { @@ -171,14 +599,46 @@ export namespace Logging { *

      An object representing the VPC configuration to use for an Amazon EKS cluster.

      */ export interface VpcConfigRequest { + /** + *

      Specify subnets for your Amazon EKS worker nodes. Amazon EKS creates cross-account elastic + * network interfaces in these subnets to allow communication between your worker nodes and + * the Kubernetes control plane.

      + */ + subnetIds?: string[]; + /** *

      Specify one or more security groups for the cross-account elastic network interfaces * that Amazon EKS creates to use to allow communication between your worker nodes and the - * Kubernetes control plane. If you don't specify a security group, the default security - * group for your VPC is used.

      + * Kubernetes control plane. If you don't specify any security groups, then familiarize + * yourself with the difference between Amazon EKS defaults for clusters deployed with + * Kubernetes:

      + *
        + *
      • + *

        1.14 Amazon EKS platform version eks.2 and earlier

        + *
      • + *
      • + *

        1.14 Amazon EKS platform version eks.3 and later

        + *
      • + *
      + *

      For more information, see Amazon EKS security group + * considerations in the + * Amazon EKS User Guide + * .

      */ securityGroupIds?: string[]; + /** + *

      Set this value to false to disable public access to your cluster's + * Kubernetes API server endpoint. If you disable public access, your cluster's Kubernetes + * API server can only receive requests from within the cluster VPC. The default value for + * this parameter is true, which enables public access for your Kubernetes API + * server. For more information, see Amazon EKS Cluster + * Endpoint Access Control in the + * Amazon EKS User Guide + * .

      + */ + endpointPublicAccess?: boolean; + /** *

      Set this value to true to enable private access for your cluster's * Kubernetes API server endpoint. If you enable private access, Kubernetes API requests @@ -195,18 +655,6 @@ export interface VpcConfigRequest { */ endpointPrivateAccess?: boolean; - /** - *

      Set this value to false to disable public access to your cluster's - * Kubernetes API server endpoint. If you disable public access, your cluster's Kubernetes - * API server can only receive requests from within the cluster VPC. The default value for - * this parameter is true, which enables public access for your Kubernetes API - * server. For more information, see Amazon EKS Cluster - * Endpoint Access Control in the - * Amazon EKS User Guide - * .

      - */ - endpointPublicAccess?: boolean; - /** *

      The CIDR blocks that are allowed access to your cluster's public Kubernetes API server * endpoint. Communication to the endpoint from addresses outside of the CIDR blocks that @@ -218,13 +666,6 @@ export interface VpcConfigRequest { * .

      */ publicAccessCidrs?: string[]; - - /** - *

      Specify subnets for your Amazon EKS worker nodes. Amazon EKS creates cross-account elastic - * network interfaces in these subnets to allow communication between your worker nodes and - * the Kubernetes control plane.

      - */ - subnetIds?: string[]; } export namespace VpcConfigRequest { @@ -235,9 +676,25 @@ export namespace VpcConfigRequest { export interface CreateClusterRequest { /** - *

      The encryption configuration for the cluster.

      + *

      The unique name to give to your cluster.

      */ - encryptionConfig?: EncryptionConfig[]; + name: string | undefined; + + /** + *

      The desired Kubernetes version for your cluster. If you don't specify a value here, + * the latest version available in Amazon EKS is used.

      + */ + version?: string; + + /** + *

      The Amazon Resource Name (ARN) of the IAM role that provides permissions for the Kubernetes control + * plane to make calls to AWS API operations on your behalf. For more information, see + * Amazon EKS + * Service IAM Role in the + * Amazon EKS User Guide + * .

      + */ + roleArn: string | undefined; /** *

      The VPC configuration used by the cluster control plane. Amazon EKS VPC resources have @@ -249,6 +706,11 @@ export interface CreateClusterRequest { */ resourcesVpcConfig: VpcConfigRequest | undefined; + /** + *

      The Kubernetes network configuration for the cluster.

      + */ + kubernetesNetworkConfig?: KubernetesNetworkConfigRequest; + /** *

      Enable or disable exporting the Kubernetes control plane logs for your cluster to * CloudWatch Logs. By default, cluster control plane logs aren't exported to CloudWatch Logs. For more @@ -269,27 +731,6 @@ export interface CreateClusterRequest { */ clientRequestToken?: string; - /** - *

      The Amazon Resource Name (ARN) of the IAM role that provides permissions for the Kubernetes control - * plane to make calls to AWS API operations on your behalf. For more information, see - * Amazon EKS - * Service IAM Role in the - * Amazon EKS User Guide - * .

      - */ - roleArn: string | undefined; - - /** - *

      The Kubernetes network configuration for the cluster.

      - */ - kubernetesNetworkConfig?: KubernetesNetworkConfigRequest; - - /** - *

      The desired Kubernetes version for your cluster. If you don't specify a value here, - * the latest version available in Amazon EKS is used.

      - */ - version?: string; - /** *

      The metadata to apply to the cluster to assist with categorization and organization. * Each tag consists of a key and an optional value, both of which you define.

      @@ -297,9 +738,9 @@ export interface CreateClusterRequest { tags?: { [key: string]: string }; /** - *

      The unique name to give to your cluster.

      + *

      The encryption configuration for the cluster.

      */ - name: string | undefined; + encryptionConfig?: EncryptionConfig[]; } export namespace CreateClusterRequest { @@ -366,8 +807,10 @@ export namespace Identity { */ export interface KubernetesNetworkConfigResponse { /** - *

      The CIDR block that Kubernetes service IP addresses are assigned from. If you didn't specify a CIDR block, then Kubernetes assigns addresses from either the 10.100.0.0/16 or 172.20.0.0/16 CIDR blocks. If this was specified, then it was specified when the cluster was created and it - * cannot be changed.

      + *

      The CIDR block that Kubernetes service IP addresses are assigned from. If you didn't + * specify a CIDR block when you created the cluster, then Kubernetes assigns addresses + * from either the 10.100.0.0/16 or 172.20.0.0/16 CIDR blocks. If this was specified, then + * it was specified when the cluster was created and it cannot be changed.

      */ serviceIpv4Cidr?: string; } @@ -383,9 +826,16 @@ export namespace KubernetesNetworkConfigResponse { */ export interface VpcConfigResponse { /** - *

      The VPC associated with your cluster.

      + *

      The subnets associated with your cluster.

      */ - vpcId?: string; + subnetIds?: string[]; + + /** + *

      The security groups associated with the cross-account elastic network interfaces that + * are used to allow communication between your worker nodes and the Kubernetes control + * plane.

      + */ + securityGroupIds?: string[]; /** *

      The cluster security group that was created by Amazon EKS for the cluster. Managed node @@ -394,16 +844,9 @@ export interface VpcConfigResponse { clusterSecurityGroupId?: string; /** - *

      The CIDR blocks that are allowed access to your cluster's public Kubernetes API server - * endpoint. Communication to the endpoint from addresses outside of the listed CIDR blocks - * is denied. The default value is 0.0.0.0/0. If you've disabled private - * endpoint access and you have worker nodes or AWS Fargate pods in the cluster, then ensure - * that the necessary CIDR blocks are listed. For more information, see Amazon EKS Cluster - * Endpoint Access Control in the - * Amazon EKS User Guide - * .

      + *

      The VPC associated with your cluster.

      */ - publicAccessCidrs?: string[]; + vpcId?: string; /** *

      This parameter indicates whether the Amazon EKS public API server endpoint is enabled. If @@ -412,18 +855,6 @@ export interface VpcConfigResponse { */ endpointPublicAccess?: boolean; - /** - *

      The subnets associated with your cluster.

      - */ - subnetIds?: string[]; - - /** - *

      The security groups associated with the cross-account elastic network interfaces that - * are used to allow communication between your worker nodes and the Kubernetes control - * plane.

      - */ - securityGroupIds?: string[]; - /** *

      This parameter indicates whether the Amazon EKS private API server endpoint is enabled. If * the Amazon EKS private API server endpoint is enabled, Kubernetes API requests that originate @@ -438,6 +869,18 @@ export interface VpcConfigResponse { * .

      */ endpointPrivateAccess?: boolean; + + /** + *

      The CIDR blocks that are allowed access to your cluster's public Kubernetes API server + * endpoint. Communication to the endpoint from addresses outside of the listed CIDR blocks + * is denied. The default value is 0.0.0.0/0. If you've disabled private + * endpoint access and you have worker nodes or AWS Fargate pods in the cluster, then ensure + * that the necessary CIDR blocks are listed. For more information, see Amazon EKS Cluster + * Endpoint Access Control in the + * Amazon EKS User Guide + * .

      + */ + publicAccessCidrs?: string[]; } export namespace VpcConfigResponse { @@ -453,17 +896,14 @@ export type ClusterStatus = "ACTIVE" | "CREATING" | "DELETING" | "FAILED" | "UPD */ export interface Cluster { /** - *

      The endpoint for your Kubernetes API server.

      + *

      The name of the cluster.

      */ - endpoint?: string; + name?: string; /** - *

      The VPC configuration used by the cluster control plane. Amazon EKS VPC resources have - * specific requirements to work properly with Kubernetes. For more information, see Cluster VPC - * Considerations and Cluster Security Group Considerations in the - * Amazon EKS User Guide.

      + *

      The Amazon Resource Name (ARN) of the cluster.

      */ - resourcesVpcConfig?: VpcConfigResponse; + arn?: string; /** *

      The Unix epoch timestamp in seconds for when the cluster was created.

      @@ -471,34 +911,38 @@ export interface Cluster { createdAt?: Date; /** - *

      The current status of the cluster.

      + *

      The Kubernetes server version for the cluster.

      */ - status?: ClusterStatus | string; + version?: string; /** - *

      Network configuration settings for your cluster.

      + *

      The endpoint for your Kubernetes API server.

      */ - kubernetesNetworkConfig?: KubernetesNetworkConfigResponse; + endpoint?: string; /** - *

      The certificate-authority-data for your cluster.

      + *

      The Amazon Resource Name (ARN) of the IAM role that provides permissions for the Kubernetes control + * plane to make calls to AWS API operations on your behalf.

      */ - certificateAuthority?: Certificate; + roleArn?: string; /** - *

      The logging configuration for your cluster.

      + *

      The VPC configuration used by the cluster control plane. Amazon EKS VPC resources have + * specific requirements to work properly with Kubernetes. For more information, see Cluster VPC + * Considerations and Cluster Security Group Considerations in the + * Amazon EKS User Guide.

      */ - logging?: Logging; + resourcesVpcConfig?: VpcConfigResponse; /** - *

      The name of the cluster.

      + *

      The Kubernetes network configuration for the cluster.

      */ - name?: string; + kubernetesNetworkConfig?: KubernetesNetworkConfigResponse; /** - *

      The Amazon Resource Name (ARN) of the cluster.

      + *

      The logging configuration for your cluster.

      */ - arn?: string; + logging?: Logging; /** *

      The identity provider information for the cluster.

      @@ -506,23 +950,20 @@ export interface Cluster { identity?: Identity; /** - *

      The Amazon Resource Name (ARN) of the IAM role that provides permissions for the Kubernetes control - * plane to make calls to AWS API operations on your behalf.

      + *

      The current status of the cluster.

      */ - roleArn?: string; + status?: ClusterStatus | string; /** - *

      The encryption configuration for the cluster.

      + *

      The certificate-authority-data for your cluster.

      */ - encryptionConfig?: EncryptionConfig[]; + certificateAuthority?: Certificate; /** - *

      The metadata that you apply to the cluster to assist with categorization and - * organization. Each tag consists of a key and an optional value, both of which you - * define. Cluster tags do not propagate to any other resources associated with the - * cluster.

      + *

      Unique, case-sensitive identifier that you provide to ensure the idempotency of the + * request.

      */ - tags?: { [key: string]: string }; + clientRequestToken?: string; /** *

      The platform version of your Amazon EKS cluster. For more information, see Platform @@ -533,15 +974,17 @@ export interface Cluster { platformVersion?: string; /** - *

      Unique, case-sensitive identifier that you provide to ensure the idempotency of the - * request.

      + *

      The metadata that you apply to the cluster to assist with categorization and + * organization. Each tag consists of a key and an optional value, both of which you + * define. Cluster tags do not propagate to any other resources associated with the + * cluster.

      */ - clientRequestToken?: string; + tags?: { [key: string]: string }; /** - *

      The Kubernetes server version for the cluster.

      + *

      The encryption configuration for the cluster.

      */ - version?: string; + encryptionConfig?: EncryptionConfig[]; } export namespace Cluster { @@ -563,60 +1006,6 @@ export namespace CreateClusterResponse { }); } -/** - *

      The specified parameter is invalid. Review the available parameters for the API - * request.

      - */ -export interface InvalidParameterException extends __SmithyException, $MetadataBearer { - name: "InvalidParameterException"; - $fault: "client"; - message?: string; - /** - *

      The Fargate profile associated with the exception.

      - */ - fargateProfileName?: string; - - /** - *

      The Amazon EKS managed node group associated with the exception.

      - */ - nodegroupName?: string; - - /** - *

      The Amazon EKS cluster associated with the exception.

      - */ - clusterName?: string; -} - -export namespace InvalidParameterException { - export const filterSensitiveLog = (obj: InvalidParameterException): any => ({ - ...obj, - }); -} - -/** - *

      The specified resource is in use.

      - */ -export interface ResourceInUseException extends __SmithyException, $MetadataBearer { - name: "ResourceInUseException"; - $fault: "client"; - message?: string; - /** - *

      The Amazon EKS managed node group associated with the exception.

      - */ - nodegroupName?: string; - - /** - *

      The Amazon EKS cluster associated with the exception.

      - */ - clusterName?: string; -} - -export namespace ResourceInUseException { - export const filterSensitiveLog = (obj: ResourceInUseException): any => ({ - ...obj, - }); -} - /** *

      You have encountered a service limit on the specified resource.

      */ @@ -642,30 +1031,6 @@ export namespace ResourceLimitExceededException { }); } -/** - *

      These errors are usually caused by a server-side issue.

      - */ -export interface ServerException extends __SmithyException, $MetadataBearer { - name: "ServerException"; - $fault: "server"; - message?: string; - /** - *

      The Amazon EKS managed node group associated with the exception.

      - */ - nodegroupName?: string; - - /** - *

      The Amazon EKS cluster associated with the exception.

      - */ - clusterName?: string; -} - -export namespace ServerException { - export const filterSensitiveLog = (obj: ServerException): any => ({ - ...obj, - }); -} - /** *

      The service is unavailable. Back off and retry the operation.

      */ @@ -689,12 +1054,7 @@ export namespace ServiceUnavailableException { export interface UnsupportedAvailabilityZoneException extends __SmithyException, $MetadataBearer { name: "UnsupportedAvailabilityZoneException"; $fault: "client"; - /** - *

      The supported Availability Zones for your account. Choose subnets in these - * Availability Zones for your cluster.

      - */ - validZones?: string[]; - + message?: string; /** *

      The Amazon EKS cluster associated with the exception.

      */ @@ -705,7 +1065,11 @@ export interface UnsupportedAvailabilityZoneException extends __SmithyException, */ nodegroupName?: string; - message?: string; + /** + *

      The supported Availability Zones for your account. Choose subnets in these + * Availability Zones for your cluster.

      + */ + validZones?: string[]; } export namespace UnsupportedAvailabilityZoneException { @@ -719,15 +1083,15 @@ export namespace UnsupportedAvailabilityZoneException { */ export interface FargateProfileSelector { /** - *

      The Kubernetes labels that the selector should match. A pod must contain all of the - * labels that are specified in the selector for it to be considered a match.

      + *

      The Kubernetes namespace that the selector should match.

      */ - labels?: { [key: string]: string }; + namespace?: string; /** - *

      The Kubernetes namespace that the selector should match.

      + *

      The Kubernetes labels that the selector should match. A pod must contain all of the + * labels that are specified in the selector for it to be considered a match.

      */ - namespace?: string; + labels?: { [key: string]: string }; } export namespace FargateProfileSelector { @@ -743,11 +1107,18 @@ export interface CreateFargateProfileRequest { fargateProfileName: string | undefined; /** - *

      The selectors to match for pods to use this Fargate profile. Each selector must have an - * associated namespace. Optionally, you can also specify labels for a namespace. You may - * specify up to five selectors in a Fargate profile.

      + *

      The name of the Amazon EKS cluster to apply the Fargate profile to.

      */ - selectors?: FargateProfileSelector[]; + clusterName: string | undefined; + + /** + *

      The Amazon Resource Name (ARN) of the pod execution role to use for pods that match the selectors in + * the Fargate profile. The pod execution role allows Fargate infrastructure to register with + * your cluster as a node, and it provides read access to Amazon ECR image repositories. For + * more information, see Pod Execution Role in the + * Amazon EKS User Guide.

      + */ + podExecutionRoleArn: string | undefined; /** *

      The IDs of subnets to launch your pods into. At this time, pods running on Fargate are @@ -757,17 +1128,11 @@ export interface CreateFargateProfileRequest { subnets?: string[]; /** - *

      The metadata to apply to the Fargate profile to assist with categorization and - * organization. Each tag consists of a key and an optional value, both of which you - * define. Fargate profile tags do not propagate to any other resources associated with the - * Fargate profile, such as the pods that are scheduled with it.

      - */ - tags?: { [key: string]: string }; - - /** - *

      The name of the Amazon EKS cluster to apply the Fargate profile to.

      + *

      The selectors to match for pods to use this Fargate profile. Each selector must have an + * associated namespace. Optionally, you can also specify labels for a namespace. You may + * specify up to five selectors in a Fargate profile.

      */ - clusterName: string | undefined; + selectors?: FargateProfileSelector[]; /** *

      Unique, case-sensitive identifier that you provide to ensure the idempotency of the @@ -776,13 +1141,12 @@ export interface CreateFargateProfileRequest { clientRequestToken?: string; /** - *

      The Amazon Resource Name (ARN) of the pod execution role to use for pods that match the selectors in - * the Fargate profile. The pod execution role allows Fargate infrastructure to register with - * your cluster as a node, and it provides read access to Amazon ECR image repositories. For - * more information, see Pod Execution Role in the - * Amazon EKS User Guide.

      + *

      The metadata to apply to the Fargate profile to assist with categorization and + * organization. Each tag consists of a key and an optional value, both of which you + * define. Fargate profile tags do not propagate to any other resources associated with the + * Fargate profile, such as the pods that are scheduled with it.

      */ - podExecutionRoleArn: string | undefined; + tags?: { [key: string]: string }; } export namespace CreateFargateProfileRequest { @@ -797,26 +1161,26 @@ export type FargateProfileStatus = "ACTIVE" | "CREATE_FAILED" | "CREATING" | "DE *

      An object representing an AWS Fargate profile.

      */ export interface FargateProfile { - /** - *

      The Unix epoch timestamp in seconds for when the Fargate profile was created.

      - */ - createdAt?: Date; - /** *

      The name of the Fargate profile.

      */ fargateProfileName?: string; /** - *

      The selectors to match for pods to use this Fargate profile.

      + *

      The full Amazon Resource Name (ARN) of the Fargate profile.

      */ - selectors?: FargateProfileSelector[]; + fargateProfileArn?: string; /** *

      The name of the Amazon EKS cluster that the Fargate profile belongs to.

      */ clusterName?: string; + /** + *

      The Unix epoch timestamp in seconds for when the Fargate profile was created.

      + */ + createdAt?: Date; + /** *

      The Amazon Resource Name (ARN) of the pod execution role to use for pods that match the selectors in * the Fargate profile. For more information, see Pod @@ -825,27 +1189,27 @@ export interface FargateProfile { podExecutionRoleArn?: string; /** - *

      The metadata applied to the Fargate profile to assist with categorization and - * organization. Each tag consists of a key and an optional value, both of which you - * define. Fargate profile tags do not propagate to any other resources associated with the - * Fargate profile, such as the pods that are scheduled with it.

      + *

      The IDs of subnets to launch pods into.

      */ - tags?: { [key: string]: string }; + subnets?: string[]; /** - *

      The current status of the Fargate profile.

      + *

      The selectors to match for pods to use this Fargate profile.

      */ - status?: FargateProfileStatus | string; + selectors?: FargateProfileSelector[]; /** - *

      The full Amazon Resource Name (ARN) of the Fargate profile.

      + *

      The current status of the Fargate profile.

      */ - fargateProfileArn?: string; + status?: FargateProfileStatus | string; /** - *

      The IDs of subnets to launch pods into.

      + *

      The metadata applied to the Fargate profile to assist with categorization and + * organization. Each tag consists of a key and an optional value, both of which you + * define. Fargate profile tags do not propagate to any other resources associated with the + * Fargate profile, such as the pods that are scheduled with it.

      */ - subnets?: string[]; + tags?: { [key: string]: string }; } export namespace FargateProfile { @@ -862,36 +1226,13 @@ export interface CreateFargateProfileResponse { } export namespace CreateFargateProfileResponse { - export const filterSensitiveLog = (obj: CreateFargateProfileResponse): any => ({ - ...obj, - }); -} - -/** - *

      The request is invalid given the state of the cluster. Check the state of the cluster - * and the associated operations.

      - */ -export interface InvalidRequestException extends __SmithyException, $MetadataBearer { - name: "InvalidRequestException"; - $fault: "client"; - /** - *

      The Amazon EKS managed node group associated with the exception.

      - */ - nodegroupName?: string; - - message?: string; - /** - *

      The Amazon EKS cluster associated with the exception.

      - */ - clusterName?: string; -} - -export namespace InvalidRequestException { - export const filterSensitiveLog = (obj: InvalidRequestException): any => ({ + export const filterSensitiveLog = (obj: CreateFargateProfileResponse): any => ({ ...obj, }); } +export type CapacityTypes = "ON_DEMAND" | "SPOT"; + /** *

      An object representing a node group launch template specification. The launch template * cannot include @@ -904,14 +1245,19 @@ export namespace InvalidRequestException { * HibernationOptions * , or * TerminateInstances - * , or the node group deployment or update - * will fail. For more information about launch templates, see + * , or the node group deployment or + * update will fail. For more information about launch templates, see * CreateLaunchTemplate * in the Amazon EC2 API Reference. * For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

      *

      Specify either name or id, but not both.

      */ export interface LaunchTemplateSpecification { + /** + *

      The name of the launch template.

      + */ + name?: string; + /** *

      The version of the launch template to use. If no version is specified, then the * template's default version is used.

      @@ -922,11 +1268,6 @@ export interface LaunchTemplateSpecification { *

      The ID of the launch template.

      */ id?: string; - - /** - *

      The name of the launch template.

      - */ - name?: string; } export namespace LaunchTemplateSpecification { @@ -969,18 +1310,18 @@ export namespace RemoteAccessConfig { * you must specify values for all of the properties.

      */ export interface NodegroupScalingConfig { - /** - *

      The maximum number of worker nodes that the managed node group can scale out to. - * Managed node groups can support up to 100 nodes by default.

      - */ - maxSize?: number; - /** *

      The minimum number of worker nodes that the managed node group can scale in to. This * number must be greater than zero.

      */ minSize?: number; + /** + *

      The maximum number of worker nodes that the managed node group can scale out to. + * Managed node groups can support up to 100 nodes by default.

      + */ + maxSize?: number; + /** *

      The current number of worker nodes that the managed node group should maintain.

      */ @@ -995,71 +1336,27 @@ export namespace NodegroupScalingConfig { export interface CreateNodegroupRequest { /** - *

      The metadata to apply to the node group to assist with categorization and - * organization. Each tag consists of a key and an optional value, both of which you - * define. Node group tags do not propagate to any other resources associated with the node - * group, such as the Amazon EC2 instances or subnets.

      - */ - tags?: { [key: string]: string }; - - /** - *

      The Kubernetes version to use for your managed nodes. By default, the Kubernetes - * version of the cluster is used, and this is the only accepted specified value. - * If you specify launchTemplate, and your launch template uses a custom AMI, then don't specify version, - * or the node group deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

      + *

      The name of the cluster to create the node group in.

      */ - version?: string; + clusterName: string | undefined; /** - *

      The remote access (SSH) configuration to use with your node group. If you specify launchTemplate, - * then don't specify remoteAccess, or the node group deployment - * will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

      + *

      The unique name to give your node group.

      */ - remoteAccess?: RemoteAccessConfig; + nodegroupName: string | undefined; /** - *

      An object representing a node group's launch template specification. If specified, - * then do not specify instanceTypes, diskSize, or - * remoteAccess and make sure that the launch template meets the - * requirements in launchTemplateSpecification.

      + *

      The scaling configuration details for the Auto Scaling group that is created for your + * node group.

      */ - launchTemplate?: LaunchTemplateSpecification; + scalingConfig?: NodegroupScalingConfig; /** - *

      The AMI version of the Amazon EKS-optimized AMI to use with your node group. By default, - * the latest available AMI version for the node group's current Kubernetes version is - * used. For more information, see Amazon EKS-Optimized Linux AMI Versions in the Amazon EKS User Guide. - * If you specify launchTemplate, and your launch template uses a custom AMI, then don't specify releaseVersion, + *

      The root device disk size (in GiB) for your node group instances. The default disk + * size is 20 GiB. If you specify launchTemplate, then don't specify diskSize, * or the node group deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

      */ - releaseVersion?: string; - - /** - *

      The Amazon Resource Name (ARN) of the IAM role to associate with your node group. The Amazon EKS worker - * node kubelet daemon makes calls to AWS APIs on your behalf. Worker nodes - * receive permissions for these API calls through an IAM instance profile and associated - * policies. Before you can launch worker nodes and register them into a cluster, you must - * create an IAM role for those worker nodes to use when they are launched. For more - * information, see Amazon EKS Worker Node IAM Role in the - * - * Amazon EKS User Guide - * . If you specify launchTemplate, then don't specify - * - * IamInstanceProfile - * in your launch template, or the node group - * deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

      - */ - nodeRole: string | undefined; - - /** - *

      The instance type to use for your node group. You can specify a single instance type - * for a node group. The default value for instanceTypes is - * t3.medium. If you choose a GPU instance type, be sure to specify - * AL2_x86_64_GPU with the amiType parameter. - * If you specify launchTemplate, then don't specify instanceTypes, or the node group - * deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

      - */ - instanceTypes?: string[]; + diskSize?: number; /** *

      The subnets to use for the Auto Scaling group that is created for your node group. @@ -1073,24 +1370,54 @@ export interface CreateNodegroupRequest { subnets: string[] | undefined; /** - *

      The unique name to give your node group.

      + *

      Specify the instance types for a node group. If you specify a GPU instance type, be + * sure to specify AL2_x86_64_GPU with the amiType parameter. If + * you specify launchTemplate, then you can specify zero or one instance type + * in your launch template or you can specify 0-20 instance types for + * instanceTypes. If however, you specify an instance type in your launch + * template and specify any instanceTypes, the node group + * deployment will fail. If you don't specify an instance type in a launch template or for + * instanceTypes, then t3.medium is used, by default. If you + * specify Spot for capacityType, then we recommend specifying + * multiple values for instanceTypes. For more information, see Managed node group + * capacity types and Launch template support in + * the Amazon EKS User Guide.

      */ - nodegroupName: string | undefined; + instanceTypes?: string[]; /** *

      The AMI type for your node group. GPU instance types should use the * AL2_x86_64_GPU AMI type. Non-GPU instances should use the * AL2_x86_64 AMI type. Arm instances should use the - * AL2_ARM_64 AMI type. All types use the Amazon EKS-optimized Amazon Linux 2 AMI. + * AL2_ARM_64 AMI type. All types use the Amazon EKS optimized Amazon Linux 2 AMI. * If you specify launchTemplate, and your launch template uses a custom AMI, then don't specify amiType, * or the node group deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

      */ amiType?: AMITypes | string; /** - *

      The name of the cluster to create the node group in.

      + *

      The remote access (SSH) configuration to use with your node group. If you specify launchTemplate, + * then don't specify remoteAccess, or the node group deployment + * will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

      */ - clusterName: string | undefined; + remoteAccess?: RemoteAccessConfig; + + /** + *

      The Amazon Resource Name (ARN) of the IAM role to associate with your node group. The Amazon EKS worker + * node kubelet daemon makes calls to AWS APIs on your behalf. Worker nodes + * receive permissions for these API calls through an IAM instance profile and associated + * policies. Before you can launch worker nodes and register them into a cluster, you must + * create an IAM role for those worker nodes to use when they are launched. For more + * information, see Amazon EKS Worker Node IAM Role in the + * + * Amazon EKS User Guide + * . If you specify launchTemplate, then don't specify + * + * IamInstanceProfile + * in your launch template, + * or the node group deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

      + */ + nodeRole: string | undefined; /** *

      The Kubernetes labels to be applied to the nodes in the node group when they are @@ -1099,11 +1426,12 @@ export interface CreateNodegroupRequest { labels?: { [key: string]: string }; /** - *

      The root device disk size (in GiB) for your node group instances. The default disk - * size is 20 GiB. If you specify launchTemplate, then don't specify diskSize, - * or the node group deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

      + *

      The metadata to apply to the node group to assist with categorization and + * organization. Each tag consists of a key and an optional value, both of which you + * define. Node group tags do not propagate to any other resources associated with the node + * group, such as the Amazon EC2 instances or subnets.

      */ - diskSize?: number; + tags?: { [key: string]: string }; /** *

      Unique, case-sensitive identifier that you provide to ensure the idempotency of the @@ -1112,10 +1440,35 @@ export interface CreateNodegroupRequest { clientRequestToken?: string; /** - *

      The scaling configuration details for the Auto Scaling group that is created for your - * node group.

      + *

      An object representing a node group's launch template specification. If specified, + * then do not specify instanceTypes, diskSize, or + * remoteAccess and make sure that the launch template meets the + * requirements in launchTemplateSpecification.

      */ - scalingConfig?: NodegroupScalingConfig; + launchTemplate?: LaunchTemplateSpecification; + + /** + *

      The capacity type for your node group.

      + */ + capacityType?: CapacityTypes | string; + + /** + *

      The Kubernetes version to use for your managed nodes. By default, the Kubernetes + * version of the cluster is used, and this is the only accepted specified value. + * If you specify launchTemplate, and your launch template uses a custom AMI, then don't specify version, + * or the node group deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

      + */ + version?: string; + + /** + *

      The AMI version of the Amazon EKS optimized AMI to use with your node group. By default, + * the latest available AMI version for the node group's current Kubernetes version is + * used. For more information, see Amazon EKS + * optimized Amazon Linux 2 AMI versions in the Amazon EKS User Guide. If you specify launchTemplate, + * and your launch template uses a custom AMI, then don't specify releaseVersion, or the node group + * deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

      + */ + releaseVersion?: string; } export namespace CreateNodegroupRequest { @@ -1149,36 +1502,32 @@ export enum NodegroupIssueCode { *

      An object representing an issue with an Amazon EKS resource.

      */ export interface Issue { - /** - *

      The AWS resources that are afflicted by this issue.

      - */ - resourceIds?: string[]; - - /** - *

      The error message associated with the issue.

      - */ - message?: string; - /** *

      A brief description of the error.

      *
        *
      • *

        - * AutoScalingGroupNotFound: We couldn't find - * the Auto Scaling group associated with the managed node group. You may be able to - * recreate an Auto Scaling group with the same settings to recover.

        + * AccessDenied: Amazon EKS or one or more of your + * managed nodes is failing to authenticate or authorize with your Kubernetes + * cluster API server.

        *
      • *
      • *

        - * Ec2SecurityGroupNotFound: We couldn't find - * the cluster security group for the cluster. You must recreate your - * cluster.

        + * AsgInstanceLaunchFailures: Your Auto Scaling group is + * experiencing failures while attempting to launch instances.

        *
      • *
      • *

        - * Ec2SecurityGroupDeletionFailure: We could not - * delete the remote access security group for your managed node group. Remove any - * dependencies from the security group.

        + * AutoScalingGroupNotFound: We couldn't find + * the Auto Scaling group associated with the managed node group. You may be able to + * recreate an Auto Scaling group with the same settings to recover.

        + *
      • + *
      • + *

        + * ClusterUnreachable: Amazon EKS or one or more of + * your managed nodes is unable to to communicate with your Kubernetes cluster API + * server. This can happen if there are network disruptions or if API servers are + * timing out processing requests.

        *
      • *
      • *

        @@ -1195,6 +1544,18 @@ export interface Issue { *

      • *
      • *

        + * Ec2SecurityGroupDeletionFailure: We could not + * delete the remote access security group for your managed node group. Remove any + * dependencies from the security group.

        + *
      • + *
      • + *

        + * Ec2SecurityGroupNotFound: We couldn't find + * the cluster security group for the cluster. You must recreate your + * cluster.

        + *
      • + *
      • + *

        * Ec2SubnetInvalidConfiguration: One or more * Amazon EC2 subnets specified for a node group do not automatically assign public IP * addresses to instances launched into it. If you want your instances to be @@ -1217,19 +1578,6 @@ export interface Issue { *

      • *
      • *

        - * AsgInstanceLaunchFailures: Your Auto Scaling group is - * experiencing failures while attempting to launch instances.

        - *
      • - *
      • - *

        - * NodeCreationFailure: Your launched instances - * are unable to register with your Amazon EKS cluster. Common causes of this failure - * are insufficient worker node IAM - * role permissions or lack of outbound internet access for the nodes. - *

        - *
      • - *
      • - *

        * InstanceLimitExceeded: Your AWS account is * unable to launch any more instances of the specified instance type. You may be * able to request an Amazon EC2 instance limit increase to recover.

        @@ -1242,17 +1590,30 @@ export interface Issue { *
      • *
      • *

        - * AccessDenied: Amazon EKS or one or more of your - * managed nodes is unable to communicate with your cluster API server.

        + * InternalFailure: These errors are usually + * caused by an Amazon EKS server-side issue.

        *
      • *
      • *

        - * InternalFailure: These errors are usually - * caused by an Amazon EKS server-side issue.

        + * NodeCreationFailure: Your launched instances + * are unable to register with your Amazon EKS cluster. Common causes of this failure + * are insufficient worker node IAM + * role permissions or lack of outbound internet access for the nodes. + *

        *
      • *
      */ code?: NodegroupIssueCode | string; + + /** + *

      The error message associated with the issue.

      + */ + message?: string; + + /** + *

      The AWS resources that are afflicted by this issue.

      + */ + resourceIds?: string[]; } export namespace Issue { @@ -1283,73 +1644,45 @@ export namespace NodegroupHealth { */ export interface NodegroupResources { /** - *

      The remote access security group associated with the node group. This security group - * controls SSH access to the worker nodes.

      + *

      The Auto Scaling groups associated with the node group.

      */ - remoteAccessSecurityGroup?: string; + autoScalingGroups?: AutoScalingGroup[]; /** - *

      The Auto Scaling groups associated with the node group.

      + *

      The remote access security group associated with the node group. This security group + * controls SSH access to the worker nodes.

      */ - autoScalingGroups?: AutoScalingGroup[]; + remoteAccessSecurityGroup?: string; } export namespace NodegroupResources { export const filterSensitiveLog = (obj: NodegroupResources): any => ({ ...obj, - }); -} - -export type NodegroupStatus = - | "ACTIVE" - | "CREATE_FAILED" - | "CREATING" - | "DEGRADED" - | "DELETE_FAILED" - | "DELETING" - | "UPDATING"; - -/** - *

      An object representing an Amazon EKS managed node group.

      - */ -export interface Nodegroup { - /** - *

      The name associated with an Amazon EKS managed node group.

      - */ - nodegroupName?: string; - - /** - *

      If the node group was deployed using a launch template with a custom AMI, then this is - * CUSTOM. For node groups that weren't deployed using a launch template, - * this is the AMI type that was specified in the node group configuration.

      - */ - amiType?: AMITypes | string; - - /** - *

      The Kubernetes labels applied to the nodes in the node group.

      - * - *

      Only labels that are applied with the Amazon EKS API are shown here. There may be other - * Kubernetes labels applied to the nodes in this group.

      - *
      - */ - labels?: { [key: string]: string }; + }); +} - /** - *

      The subnets that were specified for the Auto Scaling group that is associated with - * your node group.

      - */ - subnets?: string[]; +export type NodegroupStatus = + | "ACTIVE" + | "CREATE_FAILED" + | "CREATING" + | "DEGRADED" + | "DELETE_FAILED" + | "DELETING" + | "UPDATING"; +/** + *

      An object representing an Amazon EKS managed node group.

      + */ +export interface Nodegroup { /** - *

      The health status of the node group. If there are issues with your node group's - * health, they are listed here.

      + *

      The name associated with an Amazon EKS managed node group.

      */ - health?: NodegroupHealth; + nodegroupName?: string; /** - *

      The current status of the managed node group.

      + *

      The Amazon Resource Name (ARN) associated with the managed node group.

      */ - status?: NodegroupStatus | string; + nodegroupArn?: string; /** *

      The name of the cluster that the managed node group resides in.

      @@ -1357,17 +1690,23 @@ export interface Nodegroup { clusterName?: string; /** - *

      The Amazon Resource Name (ARN) associated with the managed node group.

      + *

      The Kubernetes version of the managed node group.

      */ - nodegroupArn?: string; + version?: string; /** - *

      If the node group wasn't deployed with a launch template, then this is the remote - * access configuration that is associated with the node group. If the node group was - * deployed with a launch template, then this is - * null.

      + *

      If the node group was deployed using a launch template with a custom AMI, then this is + * the AMI ID that was specified in the launch template. For node groups that weren't + * deployed using a launch template, this is the version of the Amazon EKS optimized AMI that + * the node group was deployed with.

      */ - remoteAccess?: RemoteAccessConfig; + releaseVersion?: string; + + /** + *

      The Unix epoch timestamp in seconds for when the managed node group was + * created.

      + */ + createdAt?: Date; /** *

      The Unix epoch timestamp in seconds for when the managed node group was last @@ -1376,22 +1715,20 @@ export interface Nodegroup { modifiedAt?: Date; /** - *

      The scaling configuration details for the Auto Scaling group that is associated with - * your node group.

      + *

      The current status of the managed node group.

      */ - scalingConfig?: NodegroupScalingConfig; + status?: NodegroupStatus | string; /** - *

      The resources associated with the node group, such as Auto Scaling groups and security - * groups for remote access.

      + *

      The capacity type of your managed node group.

      */ - resources?: NodegroupResources; + capacityType?: CapacityTypes | string; /** - *

      If a launch template was used to create the node group, then this is the launch - * template that was used.

      + *

      The scaling configuration details for the Auto Scaling group that is associated with + * your node group.

      */ - launchTemplate?: LaunchTemplateSpecification; + scalingConfig?: NodegroupScalingConfig; /** *

      If the node group wasn't deployed with a launch template, then this is the instance @@ -1401,23 +1738,24 @@ export interface Nodegroup { instanceTypes?: string[]; /** - *

      The metadata applied to the node group to assist with categorization and organization. - * Each tag consists of a key and an optional value, both of which you define. Node group - * tags do not propagate to any other resources associated with the node group, such as the - * Amazon EC2 instances or subnets.

      + *

      The subnets that were specified for the Auto Scaling group that is associated with + * your node group.

      */ - tags?: { [key: string]: string }; + subnets?: string[]; /** - *

      The Kubernetes version of the managed node group.

      + *

      If the node group wasn't deployed with a launch template, then this is the remote + * access configuration that is associated with the node group. If the node group was + * deployed with a launch template, then this is null.

      */ - version?: string; + remoteAccess?: RemoteAccessConfig; /** - *

      The Unix epoch timestamp in seconds for when the managed node group was - * created.

      + *

      If the node group was deployed using a launch template with a custom AMI, then this is + * CUSTOM. For node groups that weren't deployed using a launch template, + * this is the AMI type that was specified in the node group configuration.

      */ - createdAt?: Date; + amiType?: AMITypes | string; /** *

      The IAM role associated with your node group. The Amazon EKS worker node @@ -1427,6 +1765,21 @@ export interface Nodegroup { */ nodeRole?: string; + /** + *

      The Kubernetes labels applied to the nodes in the node group.

      + * + *

      Only labels that are applied with the Amazon EKS API are shown here. There may be other + * Kubernetes labels applied to the nodes in this group.

      + *
      + */ + labels?: { [key: string]: string }; + + /** + *

      The resources associated with the node group, such as Auto Scaling groups and security + * groups for remote access.

      + */ + resources?: NodegroupResources; + /** *

      If the node group wasn't deployed with a launch template, then this is the disk size * in the node group configuration. If the node group was deployed with a launch template, @@ -1435,12 +1788,24 @@ export interface Nodegroup { diskSize?: number; /** - *

      If the node group was deployed using a launch template with a custom AMI, then this is - * the AMI ID that was specified in the launch template. For node groups that weren't - * deployed using a launch template, this is the version of the Amazon EKS-optimized AMI that - * the node group was deployed with.

      + *

      The health status of the node group. If there are issues with your node group's + * health, they are listed here.

      */ - releaseVersion?: string; + health?: NodegroupHealth; + + /** + *

      If a launch template was used to create the node group, then this is the launch + * template that was used.

      + */ + launchTemplate?: LaunchTemplateSpecification; + + /** + *

      The metadata applied to the node group to assist with categorization and organization. + * Each tag consists of a key and an optional value, both of which you define. Node group + * tags do not propagate to any other resources associated with the node group, such as the + * Amazon EC2 instances or subnets.

      + */ + tags?: { [key: string]: string }; } export namespace Nodegroup { @@ -1462,60 +1827,61 @@ export namespace CreateNodegroupResponse { }); } -export interface DeleteClusterRequest { +export interface DeleteAddonRequest { /** - *

      The name of the cluster to delete.

      + *

      The name of the cluster to delete the add-on from.

      */ - name: string | undefined; + clusterName: string | undefined; + + /** + *

      The name of the add-on. The name must match one of the names returned by + * ListAddons + * .

      + */ + addonName: string | undefined; } -export namespace DeleteClusterRequest { - export const filterSensitiveLog = (obj: DeleteClusterRequest): any => ({ +export namespace DeleteAddonRequest { + export const filterSensitiveLog = (obj: DeleteAddonRequest): any => ({ ...obj, }); } -export interface DeleteClusterResponse { +export interface DeleteAddonResponse { /** - *

      The full description of the cluster to delete.

      + *

      An Amazon EKS add-on.

      */ - cluster?: Cluster; + addon?: Addon; } -export namespace DeleteClusterResponse { - export const filterSensitiveLog = (obj: DeleteClusterResponse): any => ({ +export namespace DeleteAddonResponse { + export const filterSensitiveLog = (obj: DeleteAddonResponse): any => ({ ...obj, }); } -/** - *

      The specified resource could not be found. You can view your available clusters with - * ListClusters. You can view your available managed node groups with - * ListNodegroups. Amazon EKS clusters and node groups are - * Region-specific.

      - */ -export interface ResourceNotFoundException extends __SmithyException, $MetadataBearer { - name: "ResourceNotFoundException"; - $fault: "client"; +export interface DeleteClusterRequest { /** - *

      The Amazon EKS cluster associated with the exception.

      + *

      The name of the cluster to delete.

      */ - clusterName?: string; + name: string | undefined; +} - /** - *

      The Amazon EKS managed node group associated with the exception.

      - */ - nodegroupName?: string; +export namespace DeleteClusterRequest { + export const filterSensitiveLog = (obj: DeleteClusterRequest): any => ({ + ...obj, + }); +} - message?: string; +export interface DeleteClusterResponse { /** - *

      The Fargate profile associated with the exception.

      + *

      The full description of the cluster to delete.

      */ - fargateProfileName?: string; + cluster?: Cluster; } -export namespace ResourceNotFoundException { - export const filterSensitiveLog = (obj: ResourceNotFoundException): any => ({ +export namespace DeleteClusterResponse { + export const filterSensitiveLog = (obj: DeleteClusterResponse): any => ({ ...obj, }); } @@ -1582,6 +1948,101 @@ export namespace DeleteNodegroupResponse { }); } +export interface DescribeAddonRequest { + /** + *

      The name of the cluster.

      + */ + clusterName: string | undefined; + + /** + *

      The name of the add-on. The name must match one of the names returned by + * ListAddons + * .

      + */ + addonName: string | undefined; +} + +export namespace DescribeAddonRequest { + export const filterSensitiveLog = (obj: DescribeAddonRequest): any => ({ + ...obj, + }); +} + +export interface DescribeAddonResponse { + /** + *

      An Amazon EKS add-on.

      + */ + addon?: Addon; +} + +export namespace DescribeAddonResponse { + export const filterSensitiveLog = (obj: DescribeAddonResponse): any => ({ + ...obj, + }); +} + +export interface DescribeAddonVersionsRequest { + /** + *

      The Kubernetes versions that the add-on can be used with.

      + */ + kubernetesVersion?: string; + + /** + *

      The maximum number of results to return.

      + */ + maxResults?: number; + + /** + *

      The nextToken value returned from a previous paginated + * DescribeAddonVersionsRequest where maxResults was used and + * the results exceeded the value of that parameter. Pagination continues from the end of + * the previous results that returned the nextToken value.

      + * + *

      This token should be treated as an opaque identifier that is used only to + * retrieve the next items in a list and not for other programmatic purposes.

      + *
      + */ + nextToken?: string; + + /** + *

      The name of the add-on. The name must match one of the names returned by + * ListAddons + * .

      + */ + addonName?: string; +} + +export namespace DescribeAddonVersionsRequest { + export const filterSensitiveLog = (obj: DescribeAddonVersionsRequest): any => ({ + ...obj, + }); +} + +export interface DescribeAddonVersionsResponse { + /** + *

      The list of available versions with Kubernetes version compatibility.

      + */ + addons?: AddonInfo[]; + + /** + *

      The nextToken value returned from a previous paginated + * DescribeAddonVersionsResponse where maxResults was used + * and the results exceeded the value of that parameter. Pagination continues from the end + * of the previous results that returned the nextToken value.

      + * + *

      This token should be treated as an opaque identifier that is used only to + * retrieve the next items in a list and not for other programmatic purposes.

      + *
      + */ + nextToken?: string; +} + +export namespace DescribeAddonVersionsResponse { + export const filterSensitiveLog = (obj: DescribeAddonVersionsResponse): any => ({ + ...obj, + }); +} + export interface DescribeClusterRequest { /** *

      The name of the cluster to describe.

      @@ -1610,14 +2071,14 @@ export namespace DescribeClusterResponse { export interface DescribeFargateProfileRequest { /** - *

      The name of the Fargate profile to describe.

      + *

      The name of the Amazon EKS cluster associated with the Fargate profile.

      */ - fargateProfileName: string | undefined; + clusterName: string | undefined; /** - *

      The name of the Amazon EKS cluster associated with the Fargate profile.

      + *

      The name of the Fargate profile to describe.

      */ - clusterName: string | undefined; + fargateProfileName: string | undefined; } export namespace DescribeFargateProfileRequest { @@ -1685,6 +2146,13 @@ export interface DescribeUpdateRequest { *

      The name of the Amazon EKS node group associated with the update.

      */ nodegroupName?: string; + + /** + *

      The name of the add-on. The name must match one of the names returned by + * ListAddons + * .

      + */ + addonName?: string; } export namespace DescribeUpdateRequest { @@ -1696,8 +2164,10 @@ export namespace DescribeUpdateRequest { export enum ErrorCode { ACCESS_DENIED = "AccessDenied", CLUSTER_UNREACHABLE = "ClusterUnreachable", + CONFIGURATION_CONFLICT = "ConfigurationConflict", ENI_LIMIT_REACHED = "EniLimitReached", INSUFFICIENT_FREE_ADDRESSES = "InsufficientFreeAddresses", + INSUFFICIENT_NUMBER_OF_REPLICAS = "InsufficientNumberOfReplicas", IP_NOT_AVAILABLE = "IpNotAvailable", NODE_CREATION_FAILURE = "NodeCreationFailure", OPERATION_NOT_PERMITTED = "OperationNotPermitted", @@ -1712,11 +2182,6 @@ export enum ErrorCode { *

      An object representing an error when an asynchronous operation fails.

      */ export interface ErrorDetail { - /** - *

      An optional field that contains the resource IDs associated with the error.

      - */ - resourceIds?: string[]; - /** *

      A brief description of the error.

      *
        @@ -1764,6 +2229,11 @@ export interface ErrorDetail { *

        A more complete description of the error.

        */ errorMessage?: string; + + /** + *

        An optional field that contains the resource IDs associated with the error.

        + */ + resourceIds?: string[]; } export namespace ErrorDetail { @@ -1773,6 +2243,7 @@ export namespace ErrorDetail { } export enum UpdateParamType { + ADDON_VERSION = "AddonVersion", CLUSTER_LOGGING = "ClusterLogging", DESIRED_SIZE = "DesiredSize", ENDPOINT_PRIVATE_ACCESS = "EndpointPrivateAccess", @@ -1784,6 +2255,8 @@ export enum UpdateParamType { PLATFORM_VERSION = "PlatformVersion", PUBLIC_ACCESS_CIDRS = "PublicAccessCidrs", RELEASE_VERSION = "ReleaseVersion", + RESOLVE_CONFLICTS = "ResolveConflicts", + SERVICE_ACCOUNT_ROLE_ARN = "ServiceAccountRoleArn", VERSION = "Version", } @@ -1792,14 +2265,14 @@ export enum UpdateParamType { */ export interface UpdateParam { /** - *

        The value of the keys submitted as part of an update request.

        + *

        The keys associated with an update request.

        */ - value?: string; + type?: UpdateParamType | string; /** - *

        The keys associated with an update request.

        + *

        The value of the keys submitted as part of an update request.

        */ - type?: UpdateParamType | string; + value?: string; } export namespace UpdateParam { @@ -1816,6 +2289,7 @@ export enum UpdateStatus { } export enum UpdateType { + ADDON_UPDATE = "AddonUpdate", CONFIG_UPDATE = "ConfigUpdate", ENDPOINT_ACCESS_UPDATE = "EndpointAccessUpdate", LOGGING_UPDATE = "LoggingUpdate", @@ -1827,19 +2301,14 @@ export enum UpdateType { */ export interface Update { /** - *

        The current status of the update.

        - */ - status?: UpdateStatus | string; - - /** - *

        The Unix epoch timestamp in seconds for when the update was created.

        + *

        A UUID that is used to track the update.

        */ - createdAt?: Date; + id?: string; /** - *

        Any errors associated with a Failed update.

        + *

        The current status of the update.

        */ - errors?: ErrorDetail[]; + status?: UpdateStatus | string; /** *

        The type of the update.

        @@ -1852,9 +2321,14 @@ export interface Update { params?: UpdateParam[]; /** - *

        A UUID that is used to track the update.

        + *

        The Unix epoch timestamp in seconds for when the update was created.

        */ - id?: string; + createdAt?: Date; + + /** + *

        Any errors associated with a Failed update.

        + */ + errors?: ErrorDetail[]; } export namespace Update { @@ -1876,6 +2350,69 @@ export namespace DescribeUpdateResponse { }); } +export interface ListAddonsRequest { + /** + *

        The name of the cluster.

        + */ + clusterName: string | undefined; + + /** + *

        The maximum number of add-on results returned by ListAddonsRequest in + * paginated output. When you use this parameter, ListAddonsRequest returns + * only maxResults results in a single page along with a + * nextToken response element. You can see the remaining results of the + * initial request by sending another ListAddonsRequest request with the + * returned nextToken value. This value can be between 1 and + * 100. If you don't use this parameter, ListAddonsRequest + * returns up to 100 results and a nextToken value, if + * applicable.

        + */ + maxResults?: number; + + /** + *

        The nextToken value returned from a previous paginated + * ListAddonsRequest where maxResults was used and the + * results exceeded the value of that parameter. Pagination continues from the end of the + * previous results that returned the nextToken value.

        + * + *

        This token should be treated as an opaque identifier that is used only to + * retrieve the next items in a list and not for other programmatic purposes.

        + *
        + */ + nextToken?: string; +} + +export namespace ListAddonsRequest { + export const filterSensitiveLog = (obj: ListAddonsRequest): any => ({ + ...obj, + }); +} + +export interface ListAddonsResponse { + /** + *

        A list of available add-ons.

        + */ + addons?: string[]; + + /** + *

        The nextToken value returned from a previous paginated + * ListAddonsResponse where maxResults was used and the + * results exceeded the value of that parameter. Pagination continues from the end of the + * previous results that returned the nextToken value.

        + * + *

        This token should be treated as an opaque identifier that is used only to + * retrieve the next items in a list and not for other programmatic purposes.

        + *
        + */ + nextToken?: string; +} + +export namespace ListAddonsResponse { + export const filterSensitiveLog = (obj: ListAddonsResponse): any => ({ + ...obj, + }); +} + export interface ListClustersRequest { /** *

        The maximum number of cluster results returned by ListClusters in @@ -1936,14 +2473,6 @@ export interface ListFargateProfilesRequest { */ clusterName: string | undefined; - /** - *

        The nextToken value returned from a previous paginated - * ListFargateProfiles request where maxResults was used and - * the results exceeded the value of that parameter. Pagination continues from the end of - * the previous results that returned the nextToken value.

        - */ - nextToken?: string; - /** *

        The maximum number of Fargate profile results returned by * ListFargateProfiles in paginated output. When you use this parameter, @@ -1956,6 +2485,14 @@ export interface ListFargateProfilesRequest { * results and a nextToken value if applicable.

        */ maxResults?: number; + + /** + *

        The nextToken value returned from a previous paginated + * ListFargateProfiles request where maxResults was used and + * the results exceeded the value of that parameter. Pagination continues from the end of + * the previous results that returned the nextToken value.

        + */ + nextToken?: string; } export namespace ListFargateProfilesRequest { @@ -1987,6 +2524,11 @@ export namespace ListFargateProfilesResponse { } export interface ListNodegroupsRequest { + /** + *

        The name of the Amazon EKS cluster that you would like to list node groups in.

        + */ + clusterName: string | undefined; + /** *

        The maximum number of node group results returned by ListNodegroups in * paginated output. When you use this parameter, ListNodegroups returns only @@ -2006,11 +2548,6 @@ export interface ListNodegroupsRequest { * previous results that returned the nextToken value.

        */ nextToken?: string; - - /** - *

        The name of the Amazon EKS cluster that you would like to list node groups in.

        - */ - clusterName: string | undefined; } export namespace ListNodegroupsRequest { @@ -2020,6 +2557,11 @@ export namespace ListNodegroupsRequest { } export interface ListNodegroupsResponse { + /** + *

        A list of all of the node groups associated with the specified cluster.

        + */ + nodegroups?: string[]; + /** *

        The nextToken value to include in a future ListNodegroups * request. When the results of a ListNodegroups request exceed @@ -2028,11 +2570,6 @@ export interface ListNodegroupsResponse { * return.

        */ nextToken?: string; - - /** - *

        A list of all of the node groups associated with the specified cluster.

        - */ - nodegroups?: string[]; } export namespace ListNodegroupsResponse { @@ -2106,6 +2643,16 @@ export interface ListUpdatesRequest { */ name: string | undefined; + /** + *

        The name of the Amazon EKS managed node group to list updates for.

        + */ + nodegroupName?: string; + + /** + *

        The names of the installed add-ons that have available updates.

        + */ + addonName?: string; + /** *

        The nextToken value returned from a previous paginated * ListUpdates request where maxResults was used and the @@ -2125,11 +2672,6 @@ export interface ListUpdatesRequest { * nextToken value if applicable.

        */ maxResults?: number; - - /** - *

        The name of the Amazon EKS managed node group to list updates for.

        - */ - nodegroupName?: string; } export namespace ListUpdatesRequest { @@ -2161,16 +2703,16 @@ export namespace ListUpdatesResponse { } export interface TagResourceRequest { - /** - *

        The tags to add to the resource. A tag is an array of key-value pairs.

        - */ - tags: { [key: string]: string } | undefined; - /** *

        The Amazon Resource Name (ARN) of the resource to which to add tags. Currently, the supported resources * are Amazon EKS clusters and managed node groups.

        */ resourceArn: string | undefined; + + /** + *

        The tags to add to the resource. A tag is an array of key-value pairs.

        + */ + tags: { [key: string]: string } | undefined; } export namespace TagResourceRequest { @@ -2188,16 +2730,16 @@ export namespace TagResourceResponse { } export interface UntagResourceRequest { - /** - *

        The keys of the tags to be removed.

        - */ - tagKeys: string[] | undefined; - /** *

        The Amazon Resource Name (ARN) of the resource from which to delete tags. Currently, the supported * resources are Amazon EKS clusters and managed node groups.

        */ resourceArn: string | undefined; + + /** + *

        The keys of the tags to be removed.

        + */ + tagKeys: string[] | undefined; } export namespace UntagResourceRequest { @@ -2214,7 +2756,81 @@ export namespace UntagResourceResponse { }); } +export interface UpdateAddonRequest { + /** + *

        The name of the cluster.

        + */ + clusterName: string | undefined; + + /** + *

        The name of the add-on. The name must match one of the names returned by + * ListAddons + * .

        + */ + addonName: string | undefined; + + /** + *

        The version of the add-on. The version must match one of the versions returned by + * DescribeAddonVersions + * .

        + */ + addonVersion?: string; + + /** + *

        The Amazon Resource Name (ARN) of an existing IAM role to bind to the add-on's service account. The role must be assigned the IAM permissions required by the add-on. If you don't specify an existing IAM role, then the add-on uses the + * permissions assigned to the node IAM role. For more information, see Amazon EKS node IAM role in the Amazon EKS User Guide.

        + * + *

        To specify an existing IAM role, you must have an IAM OpenID Connect (OIDC) provider created for + * your cluster. For more information, see Enabling + * IAM roles for service accounts on your cluster in the + * Amazon EKS User Guide.

        + *
        + */ + serviceAccountRoleArn?: string; + + /** + *

        How to resolve parameter value conflicts when applying the new version of the add-on + * to the cluster.

        + */ + resolveConflicts?: ResolveConflicts | string; + + /** + *

        Unique, case-sensitive identifier that you provide to ensure the idempotency of the + * request.

        + */ + clientRequestToken?: string; +} + +export namespace UpdateAddonRequest { + export const filterSensitiveLog = (obj: UpdateAddonRequest): any => ({ + ...obj, + }); +} + +export interface UpdateAddonResponse { + /** + *

        An object representing an asynchronous update.

        + */ + update?: Update; +} + +export namespace UpdateAddonResponse { + export const filterSensitiveLog = (obj: UpdateAddonResponse): any => ({ + ...obj, + }); +} + export interface UpdateClusterConfigRequest { + /** + *

        The name of the Amazon EKS cluster to update.

        + */ + name: string | undefined; + + /** + *

        An object representing the VPC configuration to use for an Amazon EKS cluster.

        + */ + resourcesVpcConfig?: VpcConfigRequest; + /** *

        Enable or disable exporting the Kubernetes control plane logs for your cluster to * CloudWatch Logs. By default, cluster control plane logs aren't exported to CloudWatch Logs. For more @@ -2229,16 +2845,6 @@ export interface UpdateClusterConfigRequest { */ logging?: Logging; - /** - *

        The name of the Amazon EKS cluster to update.

        - */ - name: string | undefined; - - /** - *

        An object representing the VPC configuration to use for an Amazon EKS cluster.

        - */ - resourcesVpcConfig?: VpcConfigRequest; - /** *

        Unique, case-sensitive identifier that you provide to ensure the idempotency of the * request.

        @@ -2267,10 +2873,9 @@ export namespace UpdateClusterConfigResponse { export interface UpdateClusterVersionRequest { /** - *

        Unique, case-sensitive identifier that you provide to ensure the idempotency of the - * request.

        + *

        The name of the Amazon EKS cluster to update.

        */ - clientRequestToken?: string; + name: string | undefined; /** *

        The desired Kubernetes version following a successful update.

        @@ -2278,9 +2883,10 @@ export interface UpdateClusterVersionRequest { version: string | undefined; /** - *

        The name of the Amazon EKS cluster to update.

        + *

        Unique, case-sensitive identifier that you provide to ensure the idempotency of the + * request.

        */ - name: string | undefined; + clientRequestToken?: string; } export namespace UpdateClusterVersionRequest { @@ -2325,9 +2931,9 @@ export namespace UpdateLabelsPayload { export interface UpdateNodegroupConfigRequest { /** - *

        The scaling configuration details for the Auto Scaling group after the update.

        + *

        The name of the Amazon EKS cluster that the managed node group resides in.

        */ - scalingConfig?: NodegroupScalingConfig; + clusterName: string | undefined; /** *

        The name of the managed node group to update.

        @@ -2335,21 +2941,21 @@ export interface UpdateNodegroupConfigRequest { nodegroupName: string | undefined; /** - *

        The name of the Amazon EKS cluster that the managed node group resides in.

        + *

        The Kubernetes labels to be applied to the nodes in the node group after the + * update.

        */ - clusterName: string | undefined; + labels?: UpdateLabelsPayload; /** - *

        Unique, case-sensitive identifier that you provide to ensure the idempotency of the - * request.

        + *

        The scaling configuration details for the Auto Scaling group after the update.

        */ - clientRequestToken?: string; + scalingConfig?: NodegroupScalingConfig; /** - *

        The Kubernetes labels to be applied to the nodes in the node group after the - * update.

        + *

        Unique, case-sensitive identifier that you provide to ensure the idempotency of the + * request.

        */ - labels?: UpdateLabelsPayload; + clientRequestToken?: string; } export namespace UpdateNodegroupConfigRequest { @@ -2372,6 +2978,17 @@ export namespace UpdateNodegroupConfigResponse { } export interface UpdateNodegroupVersionRequest { + /** + *

        The name of the Amazon EKS cluster that is associated with the managed node group to + * update.

        + */ + clusterName: string | undefined; + + /** + *

        The name of the managed node group to update.

        + */ + nodegroupName: string | undefined; + /** *

        The Kubernetes version to update to. If no version is specified, then the Kubernetes * version of the node group does not change. You can specify the Kubernetes version of the @@ -2383,17 +3000,9 @@ export interface UpdateNodegroupVersionRequest { version?: string; /** - *

        Force the update if the existing node group's pods are unable to be drained due to a - * pod disruption budget issue. If an update fails because pods could not be drained, you - * can force the update after it fails to terminate the old node whether or not any pods - * are running on the node.

        - */ - force?: boolean; - - /** - *

        The AMI version of the Amazon EKS-optimized AMI to use for the update. By default, the + *

        The AMI version of the Amazon EKS optimized AMI to use for the update. By default, the * latest available AMI version for the node group's Kubernetes version is used. For more - * information, see Amazon EKS-Optimized Linux AMI Versions in the + * information, see Amazon EKS optimized Amazon Linux 2 AMI versions in the * Amazon EKS User Guide. If you specify launchTemplate, and your launch template uses a custom AMI, then don't specify * releaseVersion, or the node group update will fail. * For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

        @@ -2408,21 +3017,18 @@ export interface UpdateNodegroupVersionRequest { launchTemplate?: LaunchTemplateSpecification; /** - *

        The name of the managed node group to update.

        + *

        Force the update if the existing node group's pods are unable to be drained due to a + * pod disruption budget issue. If an update fails because pods could not be drained, you + * can force the update after it fails to terminate the old node whether or not any pods + * are running on the node.

        */ - nodegroupName: string | undefined; + force?: boolean; /** *

        Unique, case-sensitive identifier that you provide to ensure the idempotency of the * request.

        */ clientRequestToken?: string; - - /** - *

        The name of the Amazon EKS cluster that is associated with the managed node group to - * update.

        - */ - clusterName: string | undefined; } export namespace UpdateNodegroupVersionRequest { diff --git a/clients/client-eks/pagination/DescribeAddonVersionsPaginator.ts b/clients/client-eks/pagination/DescribeAddonVersionsPaginator.ts new file mode 100644 index 000000000000..3e4039ef90ae --- /dev/null +++ b/clients/client-eks/pagination/DescribeAddonVersionsPaginator.ts @@ -0,0 +1,57 @@ +import { EKS } from "../EKS"; +import { EKSClient } from "../EKSClient"; +import { + DescribeAddonVersionsCommand, + DescribeAddonVersionsCommandInput, + DescribeAddonVersionsCommandOutput, +} from "../commands/DescribeAddonVersionsCommand"; +import { EKSPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: EKSClient, + input: DescribeAddonVersionsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new DescribeAddonVersionsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: EKS, + input: DescribeAddonVersionsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.describeAddonVersions(input, ...args); +}; +export async function* paginateDescribeAddonVersions( + config: EKSPaginationConfiguration, + input: DescribeAddonVersionsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: DescribeAddonVersionsCommandOutput; + while (hasNext) { + input.nextToken = token; + input["maxResults"] = config.pageSize; + if (config.client instanceof EKS) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof EKSClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected EKS | EKSClient"); + } + yield page; + token = page.nextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-eks/pagination/ListAddonsPaginator.ts b/clients/client-eks/pagination/ListAddonsPaginator.ts new file mode 100644 index 000000000000..fa0f4a607f3f --- /dev/null +++ b/clients/client-eks/pagination/ListAddonsPaginator.ts @@ -0,0 +1,53 @@ +import { EKS } from "../EKS"; +import { EKSClient } from "../EKSClient"; +import { ListAddonsCommand, ListAddonsCommandInput, ListAddonsCommandOutput } from "../commands/ListAddonsCommand"; +import { EKSPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: EKSClient, + input: ListAddonsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListAddonsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: EKS, + input: ListAddonsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listAddons(input, ...args); +}; +export async function* paginateListAddons( + config: EKSPaginationConfiguration, + input: ListAddonsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListAddonsCommandOutput; + while (hasNext) { + input.nextToken = token; + input["maxResults"] = config.pageSize; + if (config.client instanceof EKS) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof EKSClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected EKS | EKSClient"); + } + yield page; + token = page.nextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-eks/protocols/Aws_restJson1.ts b/clients/client-eks/protocols/Aws_restJson1.ts index 13ddf7d28bb2..214db0e1c68c 100644 --- a/clients/client-eks/protocols/Aws_restJson1.ts +++ b/clients/client-eks/protocols/Aws_restJson1.ts @@ -1,15 +1,22 @@ +import { CreateAddonCommandInput, CreateAddonCommandOutput } from "../commands/CreateAddonCommand"; import { CreateClusterCommandInput, CreateClusterCommandOutput } from "../commands/CreateClusterCommand"; import { CreateFargateProfileCommandInput, CreateFargateProfileCommandOutput, } from "../commands/CreateFargateProfileCommand"; import { CreateNodegroupCommandInput, CreateNodegroupCommandOutput } from "../commands/CreateNodegroupCommand"; +import { DeleteAddonCommandInput, DeleteAddonCommandOutput } from "../commands/DeleteAddonCommand"; import { DeleteClusterCommandInput, DeleteClusterCommandOutput } from "../commands/DeleteClusterCommand"; import { DeleteFargateProfileCommandInput, DeleteFargateProfileCommandOutput, } from "../commands/DeleteFargateProfileCommand"; import { DeleteNodegroupCommandInput, DeleteNodegroupCommandOutput } from "../commands/DeleteNodegroupCommand"; +import { DescribeAddonCommandInput, DescribeAddonCommandOutput } from "../commands/DescribeAddonCommand"; +import { + DescribeAddonVersionsCommandInput, + DescribeAddonVersionsCommandOutput, +} from "../commands/DescribeAddonVersionsCommand"; import { DescribeClusterCommandInput, DescribeClusterCommandOutput } from "../commands/DescribeClusterCommand"; import { DescribeFargateProfileCommandInput, @@ -17,6 +24,7 @@ import { } from "../commands/DescribeFargateProfileCommand"; import { DescribeNodegroupCommandInput, DescribeNodegroupCommandOutput } from "../commands/DescribeNodegroupCommand"; import { DescribeUpdateCommandInput, DescribeUpdateCommandOutput } from "../commands/DescribeUpdateCommand"; +import { ListAddonsCommandInput, ListAddonsCommandOutput } from "../commands/ListAddonsCommand"; import { ListClustersCommandInput, ListClustersCommandOutput } from "../commands/ListClustersCommand"; import { ListFargateProfilesCommandInput, @@ -30,6 +38,7 @@ import { import { ListUpdatesCommandInput, ListUpdatesCommandOutput } from "../commands/ListUpdatesCommand"; import { TagResourceCommandInput, TagResourceCommandOutput } from "../commands/TagResourceCommand"; import { UntagResourceCommandInput, UntagResourceCommandOutput } from "../commands/UntagResourceCommand"; +import { UpdateAddonCommandInput, UpdateAddonCommandOutput } from "../commands/UpdateAddonCommand"; import { UpdateClusterConfigCommandInput, UpdateClusterConfigCommandOutput, @@ -47,11 +56,17 @@ import { UpdateNodegroupVersionCommandOutput, } from "../commands/UpdateNodegroupVersionCommand"; import { + Addon, + AddonHealth, + AddonInfo, + AddonIssue, + AddonVersionInfo, AutoScalingGroup, BadRequestException, Certificate, ClientException, Cluster, + Compatibility, EncryptionConfig, ErrorDetail, FargateProfile, @@ -99,6 +114,44 @@ import { } from "@aws-sdk/types"; import { v4 as generateIdempotencyToken } from "uuid"; +export const serializeAws_restJson1CreateAddonCommand = async ( + input: CreateAddonCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/clusters/{clusterName}/addons"; + if (input.clusterName !== undefined) { + const labelValue: string = input.clusterName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: clusterName."); + } + resolvedPath = resolvedPath.replace("{clusterName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: clusterName."); + } + let body: any; + body = JSON.stringify({ + ...(input.addonName !== undefined && { addonName: input.addonName }), + ...(input.addonVersion !== undefined && { addonVersion: input.addonVersion }), + clientRequestToken: input.clientRequestToken ?? generateIdempotencyToken(), + ...(input.resolveConflicts !== undefined && { resolveConflicts: input.resolveConflicts }), + ...(input.serviceAccountRoleArn !== undefined && { serviceAccountRoleArn: input.serviceAccountRoleArn }), + ...(input.tags !== undefined && { tags: serializeAws_restJson1TagMap(input.tags, context) }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + export const serializeAws_restJson1CreateClusterCommand = async ( input: CreateClusterCommandInput, context: __SerdeContext @@ -200,6 +253,7 @@ export const serializeAws_restJson1CreateNodegroupCommand = async ( let body: any; body = JSON.stringify({ ...(input.amiType !== undefined && { amiType: input.amiType }), + ...(input.capacityType !== undefined && { capacityType: input.capacityType }), clientRequestToken: input.clientRequestToken ?? generateIdempotencyToken(), ...(input.diskSize !== undefined && { diskSize: input.diskSize }), ...(input.instanceTypes !== undefined && { @@ -234,6 +288,45 @@ export const serializeAws_restJson1CreateNodegroupCommand = async ( }); }; +export const serializeAws_restJson1DeleteAddonCommand = async ( + input: DeleteAddonCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/clusters/{clusterName}/addons/{addonName}"; + if (input.clusterName !== undefined) { + const labelValue: string = input.clusterName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: clusterName."); + } + resolvedPath = resolvedPath.replace("{clusterName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: clusterName."); + } + if (input.addonName !== undefined) { + const labelValue: string = input.addonName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: addonName."); + } + resolvedPath = resolvedPath.replace("{addonName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: addonName."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "DELETE", + headers, + path: resolvedPath, + body, + }); +}; + export const serializeAws_restJson1DeleteClusterCommand = async ( input: DeleteClusterCommandInput, context: __SerdeContext @@ -342,6 +435,73 @@ export const serializeAws_restJson1DeleteNodegroupCommand = async ( }); }; +export const serializeAws_restJson1DescribeAddonCommand = async ( + input: DescribeAddonCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/clusters/{clusterName}/addons/{addonName}"; + if (input.clusterName !== undefined) { + const labelValue: string = input.clusterName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: clusterName."); + } + resolvedPath = resolvedPath.replace("{clusterName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: clusterName."); + } + if (input.addonName !== undefined) { + const labelValue: string = input.addonName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: addonName."); + } + resolvedPath = resolvedPath.replace("{addonName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: addonName."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DescribeAddonVersionsCommand = async ( + input: DescribeAddonVersionsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/addons/supported-versions"; + const query: any = { + ...(input.kubernetesVersion !== undefined && { kubernetesVersion: input.kubernetesVersion }), + ...(input.maxResults !== undefined && { maxResults: input.maxResults.toString() }), + ...(input.nextToken !== undefined && { nextToken: input.nextToken }), + ...(input.addonName !== undefined && { addonName: input.addonName }), + }; + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; + export const serializeAws_restJson1DescribeClusterCommand = async ( input: DescribeClusterCommandInput, context: __SerdeContext @@ -380,15 +540,6 @@ export const serializeAws_restJson1DescribeFargateProfileCommand = async ( "Content-Type": "", }; let resolvedPath = "/clusters/{clusterName}/fargate-profiles/{fargateProfileName}"; - if (input.fargateProfileName !== undefined) { - const labelValue: string = input.fargateProfileName; - if (labelValue.length <= 0) { - throw new Error("Empty value provided for input HTTP label: fargateProfileName."); - } - resolvedPath = resolvedPath.replace("{fargateProfileName}", __extendedEncodeURIComponent(labelValue)); - } else { - throw new Error("No value provided for input HTTP label: fargateProfileName."); - } if (input.clusterName !== undefined) { const labelValue: string = input.clusterName; if (labelValue.length <= 0) { @@ -398,6 +549,15 @@ export const serializeAws_restJson1DescribeFargateProfileCommand = async ( } else { throw new Error("No value provided for input HTTP label: clusterName."); } + if (input.fargateProfileName !== undefined) { + const labelValue: string = input.fargateProfileName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: fargateProfileName."); + } + resolvedPath = resolvedPath.replace("{fargateProfileName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: fargateProfileName."); + } let body: any; const { hostname, protocol = "https", port } = await context.endpoint(); return new __HttpRequest({ @@ -478,6 +638,42 @@ export const serializeAws_restJson1DescribeUpdateCommand = async ( } const query: any = { ...(input.nodegroupName !== undefined && { nodegroupName: input.nodegroupName }), + ...(input.addonName !== undefined && { addonName: input.addonName }), + }; + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; + +export const serializeAws_restJson1ListAddonsCommand = async ( + input: ListAddonsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/clusters/{clusterName}/addons"; + if (input.clusterName !== undefined) { + const labelValue: string = input.clusterName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: clusterName."); + } + resolvedPath = resolvedPath.replace("{clusterName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: clusterName."); + } + const query: any = { + ...(input.maxResults !== undefined && { maxResults: input.maxResults.toString() }), + ...(input.nextToken !== undefined && { nextToken: input.nextToken }), }; let body: any; const { hostname, protocol = "https", port } = await context.endpoint(); @@ -537,8 +733,8 @@ export const serializeAws_restJson1ListFargateProfilesCommand = async ( throw new Error("No value provided for input HTTP label: clusterName."); } const query: any = { - ...(input.nextToken !== undefined && { nextToken: input.nextToken }), ...(input.maxResults !== undefined && { maxResults: input.maxResults.toString() }), + ...(input.nextToken !== undefined && { nextToken: input.nextToken }), }; let body: any; const { hostname, protocol = "https", port } = await context.endpoint(); @@ -637,9 +833,10 @@ export const serializeAws_restJson1ListUpdatesCommand = async ( throw new Error("No value provided for input HTTP label: name."); } const query: any = { + ...(input.nodegroupName !== undefined && { nodegroupName: input.nodegroupName }), + ...(input.addonName !== undefined && { addonName: input.addonName }), ...(input.nextToken !== undefined && { nextToken: input.nextToken }), ...(input.maxResults !== undefined && { maxResults: input.maxResults.toString() }), - ...(input.nodegroupName !== undefined && { nodegroupName: input.nodegroupName }), }; let body: any; const { hostname, protocol = "https", port } = await context.endpoint(); @@ -722,6 +919,51 @@ export const serializeAws_restJson1UntagResourceCommand = async ( }); }; +export const serializeAws_restJson1UpdateAddonCommand = async ( + input: UpdateAddonCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/clusters/{clusterName}/addons/{addonName}/update"; + if (input.clusterName !== undefined) { + const labelValue: string = input.clusterName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: clusterName."); + } + resolvedPath = resolvedPath.replace("{clusterName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: clusterName."); + } + if (input.addonName !== undefined) { + const labelValue: string = input.addonName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: addonName."); + } + resolvedPath = resolvedPath.replace("{addonName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: addonName."); + } + let body: any; + body = JSON.stringify({ + ...(input.addonVersion !== undefined && { addonVersion: input.addonVersion }), + clientRequestToken: input.clientRequestToken ?? generateIdempotencyToken(), + ...(input.resolveConflicts !== undefined && { resolveConflicts: input.resolveConflicts }), + ...(input.serviceAccountRoleArn !== undefined && { serviceAccountRoleArn: input.serviceAccountRoleArn }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + export const serializeAws_restJson1UpdateClusterConfigCommand = async ( input: UpdateClusterConfigCommandInput, context: __SerdeContext @@ -801,15 +1043,6 @@ export const serializeAws_restJson1UpdateNodegroupConfigCommand = async ( "Content-Type": "application/json", }; let resolvedPath = "/clusters/{clusterName}/node-groups/{nodegroupName}/update-config"; - if (input.nodegroupName !== undefined) { - const labelValue: string = input.nodegroupName; - if (labelValue.length <= 0) { - throw new Error("Empty value provided for input HTTP label: nodegroupName."); - } - resolvedPath = resolvedPath.replace("{nodegroupName}", __extendedEncodeURIComponent(labelValue)); - } else { - throw new Error("No value provided for input HTTP label: nodegroupName."); - } if (input.clusterName !== undefined) { const labelValue: string = input.clusterName; if (labelValue.length <= 0) { @@ -819,6 +1052,15 @@ export const serializeAws_restJson1UpdateNodegroupConfigCommand = async ( } else { throw new Error("No value provided for input HTTP label: clusterName."); } + if (input.nodegroupName !== undefined) { + const labelValue: string = input.nodegroupName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: nodegroupName."); + } + resolvedPath = resolvedPath.replace("{nodegroupName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: nodegroupName."); + } let body: any; body = JSON.stringify({ clientRequestToken: input.clientRequestToken ?? generateIdempotencyToken(), @@ -847,15 +1089,6 @@ export const serializeAws_restJson1UpdateNodegroupVersionCommand = async ( "Content-Type": "application/json", }; let resolvedPath = "/clusters/{clusterName}/node-groups/{nodegroupName}/update-version"; - if (input.nodegroupName !== undefined) { - const labelValue: string = input.nodegroupName; - if (labelValue.length <= 0) { - throw new Error("Empty value provided for input HTTP label: nodegroupName."); - } - resolvedPath = resolvedPath.replace("{nodegroupName}", __extendedEncodeURIComponent(labelValue)); - } else { - throw new Error("No value provided for input HTTP label: nodegroupName."); - } if (input.clusterName !== undefined) { const labelValue: string = input.clusterName; if (labelValue.length <= 0) { @@ -865,6 +1098,15 @@ export const serializeAws_restJson1UpdateNodegroupVersionCommand = async ( } else { throw new Error("No value provided for input HTTP label: clusterName."); } + if (input.nodegroupName !== undefined) { + const labelValue: string = input.nodegroupName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: nodegroupName."); + } + resolvedPath = resolvedPath.replace("{nodegroupName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: nodegroupName."); + } let body: any; body = JSON.stringify({ clientRequestToken: input.clientRequestToken ?? generateIdempotencyToken(), @@ -887,28 +1129,28 @@ export const serializeAws_restJson1UpdateNodegroupVersionCommand = async ( }); }; -export const deserializeAws_restJson1CreateClusterCommand = async ( +export const deserializeAws_restJson1CreateAddonCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode !== 200 && output.statusCode >= 300) { - return deserializeAws_restJson1CreateClusterCommandError(output, context); + return deserializeAws_restJson1CreateAddonCommandError(output, context); } - const contents: CreateClusterCommandOutput = { + const contents: CreateAddonCommandOutput = { $metadata: deserializeMetadata(output), - cluster: undefined, + addon: undefined, }; const data: any = await parseBody(output.body, context); - if (data.cluster !== undefined && data.cluster !== null) { - contents.cluster = deserializeAws_restJson1Cluster(data.cluster, context); + if (data.addon !== undefined && data.addon !== null) { + contents.addon = deserializeAws_restJson1Addon(data.addon, context); } return Promise.resolve(contents); }; -const deserializeAws_restJson1CreateClusterCommandError = async ( +const deserializeAws_restJson1CreateAddonCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -933,10 +1175,105 @@ const deserializeAws_restJson1CreateClusterCommandError = async ( $metadata: deserializeMetadata(output), }; break; - case "ResourceInUseException": - case "com.amazonaws.eks#ResourceInUseException": + case "InvalidRequestException": + case "com.amazonaws.eks#InvalidRequestException": response = { - ...(await deserializeAws_restJson1ResourceInUseExceptionResponse(parsedOutput, context)), + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceInUseException": + case "com.amazonaws.eks#ResourceInUseException": + response = { + ...(await deserializeAws_restJson1ResourceInUseExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.eks#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.eks#ServerException": + response = { + ...(await deserializeAws_restJson1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1CreateClusterCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1CreateClusterCommandError(output, context); + } + const contents: CreateClusterCommandOutput = { + $metadata: deserializeMetadata(output), + cluster: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.cluster !== undefined && data.cluster !== null) { + contents.cluster = deserializeAws_restJson1Cluster(data.cluster, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1CreateClusterCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ClientException": + case "com.amazonaws.eks#ClientException": + response = { + ...(await deserializeAws_restJson1ClientExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidParameterException": + case "com.amazonaws.eks#InvalidParameterException": + response = { + ...(await deserializeAws_restJson1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceInUseException": + case "com.amazonaws.eks#ResourceInUseException": + response = { + ...(await deserializeAws_restJson1ResourceInUseExceptionResponse(parsedOutput, context)), name: errorCode, $metadata: deserializeMetadata(output), }; @@ -1188,6 +1525,93 @@ const deserializeAws_restJson1CreateNodegroupCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; +export const deserializeAws_restJson1DeleteAddonCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DeleteAddonCommandError(output, context); + } + const contents: DeleteAddonCommandOutput = { + $metadata: deserializeMetadata(output), + addon: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.addon !== undefined && data.addon !== null) { + contents.addon = deserializeAws_restJson1Addon(data.addon, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DeleteAddonCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ClientException": + case "com.amazonaws.eks#ClientException": + response = { + ...(await deserializeAws_restJson1ClientExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidParameterException": + case "com.amazonaws.eks#InvalidParameterException": + response = { + ...(await deserializeAws_restJson1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.eks#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.eks#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.eks#ServerException": + response = { + ...(await deserializeAws_restJson1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + export const deserializeAws_restJson1DeleteClusterCommand = async ( output: __HttpResponse, context: __SerdeContext @@ -1403,7 +1827,177 @@ const deserializeAws_restJson1DeleteNodegroupCommandError = async ( case "ResourceInUseException": case "com.amazonaws.eks#ResourceInUseException": response = { - ...(await deserializeAws_restJson1ResourceInUseExceptionResponse(parsedOutput, context)), + ...(await deserializeAws_restJson1ResourceInUseExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.eks#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.eks#ServerException": + response = { + ...(await deserializeAws_restJson1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceUnavailableException": + case "com.amazonaws.eks#ServiceUnavailableException": + response = { + ...(await deserializeAws_restJson1ServiceUnavailableExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DescribeAddonCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DescribeAddonCommandError(output, context); + } + const contents: DescribeAddonCommandOutput = { + $metadata: deserializeMetadata(output), + addon: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.addon !== undefined && data.addon !== null) { + contents.addon = deserializeAws_restJson1Addon(data.addon, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DescribeAddonCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ClientException": + case "com.amazonaws.eks#ClientException": + response = { + ...(await deserializeAws_restJson1ClientExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidParameterException": + case "com.amazonaws.eks#InvalidParameterException": + response = { + ...(await deserializeAws_restJson1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.eks#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.eks#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.eks#ServerException": + response = { + ...(await deserializeAws_restJson1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DescribeAddonVersionsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DescribeAddonVersionsCommandError(output, context); + } + const contents: DescribeAddonVersionsCommandOutput = { + $metadata: deserializeMetadata(output), + addons: undefined, + nextToken: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.addons !== undefined && data.addons !== null) { + contents.addons = deserializeAws_restJson1Addons(data.addons, context); + } + if (data.nextToken !== undefined && data.nextToken !== null) { + contents.nextToken = data.nextToken; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DescribeAddonVersionsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "InvalidParameterException": + case "com.amazonaws.eks#InvalidParameterException": + response = { + ...(await deserializeAws_restJson1InvalidParameterExceptionResponse(parsedOutput, context)), name: errorCode, $metadata: deserializeMetadata(output), }; @@ -1424,14 +2018,6 @@ const deserializeAws_restJson1DeleteNodegroupCommandError = async ( $metadata: deserializeMetadata(output), }; break; - case "ServiceUnavailableException": - case "com.amazonaws.eks#ServiceUnavailableException": - response = { - ...(await deserializeAws_restJson1ServiceUnavailableExceptionResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -1773,6 +2359,97 @@ const deserializeAws_restJson1DescribeUpdateCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; +export const deserializeAws_restJson1ListAddonsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListAddonsCommandError(output, context); + } + const contents: ListAddonsCommandOutput = { + $metadata: deserializeMetadata(output), + addons: undefined, + nextToken: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.addons !== undefined && data.addons !== null) { + contents.addons = deserializeAws_restJson1StringList(data.addons, context); + } + if (data.nextToken !== undefined && data.nextToken !== null) { + contents.nextToken = data.nextToken; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListAddonsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ClientException": + case "com.amazonaws.eks#ClientException": + response = { + ...(await deserializeAws_restJson1ClientExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidParameterException": + case "com.amazonaws.eks#InvalidParameterException": + response = { + ...(await deserializeAws_restJson1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.eks#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.eks#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.eks#ServerException": + response = { + ...(await deserializeAws_restJson1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + export const deserializeAws_restJson1ListClustersCommand = async ( output: __HttpResponse, context: __SerdeContext @@ -2294,6 +2971,101 @@ const deserializeAws_restJson1UntagResourceCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; +export const deserializeAws_restJson1UpdateAddonCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1UpdateAddonCommandError(output, context); + } + const contents: UpdateAddonCommandOutput = { + $metadata: deserializeMetadata(output), + update: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.update !== undefined && data.update !== null) { + contents.update = deserializeAws_restJson1Update(data.update, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1UpdateAddonCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "ClientException": + case "com.amazonaws.eks#ClientException": + response = { + ...(await deserializeAws_restJson1ClientExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidParameterException": + case "com.amazonaws.eks#InvalidParameterException": + response = { + ...(await deserializeAws_restJson1InvalidParameterExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InvalidRequestException": + case "com.amazonaws.eks#InvalidRequestException": + response = { + ...(await deserializeAws_restJson1InvalidRequestExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceInUseException": + case "com.amazonaws.eks#ResourceInUseException": + response = { + ...(await deserializeAws_restJson1ResourceInUseExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.eks#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServerException": + case "com.amazonaws.eks#ServerException": + response = { + ...(await deserializeAws_restJson1ServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + export const deserializeAws_restJson1UpdateClusterConfigCommand = async ( output: __HttpResponse, context: __SerdeContext @@ -2699,11 +3471,15 @@ const deserializeAws_restJson1ClientExceptionResponse = async ( name: "ClientException", $fault: "client", $metadata: deserializeMetadata(parsedOutput), + addonName: undefined, clusterName: undefined, message: undefined, nodegroupName: undefined, }; const data: any = parsedOutput.body; + if (data.addonName !== undefined && data.addonName !== null) { + contents.addonName = data.addonName; + } if (data.clusterName !== undefined && data.clusterName !== null) { contents.clusterName = data.clusterName; } @@ -2724,12 +3500,16 @@ const deserializeAws_restJson1InvalidParameterExceptionResponse = async ( name: "InvalidParameterException", $fault: "client", $metadata: deserializeMetadata(parsedOutput), + addonName: undefined, clusterName: undefined, fargateProfileName: undefined, message: undefined, nodegroupName: undefined, }; const data: any = parsedOutput.body; + if (data.addonName !== undefined && data.addonName !== null) { + contents.addonName = data.addonName; + } if (data.clusterName !== undefined && data.clusterName !== null) { contents.clusterName = data.clusterName; } @@ -2753,11 +3533,15 @@ const deserializeAws_restJson1InvalidRequestExceptionResponse = async ( name: "InvalidRequestException", $fault: "client", $metadata: deserializeMetadata(parsedOutput), + addonName: undefined, clusterName: undefined, message: undefined, nodegroupName: undefined, }; const data: any = parsedOutput.body; + if (data.addonName !== undefined && data.addonName !== null) { + contents.addonName = data.addonName; + } if (data.clusterName !== undefined && data.clusterName !== null) { contents.clusterName = data.clusterName; } @@ -2795,11 +3579,15 @@ const deserializeAws_restJson1ResourceInUseExceptionResponse = async ( name: "ResourceInUseException", $fault: "client", $metadata: deserializeMetadata(parsedOutput), + addonName: undefined, clusterName: undefined, message: undefined, nodegroupName: undefined, }; const data: any = parsedOutput.body; + if (data.addonName !== undefined && data.addonName !== null) { + contents.addonName = data.addonName; + } if (data.clusterName !== undefined && data.clusterName !== null) { contents.clusterName = data.clusterName; } @@ -2845,12 +3633,16 @@ const deserializeAws_restJson1ResourceNotFoundExceptionResponse = async ( name: "ResourceNotFoundException", $fault: "client", $metadata: deserializeMetadata(parsedOutput), + addonName: undefined, clusterName: undefined, fargateProfileName: undefined, message: undefined, nodegroupName: undefined, }; const data: any = parsedOutput.body; + if (data.addonName !== undefined && data.addonName !== null) { + contents.addonName = data.addonName; + } if (data.clusterName !== undefined && data.clusterName !== null) { contents.clusterName = data.clusterName; } @@ -2874,11 +3666,15 @@ const deserializeAws_restJson1ServerExceptionResponse = async ( name: "ServerException", $fault: "server", $metadata: deserializeMetadata(parsedOutput), + addonName: undefined, clusterName: undefined, message: undefined, nodegroupName: undefined, }; const data: any = parsedOutput.body; + if (data.addonName !== undefined && data.addonName !== null) { + contents.addonName = data.addonName; + } if (data.clusterName !== undefined && data.clusterName !== null) { contents.clusterName = data.clusterName; } @@ -3091,6 +3887,93 @@ const serializeAws_restJson1VpcConfigRequest = (input: VpcConfigRequest, context }; }; +const deserializeAws_restJson1Addon = (output: any, context: __SerdeContext): Addon => { + return { + addonArn: output.addonArn !== undefined && output.addonArn !== null ? output.addonArn : undefined, + addonName: output.addonName !== undefined && output.addonName !== null ? output.addonName : undefined, + addonVersion: output.addonVersion !== undefined && output.addonVersion !== null ? output.addonVersion : undefined, + clusterName: output.clusterName !== undefined && output.clusterName !== null ? output.clusterName : undefined, + createdAt: + output.createdAt !== undefined && output.createdAt !== null + ? new Date(Math.round(output.createdAt * 1000)) + : undefined, + health: + output.health !== undefined && output.health !== null + ? deserializeAws_restJson1AddonHealth(output.health, context) + : undefined, + modifiedAt: + output.modifiedAt !== undefined && output.modifiedAt !== null + ? new Date(Math.round(output.modifiedAt * 1000)) + : undefined, + serviceAccountRoleArn: + output.serviceAccountRoleArn !== undefined && output.serviceAccountRoleArn !== null + ? output.serviceAccountRoleArn + : undefined, + status: output.status !== undefined && output.status !== null ? output.status : undefined, + tags: + output.tags !== undefined && output.tags !== null + ? deserializeAws_restJson1TagMap(output.tags, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1AddonHealth = (output: any, context: __SerdeContext): AddonHealth => { + return { + issues: + output.issues !== undefined && output.issues !== null + ? deserializeAws_restJson1AddonIssueList(output.issues, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1AddonInfo = (output: any, context: __SerdeContext): AddonInfo => { + return { + addonName: output.addonName !== undefined && output.addonName !== null ? output.addonName : undefined, + addonVersions: + output.addonVersions !== undefined && output.addonVersions !== null + ? deserializeAws_restJson1AddonVersionInfoList(output.addonVersions, context) + : undefined, + type: output.type !== undefined && output.type !== null ? output.type : undefined, + } as any; +}; + +const deserializeAws_restJson1AddonIssue = (output: any, context: __SerdeContext): AddonIssue => { + return { + code: output.code !== undefined && output.code !== null ? output.code : undefined, + message: output.message !== undefined && output.message !== null ? output.message : undefined, + resourceIds: + output.resourceIds !== undefined && output.resourceIds !== null + ? deserializeAws_restJson1StringList(output.resourceIds, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1AddonIssueList = (output: any, context: __SerdeContext): AddonIssue[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1AddonIssue(entry, context)); +}; + +const deserializeAws_restJson1Addons = (output: any, context: __SerdeContext): AddonInfo[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1AddonInfo(entry, context)); +}; + +const deserializeAws_restJson1AddonVersionInfo = (output: any, context: __SerdeContext): AddonVersionInfo => { + return { + addonVersion: output.addonVersion !== undefined && output.addonVersion !== null ? output.addonVersion : undefined, + architecture: + output.architecture !== undefined && output.architecture !== null + ? deserializeAws_restJson1StringList(output.architecture, context) + : undefined, + compatibilities: + output.compatibilities !== undefined && output.compatibilities !== null + ? deserializeAws_restJson1Compatibilities(output.compatibilities, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1AddonVersionInfoList = (output: any, context: __SerdeContext): AddonVersionInfo[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1AddonVersionInfo(entry, context)); +}; + const deserializeAws_restJson1AutoScalingGroup = (output: any, context: __SerdeContext): AutoScalingGroup => { return { name: output.name !== undefined && output.name !== null ? output.name : undefined, @@ -3156,6 +4039,23 @@ const deserializeAws_restJson1Cluster = (output: any, context: __SerdeContext): } as any; }; +const deserializeAws_restJson1Compatibilities = (output: any, context: __SerdeContext): Compatibility[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1Compatibility(entry, context)); +}; + +const deserializeAws_restJson1Compatibility = (output: any, context: __SerdeContext): Compatibility => { + return { + clusterVersion: + output.clusterVersion !== undefined && output.clusterVersion !== null ? output.clusterVersion : undefined, + defaultVersion: + output.defaultVersion !== undefined && output.defaultVersion !== null ? output.defaultVersion : undefined, + platformVersions: + output.platformVersions !== undefined && output.platformVersions !== null + ? deserializeAws_restJson1StringList(output.platformVersions, context) + : undefined, + } as any; +}; + const deserializeAws_restJson1EncryptionConfig = (output: any, context: __SerdeContext): EncryptionConfig => { return { provider: @@ -3341,6 +4241,7 @@ const deserializeAws_restJson1LogTypes = (output: any, context: __SerdeContext): const deserializeAws_restJson1Nodegroup = (output: any, context: __SerdeContext): Nodegroup => { return { amiType: output.amiType !== undefined && output.amiType !== null ? output.amiType : undefined, + capacityType: output.capacityType !== undefined && output.capacityType !== null ? output.capacityType : undefined, clusterName: output.clusterName !== undefined && output.clusterName !== null ? output.clusterName : undefined, createdAt: output.createdAt !== undefined && output.createdAt !== null diff --git a/clients/client-honeycode/Honeycode.ts b/clients/client-honeycode/Honeycode.ts index 7b38410b1bf6..0d648c9bccec 100644 --- a/clients/client-honeycode/Honeycode.ts +++ b/clients/client-honeycode/Honeycode.ts @@ -1,4 +1,29 @@ import { HoneycodeClient } from "./HoneycodeClient"; +import { + BatchCreateTableRowsCommand, + BatchCreateTableRowsCommandInput, + BatchCreateTableRowsCommandOutput, +} from "./commands/BatchCreateTableRowsCommand"; +import { + BatchDeleteTableRowsCommand, + BatchDeleteTableRowsCommandInput, + BatchDeleteTableRowsCommandOutput, +} from "./commands/BatchDeleteTableRowsCommand"; +import { + BatchUpdateTableRowsCommand, + BatchUpdateTableRowsCommandInput, + BatchUpdateTableRowsCommandOutput, +} from "./commands/BatchUpdateTableRowsCommand"; +import { + BatchUpsertTableRowsCommand, + BatchUpsertTableRowsCommandInput, + BatchUpsertTableRowsCommandOutput, +} from "./commands/BatchUpsertTableRowsCommand"; +import { + DescribeTableDataImportJobCommand, + DescribeTableDataImportJobCommandInput, + DescribeTableDataImportJobCommandOutput, +} from "./commands/DescribeTableDataImportJobCommand"; import { GetScreenDataCommand, GetScreenDataCommandInput, @@ -9,6 +34,27 @@ import { InvokeScreenAutomationCommandInput, InvokeScreenAutomationCommandOutput, } from "./commands/InvokeScreenAutomationCommand"; +import { + ListTableColumnsCommand, + ListTableColumnsCommandInput, + ListTableColumnsCommandOutput, +} from "./commands/ListTableColumnsCommand"; +import { + ListTableRowsCommand, + ListTableRowsCommandInput, + ListTableRowsCommandOutput, +} from "./commands/ListTableRowsCommand"; +import { ListTablesCommand, ListTablesCommandInput, ListTablesCommandOutput } from "./commands/ListTablesCommand"; +import { + QueryTableRowsCommand, + QueryTableRowsCommandInput, + QueryTableRowsCommandOutput, +} from "./commands/QueryTableRowsCommand"; +import { + StartTableDataImportJobCommand, + StartTableDataImportJobCommandInput, + StartTableDataImportJobCommandOutput, +} from "./commands/StartTableDataImportJobCommand"; import { HttpHandlerOptions as __HttpHandlerOptions } from "@aws-sdk/types"; /** @@ -19,6 +65,201 @@ import { HttpHandlerOptions as __HttpHandlerOptions } from "@aws-sdk/types"; *

        */ export class Honeycode extends HoneycodeClient { + /** + *

        + * The BatchCreateTableRows API allows you to create one or more rows at the end of a table in a workbook. + * The API allows you to specify the values to set in some or all of the columns in the new rows. + *

        + *

        + * If a column is not explicitly set in a specific row, then the column level formula specified in the table + * will be applied to the new row. If there is no column level formula but the last row of the table has a + * formula, then that formula will be copied down to the new row. If there is no column level formula and + * no formula in the last row of the table, then that column will be left blank for the new rows. + *

        + */ + public batchCreateTableRows( + args: BatchCreateTableRowsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public batchCreateTableRows( + args: BatchCreateTableRowsCommandInput, + cb: (err: any, data?: BatchCreateTableRowsCommandOutput) => void + ): void; + public batchCreateTableRows( + args: BatchCreateTableRowsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchCreateTableRowsCommandOutput) => void + ): void; + public batchCreateTableRows( + args: BatchCreateTableRowsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: BatchCreateTableRowsCommandOutput) => void), + cb?: (err: any, data?: BatchCreateTableRowsCommandOutput) => void + ): Promise | void { + const command = new BatchCreateTableRowsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        + * The BatchDeleteTableRows API allows you to delete one or more rows from a table in a workbook. + * You need to specify the ids of the rows that you want to delete from the table. + *

        + */ + public batchDeleteTableRows( + args: BatchDeleteTableRowsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public batchDeleteTableRows( + args: BatchDeleteTableRowsCommandInput, + cb: (err: any, data?: BatchDeleteTableRowsCommandOutput) => void + ): void; + public batchDeleteTableRows( + args: BatchDeleteTableRowsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchDeleteTableRowsCommandOutput) => void + ): void; + public batchDeleteTableRows( + args: BatchDeleteTableRowsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: BatchDeleteTableRowsCommandOutput) => void), + cb?: (err: any, data?: BatchDeleteTableRowsCommandOutput) => void + ): Promise | void { + const command = new BatchDeleteTableRowsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        + * The BatchUpdateTableRows API allows you to update one or more rows in a table in a workbook. + *

        + *

        + * You can specify the values to set in some or all of the columns in the table for the specified + * rows. + * If a column is not explicitly specified in a particular row, then that column will not be updated + * for that row. To clear out the data in a specific cell, you need to set the value as an empty string + * (""). + *

        + */ + public batchUpdateTableRows( + args: BatchUpdateTableRowsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public batchUpdateTableRows( + args: BatchUpdateTableRowsCommandInput, + cb: (err: any, data?: BatchUpdateTableRowsCommandOutput) => void + ): void; + public batchUpdateTableRows( + args: BatchUpdateTableRowsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchUpdateTableRowsCommandOutput) => void + ): void; + public batchUpdateTableRows( + args: BatchUpdateTableRowsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: BatchUpdateTableRowsCommandOutput) => void), + cb?: (err: any, data?: BatchUpdateTableRowsCommandOutput) => void + ): Promise | void { + const command = new BatchUpdateTableRowsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        + * The BatchUpsertTableRows API allows you to upsert one or more rows in a table. The upsert + * operation takes a filter expression as input and evaluates it to find matching rows on the destination + * table. If matching rows are found, it will update the cells in the matching rows to new values specified + * in the request. If no matching rows are found, a new row is added at the end of the table and the cells in + * that row are set to the new values specified in the request. + *

        + *

        + * You can specify the values to set in some or all of the columns in the table for the + * matching or newly appended rows. If a column is not explicitly specified for a particular row, then that + * column will not be updated for that row. To clear out the data in a specific cell, you need to set the value + * as an empty string (""). + *

        + */ + public batchUpsertTableRows( + args: BatchUpsertTableRowsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public batchUpsertTableRows( + args: BatchUpsertTableRowsCommandInput, + cb: (err: any, data?: BatchUpsertTableRowsCommandOutput) => void + ): void; + public batchUpsertTableRows( + args: BatchUpsertTableRowsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: BatchUpsertTableRowsCommandOutput) => void + ): void; + public batchUpsertTableRows( + args: BatchUpsertTableRowsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: BatchUpsertTableRowsCommandOutput) => void), + cb?: (err: any, data?: BatchUpsertTableRowsCommandOutput) => void + ): Promise | void { + const command = new BatchUpsertTableRowsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        + * The DescribeTableDataImportJob API allows you to retrieve the status and details of a table data import job. + *

        + */ + public describeTableDataImportJob( + args: DescribeTableDataImportJobCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeTableDataImportJob( + args: DescribeTableDataImportJobCommandInput, + cb: (err: any, data?: DescribeTableDataImportJobCommandOutput) => void + ): void; + public describeTableDataImportJob( + args: DescribeTableDataImportJobCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeTableDataImportJobCommandOutput) => void + ): void; + public describeTableDataImportJob( + args: DescribeTableDataImportJobCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeTableDataImportJobCommandOutput) => void), + cb?: (err: any, data?: DescribeTableDataImportJobCommandOutput) => void + ): Promise | void { + const command = new DescribeTableDataImportJobCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

        * The GetScreenData API allows retrieval of data from a screen in a Honeycode app. @@ -90,4 +331,170 @@ export class Honeycode extends HoneycodeClient { return this.send(command, optionsOrCb); } } + + /** + *

        + * The ListTableColumns API allows you to retrieve a list of all the columns in a table in a workbook. + *

        + */ + public listTableColumns( + args: ListTableColumnsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listTableColumns( + args: ListTableColumnsCommandInput, + cb: (err: any, data?: ListTableColumnsCommandOutput) => void + ): void; + public listTableColumns( + args: ListTableColumnsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListTableColumnsCommandOutput) => void + ): void; + public listTableColumns( + args: ListTableColumnsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListTableColumnsCommandOutput) => void), + cb?: (err: any, data?: ListTableColumnsCommandOutput) => void + ): Promise | void { + const command = new ListTableColumnsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        + * The ListTableRows API allows you to retrieve a list of all the rows in a table in a workbook. + *

        + */ + public listTableRows( + args: ListTableRowsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listTableRows( + args: ListTableRowsCommandInput, + cb: (err: any, data?: ListTableRowsCommandOutput) => void + ): void; + public listTableRows( + args: ListTableRowsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListTableRowsCommandOutput) => void + ): void; + public listTableRows( + args: ListTableRowsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListTableRowsCommandOutput) => void), + cb?: (err: any, data?: ListTableRowsCommandOutput) => void + ): Promise | void { + const command = new ListTableRowsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        + * The ListTables API allows you to retrieve a list of all the tables in a workbook. + *

        + */ + public listTables(args: ListTablesCommandInput, options?: __HttpHandlerOptions): Promise; + public listTables(args: ListTablesCommandInput, cb: (err: any, data?: ListTablesCommandOutput) => void): void; + public listTables( + args: ListTablesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListTablesCommandOutput) => void + ): void; + public listTables( + args: ListTablesCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListTablesCommandOutput) => void), + cb?: (err: any, data?: ListTablesCommandOutput) => void + ): Promise | void { + const command = new ListTablesCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        + * The QueryTableRows API allows you to use a filter formula to query for specific rows in a table. + *

        + */ + public queryTableRows( + args: QueryTableRowsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public queryTableRows( + args: QueryTableRowsCommandInput, + cb: (err: any, data?: QueryTableRowsCommandOutput) => void + ): void; + public queryTableRows( + args: QueryTableRowsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: QueryTableRowsCommandOutput) => void + ): void; + public queryTableRows( + args: QueryTableRowsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: QueryTableRowsCommandOutput) => void), + cb?: (err: any, data?: QueryTableRowsCommandOutput) => void + ): Promise | void { + const command = new QueryTableRowsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        + * The StartTableDataImportJob API allows you to start an import job on a table. This API will only return + * the id of the job that was started. To find out the status of the import request, you need to call the + * DescribeTableDataImportJob API. + *

        + */ + public startTableDataImportJob( + args: StartTableDataImportJobCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public startTableDataImportJob( + args: StartTableDataImportJobCommandInput, + cb: (err: any, data?: StartTableDataImportJobCommandOutput) => void + ): void; + public startTableDataImportJob( + args: StartTableDataImportJobCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: StartTableDataImportJobCommandOutput) => void + ): void; + public startTableDataImportJob( + args: StartTableDataImportJobCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: StartTableDataImportJobCommandOutput) => void), + cb?: (err: any, data?: StartTableDataImportJobCommandOutput) => void + ): Promise | void { + const command = new StartTableDataImportJobCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } } diff --git a/clients/client-honeycode/HoneycodeClient.ts b/clients/client-honeycode/HoneycodeClient.ts index 0ef2dff88e49..950884a88ae4 100644 --- a/clients/client-honeycode/HoneycodeClient.ts +++ b/clients/client-honeycode/HoneycodeClient.ts @@ -1,8 +1,36 @@ +import { + BatchCreateTableRowsCommandInput, + BatchCreateTableRowsCommandOutput, +} from "./commands/BatchCreateTableRowsCommand"; +import { + BatchDeleteTableRowsCommandInput, + BatchDeleteTableRowsCommandOutput, +} from "./commands/BatchDeleteTableRowsCommand"; +import { + BatchUpdateTableRowsCommandInput, + BatchUpdateTableRowsCommandOutput, +} from "./commands/BatchUpdateTableRowsCommand"; +import { + BatchUpsertTableRowsCommandInput, + BatchUpsertTableRowsCommandOutput, +} from "./commands/BatchUpsertTableRowsCommand"; +import { + DescribeTableDataImportJobCommandInput, + DescribeTableDataImportJobCommandOutput, +} from "./commands/DescribeTableDataImportJobCommand"; import { GetScreenDataCommandInput, GetScreenDataCommandOutput } from "./commands/GetScreenDataCommand"; import { InvokeScreenAutomationCommandInput, InvokeScreenAutomationCommandOutput, } from "./commands/InvokeScreenAutomationCommand"; +import { ListTableColumnsCommandInput, ListTableColumnsCommandOutput } from "./commands/ListTableColumnsCommand"; +import { ListTableRowsCommandInput, ListTableRowsCommandOutput } from "./commands/ListTableRowsCommand"; +import { ListTablesCommandInput, ListTablesCommandOutput } from "./commands/ListTablesCommand"; +import { QueryTableRowsCommandInput, QueryTableRowsCommandOutput } from "./commands/QueryTableRowsCommand"; +import { + StartTableDataImportJobCommandInput, + StartTableDataImportJobCommandOutput, +} from "./commands/StartTableDataImportJobCommand"; import { ClientDefaultValues as __ClientDefaultValues } from "./runtimeConfig"; import { EndpointsInputConfig, @@ -52,9 +80,33 @@ import { UrlParser as __UrlParser, } from "@aws-sdk/types"; -export type ServiceInputTypes = GetScreenDataCommandInput | InvokeScreenAutomationCommandInput; +export type ServiceInputTypes = + | BatchCreateTableRowsCommandInput + | BatchDeleteTableRowsCommandInput + | BatchUpdateTableRowsCommandInput + | BatchUpsertTableRowsCommandInput + | DescribeTableDataImportJobCommandInput + | GetScreenDataCommandInput + | InvokeScreenAutomationCommandInput + | ListTableColumnsCommandInput + | ListTableRowsCommandInput + | ListTablesCommandInput + | QueryTableRowsCommandInput + | StartTableDataImportJobCommandInput; -export type ServiceOutputTypes = GetScreenDataCommandOutput | InvokeScreenAutomationCommandOutput; +export type ServiceOutputTypes = + | BatchCreateTableRowsCommandOutput + | BatchDeleteTableRowsCommandOutput + | BatchUpdateTableRowsCommandOutput + | BatchUpsertTableRowsCommandOutput + | DescribeTableDataImportJobCommandOutput + | GetScreenDataCommandOutput + | InvokeScreenAutomationCommandOutput + | ListTableColumnsCommandOutput + | ListTableRowsCommandOutput + | ListTablesCommandOutput + | QueryTableRowsCommandOutput + | StartTableDataImportJobCommandOutput; export interface ClientDefaults extends Partial<__SmithyResolvedConfiguration<__HttpHandlerOptions>> { /** diff --git a/clients/client-honeycode/commands/BatchCreateTableRowsCommand.ts b/clients/client-honeycode/commands/BatchCreateTableRowsCommand.ts new file mode 100644 index 000000000000..faa8cda32a54 --- /dev/null +++ b/clients/client-honeycode/commands/BatchCreateTableRowsCommand.ts @@ -0,0 +1,97 @@ +import { HoneycodeClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../HoneycodeClient"; +import { BatchCreateTableRowsRequest, BatchCreateTableRowsResult } from "../models/models_0"; +import { + deserializeAws_restJson1BatchCreateTableRowsCommand, + serializeAws_restJson1BatchCreateTableRowsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type BatchCreateTableRowsCommandInput = BatchCreateTableRowsRequest; +export type BatchCreateTableRowsCommandOutput = BatchCreateTableRowsResult & __MetadataBearer; + +/** + *

        + * The BatchCreateTableRows API allows you to create one or more rows at the end of a table in a workbook. + * The API allows you to specify the values to set in some or all of the columns in the new rows. + *

        + *

        + * If a column is not explicitly set in a specific row, then the column level formula specified in the table + * will be applied to the new row. If there is no column level formula but the last row of the table has a + * formula, then that formula will be copied down to the new row. If there is no column level formula and + * no formula in the last row of the table, then that column will be left blank for the new rows. + *

        + */ +export class BatchCreateTableRowsCommand extends $Command< + BatchCreateTableRowsCommandInput, + BatchCreateTableRowsCommandOutput, + HoneycodeClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: BatchCreateTableRowsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: HoneycodeClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "HoneycodeClient"; + const commandName = "BatchCreateTableRowsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: BatchCreateTableRowsRequest.filterSensitiveLog, + outputFilterSensitiveLog: BatchCreateTableRowsResult.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: BatchCreateTableRowsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1BatchCreateTableRowsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1BatchCreateTableRowsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-honeycode/commands/BatchDeleteTableRowsCommand.ts b/clients/client-honeycode/commands/BatchDeleteTableRowsCommand.ts new file mode 100644 index 000000000000..6fc55534ac7e --- /dev/null +++ b/clients/client-honeycode/commands/BatchDeleteTableRowsCommand.ts @@ -0,0 +1,91 @@ +import { HoneycodeClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../HoneycodeClient"; +import { BatchDeleteTableRowsRequest, BatchDeleteTableRowsResult } from "../models/models_0"; +import { + deserializeAws_restJson1BatchDeleteTableRowsCommand, + serializeAws_restJson1BatchDeleteTableRowsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type BatchDeleteTableRowsCommandInput = BatchDeleteTableRowsRequest; +export type BatchDeleteTableRowsCommandOutput = BatchDeleteTableRowsResult & __MetadataBearer; + +/** + *

        + * The BatchDeleteTableRows API allows you to delete one or more rows from a table in a workbook. + * You need to specify the ids of the rows that you want to delete from the table. + *

        + */ +export class BatchDeleteTableRowsCommand extends $Command< + BatchDeleteTableRowsCommandInput, + BatchDeleteTableRowsCommandOutput, + HoneycodeClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: BatchDeleteTableRowsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: HoneycodeClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "HoneycodeClient"; + const commandName = "BatchDeleteTableRowsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: BatchDeleteTableRowsRequest.filterSensitiveLog, + outputFilterSensitiveLog: BatchDeleteTableRowsResult.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: BatchDeleteTableRowsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1BatchDeleteTableRowsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1BatchDeleteTableRowsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-honeycode/commands/BatchUpdateTableRowsCommand.ts b/clients/client-honeycode/commands/BatchUpdateTableRowsCommand.ts new file mode 100644 index 000000000000..1b0995693daf --- /dev/null +++ b/clients/client-honeycode/commands/BatchUpdateTableRowsCommand.ts @@ -0,0 +1,97 @@ +import { HoneycodeClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../HoneycodeClient"; +import { BatchUpdateTableRowsRequest, BatchUpdateTableRowsResult } from "../models/models_0"; +import { + deserializeAws_restJson1BatchUpdateTableRowsCommand, + serializeAws_restJson1BatchUpdateTableRowsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type BatchUpdateTableRowsCommandInput = BatchUpdateTableRowsRequest; +export type BatchUpdateTableRowsCommandOutput = BatchUpdateTableRowsResult & __MetadataBearer; + +/** + *

        + * The BatchUpdateTableRows API allows you to update one or more rows in a table in a workbook. + *

        + *

        + * You can specify the values to set in some or all of the columns in the table for the specified + * rows. + * If a column is not explicitly specified in a particular row, then that column will not be updated + * for that row. To clear out the data in a specific cell, you need to set the value as an empty string + * (""). + *

        + */ +export class BatchUpdateTableRowsCommand extends $Command< + BatchUpdateTableRowsCommandInput, + BatchUpdateTableRowsCommandOutput, + HoneycodeClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: BatchUpdateTableRowsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: HoneycodeClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "HoneycodeClient"; + const commandName = "BatchUpdateTableRowsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: BatchUpdateTableRowsRequest.filterSensitiveLog, + outputFilterSensitiveLog: BatchUpdateTableRowsResult.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: BatchUpdateTableRowsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1BatchUpdateTableRowsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1BatchUpdateTableRowsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-honeycode/commands/BatchUpsertTableRowsCommand.ts b/clients/client-honeycode/commands/BatchUpsertTableRowsCommand.ts new file mode 100644 index 000000000000..080d167b1b76 --- /dev/null +++ b/clients/client-honeycode/commands/BatchUpsertTableRowsCommand.ts @@ -0,0 +1,100 @@ +import { HoneycodeClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../HoneycodeClient"; +import { BatchUpsertTableRowsRequest, BatchUpsertTableRowsResult } from "../models/models_0"; +import { + deserializeAws_restJson1BatchUpsertTableRowsCommand, + serializeAws_restJson1BatchUpsertTableRowsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type BatchUpsertTableRowsCommandInput = BatchUpsertTableRowsRequest; +export type BatchUpsertTableRowsCommandOutput = BatchUpsertTableRowsResult & __MetadataBearer; + +/** + *

        + * The BatchUpsertTableRows API allows you to upsert one or more rows in a table. The upsert + * operation takes a filter expression as input and evaluates it to find matching rows on the destination + * table. If matching rows are found, it will update the cells in the matching rows to new values specified + * in the request. If no matching rows are found, a new row is added at the end of the table and the cells in + * that row are set to the new values specified in the request. + *

        + *

        + * You can specify the values to set in some or all of the columns in the table for the + * matching or newly appended rows. If a column is not explicitly specified for a particular row, then that + * column will not be updated for that row. To clear out the data in a specific cell, you need to set the value + * as an empty string (""). + *

        + */ +export class BatchUpsertTableRowsCommand extends $Command< + BatchUpsertTableRowsCommandInput, + BatchUpsertTableRowsCommandOutput, + HoneycodeClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: BatchUpsertTableRowsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: HoneycodeClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "HoneycodeClient"; + const commandName = "BatchUpsertTableRowsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: BatchUpsertTableRowsRequest.filterSensitiveLog, + outputFilterSensitiveLog: BatchUpsertTableRowsResult.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: BatchUpsertTableRowsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1BatchUpsertTableRowsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1BatchUpsertTableRowsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-honeycode/commands/DescribeTableDataImportJobCommand.ts b/clients/client-honeycode/commands/DescribeTableDataImportJobCommand.ts new file mode 100644 index 000000000000..1256b20cefc4 --- /dev/null +++ b/clients/client-honeycode/commands/DescribeTableDataImportJobCommand.ts @@ -0,0 +1,93 @@ +import { HoneycodeClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../HoneycodeClient"; +import { DescribeTableDataImportJobRequest, DescribeTableDataImportJobResult } from "../models/models_0"; +import { + deserializeAws_restJson1DescribeTableDataImportJobCommand, + serializeAws_restJson1DescribeTableDataImportJobCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeTableDataImportJobCommandInput = DescribeTableDataImportJobRequest; +export type DescribeTableDataImportJobCommandOutput = DescribeTableDataImportJobResult & __MetadataBearer; + +/** + *

        + * The DescribeTableDataImportJob API allows you to retrieve the status and details of a table data import job. + *

        + */ +export class DescribeTableDataImportJobCommand extends $Command< + DescribeTableDataImportJobCommandInput, + DescribeTableDataImportJobCommandOutput, + HoneycodeClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeTableDataImportJobCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: HoneycodeClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "HoneycodeClient"; + const commandName = "DescribeTableDataImportJobCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeTableDataImportJobRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeTableDataImportJobResult.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeTableDataImportJobCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DescribeTableDataImportJobCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_restJson1DescribeTableDataImportJobCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-honeycode/commands/ListTableColumnsCommand.ts b/clients/client-honeycode/commands/ListTableColumnsCommand.ts new file mode 100644 index 000000000000..f5a47591b2c7 --- /dev/null +++ b/clients/client-honeycode/commands/ListTableColumnsCommand.ts @@ -0,0 +1,90 @@ +import { HoneycodeClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../HoneycodeClient"; +import { ListTableColumnsRequest, ListTableColumnsResult } from "../models/models_0"; +import { + deserializeAws_restJson1ListTableColumnsCommand, + serializeAws_restJson1ListTableColumnsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListTableColumnsCommandInput = ListTableColumnsRequest; +export type ListTableColumnsCommandOutput = ListTableColumnsResult & __MetadataBearer; + +/** + *

        + * The ListTableColumns API allows you to retrieve a list of all the columns in a table in a workbook. + *

        + */ +export class ListTableColumnsCommand extends $Command< + ListTableColumnsCommandInput, + ListTableColumnsCommandOutput, + HoneycodeClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListTableColumnsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: HoneycodeClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "HoneycodeClient"; + const commandName = "ListTableColumnsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListTableColumnsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListTableColumnsResult.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListTableColumnsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1ListTableColumnsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1ListTableColumnsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-honeycode/commands/ListTableRowsCommand.ts b/clients/client-honeycode/commands/ListTableRowsCommand.ts new file mode 100644 index 000000000000..74ed39a5bc8f --- /dev/null +++ b/clients/client-honeycode/commands/ListTableRowsCommand.ts @@ -0,0 +1,90 @@ +import { HoneycodeClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../HoneycodeClient"; +import { ListTableRowsRequest, ListTableRowsResult } from "../models/models_0"; +import { + deserializeAws_restJson1ListTableRowsCommand, + serializeAws_restJson1ListTableRowsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListTableRowsCommandInput = ListTableRowsRequest; +export type ListTableRowsCommandOutput = ListTableRowsResult & __MetadataBearer; + +/** + *

        + * The ListTableRows API allows you to retrieve a list of all the rows in a table in a workbook. + *

        + */ +export class ListTableRowsCommand extends $Command< + ListTableRowsCommandInput, + ListTableRowsCommandOutput, + HoneycodeClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListTableRowsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: HoneycodeClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "HoneycodeClient"; + const commandName = "ListTableRowsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListTableRowsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListTableRowsResult.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListTableRowsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1ListTableRowsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1ListTableRowsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-honeycode/commands/ListTablesCommand.ts b/clients/client-honeycode/commands/ListTablesCommand.ts new file mode 100644 index 000000000000..fcae066eb886 --- /dev/null +++ b/clients/client-honeycode/commands/ListTablesCommand.ts @@ -0,0 +1,90 @@ +import { HoneycodeClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../HoneycodeClient"; +import { ListTablesRequest, ListTablesResult } from "../models/models_0"; +import { + deserializeAws_restJson1ListTablesCommand, + serializeAws_restJson1ListTablesCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListTablesCommandInput = ListTablesRequest; +export type ListTablesCommandOutput = ListTablesResult & __MetadataBearer; + +/** + *

        + * The ListTables API allows you to retrieve a list of all the tables in a workbook. + *

        + */ +export class ListTablesCommand extends $Command< + ListTablesCommandInput, + ListTablesCommandOutput, + HoneycodeClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListTablesCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: HoneycodeClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "HoneycodeClient"; + const commandName = "ListTablesCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListTablesRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListTablesResult.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListTablesCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1ListTablesCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1ListTablesCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-honeycode/commands/QueryTableRowsCommand.ts b/clients/client-honeycode/commands/QueryTableRowsCommand.ts new file mode 100644 index 000000000000..41c1daf10589 --- /dev/null +++ b/clients/client-honeycode/commands/QueryTableRowsCommand.ts @@ -0,0 +1,90 @@ +import { HoneycodeClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../HoneycodeClient"; +import { QueryTableRowsRequest, QueryTableRowsResult } from "../models/models_0"; +import { + deserializeAws_restJson1QueryTableRowsCommand, + serializeAws_restJson1QueryTableRowsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type QueryTableRowsCommandInput = QueryTableRowsRequest; +export type QueryTableRowsCommandOutput = QueryTableRowsResult & __MetadataBearer; + +/** + *

        + * The QueryTableRows API allows you to use a filter formula to query for specific rows in a table. + *

        + */ +export class QueryTableRowsCommand extends $Command< + QueryTableRowsCommandInput, + QueryTableRowsCommandOutput, + HoneycodeClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: QueryTableRowsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: HoneycodeClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "HoneycodeClient"; + const commandName = "QueryTableRowsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: QueryTableRowsRequest.filterSensitiveLog, + outputFilterSensitiveLog: QueryTableRowsResult.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: QueryTableRowsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1QueryTableRowsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1QueryTableRowsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-honeycode/commands/StartTableDataImportJobCommand.ts b/clients/client-honeycode/commands/StartTableDataImportJobCommand.ts new file mode 100644 index 000000000000..89c2f4a5c05c --- /dev/null +++ b/clients/client-honeycode/commands/StartTableDataImportJobCommand.ts @@ -0,0 +1,92 @@ +import { HoneycodeClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../HoneycodeClient"; +import { StartTableDataImportJobRequest, StartTableDataImportJobResult } from "../models/models_0"; +import { + deserializeAws_restJson1StartTableDataImportJobCommand, + serializeAws_restJson1StartTableDataImportJobCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type StartTableDataImportJobCommandInput = StartTableDataImportJobRequest; +export type StartTableDataImportJobCommandOutput = StartTableDataImportJobResult & __MetadataBearer; + +/** + *

        + * The StartTableDataImportJob API allows you to start an import job on a table. This API will only return + * the id of the job that was started. To find out the status of the import request, you need to call the + * DescribeTableDataImportJob API. + *

        + */ +export class StartTableDataImportJobCommand extends $Command< + StartTableDataImportJobCommandInput, + StartTableDataImportJobCommandOutput, + HoneycodeClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: StartTableDataImportJobCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: HoneycodeClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "HoneycodeClient"; + const commandName = "StartTableDataImportJobCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: StartTableDataImportJobRequest.filterSensitiveLog, + outputFilterSensitiveLog: StartTableDataImportJobResult.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: StartTableDataImportJobCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1StartTableDataImportJobCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1StartTableDataImportJobCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-honeycode/index.ts b/clients/client-honeycode/index.ts index f8fa033b7082..2d232be50101 100644 --- a/clients/client-honeycode/index.ts +++ b/clients/client-honeycode/index.ts @@ -1,5 +1,20 @@ export * from "./HoneycodeClient"; export * from "./Honeycode"; +export * from "./commands/BatchCreateTableRowsCommand"; +export * from "./commands/BatchDeleteTableRowsCommand"; +export * from "./commands/BatchUpdateTableRowsCommand"; +export * from "./commands/BatchUpsertTableRowsCommand"; +export * from "./commands/DescribeTableDataImportJobCommand"; export * from "./commands/GetScreenDataCommand"; export * from "./commands/InvokeScreenAutomationCommand"; +export * from "./commands/ListTableColumnsCommand"; +export * from "./pagination/ListTableColumnsPaginator"; +export * from "./commands/ListTableRowsCommand"; +export * from "./pagination/ListTableRowsPaginator"; +export * from "./commands/ListTablesCommand"; +export * from "./pagination/ListTablesPaginator"; +export * from "./commands/QueryTableRowsCommand"; +export * from "./pagination/QueryTableRowsPaginator"; +export * from "./commands/StartTableDataImportJobCommand"; +export * from "./pagination/Interfaces"; export * from "./models/index"; diff --git a/clients/client-honeycode/models/models_0.ts b/clients/client-honeycode/models/models_0.ts index 0c075107321d..99284e7d7db0 100644 --- a/clients/client-honeycode/models/models_0.ts +++ b/clients/client-honeycode/models/models_0.ts @@ -4,7 +4,7 @@ import { MetadataBearer as $MetadataBearer } from "@aws-sdk/types"; /** *

        * You do not have sufficient access to perform this action. Check that the workbook is owned by you and your - * IAM policy allows access to the screen/automation in the request. + * IAM policy allows access to the resource in the request. *

        */ export interface AccessDeniedException extends __SmithyException, $MetadataBearer { @@ -49,229 +49,174 @@ export namespace AutomationExecutionTimeoutException { }); } -export enum Format { - Accounting = "ACCOUNTING", - Auto = "AUTO", - Contact = "CONTACT", - Currency = "CURRENCY", - Date = "DATE", - DateTime = "DATE_TIME", - Number = "NUMBER", - Percentage = "PERCENTAGE", - Rowlink = "ROWLINK", - Text = "TEXT", - Time = "TIME", -} - /** - *

        Metadata for column in the table.

        + *

        + * CellInput object contains the data needed to create or update cells in a table. + *

        */ -export interface ColumnMetadata { - /** - *

        The name of the column.

        - */ - name: string | undefined; - +export interface CellInput { /** - *

        The format of the column.

        + *

        + * Fact represents the data that is entered into a cell. This data can be free text or a formula. Formulas need + * to start with the equals (=) sign. + *

        */ - format: Format | string | undefined; + fact?: string; } -export namespace ColumnMetadata { - export const filterSensitiveLog = (obj: ColumnMetadata): any => ({ +export namespace CellInput { + export const filterSensitiveLog = (obj: CellInput): any => ({ ...obj, - ...(obj.name && { name: SENSITIVE_STRING }), + ...(obj.fact && { fact: SENSITIVE_STRING }), }); } /** - *

        The data in a particular data cell defined on the screen.

        + *

        + * Data needed to create a single row in a table as part of the BatchCreateTableRows request. + *

        */ -export interface DataItem { - /** - *

        The formatted value of the data. e.g. John Smith.

        - */ - formattedValue?: string; - +export interface CreateRowData { /** *

        - * The overrideFormat is optional and is specified only if a particular row of data has a different format for - * the data than the default format defined on the screen or the table. + * An external identifier that represents the single row that is being created as part of the + * BatchCreateTableRows request. This can be any string that you can use to identify the row in the request. + * The BatchCreateTableRows API puts the batch item id in the results to allow you to link data in the + * request to data in the results. *

        */ - overrideFormat?: Format | string; + batchItemId: string | undefined; /** - *

        The raw value of the data. e.g. jsmith@example.com

        + *

        + * A map representing the cells to create in the new row. The key is the column id of the + * cell and the value is the CellInput object that represents the data to set in that cell. + *

        */ - rawValue?: string; + cellsToCreate: { [key: string]: CellInput } | undefined; } -export namespace DataItem { - export const filterSensitiveLog = (obj: DataItem): any => ({ +export namespace CreateRowData { + export const filterSensitiveLog = (obj: CreateRowData): any => ({ ...obj, + ...(obj.cellsToCreate && { + cellsToCreate: Object.entries(obj.cellsToCreate).reduce( + (acc: any, [key, value]: [string, CellInput]) => ({ + ...acc, + [key]: CellInput.filterSensitiveLog(value), + }), + {} + ), + }), }); } -/** - *

        The input variables to the app to be used by the InvokeScreenAutomation action request.

        - */ -export interface VariableValue { +export interface BatchCreateTableRowsRequest { /** - *

        Raw value of the variable.

        + *

        The ID of the workbook where the new rows are being added.

        + *

        + * If a workbook with the specified ID could not be found, this API throws ResourceNotFoundException. + *

        */ - rawValue: string | undefined; -} - -export namespace VariableValue { - export const filterSensitiveLog = (obj: VariableValue): any => ({ - ...obj, - }); -} + workbookId: string | undefined; -export interface GetScreenDataRequest { /** - *

        - * This parameter is optional. If a nextToken is not specified, the API returns the first page of data. - *

        + *

        The ID of the table where the new rows are being added.

        *

        - * Pagination tokens expire after 1 hour. If you use a token that was returned more than an hour back, the API - * will throw ValidationException. + * If a table with the specified ID could not be found, this API throws ResourceNotFoundException. *

        */ - nextToken?: string; + tableId: string | undefined; /** *

        - * Variables are optional and are needed only if the screen requires them to render correctly. Variables are - * specified as a map where the key is the name of the variable as defined on the screen. The value is an - * object which currently has only one property, rawValue, which holds the value of the variable to be passed - * to the screen. + * The list of rows to create at the end of the table. Each item in this list needs to have a batch item id + * to uniquely identify the element in the request and the cells to create for that row. + * You need to specify at least one item in this list. + *

        + *

        + * Note that if one of the column ids in any of the rows in the request does not exist in the table, then the + * request fails and no updates are made to the table. *

        */ - variables?: { [key: string]: VariableValue }; - - /** - *

        The ID of the app that contains the screem.

        - */ - appId: string | undefined; + rowsToCreate: CreateRowData[] | undefined; /** *

        - * The number of results to be returned on a single page. - * Specify a number between 1 and 100. The maximum value is 100. + * The request token for performing the batch create operation. + * Request tokens help to identify duplicate requests. If a call times out or fails due to a transient error + * like a failed network connection, you can retry the call with the same request token. The service ensures + * that if the first call using that request token is successfully performed, the second call will not perform + * the operation again. *

        *

        - * This parameter is optional. If you don't specify this parameter, the default page size is 100. + * Note that request tokens are valid only for a few minutes. You cannot use request tokens to dedupe requests + * spanning hours or days. *

        */ - maxResults?: number; - - /** - *

        The ID of the screen.

        - */ - screenId: string | undefined; - - /** - *

        The ID of the workbook that contains the screen.

        - */ - workbookId: string | undefined; -} - -export namespace GetScreenDataRequest { - export const filterSensitiveLog = (obj: GetScreenDataRequest): any => ({ - ...obj, - ...(obj.variables && { variables: SENSITIVE_STRING }), - }); -} - -/** - *

        A single row in the ResultSet.

        - */ -export interface ResultRow { - /** - *

        The ID for a particular row.

        - */ - rowId?: string; - - /** - *

        List of all the data cells in a row.

        - */ - dataItems: DataItem[] | undefined; + clientRequestToken?: string; } -export namespace ResultRow { - export const filterSensitiveLog = (obj: ResultRow): any => ({ +export namespace BatchCreateTableRowsRequest { + export const filterSensitiveLog = (obj: BatchCreateTableRowsRequest): any => ({ ...obj, - ...(obj.dataItems && { dataItems: SENSITIVE_STRING }), + ...(obj.rowsToCreate && { rowsToCreate: obj.rowsToCreate.map((item) => CreateRowData.filterSensitiveLog(item)) }), }); } /** *

        - * ResultSet contains the results of the request for a single block or list defined on the screen. + * A single item in a batch that failed to perform the intended action because of an error preventing it from + * succeeding. *

        */ -export interface ResultSet { +export interface FailedBatchItem { /** *

        - * List of rows returned by the request. Each row has a row Id and a list of data cells in that row. The data - * cells will be present in the same order as they are defined in the header. + * The id of the batch item that failed. This is the batch item id for the BatchCreateTableRows and + * BatchUpsertTableRows operations and the row id for the BatchUpdateTableRows and BatchDeleteTableRows + * operations. *

        */ - rows: ResultRow[] | undefined; + id: string | undefined; /** *

        - * List of headers for all the data cells in the block. The header identifies the name and default format of - * the data cell. Data cells appear in the same order in all rows as defined in the header. The names and - * formats are not repeated in the rows. If a particular row does not have a value for a data cell, a blank - * value is used. - *

        - *

        - * For example, a task list that displays the task name, due date and assigned person might have headers - * [ { "name": "Task Name"}, {"name": "Due Date", "format": "DATE"}, {"name": "Assigned", "format": "CONTACT"} ]. - * Every row in the result will have the task name as the first item, due date as the second item and assigned - * person as the third item. If a particular task does not have a due date, that row will still have a blank - * value in the second element and the assigned person will still be in the third element. + * The error message that indicates why the batch item failed. *

        */ - headers: ColumnMetadata[] | undefined; + errorMessage: string | undefined; } -export namespace ResultSet { - export const filterSensitiveLog = (obj: ResultSet): any => ({ +export namespace FailedBatchItem { + export const filterSensitiveLog = (obj: FailedBatchItem): any => ({ ...obj, - ...(obj.headers && { headers: obj.headers.map((item) => ColumnMetadata.filterSensitiveLog(item)) }), }); } -export interface GetScreenDataResult { +export interface BatchCreateTableRowsResult { /** - *

        - * Provides the pagination token to load the next page if there are more results matching the request. If a - * pagination token is not present in the response, it means that all data matching the query has been loaded. - *

        + *

        The updated workbook cursor after adding the new rows at the end of the table.

        */ - nextToken?: string; + workbookCursor: number | undefined; /** - *

        - * Indicates the cursor of the workbook at which the data returned by this workbook is read. Workbook cursor - * keeps increasing with every update and the increments are not sequential. - *

        + *

        The map of batch item id to the row id that was created for that item.

        */ - workbookCursor: number | undefined; + createdRows: { [key: string]: string } | undefined; /** - *

        A map of all the rows on the screen keyed by block name.

        + *

        + * The list of batch items in the request that could not be added to the table. Each element in this list + * contains one item from the request that could not be added to the table along with the reason why + * that item could not be added. + *

        */ - results: { [key: string]: ResultSet } | undefined; + failedBatchItems?: FailedBatchItem[]; } -export namespace GetScreenDataResult { - export const filterSensitiveLog = (obj: GetScreenDataResult): any => ({ +export namespace BatchCreateTableRowsResult { + export const filterSensitiveLog = (obj: BatchCreateTableRowsResult): any => ({ ...obj, }); } @@ -307,7 +252,7 @@ export namespace RequestTimeoutException { } /** - *

        A Workbook, App, Screen or Screen Automation was not found with the given ID.

        + *

        A Workbook, Table, App, Screen or Screen Automation was not found with the given ID.

        */ export interface ResourceNotFoundException extends __SmithyException, $MetadataBearer { name: "ResourceNotFoundException"; @@ -321,6 +266,23 @@ export namespace ResourceNotFoundException { }); } +/** + *

        + * The request caused service quota to be breached. + *

        + */ +export interface ServiceQuotaExceededException extends __SmithyException, $MetadataBearer { + name: "ServiceQuotaExceededException"; + $fault: "client"; + message: string | undefined; +} + +export namespace ServiceQuotaExceededException { + export const filterSensitiveLog = (obj: ServiceQuotaExceededException): any => ({ + ...obj, + }); +} + /** *

        Remote service is unreachable.

        */ @@ -368,26 +330,41 @@ export namespace ValidationException { }); } -export interface InvokeScreenAutomationRequest { +export interface BatchDeleteTableRowsRequest { /** - *

        The ID of the workbook that contains the screen automation.

        + *

        The ID of the workbook where the rows are being deleted.

        + *

        + * If a workbook with the specified id could not be found, this API throws ResourceNotFoundException. + *

        */ workbookId: string | undefined; + /** + *

        The ID of the table where the rows are being deleted.

        + *

        + * If a table with the specified id could not be found, this API throws ResourceNotFoundException. + *

        + */ + tableId: string | undefined; + /** *

        - * The row ID for the automation if the automation is defined inside a block with source or list. + * The list of row ids to delete from the table. You need to specify at least one row id in this list. + *

        + *

        + * Note that if one of the row ids provided in the request does not exist in the table, then the request fails + * and no rows are deleted from the table. *

        */ - rowId?: string; + rowIds: string[] | undefined; /** *

        - * The request token for performing the automation action. + * The request token for performing the delete action. * Request tokens help to identify duplicate requests. If a call times out or fails due to a transient error * like a failed network connection, you can retry the call with the same request token. The service ensures - * that if the first call using that request token is successfully performed, the second call will return the - * response of the previous call rather than performing the action again. + * that if the first call using that request token is successfully performed, the second call will not perform + * the action again. *

        *

        * Note that request tokens are valid only for a few minutes. You cannot use request tokens to dedupe requests @@ -395,45 +372,1001 @@ export interface InvokeScreenAutomationRequest { *

        */ clientRequestToken?: string; +} + +export namespace BatchDeleteTableRowsRequest { + export const filterSensitiveLog = (obj: BatchDeleteTableRowsRequest): any => ({ + ...obj, + }); +} +export interface BatchDeleteTableRowsResult { /** - *

        The ID of the screen that contains the screen automation.

        + *

        The updated workbook cursor after deleting the rows from the table.

        */ - screenId: string | undefined; + workbookCursor: number | undefined; /** - *

        The ID of the app that contains the screen automation.

        + *

        + * The list of row ids in the request that could not be deleted from the table. Each element in this list + * contains one row id from the request that could not be deleted along with the reason why that item could not + * be deleted. + *

        */ - appId: string | undefined; + failedBatchItems?: FailedBatchItem[]; +} + +export namespace BatchDeleteTableRowsResult { + export const filterSensitiveLog = (obj: BatchDeleteTableRowsResult): any => ({ + ...obj, + }); +} +/** + *

        + * Data needed to create a single row in a table as part of the BatchCreateTableRows request. + *

        + */ +export interface UpdateRowData { /** - *

        The ID of the automation action to be performed.

        + *

        + * The id of the row that needs to be updated. + *

        */ - screenAutomationId: string | undefined; + rowId: string | undefined; /** *

        - * Variables are optional and are needed only if the screen requires them to render correctly. Variables are - * specified as a map where the key is the name of the variable as defined on the screen. The value is an - * object which currently has only one property, rawValue, which holds the value of the variable to be passed - * to the screen. + * A map representing the cells to update in the given row. The key is the column id of the + * cell and the value is the CellInput object that represents the data to set in that cell. *

        */ - variables?: { [key: string]: VariableValue }; + cellsToUpdate: { [key: string]: CellInput } | undefined; } -export namespace InvokeScreenAutomationRequest { - export const filterSensitiveLog = (obj: InvokeScreenAutomationRequest): any => ({ +export namespace UpdateRowData { + export const filterSensitiveLog = (obj: UpdateRowData): any => ({ ...obj, - ...(obj.variables && { variables: SENSITIVE_STRING }), + ...(obj.cellsToUpdate && { + cellsToUpdate: Object.entries(obj.cellsToUpdate).reduce( + (acc: any, [key, value]: [string, CellInput]) => ({ + ...acc, + [key]: CellInput.filterSensitiveLog(value), + }), + {} + ), + }), }); } -export interface InvokeScreenAutomationResult { +export interface BatchUpdateTableRowsRequest { /** - *

        The updated workbook cursor after performing the automation action.

        + *

        The ID of the workbook where the rows are being updated.

        + *

        + * If a workbook with the specified id could not be found, this API throws ResourceNotFoundException. + *

        */ - workbookCursor: number | undefined; + workbookId: string | undefined; + + /** + *

        The ID of the table where the rows are being updated.

        + *

        + * If a table with the specified id could not be found, this API throws ResourceNotFoundException. + *

        + */ + tableId: string | undefined; + + /** + *

        + * The list of rows to update in the table. Each item in this list needs to contain the row id to update + * along with the map of column id to cell values for each column in that row that needs to be updated. + * You need to specify at least one row in this list, and for each row, you need to specify at least one + * column to update. + *

        + *

        + * Note that if one of the row or column ids in the request does not exist in the table, then the request fails + * and no updates are made to the table. + *

        + */ + rowsToUpdate: UpdateRowData[] | undefined; + + /** + *

        + * The request token for performing the update action. + * Request tokens help to identify duplicate requests. If a call times out or fails due to a transient error + * like a failed network connection, you can retry the call with the same request token. The service ensures + * that if the first call using that request token is successfully performed, the second call will not perform + * the action again. + *

        + *

        + * Note that request tokens are valid only for a few minutes. You cannot use request tokens to dedupe requests + * spanning hours or days. + *

        + */ + clientRequestToken?: string; +} + +export namespace BatchUpdateTableRowsRequest { + export const filterSensitiveLog = (obj: BatchUpdateTableRowsRequest): any => ({ + ...obj, + ...(obj.rowsToUpdate && { rowsToUpdate: obj.rowsToUpdate.map((item) => UpdateRowData.filterSensitiveLog(item)) }), + }); +} + +export interface BatchUpdateTableRowsResult { + /** + *

        The updated workbook cursor after adding the new rows at the end of the table.

        + */ + workbookCursor: number | undefined; + + /** + *

        + * The list of batch items in the request that could not be updated in the table. Each element in this list + * contains one item from the request that could not be updated in the table along with the reason why + * that item could not be updated. + *

        + */ + failedBatchItems?: FailedBatchItem[]; +} + +export namespace BatchUpdateTableRowsResult { + export const filterSensitiveLog = (obj: BatchUpdateTableRowsResult): any => ({ + ...obj, + }); +} + +/** + *

        + * An object that represents a filter formula along with the id of the context row under which the filter + * function needs to evaluate. + *

        + */ +export interface Filter { + /** + *

        + * A formula representing a filter function that returns zero or more matching rows from a table. Valid + * formulas in this field return a list of rows from a table. The most common ways of writing a formula to + * return a list of rows are to use the FindRow() or Filter() functions. Any other formula that returns zero or + * more rows is also acceptable. For example, you can use a formula that points to a cell that contains a + * filter function. + *

        + */ + formula: string | undefined; + + /** + *

        + * The optional contextRowId attribute can be used to specify the row id of the context row if the filter + * formula contains unqualified references to table columns and needs a context row to evaluate them + * successfully. + *

        + */ + contextRowId?: string; +} + +export namespace Filter { + export const filterSensitiveLog = (obj: Filter): any => ({ + ...obj, + ...(obj.formula && { formula: SENSITIVE_STRING }), + }); +} + +/** + *

        + * Data needed to upsert rows in a table as part of a single item in the BatchUpsertTableRows request. + *

        + */ +export interface UpsertRowData { + /** + *

        + * An external identifier that represents a single item in the request that is being upserted as part of the + * BatchUpsertTableRows request. This can be any string that you can use to identify the item in the request. + * The BatchUpsertTableRows API puts the batch item id in the results to allow you to link data in the + * request to data in the results. + *

        + */ + batchItemId: string | undefined; + + /** + *

        + * The filter formula to use to find existing matching rows to update. The formula needs to return zero or more + * rows. If the formula returns 0 rows, then a new row will be appended in the target table. If the formula + * returns one or more rows, then the returned rows will be updated. + *

        + *

        + * Note that the filter formula needs to return rows from the target table for the upsert operation to succeed. + * If the filter formula has a syntax error or it doesn't evaluate to zero or more rows in the target table + * for any one item in the input list, then the entire BatchUpsertTableRows request fails and no updates are + * made to the table. + *

        + */ + filter: Filter | undefined; + + /** + *

        + * A map representing the cells to update for the matching rows or an appended row. The key is the column id + * of the cell and the value is the CellInput object that represents the data to set in that cell. + *

        + */ + cellsToUpdate: { [key: string]: CellInput } | undefined; +} + +export namespace UpsertRowData { + export const filterSensitiveLog = (obj: UpsertRowData): any => ({ + ...obj, + ...(obj.filter && { filter: Filter.filterSensitiveLog(obj.filter) }), + ...(obj.cellsToUpdate && { + cellsToUpdate: Object.entries(obj.cellsToUpdate).reduce( + (acc: any, [key, value]: [string, CellInput]) => ({ + ...acc, + [key]: CellInput.filterSensitiveLog(value), + }), + {} + ), + }), + }); +} + +export interface BatchUpsertTableRowsRequest { + /** + *

        The ID of the workbook where the rows are being upserted.

        + *

        + * If a workbook with the specified id could not be found, this API throws ResourceNotFoundException. + *

        + */ + workbookId: string | undefined; + + /** + *

        The ID of the table where the rows are being upserted.

        + *

        + * If a table with the specified id could not be found, this API throws ResourceNotFoundException. + *

        + */ + tableId: string | undefined; + + /** + *

        + * The list of rows to upsert in the table. Each item in this list needs to have a batch item id to uniquely + * identify the element in the request, a filter expression to find the rows to update for that element + * and the cell values to set for each column in the upserted rows. You need to specify + * at least one item in this list. + *

        + *

        + * Note that if one of the filter formulas in the request fails to evaluate because of an error or one of the + * column ids in any of the rows does not exist in the table, then the request fails + * and no updates are made to the table. + *

        + */ + rowsToUpsert: UpsertRowData[] | undefined; + + /** + *

        + * The request token for performing the update action. + * Request tokens help to identify duplicate requests. If a call times out or fails due to a transient error + * like a failed network connection, you can retry the call with the same request token. The service ensures + * that if the first call using that request token is successfully performed, the second call will not perform + * the action again. + *

        + *

        + * Note that request tokens are valid only for a few minutes. You cannot use request tokens to dedupe requests + * spanning hours or days. + *

        + */ + clientRequestToken?: string; +} + +export namespace BatchUpsertTableRowsRequest { + export const filterSensitiveLog = (obj: BatchUpsertTableRowsRequest): any => ({ + ...obj, + ...(obj.rowsToUpsert && { rowsToUpsert: obj.rowsToUpsert.map((item) => UpsertRowData.filterSensitiveLog(item)) }), + }); +} + +export enum UpsertAction { + Appended = "APPENDED", + Updated = "UPDATED", +} + +/** + *

        + * An object that represents the result of a single upsert row request. + *

        + */ +export interface UpsertRowsResult { + /** + *

        + * The list of row ids that were changed as part of an upsert row operation. If the upsert resulted in an + * update, this list could potentially contain multiple rows that matched the filter and hence got updated. + * If the upsert resulted in an append, this list would only have the single row that was appended. + *

        + */ + rowIds: string[] | undefined; + + /** + *

        + * The result of the upsert action. + *

        + */ + upsertAction: UpsertAction | string | undefined; +} + +export namespace UpsertRowsResult { + export const filterSensitiveLog = (obj: UpsertRowsResult): any => ({ + ...obj, + }); +} + +export interface BatchUpsertTableRowsResult { + /** + *

        + * A map with the batch item id as the key and the result of the upsert operation as the value. The + * result of the upsert operation specifies whether existing rows were updated or a new row was appended, along + * with the list of row ids that were affected. + *

        + */ + rows: { [key: string]: UpsertRowsResult } | undefined; + + /** + *

        The updated workbook cursor after updating or appending rows in the table.

        + */ + workbookCursor: number | undefined; + + /** + *

        + * The list of batch items in the request that could not be updated or appended in the table. Each element in + * this list contains one item from the request that could not be updated in the table along with the reason + * why that item could not be updated or appended. + *

        + */ + failedBatchItems?: FailedBatchItem[]; +} + +export namespace BatchUpsertTableRowsResult { + export const filterSensitiveLog = (obj: BatchUpsertTableRowsResult): any => ({ + ...obj, + }); +} + +export enum Format { + Accounting = "ACCOUNTING", + Auto = "AUTO", + Contact = "CONTACT", + Currency = "CURRENCY", + Date = "DATE", + DateTime = "DATE_TIME", + Number = "NUMBER", + Percentage = "PERCENTAGE", + Rowlink = "ROWLINK", + Text = "TEXT", + Time = "TIME", +} + +/** + *

        An object that represents a single cell in a table.

        + */ +export interface Cell { + /** + *

        + * The formula contained in the cell. This field is empty if a cell does not have a formula. + *

        + */ + formula?: string; + + /** + *

        The format of the cell. If this field is empty, then the format is either not specified in the + * workbook or the format is set to AUTO.

        + */ + format?: Format | string; + + /** + *

        + * The raw value of the data contained in the cell. The raw value depends on the format of the data in the + * cell. However the attribute in the API return value is always a string containing the raw value. + *

        + *

        + * Cells with format DATE, DATE_TIME or TIME have the raw value as a floating point + * number where the whole number represents the number of days since 1/1/1900 and the fractional part + * represents the fraction of the day since midnight. For example, a cell with date 11/3/2020 has the raw value + * "44138". A cell with the time 9:00 AM has the raw value "0.375" and a cell with date/time value of + * 11/3/2020 9:00 AM has the raw value "44138.375". Notice that even though the raw value is a number in all + * three cases, it is still represented as a string. + *

        + *

        + * Cells with format NUMBER, CURRENCY, PERCENTAGE and ACCOUNTING have the raw value of the data as the number + * representing the data being displayed. For example, the number 1.325 with two decimal places in the format + * will have it's raw value as "1.325" and formatted value as "1.33". A currency value for + * $10 will have the raw value as "10" and formatted value as "$10.00". A value representing 20% with two + * decimal places in the format will have its raw value as "0.2" and the formatted value as "20.00%". An + * accounting value of -$25 will have "-25" as the raw value and "$ (25.00)" as the formatted value. + *

        + *

        + * Cells with format TEXT will have the raw text as the raw value. For example, a cell with text "John Smith" + * will have "John Smith" as both the raw value and the formatted value. + *

        + *

        + * Cells with format CONTACT will have the name of the contact as a formatted value and the email address of + * the contact as the raw value. For example, a contact for John Smith will have "John Smith" as the + * formatted value and "john.smith@example.com" as the raw value. + *

        + *

        + * Cells with format ROWLINK (aka picklist) will have the first column of the linked row as the formatted value + * and the row id of the linked row as the raw value. For example, a cell containing a picklist to a table + * that displays task status might have "Completed" as the formatted value and + * "row:dfcefaee-5b37-4355-8f28-40c3e4ff5dd4/ca432b2f-b8eb-431d-9fb5-cbe0342f9f03" as the raw value. + *

        + *

        + * Cells with format AUTO or cells without any format that are auto-detected as one of the formats above will + * contain the raw and formatted values as mentioned above, based on the auto-detected formats. If there is no + * auto-detected format, the raw and formatted values will be the same as the data in the cell. + *

        + */ + rawValue?: string; + + /** + *

        + * The formatted value of the cell. This is the value that you see displayed in the cell in the UI. + *

        + *

        + * Note that the formatted value of a cell is always represented as a string irrespective of the data that is + * stored in the cell. For example, if a cell contains a date, the formatted value of the cell is the string + * representation of the formatted date being shown in the cell in the UI. See details in the rawValue field + * below for how cells of different formats will have different raw and formatted values. + *

        + */ + formattedValue?: string; +} + +export namespace Cell { + export const filterSensitiveLog = (obj: Cell): any => ({ + ...obj, + ...(obj.formula && { formula: SENSITIVE_STRING }), + }); +} + +/** + *

        Metadata for column in the table.

        + */ +export interface ColumnMetadata { + /** + *

        The name of the column.

        + */ + name: string | undefined; + + /** + *

        The format of the column.

        + */ + format: Format | string | undefined; +} + +export namespace ColumnMetadata { + export const filterSensitiveLog = (obj: ColumnMetadata): any => ({ + ...obj, + ...(obj.name && { name: SENSITIVE_STRING }), + }); +} + +/** + *

        The data in a particular data cell defined on the screen.

        + */ +export interface DataItem { + /** + *

        + * The overrideFormat is optional and is specified only if a particular row of data has a different format for + * the data than the default format defined on the screen or the table. + *

        + */ + overrideFormat?: Format | string; + + /** + *

        The raw value of the data. e.g. jsmith@example.com

        + */ + rawValue?: string; + + /** + *

        The formatted value of the data. e.g. John Smith.

        + */ + formattedValue?: string; +} + +export namespace DataItem { + export const filterSensitiveLog = (obj: DataItem): any => ({ + ...obj, + }); +} + +export enum ImportDataCharacterEncoding { + ISO_8859_1 = "ISO-8859-1", + US_ASCII = "US-ASCII", + UTF_16 = "UTF-16", + UTF_16BE = "UTF-16BE", + UTF_16LE = "UTF-16LE", + UTF_8 = "UTF-8", +} + +/** + *

        + * An object that contains the options relating to parsing delimited text as part of an import request. + *

        + */ +export interface DelimitedTextImportOptions { + /** + *

        The delimiter to use for separating columns in a single row of the input.

        + */ + delimiter: string | undefined; + + /** + *

        Indicates whether the input file has a header row at the top containing the column names.

        + */ + hasHeaderRow?: boolean; + + /** + *

        A parameter to indicate whether empty rows should be ignored or be included in the import.

        + */ + ignoreEmptyRows?: boolean; + + /** + *

        The encoding of the data in the input file.

        + */ + dataCharacterEncoding?: ImportDataCharacterEncoding | string; +} + +export namespace DelimitedTextImportOptions { + export const filterSensitiveLog = (obj: DelimitedTextImportOptions): any => ({ + ...obj, + }); +} + +export interface DescribeTableDataImportJobRequest { + /** + *

        The ID of the workbook into which data was imported.

        + *

        + * If a workbook with the specified id could not be found, this API throws ResourceNotFoundException. + *

        + */ + workbookId: string | undefined; + + /** + *

        The ID of the table into which data was imported.

        + *

        + * If a table with the specified id could not be found, this API throws ResourceNotFoundException. + *

        + */ + tableId: string | undefined; + + /** + *

        The ID of the job that was returned by the StartTableDataImportJob request.

        + *

        + * If a job with the specified id could not be found, this API throws ResourceNotFoundException. + *

        + */ + jobId: string | undefined; +} + +export namespace DescribeTableDataImportJobRequest { + export const filterSensitiveLog = (obj: DescribeTableDataImportJobRequest): any => ({ + ...obj, + }); +} + +/** + *

        + * An object that contains the configuration parameters for the data source of an import request. + *

        + */ +export interface ImportDataSourceConfig { + /** + *

        + * The URL from which source data will be downloaded for the import request. + *

        + */ + dataSourceUrl?: string; +} + +export namespace ImportDataSourceConfig { + export const filterSensitiveLog = (obj: ImportDataSourceConfig): any => ({ + ...obj, + }); +} + +/** + *

        An object that has details about the source of the data that was submitted for import.

        + */ +export interface ImportDataSource { + /** + *

        The configuration parameters for the data source of the import

        + */ + dataSourceConfig: ImportDataSourceConfig | undefined; +} + +export namespace ImportDataSource { + export const filterSensitiveLog = (obj: ImportDataSource): any => ({ + ...obj, + }); +} + +/** + *

        An object that contains the properties for importing data to a specific column in a table.

        + */ +export interface SourceDataColumnProperties { + /** + *

        The index of the column in the input file.

        + */ + columnIndex?: number; +} + +export namespace SourceDataColumnProperties { + export const filterSensitiveLog = (obj: SourceDataColumnProperties): any => ({ + ...obj, + }); +} + +/** + *

        An object that contains the options relating to the destination of the import request.

        + */ +export interface DestinationOptions { + /** + *

        A map of the column id to the import properties for each column.

        + */ + columnMap?: { [key: string]: SourceDataColumnProperties }; +} + +export namespace DestinationOptions { + export const filterSensitiveLog = (obj: DestinationOptions): any => ({ + ...obj, + }); +} + +/** + *

        An object that contains the options specified by the sumitter of the import request.

        + */ +export interface ImportOptions { + /** + *

        Options relating to the destination of the import request.

        + */ + destinationOptions?: DestinationOptions; + + /** + *

        Options relating to parsing delimited text. Required if dataFormat is DELIMITED_TEXT.

        + */ + delimitedTextOptions?: DelimitedTextImportOptions; +} + +export namespace ImportOptions { + export const filterSensitiveLog = (obj: ImportOptions): any => ({ + ...obj, + }); +} + +/** + *

        An object that contains the attributes of the submitter of the import job.

        + */ +export interface ImportJobSubmitter { + /** + *

        The email id of the submitter of the import job, if available.

        + */ + email?: string; + + /** + *

        The AWS user ARN of the submitter of the import job, if available.

        + */ + userArn?: string; +} + +export namespace ImportJobSubmitter { + export const filterSensitiveLog = (obj: ImportJobSubmitter): any => ({ + ...obj, + ...(obj.email && { email: SENSITIVE_STRING }), + }); +} + +/** + *

        The metadata associated with the table data import job that was submitted.

        + */ +export interface TableDataImportJobMetadata { + /** + *

        Details about the submitter of the import request.

        + */ + submitter: ImportJobSubmitter | undefined; + + /** + *

        The timestamp when the job was submitted for import.

        + */ + submitTime: Date | undefined; + + /** + *

        The options that was specified at the time of submitting the import request.

        + */ + importOptions: ImportOptions | undefined; + + /** + *

        The source of the data that was submitted for import.

        + */ + dataSource: ImportDataSource | undefined; +} + +export namespace TableDataImportJobMetadata { + export const filterSensitiveLog = (obj: TableDataImportJobMetadata): any => ({ + ...obj, + ...(obj.submitter && { submitter: ImportJobSubmitter.filterSensitiveLog(obj.submitter) }), + }); +} + +export enum TableDataImportJobStatus { + Completed = "COMPLETED", + Failed = "FAILED", + InProgress = "IN_PROGRESS", + Submitted = "SUBMITTED", +} + +export interface DescribeTableDataImportJobResult { + /** + *

        + * The current status of the import job. + *

        + */ + jobStatus: TableDataImportJobStatus | string | undefined; + + /** + *

        + * A message providing more details about the current status of the import job. + *

        + */ + message: string | undefined; + + /** + *

        + * The metadata about the job that was submitted for import. + *

        + */ + jobMetadata: TableDataImportJobMetadata | undefined; +} + +export namespace DescribeTableDataImportJobResult { + export const filterSensitiveLog = (obj: DescribeTableDataImportJobResult): any => ({ + ...obj, + ...(obj.jobMetadata && { jobMetadata: TableDataImportJobMetadata.filterSensitiveLog(obj.jobMetadata) }), + }); +} + +/** + *

        The input variables to the app to be used by the InvokeScreenAutomation action request.

        + */ +export interface VariableValue { + /** + *

        Raw value of the variable.

        + */ + rawValue: string | undefined; +} + +export namespace VariableValue { + export const filterSensitiveLog = (obj: VariableValue): any => ({ + ...obj, + }); +} + +export interface GetScreenDataRequest { + /** + *

        The ID of the workbook that contains the screen.

        + */ + workbookId: string | undefined; + + /** + *

        The ID of the app that contains the screem.

        + */ + appId: string | undefined; + + /** + *

        The ID of the screen.

        + */ + screenId: string | undefined; + + /** + *

        + * Variables are optional and are needed only if the screen requires them to render correctly. Variables are + * specified as a map where the key is the name of the variable as defined on the screen. The value is an + * object which currently has only one property, rawValue, which holds the value of the variable to be passed + * to the screen. + *

        + */ + variables?: { [key: string]: VariableValue }; + + /** + *

        + * The number of results to be returned on a single page. + * Specify a number between 1 and 100. The maximum value is 100. + *

        + *

        + * This parameter is optional. If you don't specify this parameter, the default page size is 100. + *

        + */ + maxResults?: number; + + /** + *

        + * This parameter is optional. If a nextToken is not specified, the API returns the first page of data. + *

        + *

        + * Pagination tokens expire after 1 hour. If you use a token that was returned more than an hour back, the API + * will throw ValidationException. + *

        + */ + nextToken?: string; +} + +export namespace GetScreenDataRequest { + export const filterSensitiveLog = (obj: GetScreenDataRequest): any => ({ + ...obj, + ...(obj.variables && { variables: SENSITIVE_STRING }), + }); +} + +/** + *

        A single row in the ResultSet.

        + */ +export interface ResultRow { + /** + *

        The ID for a particular row.

        + */ + rowId?: string; + + /** + *

        List of all the data cells in a row.

        + */ + dataItems: DataItem[] | undefined; +} + +export namespace ResultRow { + export const filterSensitiveLog = (obj: ResultRow): any => ({ + ...obj, + ...(obj.dataItems && { dataItems: SENSITIVE_STRING }), + }); +} + +/** + *

        + * ResultSet contains the results of the request for a single block or list defined on the screen. + *

        + */ +export interface ResultSet { + /** + *

        + * List of headers for all the data cells in the block. The header identifies the name and default format of + * the data cell. Data cells appear in the same order in all rows as defined in the header. The names and + * formats are not repeated in the rows. If a particular row does not have a value for a data cell, a blank + * value is used. + *

        + *

        + * For example, a task list that displays the task name, due date and assigned person might have headers + * [ { "name": "Task Name"}, {"name": "Due Date", "format": "DATE"}, {"name": "Assigned", "format": "CONTACT"} ]. + * Every row in the result will have the task name as the first item, due date as the second item and assigned + * person as the third item. If a particular task does not have a due date, that row will still have a blank + * value in the second element and the assigned person will still be in the third element. + *

        + */ + headers: ColumnMetadata[] | undefined; + + /** + *

        + * List of rows returned by the request. Each row has a row Id and a list of data cells in that row. The data + * cells will be present in the same order as they are defined in the header. + *

        + */ + rows: ResultRow[] | undefined; +} + +export namespace ResultSet { + export const filterSensitiveLog = (obj: ResultSet): any => ({ + ...obj, + ...(obj.headers && { headers: obj.headers.map((item) => ColumnMetadata.filterSensitiveLog(item)) }), + }); +} + +export interface GetScreenDataResult { + /** + *

        A map of all the rows on the screen keyed by block name.

        + */ + results: { [key: string]: ResultSet } | undefined; + + /** + *

        + * Indicates the cursor of the workbook at which the data returned by this workbook is read. Workbook cursor + * keeps increasing with every update and the increments are not sequential. + *

        + */ + workbookCursor: number | undefined; + + /** + *

        + * Provides the pagination token to load the next page if there are more results matching the request. If a + * pagination token is not present in the response, it means that all data matching the query has been loaded. + *

        + */ + nextToken?: string; +} + +export namespace GetScreenDataResult { + export const filterSensitiveLog = (obj: GetScreenDataResult): any => ({ + ...obj, + ...(obj.results && { + results: Object.entries(obj.results).reduce( + (acc: any, [key, value]: [string, ResultSet]) => ({ + ...acc, + [key]: ResultSet.filterSensitiveLog(value), + }), + {} + ), + }), + }); +} + +export enum ImportSourceDataFormat { + DelimitedText = "DELIMITED_TEXT", +} + +export interface InvokeScreenAutomationRequest { + /** + *

        The ID of the workbook that contains the screen automation.

        + */ + workbookId: string | undefined; + + /** + *

        The ID of the app that contains the screen automation.

        + */ + appId: string | undefined; + + /** + *

        The ID of the screen that contains the screen automation.

        + */ + screenId: string | undefined; + + /** + *

        The ID of the automation action to be performed.

        + */ + screenAutomationId: string | undefined; + + /** + *

        + * Variables are specified as a map where the key is the name of the variable as defined on the screen. The value is an + * object which currently has only one property, rawValue, which holds the value of the variable to be passed + * to the screen. Any variables defined in a screen are required to be passed in the call. + *

        + */ + variables?: { [key: string]: VariableValue }; + + /** + *

        + * The row ID for the automation if the automation is defined inside a block with source or list. + *

        + */ + rowId?: string; + + /** + *

        + * The request token for performing the automation action. + * Request tokens help to identify duplicate requests. If a call times out or fails due to a transient error + * like a failed network connection, you can retry the call with the same request token. The service ensures + * that if the first call using that request token is successfully performed, the second call will return the + * response of the previous call rather than performing the action again. + *

        + *

        + * Note that request tokens are valid only for a few minutes. You cannot use request tokens to dedupe requests + * spanning hours or days. + *

        + */ + clientRequestToken?: string; +} + +export namespace InvokeScreenAutomationRequest { + export const filterSensitiveLog = (obj: InvokeScreenAutomationRequest): any => ({ + ...obj, + ...(obj.variables && { variables: SENSITIVE_STRING }), + }); +} + +export interface InvokeScreenAutomationResult { + /** + *

        The updated workbook cursor after performing the automation action.

        + */ + workbookCursor: number | undefined; } export namespace InvokeScreenAutomationResult { @@ -441,3 +1374,473 @@ export namespace InvokeScreenAutomationResult { ...obj, }); } + +export interface ListTableColumnsRequest { + /** + *

        The ID of the workbook that contains the table whose columns are being retrieved.

        + *

        + * If a workbook with the specified id could not be found, this API throws ResourceNotFoundException. + *

        + */ + workbookId: string | undefined; + + /** + *

        The ID of the table whose columns are being retrieved.

        + *

        + * If a table with the specified id could not be found, this API throws ResourceNotFoundException. + *

        + */ + tableId: string | undefined; + + /** + *

        + * This parameter is optional. If a nextToken is not specified, the API returns the first page of data. + *

        + *

        + * Pagination tokens expire after 1 hour. If you use a token that was returned more than an hour back, the API + * will throw ValidationException. + *

        + */ + nextToken?: string; +} + +export namespace ListTableColumnsRequest { + export const filterSensitiveLog = (obj: ListTableColumnsRequest): any => ({ + ...obj, + }); +} + +/** + *

        An object that contains attributes about a single column in a table

        + */ +export interface TableColumn { + /** + *

        The id of the column in the table.

        + */ + tableColumnId?: string; + + /** + *

        The name of the column in the table.

        + */ + tableColumnName?: string; + + /** + *

        + * The column level format that is applied in the table. An empty value in this field means that the + * column format is the default value 'AUTO'. + *

        + */ + format?: Format | string; +} + +export namespace TableColumn { + export const filterSensitiveLog = (obj: TableColumn): any => ({ + ...obj, + }); +} + +export interface ListTableColumnsResult { + /** + *

        + * The list of columns in the table. + *

        + */ + tableColumns: TableColumn[] | undefined; + + /** + *

        + * Provides the pagination token to load the next page if there are more results matching the request. If a + * pagination token is not present in the response, it means that all data matching the request has been + * loaded. + *

        + */ + nextToken?: string; + + /** + *

        + * Indicates the cursor of the workbook at which the data returned by this request is read. Workbook cursor + * keeps increasing with every update and the increments are not sequential. + *

        + */ + workbookCursor?: number; +} + +export namespace ListTableColumnsResult { + export const filterSensitiveLog = (obj: ListTableColumnsResult): any => ({ + ...obj, + }); +} + +export interface ListTableRowsRequest { + /** + *

        The ID of the workbook that contains the table whose rows are being retrieved.

        + *

        + * If a workbook with the specified id could not be found, this API throws ResourceNotFoundException. + *

        + */ + workbookId: string | undefined; + + /** + *

        The ID of the table whose rows are being retrieved.

        + *

        + * If a table with the specified id could not be found, this API throws ResourceNotFoundException. + *

        + */ + tableId: string | undefined; + + /** + *

        + * This parameter is optional. If one or more row ids are specified in this list, then only the specified + * row ids are returned in the result. If no row ids are specified here, then all the rows in the table are + * returned. + *

        + */ + rowIds?: string[]; + + /** + *

        The maximum number of rows to return in each page of the results.

        + */ + maxResults?: number; + + /** + *

        + * This parameter is optional. If a nextToken is not specified, the API returns the first page of data. + *

        + *

        + * Pagination tokens expire after 1 hour. If you use a token that was returned more than an hour back, the API + * will throw ValidationException. + *

        + */ + nextToken?: string; +} + +export namespace ListTableRowsRequest { + export const filterSensitiveLog = (obj: ListTableRowsRequest): any => ({ + ...obj, + }); +} + +/** + *

        An object that contains attributes about a single row in a table

        + */ +export interface TableRow { + /** + *

        The id of the row in the table.

        + */ + rowId: string | undefined; + + /** + *

        A list of cells in the table row. The cells appear in the same order as the columns of the table. + *

        + */ + cells: Cell[] | undefined; +} + +export namespace TableRow { + export const filterSensitiveLog = (obj: TableRow): any => ({ + ...obj, + ...(obj.cells && { cells: SENSITIVE_STRING }), + }); +} + +export interface ListTableRowsResult { + /** + *

        + * The list of columns in the table whose row data is returned in the result. + *

        + */ + columnIds: string[] | undefined; + + /** + *

        + * The list of rows in the table. Note that this result is paginated, so this list contains a maximum of 100 + * rows. + *

        + */ + rows: TableRow[] | undefined; + + /** + *

        + * The list of row ids included in the request that were not found in the table. + *

        + */ + rowIdsNotFound?: string[]; + + /** + *

        + * Provides the pagination token to load the next page if there are more results matching the request. If a + * pagination token is not present in the response, it means that all data matching the request has been + * loaded. + *

        + */ + nextToken?: string; + + /** + *

        + * Indicates the cursor of the workbook at which the data returned by this request is read. Workbook cursor + * keeps increasing with every update and the increments are not sequential. + *

        + */ + workbookCursor: number | undefined; +} + +export namespace ListTableRowsResult { + export const filterSensitiveLog = (obj: ListTableRowsResult): any => ({ + ...obj, + }); +} + +export interface ListTablesRequest { + /** + *

        The ID of the workbook whose tables are being retrieved.

        + *

        + * If a workbook with the specified id could not be found, this API throws ResourceNotFoundException. + *

        + */ + workbookId: string | undefined; + + /** + *

        The maximum number of tables to return in each page of the results.

        + */ + maxResults?: number; + + /** + *

        + * This parameter is optional. If a nextToken is not specified, the API returns the first page of data. + *

        + *

        + * Pagination tokens expire after 1 hour. If you use a token that was returned more than an hour back, the API + * will throw ValidationException. + *

        + */ + nextToken?: string; +} + +export namespace ListTablesRequest { + export const filterSensitiveLog = (obj: ListTablesRequest): any => ({ + ...obj, + }); +} + +/** + *

        An object representing the properties of a table in a workbook.

        + */ +export interface Table { + /** + *

        The id of the table.

        + */ + tableId?: string; + + /** + *

        The name of the table.

        + */ + tableName?: string; +} + +export namespace Table { + export const filterSensitiveLog = (obj: Table): any => ({ + ...obj, + }); +} + +export interface ListTablesResult { + /** + *

        + * The list of tables in the workbook. + *

        + */ + tables: Table[] | undefined; + + /** + *

        + * Provides the pagination token to load the next page if there are more results matching the request. If a + * pagination token is not present in the response, it means that all data matching the request has been + * loaded. + *

        + */ + nextToken?: string; + + /** + *

        + * Indicates the cursor of the workbook at which the data returned by this request is read. Workbook cursor + * keeps increasing with every update and the increments are not sequential. + *

        + */ + workbookCursor?: number; +} + +export namespace ListTablesResult { + export const filterSensitiveLog = (obj: ListTablesResult): any => ({ + ...obj, + }); +} + +export interface QueryTableRowsRequest { + /** + *

        The ID of the workbook whose table rows are being queried.

        + *

        + * If a workbook with the specified id could not be found, this API throws ResourceNotFoundException. + *

        + */ + workbookId: string | undefined; + + /** + *

        The ID of the table whose rows are being queried.

        + *

        + * If a table with the specified id could not be found, this API throws ResourceNotFoundException. + *

        + */ + tableId: string | undefined; + + /** + *

        An object that represents a filter formula along with the id of the context row under which the filter + * function needs to evaluate.

        + */ + filterFormula: Filter | undefined; + + /** + *

        The maximum number of rows to return in each page of the results.

        + */ + maxResults?: number; + + /** + *

        + * This parameter is optional. If a nextToken is not specified, the API returns the first page of data. + *

        + *

        + * Pagination tokens expire after 1 hour. If you use a token that was returned more than an hour back, the API + * will throw ValidationException. + *

        + */ + nextToken?: string; +} + +export namespace QueryTableRowsRequest { + export const filterSensitiveLog = (obj: QueryTableRowsRequest): any => ({ + ...obj, + ...(obj.filterFormula && { filterFormula: Filter.filterSensitiveLog(obj.filterFormula) }), + }); +} + +export interface QueryTableRowsResult { + /** + *

        + * The list of columns in the table whose row data is returned in the result. + *

        + */ + columnIds: string[] | undefined; + + /** + *

        + * The list of rows in the table that match the query filter. + *

        + */ + rows: TableRow[] | undefined; + + /** + *

        + * Provides the pagination token to load the next page if there are more results matching the request. If a + * pagination token is not present in the response, it means that all data matching the request has been + * loaded. + *

        + */ + nextToken?: string; + + /** + *

        + * Indicates the cursor of the workbook at which the data returned by this request is read. Workbook cursor + * keeps increasing with every update and the increments are not sequential. + *

        + */ + workbookCursor: number | undefined; +} + +export namespace QueryTableRowsResult { + export const filterSensitiveLog = (obj: QueryTableRowsResult): any => ({ + ...obj, + }); +} + +export interface StartTableDataImportJobRequest { + /** + *

        The ID of the workbook where the rows are being imported.

        + *

        + * If a workbook with the specified id could not be found, this API throws ResourceNotFoundException. + *

        + */ + workbookId: string | undefined; + + /** + *

        + * The source of the data that is being imported. The size of source must be no larger than 100 MB. + * Source must have no more than 100,000 cells and no more than 1,000 rows. + *

        + */ + dataSource: ImportDataSource | undefined; + + /** + *

        + * The format of the data that is being imported. Currently the only option supported is "DELIMITED_TEXT". + *

        + */ + dataFormat: ImportSourceDataFormat | string | undefined; + + /** + *

        The ID of the table where the rows are being imported.

        + *

        + * If a table with the specified id could not be found, this API throws ResourceNotFoundException. + *

        + */ + destinationTableId: string | undefined; + + /** + *

        + * The options for customizing this import request. + *

        + */ + importOptions: ImportOptions | undefined; + + /** + *

        + * The request token for performing the update action. + * Request tokens help to identify duplicate requests. If a call times out or fails due to a transient error + * like a failed network connection, you can retry the call with the same request token. The service ensures + * that if the first call using that request token is successfully performed, the second call will not perform + * the action again. + *

        + *

        + * Note that request tokens are valid only for a few minutes. You cannot use request tokens to dedupe requests + * spanning hours or days. + *

        + */ + clientRequestToken: string | undefined; +} + +export namespace StartTableDataImportJobRequest { + export const filterSensitiveLog = (obj: StartTableDataImportJobRequest): any => ({ + ...obj, + }); +} + +export interface StartTableDataImportJobResult { + /** + *

        + * The id that is assigned to this import job. Future requests to find out the status of this import job + * need to send this id in the appropriate parameter in the request. + *

        + */ + jobId: string | undefined; + + /** + *

        + * The status of the import job immediately after submitting the request. + *

        + */ + jobStatus: TableDataImportJobStatus | string | undefined; +} + +export namespace StartTableDataImportJobResult { + export const filterSensitiveLog = (obj: StartTableDataImportJobResult): any => ({ + ...obj, + }); +} diff --git a/clients/client-honeycode/pagination/Interfaces.ts b/clients/client-honeycode/pagination/Interfaces.ts new file mode 100644 index 000000000000..879717299f6d --- /dev/null +++ b/clients/client-honeycode/pagination/Interfaces.ts @@ -0,0 +1,7 @@ +import { Honeycode } from "../Honeycode"; +import { HoneycodeClient } from "../HoneycodeClient"; +import { PaginationConfiguration } from "@aws-sdk/types"; + +export interface HoneycodePaginationConfiguration extends PaginationConfiguration { + client: Honeycode | HoneycodeClient; +} diff --git a/clients/client-honeycode/pagination/ListTableColumnsPaginator.ts b/clients/client-honeycode/pagination/ListTableColumnsPaginator.ts new file mode 100644 index 000000000000..2f45d10f6a23 --- /dev/null +++ b/clients/client-honeycode/pagination/ListTableColumnsPaginator.ts @@ -0,0 +1,56 @@ +import { Honeycode } from "../Honeycode"; +import { HoneycodeClient } from "../HoneycodeClient"; +import { + ListTableColumnsCommand, + ListTableColumnsCommandInput, + ListTableColumnsCommandOutput, +} from "../commands/ListTableColumnsCommand"; +import { HoneycodePaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: HoneycodeClient, + input: ListTableColumnsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListTableColumnsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: Honeycode, + input: ListTableColumnsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listTableColumns(input, ...args); +}; +export async function* paginateListTableColumns( + config: HoneycodePaginationConfiguration, + input: ListTableColumnsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListTableColumnsCommandOutput; + while (hasNext) { + input.nextToken = token; + if (config.client instanceof Honeycode) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof HoneycodeClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected Honeycode | HoneycodeClient"); + } + yield page; + token = page.nextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-honeycode/pagination/ListTableRowsPaginator.ts b/clients/client-honeycode/pagination/ListTableRowsPaginator.ts new file mode 100644 index 000000000000..43de81b473fd --- /dev/null +++ b/clients/client-honeycode/pagination/ListTableRowsPaginator.ts @@ -0,0 +1,57 @@ +import { Honeycode } from "../Honeycode"; +import { HoneycodeClient } from "../HoneycodeClient"; +import { + ListTableRowsCommand, + ListTableRowsCommandInput, + ListTableRowsCommandOutput, +} from "../commands/ListTableRowsCommand"; +import { HoneycodePaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: HoneycodeClient, + input: ListTableRowsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListTableRowsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: Honeycode, + input: ListTableRowsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listTableRows(input, ...args); +}; +export async function* paginateListTableRows( + config: HoneycodePaginationConfiguration, + input: ListTableRowsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListTableRowsCommandOutput; + while (hasNext) { + input.nextToken = token; + input["maxResults"] = config.pageSize; + if (config.client instanceof Honeycode) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof HoneycodeClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected Honeycode | HoneycodeClient"); + } + yield page; + token = page.nextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-honeycode/pagination/ListTablesPaginator.ts b/clients/client-honeycode/pagination/ListTablesPaginator.ts new file mode 100644 index 000000000000..d5795969c5d8 --- /dev/null +++ b/clients/client-honeycode/pagination/ListTablesPaginator.ts @@ -0,0 +1,53 @@ +import { Honeycode } from "../Honeycode"; +import { HoneycodeClient } from "../HoneycodeClient"; +import { ListTablesCommand, ListTablesCommandInput, ListTablesCommandOutput } from "../commands/ListTablesCommand"; +import { HoneycodePaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: HoneycodeClient, + input: ListTablesCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListTablesCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: Honeycode, + input: ListTablesCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listTables(input, ...args); +}; +export async function* paginateListTables( + config: HoneycodePaginationConfiguration, + input: ListTablesCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListTablesCommandOutput; + while (hasNext) { + input.nextToken = token; + input["maxResults"] = config.pageSize; + if (config.client instanceof Honeycode) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof HoneycodeClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected Honeycode | HoneycodeClient"); + } + yield page; + token = page.nextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-honeycode/pagination/QueryTableRowsPaginator.ts b/clients/client-honeycode/pagination/QueryTableRowsPaginator.ts new file mode 100644 index 000000000000..97bb827c7d71 --- /dev/null +++ b/clients/client-honeycode/pagination/QueryTableRowsPaginator.ts @@ -0,0 +1,57 @@ +import { Honeycode } from "../Honeycode"; +import { HoneycodeClient } from "../HoneycodeClient"; +import { + QueryTableRowsCommand, + QueryTableRowsCommandInput, + QueryTableRowsCommandOutput, +} from "../commands/QueryTableRowsCommand"; +import { HoneycodePaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: HoneycodeClient, + input: QueryTableRowsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new QueryTableRowsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: Honeycode, + input: QueryTableRowsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.queryTableRows(input, ...args); +}; +export async function* paginateQueryTableRows( + config: HoneycodePaginationConfiguration, + input: QueryTableRowsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: QueryTableRowsCommandOutput; + while (hasNext) { + input.nextToken = token; + input["maxResults"] = config.pageSize; + if (config.client instanceof Honeycode) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof HoneycodeClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected Honeycode | HoneycodeClient"); + } + yield page; + token = page.nextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-honeycode/protocols/Aws_restJson1.ts b/clients/client-honeycode/protocols/Aws_restJson1.ts index 31f11975c4a1..324b1ae6fa84 100644 --- a/clients/client-honeycode/protocols/Aws_restJson1.ts +++ b/clients/client-honeycode/protocols/Aws_restJson1.ts @@ -1,21 +1,69 @@ +import { + BatchCreateTableRowsCommandInput, + BatchCreateTableRowsCommandOutput, +} from "../commands/BatchCreateTableRowsCommand"; +import { + BatchDeleteTableRowsCommandInput, + BatchDeleteTableRowsCommandOutput, +} from "../commands/BatchDeleteTableRowsCommand"; +import { + BatchUpdateTableRowsCommandInput, + BatchUpdateTableRowsCommandOutput, +} from "../commands/BatchUpdateTableRowsCommand"; +import { + BatchUpsertTableRowsCommandInput, + BatchUpsertTableRowsCommandOutput, +} from "../commands/BatchUpsertTableRowsCommand"; +import { + DescribeTableDataImportJobCommandInput, + DescribeTableDataImportJobCommandOutput, +} from "../commands/DescribeTableDataImportJobCommand"; import { GetScreenDataCommandInput, GetScreenDataCommandOutput } from "../commands/GetScreenDataCommand"; import { InvokeScreenAutomationCommandInput, InvokeScreenAutomationCommandOutput, } from "../commands/InvokeScreenAutomationCommand"; +import { ListTableColumnsCommandInput, ListTableColumnsCommandOutput } from "../commands/ListTableColumnsCommand"; +import { ListTableRowsCommandInput, ListTableRowsCommandOutput } from "../commands/ListTableRowsCommand"; +import { ListTablesCommandInput, ListTablesCommandOutput } from "../commands/ListTablesCommand"; +import { QueryTableRowsCommandInput, QueryTableRowsCommandOutput } from "../commands/QueryTableRowsCommand"; +import { + StartTableDataImportJobCommandInput, + StartTableDataImportJobCommandOutput, +} from "../commands/StartTableDataImportJobCommand"; import { AccessDeniedException, AutomationExecutionException, AutomationExecutionTimeoutException, + Cell, + CellInput, ColumnMetadata, + CreateRowData, DataItem, + DelimitedTextImportOptions, + DestinationOptions, + FailedBatchItem, + Filter, + ImportDataSource, + ImportDataSourceConfig, + ImportJobSubmitter, + ImportOptions, InternalServerException, RequestTimeoutException, ResourceNotFoundException, ResultRow, ResultSet, + ServiceQuotaExceededException, ServiceUnavailableException, + SourceDataColumnProperties, + Table, + TableColumn, + TableDataImportJobMetadata, + TableRow, ThrottlingException, + UpdateRowData, + UpsertRowData, + UpsertRowsResult, ValidationException, VariableValue, } from "../models/models_0"; @@ -31,6 +79,232 @@ import { SerdeContext as __SerdeContext, } from "@aws-sdk/types"; +export const serializeAws_restJson1BatchCreateTableRowsCommand = async ( + input: BatchCreateTableRowsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/workbooks/{workbookId}/tables/{tableId}/rows/batchcreate"; + if (input.workbookId !== undefined) { + const labelValue: string = input.workbookId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: workbookId."); + } + resolvedPath = resolvedPath.replace("{workbookId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: workbookId."); + } + if (input.tableId !== undefined) { + const labelValue: string = input.tableId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: tableId."); + } + resolvedPath = resolvedPath.replace("{tableId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: tableId."); + } + let body: any; + body = JSON.stringify({ + ...(input.clientRequestToken !== undefined && { clientRequestToken: input.clientRequestToken }), + ...(input.rowsToCreate !== undefined && { + rowsToCreate: serializeAws_restJson1CreateRowDataList(input.rowsToCreate, context), + }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1BatchDeleteTableRowsCommand = async ( + input: BatchDeleteTableRowsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/workbooks/{workbookId}/tables/{tableId}/rows/batchdelete"; + if (input.workbookId !== undefined) { + const labelValue: string = input.workbookId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: workbookId."); + } + resolvedPath = resolvedPath.replace("{workbookId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: workbookId."); + } + if (input.tableId !== undefined) { + const labelValue: string = input.tableId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: tableId."); + } + resolvedPath = resolvedPath.replace("{tableId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: tableId."); + } + let body: any; + body = JSON.stringify({ + ...(input.clientRequestToken !== undefined && { clientRequestToken: input.clientRequestToken }), + ...(input.rowIds !== undefined && { rowIds: serializeAws_restJson1RowIdList(input.rowIds, context) }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1BatchUpdateTableRowsCommand = async ( + input: BatchUpdateTableRowsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/workbooks/{workbookId}/tables/{tableId}/rows/batchupdate"; + if (input.workbookId !== undefined) { + const labelValue: string = input.workbookId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: workbookId."); + } + resolvedPath = resolvedPath.replace("{workbookId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: workbookId."); + } + if (input.tableId !== undefined) { + const labelValue: string = input.tableId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: tableId."); + } + resolvedPath = resolvedPath.replace("{tableId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: tableId."); + } + let body: any; + body = JSON.stringify({ + ...(input.clientRequestToken !== undefined && { clientRequestToken: input.clientRequestToken }), + ...(input.rowsToUpdate !== undefined && { + rowsToUpdate: serializeAws_restJson1UpdateRowDataList(input.rowsToUpdate, context), + }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1BatchUpsertTableRowsCommand = async ( + input: BatchUpsertTableRowsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/workbooks/{workbookId}/tables/{tableId}/rows/batchupsert"; + if (input.workbookId !== undefined) { + const labelValue: string = input.workbookId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: workbookId."); + } + resolvedPath = resolvedPath.replace("{workbookId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: workbookId."); + } + if (input.tableId !== undefined) { + const labelValue: string = input.tableId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: tableId."); + } + resolvedPath = resolvedPath.replace("{tableId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: tableId."); + } + let body: any; + body = JSON.stringify({ + ...(input.clientRequestToken !== undefined && { clientRequestToken: input.clientRequestToken }), + ...(input.rowsToUpsert !== undefined && { + rowsToUpsert: serializeAws_restJson1UpsertRowDataList(input.rowsToUpsert, context), + }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DescribeTableDataImportJobCommand = async ( + input: DescribeTableDataImportJobCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/workbooks/{workbookId}/tables/{tableId}/import/{jobId}"; + if (input.workbookId !== undefined) { + const labelValue: string = input.workbookId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: workbookId."); + } + resolvedPath = resolvedPath.replace("{workbookId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: workbookId."); + } + if (input.tableId !== undefined) { + const labelValue: string = input.tableId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: tableId."); + } + resolvedPath = resolvedPath.replace("{tableId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: tableId."); + } + if (input.jobId !== undefined) { + const labelValue: string = input.jobId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: jobId."); + } + resolvedPath = resolvedPath.replace("{jobId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: jobId."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + body, + }); +}; + export const serializeAws_restJson1GetScreenDataCommand = async ( input: GetScreenDataCommandInput, context: __SerdeContext @@ -79,15 +353,6 @@ export const serializeAws_restJson1InvokeScreenAutomationCommand = async ( } else { throw new Error("No value provided for input HTTP label: workbookId."); } - if (input.screenId !== undefined) { - const labelValue: string = input.screenId; - if (labelValue.length <= 0) { - throw new Error("Empty value provided for input HTTP label: screenId."); - } - resolvedPath = resolvedPath.replace("{screenId}", __extendedEncodeURIComponent(labelValue)); - } else { - throw new Error("No value provided for input HTTP label: screenId."); - } if (input.appId !== undefined) { const labelValue: string = input.appId; if (labelValue.length <= 0) { @@ -97,6 +362,15 @@ export const serializeAws_restJson1InvokeScreenAutomationCommand = async ( } else { throw new Error("No value provided for input HTTP label: appId."); } + if (input.screenId !== undefined) { + const labelValue: string = input.screenId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: screenId."); + } + resolvedPath = resolvedPath.replace("{screenId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: screenId."); + } if (input.screenAutomationId !== undefined) { const labelValue: string = input.screenAutomationId; if (labelValue.length <= 0) { @@ -126,25 +400,1372 @@ export const serializeAws_restJson1InvokeScreenAutomationCommand = async ( }); }; -export const deserializeAws_restJson1GetScreenDataCommand = async ( +export const serializeAws_restJson1ListTableColumnsCommand = async ( + input: ListTableColumnsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/workbooks/{workbookId}/tables/{tableId}/columns"; + if (input.workbookId !== undefined) { + const labelValue: string = input.workbookId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: workbookId."); + } + resolvedPath = resolvedPath.replace("{workbookId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: workbookId."); + } + if (input.tableId !== undefined) { + const labelValue: string = input.tableId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: tableId."); + } + resolvedPath = resolvedPath.replace("{tableId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: tableId."); + } + const query: any = { + ...(input.nextToken !== undefined && { nextToken: input.nextToken }), + }; + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; + +export const serializeAws_restJson1ListTableRowsCommand = async ( + input: ListTableRowsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/workbooks/{workbookId}/tables/{tableId}/rows/list"; + if (input.workbookId !== undefined) { + const labelValue: string = input.workbookId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: workbookId."); + } + resolvedPath = resolvedPath.replace("{workbookId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: workbookId."); + } + if (input.tableId !== undefined) { + const labelValue: string = input.tableId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: tableId."); + } + resolvedPath = resolvedPath.replace("{tableId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: tableId."); + } + let body: any; + body = JSON.stringify({ + ...(input.maxResults !== undefined && { maxResults: input.maxResults }), + ...(input.nextToken !== undefined && { nextToken: input.nextToken }), + ...(input.rowIds !== undefined && { rowIds: serializeAws_restJson1RowIdList(input.rowIds, context) }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1ListTablesCommand = async ( + input: ListTablesCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/workbooks/{workbookId}/tables"; + if (input.workbookId !== undefined) { + const labelValue: string = input.workbookId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: workbookId."); + } + resolvedPath = resolvedPath.replace("{workbookId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: workbookId."); + } + const query: any = { + ...(input.maxResults !== undefined && { maxResults: input.maxResults.toString() }), + ...(input.nextToken !== undefined && { nextToken: input.nextToken }), + }; + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; + +export const serializeAws_restJson1QueryTableRowsCommand = async ( + input: QueryTableRowsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/workbooks/{workbookId}/tables/{tableId}/rows/query"; + if (input.workbookId !== undefined) { + const labelValue: string = input.workbookId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: workbookId."); + } + resolvedPath = resolvedPath.replace("{workbookId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: workbookId."); + } + if (input.tableId !== undefined) { + const labelValue: string = input.tableId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: tableId."); + } + resolvedPath = resolvedPath.replace("{tableId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: tableId."); + } + let body: any; + body = JSON.stringify({ + ...(input.filterFormula !== undefined && { + filterFormula: serializeAws_restJson1Filter(input.filterFormula, context), + }), + ...(input.maxResults !== undefined && { maxResults: input.maxResults }), + ...(input.nextToken !== undefined && { nextToken: input.nextToken }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1StartTableDataImportJobCommand = async ( + input: StartTableDataImportJobCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/workbooks/{workbookId}/tables/{destinationTableId}/import"; + if (input.workbookId !== undefined) { + const labelValue: string = input.workbookId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: workbookId."); + } + resolvedPath = resolvedPath.replace("{workbookId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: workbookId."); + } + if (input.destinationTableId !== undefined) { + const labelValue: string = input.destinationTableId; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: destinationTableId."); + } + resolvedPath = resolvedPath.replace("{destinationTableId}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: destinationTableId."); + } + let body: any; + body = JSON.stringify({ + ...(input.clientRequestToken !== undefined && { clientRequestToken: input.clientRequestToken }), + ...(input.dataFormat !== undefined && { dataFormat: input.dataFormat }), + ...(input.dataSource !== undefined && { + dataSource: serializeAws_restJson1ImportDataSource(input.dataSource, context), + }), + ...(input.importOptions !== undefined && { + importOptions: serializeAws_restJson1ImportOptions(input.importOptions, context), + }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const deserializeAws_restJson1BatchCreateTableRowsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1BatchCreateTableRowsCommandError(output, context); + } + const contents: BatchCreateTableRowsCommandOutput = { + $metadata: deserializeMetadata(output), + createdRows: undefined, + failedBatchItems: undefined, + workbookCursor: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.createdRows !== undefined && data.createdRows !== null) { + contents.createdRows = deserializeAws_restJson1CreatedRowsMap(data.createdRows, context); + } + if (data.failedBatchItems !== undefined && data.failedBatchItems !== null) { + contents.failedBatchItems = deserializeAws_restJson1FailedBatchItems(data.failedBatchItems, context); + } + if (data.workbookCursor !== undefined && data.workbookCursor !== null) { + contents.workbookCursor = data.workbookCursor; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1BatchCreateTableRowsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.honeycode#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.honeycode#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RequestTimeoutException": + case "com.amazonaws.honeycode#RequestTimeoutException": + response = { + ...(await deserializeAws_restJson1RequestTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.honeycode#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceQuotaExceededException": + case "com.amazonaws.honeycode#ServiceQuotaExceededException": + response = { + ...(await deserializeAws_restJson1ServiceQuotaExceededExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceUnavailableException": + case "com.amazonaws.honeycode#ServiceUnavailableException": + response = { + ...(await deserializeAws_restJson1ServiceUnavailableExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.honeycode#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.honeycode#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1BatchDeleteTableRowsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1BatchDeleteTableRowsCommandError(output, context); + } + const contents: BatchDeleteTableRowsCommandOutput = { + $metadata: deserializeMetadata(output), + failedBatchItems: undefined, + workbookCursor: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.failedBatchItems !== undefined && data.failedBatchItems !== null) { + contents.failedBatchItems = deserializeAws_restJson1FailedBatchItems(data.failedBatchItems, context); + } + if (data.workbookCursor !== undefined && data.workbookCursor !== null) { + contents.workbookCursor = data.workbookCursor; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1BatchDeleteTableRowsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.honeycode#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.honeycode#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RequestTimeoutException": + case "com.amazonaws.honeycode#RequestTimeoutException": + response = { + ...(await deserializeAws_restJson1RequestTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.honeycode#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceUnavailableException": + case "com.amazonaws.honeycode#ServiceUnavailableException": + response = { + ...(await deserializeAws_restJson1ServiceUnavailableExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.honeycode#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.honeycode#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1BatchUpdateTableRowsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1BatchUpdateTableRowsCommandError(output, context); + } + const contents: BatchUpdateTableRowsCommandOutput = { + $metadata: deserializeMetadata(output), + failedBatchItems: undefined, + workbookCursor: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.failedBatchItems !== undefined && data.failedBatchItems !== null) { + contents.failedBatchItems = deserializeAws_restJson1FailedBatchItems(data.failedBatchItems, context); + } + if (data.workbookCursor !== undefined && data.workbookCursor !== null) { + contents.workbookCursor = data.workbookCursor; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1BatchUpdateTableRowsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.honeycode#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.honeycode#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RequestTimeoutException": + case "com.amazonaws.honeycode#RequestTimeoutException": + response = { + ...(await deserializeAws_restJson1RequestTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.honeycode#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceUnavailableException": + case "com.amazonaws.honeycode#ServiceUnavailableException": + response = { + ...(await deserializeAws_restJson1ServiceUnavailableExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.honeycode#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.honeycode#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1BatchUpsertTableRowsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1BatchUpsertTableRowsCommandError(output, context); + } + const contents: BatchUpsertTableRowsCommandOutput = { + $metadata: deserializeMetadata(output), + failedBatchItems: undefined, + rows: undefined, + workbookCursor: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.failedBatchItems !== undefined && data.failedBatchItems !== null) { + contents.failedBatchItems = deserializeAws_restJson1FailedBatchItems(data.failedBatchItems, context); + } + if (data.rows !== undefined && data.rows !== null) { + contents.rows = deserializeAws_restJson1UpsertRowsResultMap(data.rows, context); + } + if (data.workbookCursor !== undefined && data.workbookCursor !== null) { + contents.workbookCursor = data.workbookCursor; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1BatchUpsertTableRowsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.honeycode#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.honeycode#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RequestTimeoutException": + case "com.amazonaws.honeycode#RequestTimeoutException": + response = { + ...(await deserializeAws_restJson1RequestTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.honeycode#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceQuotaExceededException": + case "com.amazonaws.honeycode#ServiceQuotaExceededException": + response = { + ...(await deserializeAws_restJson1ServiceQuotaExceededExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceUnavailableException": + case "com.amazonaws.honeycode#ServiceUnavailableException": + response = { + ...(await deserializeAws_restJson1ServiceUnavailableExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.honeycode#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.honeycode#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DescribeTableDataImportJobCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DescribeTableDataImportJobCommandError(output, context); + } + const contents: DescribeTableDataImportJobCommandOutput = { + $metadata: deserializeMetadata(output), + jobMetadata: undefined, + jobStatus: undefined, + message: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.jobMetadata !== undefined && data.jobMetadata !== null) { + contents.jobMetadata = deserializeAws_restJson1TableDataImportJobMetadata(data.jobMetadata, context); + } + if (data.jobStatus !== undefined && data.jobStatus !== null) { + contents.jobStatus = data.jobStatus; + } + if (data.message !== undefined && data.message !== null) { + contents.message = data.message; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DescribeTableDataImportJobCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.honeycode#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.honeycode#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.honeycode#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceUnavailableException": + case "com.amazonaws.honeycode#ServiceUnavailableException": + response = { + ...(await deserializeAws_restJson1ServiceUnavailableExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.honeycode#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.honeycode#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1GetScreenDataCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1GetScreenDataCommandError(output, context); + } + const contents: GetScreenDataCommandOutput = { + $metadata: deserializeMetadata(output), + nextToken: undefined, + results: undefined, + workbookCursor: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.nextToken !== undefined && data.nextToken !== null) { + contents.nextToken = data.nextToken; + } + if (data.results !== undefined && data.results !== null) { + contents.results = deserializeAws_restJson1ResultSetMap(data.results, context); + } + if (data.workbookCursor !== undefined && data.workbookCursor !== null) { + contents.workbookCursor = data.workbookCursor; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1GetScreenDataCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.honeycode#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.honeycode#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RequestTimeoutException": + case "com.amazonaws.honeycode#RequestTimeoutException": + response = { + ...(await deserializeAws_restJson1RequestTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.honeycode#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceUnavailableException": + case "com.amazonaws.honeycode#ServiceUnavailableException": + response = { + ...(await deserializeAws_restJson1ServiceUnavailableExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.honeycode#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.honeycode#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1InvokeScreenAutomationCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1InvokeScreenAutomationCommandError(output, context); + } + const contents: InvokeScreenAutomationCommandOutput = { + $metadata: deserializeMetadata(output), + workbookCursor: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.workbookCursor !== undefined && data.workbookCursor !== null) { + contents.workbookCursor = data.workbookCursor; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1InvokeScreenAutomationCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.honeycode#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "AutomationExecutionException": + case "com.amazonaws.honeycode#AutomationExecutionException": + response = { + ...(await deserializeAws_restJson1AutomationExecutionExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "AutomationExecutionTimeoutException": + case "com.amazonaws.honeycode#AutomationExecutionTimeoutException": + response = { + ...(await deserializeAws_restJson1AutomationExecutionTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.honeycode#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RequestTimeoutException": + case "com.amazonaws.honeycode#RequestTimeoutException": + response = { + ...(await deserializeAws_restJson1RequestTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.honeycode#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceUnavailableException": + case "com.amazonaws.honeycode#ServiceUnavailableException": + response = { + ...(await deserializeAws_restJson1ServiceUnavailableExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.honeycode#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.honeycode#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1ListTableColumnsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListTableColumnsCommandError(output, context); + } + const contents: ListTableColumnsCommandOutput = { + $metadata: deserializeMetadata(output), + nextToken: undefined, + tableColumns: undefined, + workbookCursor: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.nextToken !== undefined && data.nextToken !== null) { + contents.nextToken = data.nextToken; + } + if (data.tableColumns !== undefined && data.tableColumns !== null) { + contents.tableColumns = deserializeAws_restJson1TableColumns(data.tableColumns, context); + } + if (data.workbookCursor !== undefined && data.workbookCursor !== null) { + contents.workbookCursor = data.workbookCursor; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListTableColumnsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.honeycode#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.honeycode#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RequestTimeoutException": + case "com.amazonaws.honeycode#RequestTimeoutException": + response = { + ...(await deserializeAws_restJson1RequestTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.honeycode#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceUnavailableException": + case "com.amazonaws.honeycode#ServiceUnavailableException": + response = { + ...(await deserializeAws_restJson1ServiceUnavailableExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.honeycode#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.honeycode#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1ListTableRowsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListTableRowsCommandError(output, context); + } + const contents: ListTableRowsCommandOutput = { + $metadata: deserializeMetadata(output), + columnIds: undefined, + nextToken: undefined, + rowIdsNotFound: undefined, + rows: undefined, + workbookCursor: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.columnIds !== undefined && data.columnIds !== null) { + contents.columnIds = deserializeAws_restJson1ResourceIds(data.columnIds, context); + } + if (data.nextToken !== undefined && data.nextToken !== null) { + contents.nextToken = data.nextToken; + } + if (data.rowIdsNotFound !== undefined && data.rowIdsNotFound !== null) { + contents.rowIdsNotFound = deserializeAws_restJson1RowIdList(data.rowIdsNotFound, context); + } + if (data.rows !== undefined && data.rows !== null) { + contents.rows = deserializeAws_restJson1TableRows(data.rows, context); + } + if (data.workbookCursor !== undefined && data.workbookCursor !== null) { + contents.workbookCursor = data.workbookCursor; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListTableRowsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.honeycode#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.honeycode#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RequestTimeoutException": + case "com.amazonaws.honeycode#RequestTimeoutException": + response = { + ...(await deserializeAws_restJson1RequestTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.honeycode#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceUnavailableException": + case "com.amazonaws.honeycode#ServiceUnavailableException": + response = { + ...(await deserializeAws_restJson1ServiceUnavailableExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.honeycode#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.honeycode#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1ListTablesCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListTablesCommandError(output, context); + } + const contents: ListTablesCommandOutput = { + $metadata: deserializeMetadata(output), + nextToken: undefined, + tables: undefined, + workbookCursor: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.nextToken !== undefined && data.nextToken !== null) { + contents.nextToken = data.nextToken; + } + if (data.tables !== undefined && data.tables !== null) { + contents.tables = deserializeAws_restJson1Tables(data.tables, context); + } + if (data.workbookCursor !== undefined && data.workbookCursor !== null) { + contents.workbookCursor = data.workbookCursor; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListTablesCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.honeycode#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.honeycode#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "RequestTimeoutException": + case "com.amazonaws.honeycode#RequestTimeoutException": + response = { + ...(await deserializeAws_restJson1RequestTimeoutExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.honeycode#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceUnavailableException": + case "com.amazonaws.honeycode#ServiceUnavailableException": + response = { + ...(await deserializeAws_restJson1ServiceUnavailableExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.honeycode#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.honeycode#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1QueryTableRowsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode !== 200 && output.statusCode >= 300) { - return deserializeAws_restJson1GetScreenDataCommandError(output, context); + return deserializeAws_restJson1QueryTableRowsCommandError(output, context); } - const contents: GetScreenDataCommandOutput = { + const contents: QueryTableRowsCommandOutput = { $metadata: deserializeMetadata(output), + columnIds: undefined, nextToken: undefined, - results: undefined, + rows: undefined, workbookCursor: undefined, }; const data: any = await parseBody(output.body, context); + if (data.columnIds !== undefined && data.columnIds !== null) { + contents.columnIds = deserializeAws_restJson1ResourceIds(data.columnIds, context); + } if (data.nextToken !== undefined && data.nextToken !== null) { contents.nextToken = data.nextToken; } - if (data.results !== undefined && data.results !== null) { - contents.results = deserializeAws_restJson1ResultSetMap(data.results, context); + if (data.rows !== undefined && data.rows !== null) { + contents.rows = deserializeAws_restJson1TableRows(data.rows, context); } if (data.workbookCursor !== undefined && data.workbookCursor !== null) { contents.workbookCursor = data.workbookCursor; @@ -152,10 +1773,10 @@ export const deserializeAws_restJson1GetScreenDataCommand = async ( return Promise.resolve(contents); }; -const deserializeAws_restJson1GetScreenDataCommandError = async ( +const deserializeAws_restJson1QueryTableRowsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -237,28 +1858,32 @@ const deserializeAws_restJson1GetScreenDataCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_restJson1InvokeScreenAutomationCommand = async ( +export const deserializeAws_restJson1StartTableDataImportJobCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode !== 200 && output.statusCode >= 300) { - return deserializeAws_restJson1InvokeScreenAutomationCommandError(output, context); + return deserializeAws_restJson1StartTableDataImportJobCommandError(output, context); } - const contents: InvokeScreenAutomationCommandOutput = { + const contents: StartTableDataImportJobCommandOutput = { $metadata: deserializeMetadata(output), - workbookCursor: undefined, + jobId: undefined, + jobStatus: undefined, }; const data: any = await parseBody(output.body, context); - if (data.workbookCursor !== undefined && data.workbookCursor !== null) { - contents.workbookCursor = data.workbookCursor; + if (data.jobId !== undefined && data.jobId !== null) { + contents.jobId = data.jobId; + } + if (data.jobStatus !== undefined && data.jobStatus !== null) { + contents.jobStatus = data.jobStatus; } return Promise.resolve(contents); }; -const deserializeAws_restJson1InvokeScreenAutomationCommandError = async ( +const deserializeAws_restJson1StartTableDataImportJobCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -275,22 +1900,6 @@ const deserializeAws_restJson1InvokeScreenAutomationCommandError = async ( $metadata: deserializeMetadata(output), }; break; - case "AutomationExecutionException": - case "com.amazonaws.honeycode#AutomationExecutionException": - response = { - ...(await deserializeAws_restJson1AutomationExecutionExceptionResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; - case "AutomationExecutionTimeoutException": - case "com.amazonaws.honeycode#AutomationExecutionTimeoutException": - response = { - ...(await deserializeAws_restJson1AutomationExecutionTimeoutExceptionResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; case "InternalServerException": case "com.amazonaws.honeycode#InternalServerException": response = { @@ -299,14 +1908,6 @@ const deserializeAws_restJson1InvokeScreenAutomationCommandError = async ( $metadata: deserializeMetadata(output), }; break; - case "RequestTimeoutException": - case "com.amazonaws.honeycode#RequestTimeoutException": - response = { - ...(await deserializeAws_restJson1RequestTimeoutExceptionResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; case "ResourceNotFoundException": case "com.amazonaws.honeycode#ResourceNotFoundException": response = { @@ -458,6 +2059,23 @@ const deserializeAws_restJson1ResourceNotFoundExceptionResponse = async ( return contents; }; +const deserializeAws_restJson1ServiceQuotaExceededExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ServiceQuotaExceededException = { + name: "ServiceQuotaExceededException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + message: undefined, + }; + const data: any = parsedOutput.body; + if (data.message !== undefined && data.message !== null) { + contents.message = data.message; + } + return contents; +}; + const deserializeAws_restJson1ServiceUnavailableExceptionResponse = async ( parsedOutput: any, context: __SerdeContext @@ -509,6 +2127,140 @@ const deserializeAws_restJson1ValidationExceptionResponse = async ( return contents; }; +const serializeAws_restJson1CellInput = (input: CellInput, context: __SerdeContext): any => { + return { + ...(input.fact !== undefined && { fact: input.fact }), + }; +}; + +const serializeAws_restJson1CreateRowData = (input: CreateRowData, context: __SerdeContext): any => { + return { + ...(input.batchItemId !== undefined && { batchItemId: input.batchItemId }), + ...(input.cellsToCreate !== undefined && { + cellsToCreate: serializeAws_restJson1RowDataInput(input.cellsToCreate, context), + }), + }; +}; + +const serializeAws_restJson1CreateRowDataList = (input: CreateRowData[], context: __SerdeContext): any => { + return input.map((entry) => serializeAws_restJson1CreateRowData(entry, context)); +}; + +const serializeAws_restJson1DelimitedTextImportOptions = ( + input: DelimitedTextImportOptions, + context: __SerdeContext +): any => { + return { + ...(input.dataCharacterEncoding !== undefined && { dataCharacterEncoding: input.dataCharacterEncoding }), + ...(input.delimiter !== undefined && { delimiter: input.delimiter }), + ...(input.hasHeaderRow !== undefined && { hasHeaderRow: input.hasHeaderRow }), + ...(input.ignoreEmptyRows !== undefined && { ignoreEmptyRows: input.ignoreEmptyRows }), + }; +}; + +const serializeAws_restJson1DestinationOptions = (input: DestinationOptions, context: __SerdeContext): any => { + return { + ...(input.columnMap !== undefined && { + columnMap: serializeAws_restJson1ImportColumnMap(input.columnMap, context), + }), + }; +}; + +const serializeAws_restJson1Filter = (input: Filter, context: __SerdeContext): any => { + return { + ...(input.contextRowId !== undefined && { contextRowId: input.contextRowId }), + ...(input.formula !== undefined && { formula: input.formula }), + }; +}; + +const serializeAws_restJson1ImportColumnMap = ( + input: { [key: string]: SourceDataColumnProperties }, + context: __SerdeContext +): any => { + return Object.entries(input).reduce( + (acc: { [key: string]: SourceDataColumnProperties }, [key, value]: [string, any]) => ({ + ...acc, + [key]: serializeAws_restJson1SourceDataColumnProperties(value, context), + }), + {} + ); +}; + +const serializeAws_restJson1ImportDataSource = (input: ImportDataSource, context: __SerdeContext): any => { + return { + ...(input.dataSourceConfig !== undefined && { + dataSourceConfig: serializeAws_restJson1ImportDataSourceConfig(input.dataSourceConfig, context), + }), + }; +}; + +const serializeAws_restJson1ImportDataSourceConfig = (input: ImportDataSourceConfig, context: __SerdeContext): any => { + return { + ...(input.dataSourceUrl !== undefined && { dataSourceUrl: input.dataSourceUrl }), + }; +}; + +const serializeAws_restJson1ImportOptions = (input: ImportOptions, context: __SerdeContext): any => { + return { + ...(input.delimitedTextOptions !== undefined && { + delimitedTextOptions: serializeAws_restJson1DelimitedTextImportOptions(input.delimitedTextOptions, context), + }), + ...(input.destinationOptions !== undefined && { + destinationOptions: serializeAws_restJson1DestinationOptions(input.destinationOptions, context), + }), + }; +}; + +const serializeAws_restJson1RowDataInput = (input: { [key: string]: CellInput }, context: __SerdeContext): any => { + return Object.entries(input).reduce( + (acc: { [key: string]: CellInput }, [key, value]: [string, any]) => ({ + ...acc, + [key]: serializeAws_restJson1CellInput(value, context), + }), + {} + ); +}; + +const serializeAws_restJson1RowIdList = (input: string[], context: __SerdeContext): any => { + return input.map((entry) => entry); +}; + +const serializeAws_restJson1SourceDataColumnProperties = ( + input: SourceDataColumnProperties, + context: __SerdeContext +): any => { + return { + ...(input.columnIndex !== undefined && { columnIndex: input.columnIndex }), + }; +}; + +const serializeAws_restJson1UpdateRowData = (input: UpdateRowData, context: __SerdeContext): any => { + return { + ...(input.cellsToUpdate !== undefined && { + cellsToUpdate: serializeAws_restJson1RowDataInput(input.cellsToUpdate, context), + }), + ...(input.rowId !== undefined && { rowId: input.rowId }), + }; +}; + +const serializeAws_restJson1UpdateRowDataList = (input: UpdateRowData[], context: __SerdeContext): any => { + return input.map((entry) => serializeAws_restJson1UpdateRowData(entry, context)); +}; + +const serializeAws_restJson1UpsertRowData = (input: UpsertRowData, context: __SerdeContext): any => { + return { + ...(input.batchItemId !== undefined && { batchItemId: input.batchItemId }), + ...(input.cellsToUpdate !== undefined && { + cellsToUpdate: serializeAws_restJson1RowDataInput(input.cellsToUpdate, context), + }), + ...(input.filter !== undefined && { filter: serializeAws_restJson1Filter(input.filter, context) }), + }; +}; + +const serializeAws_restJson1UpsertRowDataList = (input: UpsertRowData[], context: __SerdeContext): any => { + return input.map((entry) => serializeAws_restJson1UpsertRowData(entry, context)); +}; + const serializeAws_restJson1VariableValue = (input: VariableValue, context: __SerdeContext): any => { return { ...(input.rawValue !== undefined && { rawValue: input.rawValue }), @@ -528,6 +2280,20 @@ const serializeAws_restJson1VariableValueMap = ( ); }; +const deserializeAws_restJson1Cell = (output: any, context: __SerdeContext): Cell => { + return { + format: output.format !== undefined && output.format !== null ? output.format : undefined, + formattedValue: + output.formattedValue !== undefined && output.formattedValue !== null ? output.formattedValue : undefined, + formula: output.formula !== undefined && output.formula !== null ? output.formula : undefined, + rawValue: output.rawValue !== undefined && output.rawValue !== null ? output.rawValue : undefined, + } as any; +}; + +const deserializeAws_restJson1Cells = (output: any, context: __SerdeContext): Cell[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1Cell(entry, context)); +}; + const deserializeAws_restJson1ColumnMetadata = (output: any, context: __SerdeContext): ColumnMetadata => { return { format: output.format !== undefined && output.format !== null ? output.format : undefined, @@ -535,6 +2301,16 @@ const deserializeAws_restJson1ColumnMetadata = (output: any, context: __SerdeCon } as any; }; +const deserializeAws_restJson1CreatedRowsMap = (output: any, context: __SerdeContext): { [key: string]: string } => { + return Object.entries(output).reduce( + (acc: { [key: string]: string }, [key, value]: [string, any]) => ({ + ...acc, + [key]: value, + }), + {} + ); +}; + const deserializeAws_restJson1DataItem = (output: any, context: __SerdeContext): DataItem => { return { formattedValue: @@ -549,6 +2325,98 @@ const deserializeAws_restJson1DataItems = (output: any, context: __SerdeContext) return (output || []).map((entry: any) => deserializeAws_restJson1DataItem(entry, context)); }; +const deserializeAws_restJson1DelimitedTextImportOptions = ( + output: any, + context: __SerdeContext +): DelimitedTextImportOptions => { + return { + dataCharacterEncoding: + output.dataCharacterEncoding !== undefined && output.dataCharacterEncoding !== null + ? output.dataCharacterEncoding + : undefined, + delimiter: output.delimiter !== undefined && output.delimiter !== null ? output.delimiter : undefined, + hasHeaderRow: output.hasHeaderRow !== undefined && output.hasHeaderRow !== null ? output.hasHeaderRow : undefined, + ignoreEmptyRows: + output.ignoreEmptyRows !== undefined && output.ignoreEmptyRows !== null ? output.ignoreEmptyRows : undefined, + } as any; +}; + +const deserializeAws_restJson1DestinationOptions = (output: any, context: __SerdeContext): DestinationOptions => { + return { + columnMap: + output.columnMap !== undefined && output.columnMap !== null + ? deserializeAws_restJson1ImportColumnMap(output.columnMap, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1FailedBatchItem = (output: any, context: __SerdeContext): FailedBatchItem => { + return { + errorMessage: output.errorMessage !== undefined && output.errorMessage !== null ? output.errorMessage : undefined, + id: output.id !== undefined && output.id !== null ? output.id : undefined, + } as any; +}; + +const deserializeAws_restJson1FailedBatchItems = (output: any, context: __SerdeContext): FailedBatchItem[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1FailedBatchItem(entry, context)); +}; + +const deserializeAws_restJson1ImportColumnMap = ( + output: any, + context: __SerdeContext +): { [key: string]: SourceDataColumnProperties } => { + return Object.entries(output).reduce( + (acc: { [key: string]: SourceDataColumnProperties }, [key, value]: [string, any]) => ({ + ...acc, + [key]: deserializeAws_restJson1SourceDataColumnProperties(value, context), + }), + {} + ); +}; + +const deserializeAws_restJson1ImportDataSource = (output: any, context: __SerdeContext): ImportDataSource => { + return { + dataSourceConfig: + output.dataSourceConfig !== undefined && output.dataSourceConfig !== null + ? deserializeAws_restJson1ImportDataSourceConfig(output.dataSourceConfig, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1ImportDataSourceConfig = ( + output: any, + context: __SerdeContext +): ImportDataSourceConfig => { + return { + dataSourceUrl: + output.dataSourceUrl !== undefined && output.dataSourceUrl !== null ? output.dataSourceUrl : undefined, + } as any; +}; + +const deserializeAws_restJson1ImportJobSubmitter = (output: any, context: __SerdeContext): ImportJobSubmitter => { + return { + email: output.email !== undefined && output.email !== null ? output.email : undefined, + userArn: output.userArn !== undefined && output.userArn !== null ? output.userArn : undefined, + } as any; +}; + +const deserializeAws_restJson1ImportOptions = (output: any, context: __SerdeContext): ImportOptions => { + return { + delimitedTextOptions: + output.delimitedTextOptions !== undefined && output.delimitedTextOptions !== null + ? deserializeAws_restJson1DelimitedTextImportOptions(output.delimitedTextOptions, context) + : undefined, + destinationOptions: + output.destinationOptions !== undefined && output.destinationOptions !== null + ? deserializeAws_restJson1DestinationOptions(output.destinationOptions, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1ResourceIds = (output: any, context: __SerdeContext): string[] => { + return (output || []).map((entry: any) => entry); +}; + const deserializeAws_restJson1ResultHeader = (output: any, context: __SerdeContext): ColumnMetadata[] => { return (output || []).map((entry: any) => deserializeAws_restJson1ColumnMetadata(entry, context)); }; @@ -590,6 +2458,105 @@ const deserializeAws_restJson1ResultSetMap = (output: any, context: __SerdeConte ); }; +const deserializeAws_restJson1RowIdList = (output: any, context: __SerdeContext): string[] => { + return (output || []).map((entry: any) => entry); +}; + +const deserializeAws_restJson1SourceDataColumnProperties = ( + output: any, + context: __SerdeContext +): SourceDataColumnProperties => { + return { + columnIndex: output.columnIndex !== undefined && output.columnIndex !== null ? output.columnIndex : undefined, + } as any; +}; + +const deserializeAws_restJson1Table = (output: any, context: __SerdeContext): Table => { + return { + tableId: output.tableId !== undefined && output.tableId !== null ? output.tableId : undefined, + tableName: output.tableName !== undefined && output.tableName !== null ? output.tableName : undefined, + } as any; +}; + +const deserializeAws_restJson1TableColumn = (output: any, context: __SerdeContext): TableColumn => { + return { + format: output.format !== undefined && output.format !== null ? output.format : undefined, + tableColumnId: + output.tableColumnId !== undefined && output.tableColumnId !== null ? output.tableColumnId : undefined, + tableColumnName: + output.tableColumnName !== undefined && output.tableColumnName !== null ? output.tableColumnName : undefined, + } as any; +}; + +const deserializeAws_restJson1TableColumns = (output: any, context: __SerdeContext): TableColumn[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1TableColumn(entry, context)); +}; + +const deserializeAws_restJson1TableDataImportJobMetadata = ( + output: any, + context: __SerdeContext +): TableDataImportJobMetadata => { + return { + dataSource: + output.dataSource !== undefined && output.dataSource !== null + ? deserializeAws_restJson1ImportDataSource(output.dataSource, context) + : undefined, + importOptions: + output.importOptions !== undefined && output.importOptions !== null + ? deserializeAws_restJson1ImportOptions(output.importOptions, context) + : undefined, + submitTime: + output.submitTime !== undefined && output.submitTime !== null + ? new Date(Math.round(output.submitTime * 1000)) + : undefined, + submitter: + output.submitter !== undefined && output.submitter !== null + ? deserializeAws_restJson1ImportJobSubmitter(output.submitter, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1TableRow = (output: any, context: __SerdeContext): TableRow => { + return { + cells: + output.cells !== undefined && output.cells !== null + ? deserializeAws_restJson1Cells(output.cells, context) + : undefined, + rowId: output.rowId !== undefined && output.rowId !== null ? output.rowId : undefined, + } as any; +}; + +const deserializeAws_restJson1TableRows = (output: any, context: __SerdeContext): TableRow[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1TableRow(entry, context)); +}; + +const deserializeAws_restJson1Tables = (output: any, context: __SerdeContext): Table[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1Table(entry, context)); +}; + +const deserializeAws_restJson1UpsertRowsResult = (output: any, context: __SerdeContext): UpsertRowsResult => { + return { + rowIds: + output.rowIds !== undefined && output.rowIds !== null + ? deserializeAws_restJson1RowIdList(output.rowIds, context) + : undefined, + upsertAction: output.upsertAction !== undefined && output.upsertAction !== null ? output.upsertAction : undefined, + } as any; +}; + +const deserializeAws_restJson1UpsertRowsResultMap = ( + output: any, + context: __SerdeContext +): { [key: string]: UpsertRowsResult } => { + return Object.entries(output).reduce( + (acc: { [key: string]: UpsertRowsResult }, [key, value]: [string, any]) => ({ + ...acc, + [key]: deserializeAws_restJson1UpsertRowsResult(value, context), + }), + {} + ); +}; + const deserializeMetadata = (output: __HttpResponse): __ResponseMetadata => ({ httpStatusCode: output.statusCode, httpHeaders: output.headers, diff --git a/clients/client-lambda/Lambda.ts b/clients/client-lambda/Lambda.ts index 211d42c21f06..270dc4527d6a 100644 --- a/clients/client-lambda/Lambda.ts +++ b/clients/client-lambda/Lambda.ts @@ -488,8 +488,8 @@ export class Lambda extends LambdaClient { } /** - *

        Creates a Lambda function. To create a function, you need a deployment package and an execution role. The - * deployment package contains your function code. The execution role grants the function permission to use AWS + *

        Creates a Lambda function. To create a function, you need a deployment package and an execution role. The + * deployment package is a ZIP archive or image container that contains your function code. The execution role grants the function permission to use AWS * services, such as Amazon CloudWatch Logs for log streaming and AWS X-Ray for request tracing.

        * *

        When you create a function, Lambda provisions an instance of the function and its supporting resources. If @@ -510,7 +510,8 @@ export class Lambda extends LambdaClient { * to both the unpublished and published versions of the function, and include tags (TagResource) * and per-function concurrency limits (PutFunctionConcurrency).

        * - *

        To enable code signing for this function, specify the ARN of a code-signing configuration. When a user + *

        You can use code signing if your deployment package is a ZIP archive. To enable code signing for this function, + * specify the ARN of a code-signing configuration. When a user * attempts to deploy a code package with UpdateFunctionCode, Lambda checks that the code * package has a valid signature from a trusted publisher. The code-signing configuration * includes set set of signing profiles, which define the trusted publishers for this function.

        @@ -1398,7 +1399,7 @@ export class Lambda extends LambdaClient { /** *

        Returns a list of code - * signing configurations for the specified function. A request returns up to 10,000 configurations per + * signing configurations. A request returns up to 10,000 configurations per * call. You can use the MaxItems parameter to return fewer configurations per call.

        */ public listCodeSigningConfigs( diff --git a/clients/client-lambda/commands/CreateFunctionCommand.ts b/clients/client-lambda/commands/CreateFunctionCommand.ts index 26efdb72e449..13ea233597cf 100644 --- a/clients/client-lambda/commands/CreateFunctionCommand.ts +++ b/clients/client-lambda/commands/CreateFunctionCommand.ts @@ -21,8 +21,8 @@ export type CreateFunctionCommandInput = CreateFunctionRequest; export type CreateFunctionCommandOutput = FunctionConfiguration & __MetadataBearer; /** - *

        Creates a Lambda function. To create a function, you need a deployment package and an execution role. The - * deployment package contains your function code. The execution role grants the function permission to use AWS + *

        Creates a Lambda function. To create a function, you need a deployment package and an execution role. The + * deployment package is a ZIP archive or image container that contains your function code. The execution role grants the function permission to use AWS * services, such as Amazon CloudWatch Logs for log streaming and AWS X-Ray for request tracing.

        * *

        When you create a function, Lambda provisions an instance of the function and its supporting resources. If @@ -43,7 +43,8 @@ export type CreateFunctionCommandOutput = FunctionConfiguration & __MetadataBear * to both the unpublished and published versions of the function, and include tags (TagResource) * and per-function concurrency limits (PutFunctionConcurrency).

        * - *

        To enable code signing for this function, specify the ARN of a code-signing configuration. When a user + *

        You can use code signing if your deployment package is a ZIP archive. To enable code signing for this function, + * specify the ARN of a code-signing configuration. When a user * attempts to deploy a code package with UpdateFunctionCode, Lambda checks that the code * package has a valid signature from a trusted publisher. The code-signing configuration * includes set set of signing profiles, which define the trusted publishers for this function.

        diff --git a/clients/client-lambda/commands/ListCodeSigningConfigsCommand.ts b/clients/client-lambda/commands/ListCodeSigningConfigsCommand.ts index 96d2a8b0f80b..00e1999a8385 100644 --- a/clients/client-lambda/commands/ListCodeSigningConfigsCommand.ts +++ b/clients/client-lambda/commands/ListCodeSigningConfigsCommand.ts @@ -22,7 +22,7 @@ export type ListCodeSigningConfigsCommandOutput = ListCodeSigningConfigsResponse /** *

        Returns a list of code - * signing configurations for the specified function. A request returns up to 10,000 configurations per + * signing configurations. A request returns up to 10,000 configurations per * call. You can use the MaxItems parameter to return fewer configurations per call.

        */ export class ListCodeSigningConfigsCommand extends $Command< diff --git a/clients/client-lambda/models/models_0.ts b/clients/client-lambda/models/models_0.ts index 894728858398..e3b0cf88f3d0 100644 --- a/clients/client-lambda/models/models_0.ts +++ b/clients/client-lambda/models/models_0.ts @@ -500,12 +500,12 @@ export enum CodeSigningPolicy { /** *

        Code signing configuration policies specifies the validation failure action for signature mismatch or - * expiry.

        + * expiry.

        */ export interface CodeSigningPolicies { /** *

        Code signing configuration policy for deployment validation failure. If you set the policy to - * Enforce, Lambda blocks the deployment request if code-signing validation checks fail. If you set the + * Enforce, Lambda blocks the deployment request if signature validation checks fail. If you set the * policy to Warn, Lambda allows the deployment and creates a CloudWatch log.

        *

        Default value: Warn *

        @@ -1053,8 +1053,8 @@ export namespace CodeVerificationFailedException { } /** - *

        The code for the Lambda function. You can specify either an object in Amazon S3, or upload a deployment - * package directly.

        + *

        The code for the Lambda function. You can specify either an object in Amazon S3, upload a ZIP archive deployment + * package directly, or specify the URI of a container image.

        */ export interface FunctionCode { /** @@ -1077,6 +1077,11 @@ export interface FunctionCode { *

        For versioned objects, the version of the deployment package object to use.

        */ S3ObjectVersion?: string; + + /** + *

        URI of a container image in the Amazon ECR registry.

        + */ + ImageUri?: string; } export namespace FunctionCode { @@ -1141,6 +1146,39 @@ export namespace FileSystemConfig { }); } +/** + *

        Configuration values that override the container image Dockerfile. See + * Override Container settings.

        + */ +export interface ImageConfig { + /** + *

        Specifies the entry point to their application, which is typically the location of the runtime + * executable.

        + */ + EntryPoint?: string[]; + + /** + *

        Specifies parameters that you want to pass in with ENTRYPOINT.

        + */ + Command?: string[]; + + /** + *

        Specifies the working directory.

        + */ + WorkingDirectory?: string; +} + +export namespace ImageConfig { + export const filterSensitiveLog = (obj: ImageConfig): any => ({ + ...obj, + }); +} + +export enum PackageType { + Image = "Image", + Zip = "Zip", +} + export enum Runtime { dotnetcore10 = "dotnetcore1.0", dotnetcore20 = "dotnetcore2.0", @@ -1238,7 +1276,7 @@ export interface CreateFunctionRequest { /** *

        The identifier of the function's runtime.

        */ - Runtime: Runtime | string | undefined; + Runtime?: Runtime | string; /** *

        The Amazon Resource Name (ARN) of the function's execution role.

        @@ -1250,7 +1288,7 @@ export interface CreateFunctionRequest { * file name. It can also include namespaces and other qualifiers, depending on the runtime. For more information, * see Programming Model.

        */ - Handler: string | undefined; + Handler?: string; /** *

        The code for the function.

        @@ -1286,6 +1324,11 @@ export interface CreateFunctionRequest { */ VpcConfig?: VpcConfig; + /** + *

        The type of deployment package. Set to Image for container image and set Zip for ZIP archive.

        + */ + PackageType?: PackageType | string; + /** *

        A dead letter queue configuration that specifies the queue or topic where Lambda sends asynchronous events * when they fail processing. For more information, see Dead Letter Queues.

        @@ -1326,9 +1369,14 @@ export interface CreateFunctionRequest { */ FileSystemConfigs?: FileSystemConfig[]; + /** + *

        Configuration values that override the container image Dockerfile.

        + */ + ImageConfig?: ImageConfig; + /** *

        To enable code signing for this function, specify the ARN of a code-signing configuration. A code-signing configuration - * includes set set of signing profiles, which define the trusted publishers for this function.

        + * includes a set of signing profiles, which define the trusted publishers for this function.

        */ CodeSigningConfigArn?: string; } @@ -1387,6 +1435,50 @@ export namespace EnvironmentResponse { }); } +/** + *

        Error response to GetFunctionConfiguration.

        + */ +export interface ImageConfigError { + /** + *

        Error code.

        + */ + ErrorCode?: string; + + /** + *

        Error message.

        + */ + Message?: string; +} + +export namespace ImageConfigError { + export const filterSensitiveLog = (obj: ImageConfigError): any => ({ + ...obj, + ...(obj.Message && { Message: SENSITIVE_STRING }), + }); +} + +/** + *

        Response to GetFunctionConfiguration request.

        + */ +export interface ImageConfigResponse { + /** + *

        Configuration values that override the container image Dockerfile.

        + */ + ImageConfig?: ImageConfig; + + /** + *

        Error response to GetFunctionConfiguration.

        + */ + Error?: ImageConfigError; +} + +export namespace ImageConfigResponse { + export const filterSensitiveLog = (obj: ImageConfigResponse): any => ({ + ...obj, + ...(obj.Error && { Error: ImageConfigError.filterSensitiveLog(obj.Error) }), + }); +} + export enum LastUpdateStatus { Failed = "Failed", InProgress = "InProgress", @@ -1395,6 +1487,8 @@ export enum LastUpdateStatus { export enum LastUpdateStatusReasonCode { EniLimitExceeded = "EniLimitExceeded", + ImageAccessDenied = "ImageAccessDenied", + ImageDeleted = "ImageDeleted", InsufficientRolePermissions = "InsufficientRolePermissions", InternalError = "InternalError", InvalidConfiguration = "InvalidConfiguration", @@ -1446,6 +1540,8 @@ export enum StateReasonCode { Creating = "Creating", EniLimitExceeded = "EniLimitExceeded", Idle = "Idle", + ImageAccessDenied = "ImageAccessDenied", + ImageDeleted = "ImageDeleted", InsufficientRolePermissions = "InsufficientRolePermissions", InternalError = "InternalError", InvalidConfiguration = "InvalidConfiguration", @@ -1641,6 +1737,16 @@ export interface FunctionConfiguration { */ FileSystemConfigs?: FileSystemConfig[]; + /** + *

        The type of deployment package. Set to Image for container image and set Zip for ZIP archive.

        + */ + PackageType?: PackageType | string; + + /** + *

        The function's image configuration values.

        + */ + ImageConfigResponse?: ImageConfigResponse; + /** *

        The ARN of the signing profile version.

        */ @@ -1656,6 +1762,9 @@ export namespace FunctionConfiguration { export const filterSensitiveLog = (obj: FunctionConfiguration): any => ({ ...obj, ...(obj.Environment && { Environment: EnvironmentResponse.filterSensitiveLog(obj.Environment) }), + ...(obj.ImageConfigResponse && { + ImageConfigResponse: ImageConfigResponse.filterSensitiveLog(obj.ImageConfigResponse), + }), }); } @@ -2109,6 +2218,16 @@ export interface FunctionCodeLocation { *

        A presigned URL that you can use to download the deployment package.

        */ Location?: string; + + /** + *

        URI of a container image in the Amazon ECR registry.

        + */ + ImageUri?: string; + + /** + *

        The resolved URI for the image.

        + */ + ResolvedImageUri?: string; } export namespace FunctionCodeLocation { @@ -4764,6 +4883,11 @@ export interface UpdateFunctionCodeRequest { */ S3ObjectVersion?: string; + /** + *

        URI of a container image in the Amazon ECR registry.

        + */ + ImageUri?: string; + /** *

        Set to true to publish a new version of the function after updating the code. This has the same effect as * calling PublishVersion separately.

        @@ -4895,6 +5019,11 @@ export interface UpdateFunctionConfigurationRequest { *

        Connection settings for an Amazon EFS file system.

        */ FileSystemConfigs?: FileSystemConfig[]; + + /** + *

        Configuration values that override the container image Dockerfile.

        + */ + ImageConfig?: ImageConfig; } export namespace UpdateFunctionConfigurationRequest { diff --git a/clients/client-lambda/protocols/Aws_restJson1.ts b/clients/client-lambda/protocols/Aws_restJson1.ts index 1104e4588d7a..73b50cf7c994 100644 --- a/clients/client-lambda/protocols/Aws_restJson1.ts +++ b/clients/client-lambda/protocols/Aws_restJson1.ts @@ -189,6 +189,9 @@ import { FunctionCodeLocation, FunctionConfiguration, FunctionEventInvokeConfig, + ImageConfig, + ImageConfigError, + ImageConfigResponse, InvalidCodeSignatureException, InvalidParameterValueException, InvalidRequestContentException, @@ -476,9 +479,13 @@ export const serializeAws_restJson1CreateFunctionCommand = async ( }), ...(input.FunctionName !== undefined && { FunctionName: input.FunctionName }), ...(input.Handler !== undefined && { Handler: input.Handler }), + ...(input.ImageConfig !== undefined && { + ImageConfig: serializeAws_restJson1ImageConfig(input.ImageConfig, context), + }), ...(input.KMSKeyArn !== undefined && { KMSKeyArn: input.KMSKeyArn }), ...(input.Layers !== undefined && { Layers: serializeAws_restJson1LayerList(input.Layers, context) }), ...(input.MemorySize !== undefined && { MemorySize: input.MemorySize }), + ...(input.PackageType !== undefined && { PackageType: input.PackageType }), ...(input.Publish !== undefined && { Publish: input.Publish }), ...(input.Role !== undefined && { Role: input.Role }), ...(input.Runtime !== undefined && { Runtime: input.Runtime }), @@ -2228,6 +2235,7 @@ export const serializeAws_restJson1UpdateFunctionCodeCommand = async ( let body: any; body = JSON.stringify({ ...(input.DryRun !== undefined && { DryRun: input.DryRun }), + ...(input.ImageUri !== undefined && { ImageUri: input.ImageUri }), ...(input.Publish !== undefined && { Publish: input.Publish }), ...(input.RevisionId !== undefined && { RevisionId: input.RevisionId }), ...(input.S3Bucket !== undefined && { S3Bucket: input.S3Bucket }), @@ -2277,6 +2285,9 @@ export const serializeAws_restJson1UpdateFunctionConfigurationCommand = async ( FileSystemConfigs: serializeAws_restJson1FileSystemConfigList(input.FileSystemConfigs, context), }), ...(input.Handler !== undefined && { Handler: input.Handler }), + ...(input.ImageConfig !== undefined && { + ImageConfig: serializeAws_restJson1ImageConfig(input.ImageConfig, context), + }), ...(input.KMSKeyArn !== undefined && { KMSKeyArn: input.KMSKeyArn }), ...(input.Layers !== undefined && { Layers: serializeAws_restJson1LayerList(input.Layers, context) }), ...(input.MemorySize !== undefined && { MemorySize: input.MemorySize }), @@ -2902,6 +2913,7 @@ export const deserializeAws_restJson1CreateFunctionCommand = async ( FunctionArn: undefined, FunctionName: undefined, Handler: undefined, + ImageConfigResponse: undefined, KMSKeyArn: undefined, LastModified: undefined, LastUpdateStatus: undefined, @@ -2910,6 +2922,7 @@ export const deserializeAws_restJson1CreateFunctionCommand = async ( Layers: undefined, MasterArn: undefined, MemorySize: undefined, + PackageType: undefined, RevisionId: undefined, Role: undefined, Runtime: undefined, @@ -2951,6 +2964,9 @@ export const deserializeAws_restJson1CreateFunctionCommand = async ( if (data.Handler !== undefined && data.Handler !== null) { contents.Handler = data.Handler; } + if (data.ImageConfigResponse !== undefined && data.ImageConfigResponse !== null) { + contents.ImageConfigResponse = deserializeAws_restJson1ImageConfigResponse(data.ImageConfigResponse, context); + } if (data.KMSKeyArn !== undefined && data.KMSKeyArn !== null) { contents.KMSKeyArn = data.KMSKeyArn; } @@ -2975,6 +2991,9 @@ export const deserializeAws_restJson1CreateFunctionCommand = async ( if (data.MemorySize !== undefined && data.MemorySize !== null) { contents.MemorySize = data.MemorySize; } + if (data.PackageType !== undefined && data.PackageType !== null) { + contents.PackageType = data.PackageType; + } if (data.RevisionId !== undefined && data.RevisionId !== null) { contents.RevisionId = data.RevisionId; } @@ -4563,6 +4582,7 @@ export const deserializeAws_restJson1GetFunctionConfigurationCommand = async ( FunctionArn: undefined, FunctionName: undefined, Handler: undefined, + ImageConfigResponse: undefined, KMSKeyArn: undefined, LastModified: undefined, LastUpdateStatus: undefined, @@ -4571,6 +4591,7 @@ export const deserializeAws_restJson1GetFunctionConfigurationCommand = async ( Layers: undefined, MasterArn: undefined, MemorySize: undefined, + PackageType: undefined, RevisionId: undefined, Role: undefined, Runtime: undefined, @@ -4612,6 +4633,9 @@ export const deserializeAws_restJson1GetFunctionConfigurationCommand = async ( if (data.Handler !== undefined && data.Handler !== null) { contents.Handler = data.Handler; } + if (data.ImageConfigResponse !== undefined && data.ImageConfigResponse !== null) { + contents.ImageConfigResponse = deserializeAws_restJson1ImageConfigResponse(data.ImageConfigResponse, context); + } if (data.KMSKeyArn !== undefined && data.KMSKeyArn !== null) { contents.KMSKeyArn = data.KMSKeyArn; } @@ -4636,6 +4660,9 @@ export const deserializeAws_restJson1GetFunctionConfigurationCommand = async ( if (data.MemorySize !== undefined && data.MemorySize !== null) { contents.MemorySize = data.MemorySize; } + if (data.PackageType !== undefined && data.PackageType !== null) { + contents.PackageType = data.PackageType; + } if (data.RevisionId !== undefined && data.RevisionId !== null) { contents.RevisionId = data.RevisionId; } @@ -6685,6 +6712,7 @@ export const deserializeAws_restJson1PublishVersionCommand = async ( FunctionArn: undefined, FunctionName: undefined, Handler: undefined, + ImageConfigResponse: undefined, KMSKeyArn: undefined, LastModified: undefined, LastUpdateStatus: undefined, @@ -6693,6 +6721,7 @@ export const deserializeAws_restJson1PublishVersionCommand = async ( Layers: undefined, MasterArn: undefined, MemorySize: undefined, + PackageType: undefined, RevisionId: undefined, Role: undefined, Runtime: undefined, @@ -6734,6 +6763,9 @@ export const deserializeAws_restJson1PublishVersionCommand = async ( if (data.Handler !== undefined && data.Handler !== null) { contents.Handler = data.Handler; } + if (data.ImageConfigResponse !== undefined && data.ImageConfigResponse !== null) { + contents.ImageConfigResponse = deserializeAws_restJson1ImageConfigResponse(data.ImageConfigResponse, context); + } if (data.KMSKeyArn !== undefined && data.KMSKeyArn !== null) { contents.KMSKeyArn = data.KMSKeyArn; } @@ -6758,6 +6790,9 @@ export const deserializeAws_restJson1PublishVersionCommand = async ( if (data.MemorySize !== undefined && data.MemorySize !== null) { contents.MemorySize = data.MemorySize; } + if (data.PackageType !== undefined && data.PackageType !== null) { + contents.PackageType = data.PackageType; + } if (data.RevisionId !== undefined && data.RevisionId !== null) { contents.RevisionId = data.RevisionId; } @@ -7985,6 +8020,7 @@ export const deserializeAws_restJson1UpdateFunctionCodeCommand = async ( FunctionArn: undefined, FunctionName: undefined, Handler: undefined, + ImageConfigResponse: undefined, KMSKeyArn: undefined, LastModified: undefined, LastUpdateStatus: undefined, @@ -7993,6 +8029,7 @@ export const deserializeAws_restJson1UpdateFunctionCodeCommand = async ( Layers: undefined, MasterArn: undefined, MemorySize: undefined, + PackageType: undefined, RevisionId: undefined, Role: undefined, Runtime: undefined, @@ -8034,6 +8071,9 @@ export const deserializeAws_restJson1UpdateFunctionCodeCommand = async ( if (data.Handler !== undefined && data.Handler !== null) { contents.Handler = data.Handler; } + if (data.ImageConfigResponse !== undefined && data.ImageConfigResponse !== null) { + contents.ImageConfigResponse = deserializeAws_restJson1ImageConfigResponse(data.ImageConfigResponse, context); + } if (data.KMSKeyArn !== undefined && data.KMSKeyArn !== null) { contents.KMSKeyArn = data.KMSKeyArn; } @@ -8058,6 +8098,9 @@ export const deserializeAws_restJson1UpdateFunctionCodeCommand = async ( if (data.MemorySize !== undefined && data.MemorySize !== null) { contents.MemorySize = data.MemorySize; } + if (data.PackageType !== undefined && data.PackageType !== null) { + contents.PackageType = data.PackageType; + } if (data.RevisionId !== undefined && data.RevisionId !== null) { contents.RevisionId = data.RevisionId; } @@ -8224,6 +8267,7 @@ export const deserializeAws_restJson1UpdateFunctionConfigurationCommand = async FunctionArn: undefined, FunctionName: undefined, Handler: undefined, + ImageConfigResponse: undefined, KMSKeyArn: undefined, LastModified: undefined, LastUpdateStatus: undefined, @@ -8232,6 +8276,7 @@ export const deserializeAws_restJson1UpdateFunctionConfigurationCommand = async Layers: undefined, MasterArn: undefined, MemorySize: undefined, + PackageType: undefined, RevisionId: undefined, Role: undefined, Runtime: undefined, @@ -8273,6 +8318,9 @@ export const deserializeAws_restJson1UpdateFunctionConfigurationCommand = async if (data.Handler !== undefined && data.Handler !== null) { contents.Handler = data.Handler; } + if (data.ImageConfigResponse !== undefined && data.ImageConfigResponse !== null) { + contents.ImageConfigResponse = deserializeAws_restJson1ImageConfigResponse(data.ImageConfigResponse, context); + } if (data.KMSKeyArn !== undefined && data.KMSKeyArn !== null) { contents.KMSKeyArn = data.KMSKeyArn; } @@ -8297,6 +8345,9 @@ export const deserializeAws_restJson1UpdateFunctionConfigurationCommand = async if (data.MemorySize !== undefined && data.MemorySize !== null) { contents.MemorySize = data.MemorySize; } + if (data.PackageType !== undefined && data.PackageType !== null) { + contents.PackageType = data.PackageType; + } if (data.RevisionId !== undefined && data.RevisionId !== null) { contents.RevisionId = data.RevisionId; } @@ -9349,6 +9400,7 @@ const serializeAws_restJson1FileSystemConfigList = (input: FileSystemConfig[], c const serializeAws_restJson1FunctionCode = (input: FunctionCode, context: __SerdeContext): any => { return { + ...(input.ImageUri !== undefined && { ImageUri: input.ImageUri }), ...(input.S3Bucket !== undefined && { S3Bucket: input.S3Bucket }), ...(input.S3Key !== undefined && { S3Key: input.S3Key }), ...(input.S3ObjectVersion !== undefined && { S3ObjectVersion: input.S3ObjectVersion }), @@ -9356,6 +9408,14 @@ const serializeAws_restJson1FunctionCode = (input: FunctionCode, context: __Serd }; }; +const serializeAws_restJson1ImageConfig = (input: ImageConfig, context: __SerdeContext): any => { + return { + ...(input.Command !== undefined && { Command: serializeAws_restJson1StringList(input.Command, context) }), + ...(input.EntryPoint !== undefined && { EntryPoint: serializeAws_restJson1StringList(input.EntryPoint, context) }), + ...(input.WorkingDirectory !== undefined && { WorkingDirectory: input.WorkingDirectory }), + }; +}; + const serializeAws_restJson1LayerList = (input: string[], context: __SerdeContext): any => { return input.map((entry) => entry); }; @@ -9413,6 +9473,10 @@ const serializeAws_restJson1SourceAccessConfigurations = ( return input.map((entry) => serializeAws_restJson1SourceAccessConfiguration(entry, context)); }; +const serializeAws_restJson1StringList = (input: string[], context: __SerdeContext): any => { + return input.map((entry) => entry); +}; + const serializeAws_restJson1SubnetIds = (input: string[], context: __SerdeContext): any => { return input.map((entry) => entry); }; @@ -9721,9 +9785,12 @@ const deserializeAws_restJson1FunctionArnList = (output: any, context: __SerdeCo const deserializeAws_restJson1FunctionCodeLocation = (output: any, context: __SerdeContext): FunctionCodeLocation => { return { + ImageUri: output.ImageUri !== undefined && output.ImageUri !== null ? output.ImageUri : undefined, Location: output.Location !== undefined && output.Location !== null ? output.Location : undefined, RepositoryType: output.RepositoryType !== undefined && output.RepositoryType !== null ? output.RepositoryType : undefined, + ResolvedImageUri: + output.ResolvedImageUri !== undefined && output.ResolvedImageUri !== null ? output.ResolvedImageUri : undefined, } as any; }; @@ -9747,6 +9814,10 @@ const deserializeAws_restJson1FunctionConfiguration = (output: any, context: __S FunctionArn: output.FunctionArn !== undefined && output.FunctionArn !== null ? output.FunctionArn : undefined, FunctionName: output.FunctionName !== undefined && output.FunctionName !== null ? output.FunctionName : undefined, Handler: output.Handler !== undefined && output.Handler !== null ? output.Handler : undefined, + ImageConfigResponse: + output.ImageConfigResponse !== undefined && output.ImageConfigResponse !== null + ? deserializeAws_restJson1ImageConfigResponse(output.ImageConfigResponse, context) + : undefined, KMSKeyArn: output.KMSKeyArn !== undefined && output.KMSKeyArn !== null ? output.KMSKeyArn : undefined, LastModified: output.LastModified !== undefined && output.LastModified !== null ? output.LastModified : undefined, LastUpdateStatus: @@ -9765,6 +9836,7 @@ const deserializeAws_restJson1FunctionConfiguration = (output: any, context: __S : undefined, MasterArn: output.MasterArn !== undefined && output.MasterArn !== null ? output.MasterArn : undefined, MemorySize: output.MemorySize !== undefined && output.MemorySize !== null ? output.MemorySize : undefined, + PackageType: output.PackageType !== undefined && output.PackageType !== null ? output.PackageType : undefined, RevisionId: output.RevisionId !== undefined && output.RevisionId !== null ? output.RevisionId : undefined, Role: output.Role !== undefined && output.Role !== null ? output.Role : undefined, Runtime: output.Runtime !== undefined && output.Runtime !== null ? output.Runtime : undefined, @@ -9827,6 +9899,41 @@ const deserializeAws_restJson1FunctionList = (output: any, context: __SerdeConte return (output || []).map((entry: any) => deserializeAws_restJson1FunctionConfiguration(entry, context)); }; +const deserializeAws_restJson1ImageConfig = (output: any, context: __SerdeContext): ImageConfig => { + return { + Command: + output.Command !== undefined && output.Command !== null + ? deserializeAws_restJson1StringList(output.Command, context) + : undefined, + EntryPoint: + output.EntryPoint !== undefined && output.EntryPoint !== null + ? deserializeAws_restJson1StringList(output.EntryPoint, context) + : undefined, + WorkingDirectory: + output.WorkingDirectory !== undefined && output.WorkingDirectory !== null ? output.WorkingDirectory : undefined, + } as any; +}; + +const deserializeAws_restJson1ImageConfigError = (output: any, context: __SerdeContext): ImageConfigError => { + return { + ErrorCode: output.ErrorCode !== undefined && output.ErrorCode !== null ? output.ErrorCode : undefined, + Message: output.Message !== undefined && output.Message !== null ? output.Message : undefined, + } as any; +}; + +const deserializeAws_restJson1ImageConfigResponse = (output: any, context: __SerdeContext): ImageConfigResponse => { + return { + Error: + output.Error !== undefined && output.Error !== null + ? deserializeAws_restJson1ImageConfigError(output.Error, context) + : undefined, + ImageConfig: + output.ImageConfig !== undefined && output.ImageConfig !== null + ? deserializeAws_restJson1ImageConfig(output.ImageConfig, context) + : undefined, + } as any; +}; + const deserializeAws_restJson1Layer = (output: any, context: __SerdeContext): Layer => { return { Arn: output.Arn !== undefined && output.Arn !== null ? output.Arn : undefined, @@ -9972,6 +10079,10 @@ const deserializeAws_restJson1SourceAccessConfigurations = ( return (output || []).map((entry: any) => deserializeAws_restJson1SourceAccessConfiguration(entry, context)); }; +const deserializeAws_restJson1StringList = (output: any, context: __SerdeContext): string[] => { + return (output || []).map((entry: any) => entry); +}; + const deserializeAws_restJson1SubnetIds = (output: any, context: __SerdeContext): string[] => { return (output || []).map((entry: any) => entry); }; diff --git a/clients/client-lookoutvision/.gitignore b/clients/client-lookoutvision/.gitignore new file mode 100644 index 000000000000..b41c05b597c4 --- /dev/null +++ b/clients/client-lookoutvision/.gitignore @@ -0,0 +1,14 @@ +/node_modules/ +/build/ +/coverage/ +/docs/ +/types/ +/dist/ +*.tsbuildinfo +*.tgz +*.log +package-lock.json + +*.d.ts +*.js +*.js.map diff --git a/clients/client-lookoutvision/.npmignore b/clients/client-lookoutvision/.npmignore new file mode 100644 index 000000000000..b7ff81137c4a --- /dev/null +++ b/clients/client-lookoutvision/.npmignore @@ -0,0 +1,4 @@ +/coverage/ +/docs/ +tsconfig.test.json +*.tsbuildinfo diff --git a/clients/client-lookoutvision/LICENSE b/clients/client-lookoutvision/LICENSE new file mode 100644 index 000000000000..dd65ae06be7a --- /dev/null +++ b/clients/client-lookoutvision/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/clients/client-lookoutvision/LookoutVision.ts b/clients/client-lookoutvision/LookoutVision.ts new file mode 100644 index 000000000000..9712cd0777c5 --- /dev/null +++ b/clients/client-lookoutvision/LookoutVision.ts @@ -0,0 +1,612 @@ +import { LookoutVisionClient } from "./LookoutVisionClient"; +import { + CreateDatasetCommand, + CreateDatasetCommandInput, + CreateDatasetCommandOutput, +} from "./commands/CreateDatasetCommand"; +import { CreateModelCommand, CreateModelCommandInput, CreateModelCommandOutput } from "./commands/CreateModelCommand"; +import { + CreateProjectCommand, + CreateProjectCommandInput, + CreateProjectCommandOutput, +} from "./commands/CreateProjectCommand"; +import { + DeleteDatasetCommand, + DeleteDatasetCommandInput, + DeleteDatasetCommandOutput, +} from "./commands/DeleteDatasetCommand"; +import { DeleteModelCommand, DeleteModelCommandInput, DeleteModelCommandOutput } from "./commands/DeleteModelCommand"; +import { + DeleteProjectCommand, + DeleteProjectCommandInput, + DeleteProjectCommandOutput, +} from "./commands/DeleteProjectCommand"; +import { + DescribeDatasetCommand, + DescribeDatasetCommandInput, + DescribeDatasetCommandOutput, +} from "./commands/DescribeDatasetCommand"; +import { + DescribeModelCommand, + DescribeModelCommandInput, + DescribeModelCommandOutput, +} from "./commands/DescribeModelCommand"; +import { + DescribeProjectCommand, + DescribeProjectCommandInput, + DescribeProjectCommandOutput, +} from "./commands/DescribeProjectCommand"; +import { + DetectAnomaliesCommand, + DetectAnomaliesCommandInput, + DetectAnomaliesCommandOutput, +} from "./commands/DetectAnomaliesCommand"; +import { + ListDatasetEntriesCommand, + ListDatasetEntriesCommandInput, + ListDatasetEntriesCommandOutput, +} from "./commands/ListDatasetEntriesCommand"; +import { ListModelsCommand, ListModelsCommandInput, ListModelsCommandOutput } from "./commands/ListModelsCommand"; +import { + ListProjectsCommand, + ListProjectsCommandInput, + ListProjectsCommandOutput, +} from "./commands/ListProjectsCommand"; +import { StartModelCommand, StartModelCommandInput, StartModelCommandOutput } from "./commands/StartModelCommand"; +import { StopModelCommand, StopModelCommandInput, StopModelCommandOutput } from "./commands/StopModelCommand"; +import { + UpdateDatasetEntriesCommand, + UpdateDatasetEntriesCommandInput, + UpdateDatasetEntriesCommandOutput, +} from "./commands/UpdateDatasetEntriesCommand"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@aws-sdk/types"; + +/** + *

        This is the Amazon Lookout for Vision API Reference. It provides descriptions of actions, + * data types, common parameters, and common errors.

        + *

        Amazon Lookout for Vision enables you to find visual defects in industrial products, + * accurately and at scale. It uses computer vision to identify missing components in an industrial product, + * damage to vehicles or structures, irregularities in production lines, and even minuscule defects in + * silicon wafers — or any other physical item where quality is important such as a missing capacitor + * on printed circuit boards.

        + */ +export class LookoutVision extends LookoutVisionClient { + /** + *

        Creates a new dataset in an Amazon Lookout for Vision project. CreateDataset can create a + * training or a test dataset from a valid dataset source (DatasetSource).

        + *

        If you want a single dataset project, specify train for the value of + * DatasetType.

        + *

        To have a project with separate training and test datasets, call CreateDataset twice. + * On the first call, specify train for the value of + * DatasetType. On the second call, specify test for the value of + * DatasetType. + * + * of dataset with

        + */ + public createDataset( + args: CreateDatasetCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public createDataset( + args: CreateDatasetCommandInput, + cb: (err: any, data?: CreateDatasetCommandOutput) => void + ): void; + public createDataset( + args: CreateDatasetCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateDatasetCommandOutput) => void + ): void; + public createDataset( + args: CreateDatasetCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateDatasetCommandOutput) => void), + cb?: (err: any, data?: CreateDatasetCommandOutput) => void + ): Promise | void { + const command = new CreateDatasetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        Creates a new version of a model within an an Amazon Lookout for Vision project. + * CreateModel is an asynchronous operation in which Amazon Lookout for Vision trains, tests, + * and evaluates a new version of a model.

        + *

        To get the current status, check the Status field returned + * in the response from DescribeModel.

        + *

        If the project has a single dataset, Amazon Lookout for Vision internally splits the dataset + * to create a training and a test dataset. + * If the project has a training and a test dataset, Lookout for Vision uses the respective datasets to train and test + * the model.

        + *

        After training completes, the evaluation metrics are stored at the location specified in + * OutputConfig.

        + */ + public createModel(args: CreateModelCommandInput, options?: __HttpHandlerOptions): Promise; + public createModel(args: CreateModelCommandInput, cb: (err: any, data?: CreateModelCommandOutput) => void): void; + public createModel( + args: CreateModelCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateModelCommandOutput) => void + ): void; + public createModel( + args: CreateModelCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateModelCommandOutput) => void), + cb?: (err: any, data?: CreateModelCommandOutput) => void + ): Promise | void { + const command = new CreateModelCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        Creates an empty Amazon Lookout for Vision project. After you create the project, add a dataset by calling + * CreateDataset.

        + */ + public createProject( + args: CreateProjectCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public createProject( + args: CreateProjectCommandInput, + cb: (err: any, data?: CreateProjectCommandOutput) => void + ): void; + public createProject( + args: CreateProjectCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateProjectCommandOutput) => void + ): void; + public createProject( + args: CreateProjectCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateProjectCommandOutput) => void), + cb?: (err: any, data?: CreateProjectCommandOutput) => void + ): Promise | void { + const command = new CreateProjectCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        Deletes an existing Amazon Lookout for Vision dataset.

        + *

        If your the project has a single + * dataset, you must create a new dataset before you can create a model.

        + *

        If you project has a training dataset and a test dataset consider the following.

        + *
          + *
        • + *

          If you delete the test dataset, your project reverts to a single dataset project. If you then + * train the model, Amazon Lookout for Vision internally splits the remaining dataset into a training and test dataset.

          + *
        • + *
        • + *

          If you delete the training dataset, you must create a training dataset before you can create a model.

          + *
        • + *
        + *

        It might take a while to delete the dataset. To check the current status, check the Status field + * in the response from a call to DescribeDataset.

        + */ + public deleteDataset( + args: DeleteDatasetCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteDataset( + args: DeleteDatasetCommandInput, + cb: (err: any, data?: DeleteDatasetCommandOutput) => void + ): void; + public deleteDataset( + args: DeleteDatasetCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteDatasetCommandOutput) => void + ): void; + public deleteDataset( + args: DeleteDatasetCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteDatasetCommandOutput) => void), + cb?: (err: any, data?: DeleteDatasetCommandOutput) => void + ): Promise | void { + const command = new DeleteDatasetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        Deletes an Amazon Lookout for Vision model. You can't delete a running model. To stop a running model, + * use the StopModel operation.

        + */ + public deleteModel(args: DeleteModelCommandInput, options?: __HttpHandlerOptions): Promise; + public deleteModel(args: DeleteModelCommandInput, cb: (err: any, data?: DeleteModelCommandOutput) => void): void; + public deleteModel( + args: DeleteModelCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteModelCommandOutput) => void + ): void; + public deleteModel( + args: DeleteModelCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteModelCommandOutput) => void), + cb?: (err: any, data?: DeleteModelCommandOutput) => void + ): Promise | void { + const command = new DeleteModelCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        Deletes an Amazon Lookout for Vision project.

        + *

        To delete a project, you must first delete each version of the model associated with + * the project. To delete a model use the DeleteModel operation.

        + *

        The training and test datasets are deleted automatically for you. + * The images referenced by the training and test datasets aren't deleted.

        + */ + public deleteProject( + args: DeleteProjectCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteProject( + args: DeleteProjectCommandInput, + cb: (err: any, data?: DeleteProjectCommandOutput) => void + ): void; + public deleteProject( + args: DeleteProjectCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteProjectCommandOutput) => void + ): void; + public deleteProject( + args: DeleteProjectCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteProjectCommandOutput) => void), + cb?: (err: any, data?: DeleteProjectCommandOutput) => void + ): Promise | void { + const command = new DeleteProjectCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        Describe an Amazon Lookout for Vision dataset.

        + */ + public describeDataset( + args: DescribeDatasetCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeDataset( + args: DescribeDatasetCommandInput, + cb: (err: any, data?: DescribeDatasetCommandOutput) => void + ): void; + public describeDataset( + args: DescribeDatasetCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeDatasetCommandOutput) => void + ): void; + public describeDataset( + args: DescribeDatasetCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeDatasetCommandOutput) => void), + cb?: (err: any, data?: DescribeDatasetCommandOutput) => void + ): Promise | void { + const command = new DescribeDatasetCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        Describes a version of an Amazon Lookout for Vision model.

        + */ + public describeModel( + args: DescribeModelCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeModel( + args: DescribeModelCommandInput, + cb: (err: any, data?: DescribeModelCommandOutput) => void + ): void; + public describeModel( + args: DescribeModelCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeModelCommandOutput) => void + ): void; + public describeModel( + args: DescribeModelCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeModelCommandOutput) => void), + cb?: (err: any, data?: DescribeModelCommandOutput) => void + ): Promise | void { + const command = new DescribeModelCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        Describes an Amazon Lookout for Vision project.

        + */ + public describeProject( + args: DescribeProjectCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeProject( + args: DescribeProjectCommandInput, + cb: (err: any, data?: DescribeProjectCommandOutput) => void + ): void; + public describeProject( + args: DescribeProjectCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeProjectCommandOutput) => void + ): void; + public describeProject( + args: DescribeProjectCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeProjectCommandOutput) => void), + cb?: (err: any, data?: DescribeProjectCommandOutput) => void + ): Promise | void { + const command = new DescribeProjectCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        Detects anomalies in an image that you supply.

        + *

        The response from DetectAnomalies includes a boolean prediction + * that the image contains one or more anomalies and a confidence value for the prediction.

        + * + *

        Before calling DetectAnomalies, you must first start your model with the StartModel operation. + * You are charged for the amount of time, in minutes, that a model runs and for the number of anomaly detection units that your + * model uses. If you are not using a model, use the StopModel operation to stop your model.

        + *
        + */ + public detectAnomalies( + args: DetectAnomaliesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public detectAnomalies( + args: DetectAnomaliesCommandInput, + cb: (err: any, data?: DetectAnomaliesCommandOutput) => void + ): void; + public detectAnomalies( + args: DetectAnomaliesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DetectAnomaliesCommandOutput) => void + ): void; + public detectAnomalies( + args: DetectAnomaliesCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DetectAnomaliesCommandOutput) => void), + cb?: (err: any, data?: DetectAnomaliesCommandOutput) => void + ): Promise | void { + const command = new DetectAnomaliesCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        Lists the JSON Lines within a dataset. An Amazon Lookout for Vision JSON Line contains the anomaly + * information for a single image, including the image location and the assigned label.

        + */ + public listDatasetEntries( + args: ListDatasetEntriesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listDatasetEntries( + args: ListDatasetEntriesCommandInput, + cb: (err: any, data?: ListDatasetEntriesCommandOutput) => void + ): void; + public listDatasetEntries( + args: ListDatasetEntriesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListDatasetEntriesCommandOutput) => void + ): void; + public listDatasetEntries( + args: ListDatasetEntriesCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListDatasetEntriesCommandOutput) => void), + cb?: (err: any, data?: ListDatasetEntriesCommandOutput) => void + ): Promise | void { + const command = new ListDatasetEntriesCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        Lists the versions of a model in an Amazon Lookout for Vision project.

        + */ + public listModels(args: ListModelsCommandInput, options?: __HttpHandlerOptions): Promise; + public listModels(args: ListModelsCommandInput, cb: (err: any, data?: ListModelsCommandOutput) => void): void; + public listModels( + args: ListModelsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListModelsCommandOutput) => void + ): void; + public listModels( + args: ListModelsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListModelsCommandOutput) => void), + cb?: (err: any, data?: ListModelsCommandOutput) => void + ): Promise | void { + const command = new ListModelsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        Lists the Amazon Lookout for Vision projects in your AWS account.

        + */ + public listProjects( + args: ListProjectsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listProjects(args: ListProjectsCommandInput, cb: (err: any, data?: ListProjectsCommandOutput) => void): void; + public listProjects( + args: ListProjectsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListProjectsCommandOutput) => void + ): void; + public listProjects( + args: ListProjectsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListProjectsCommandOutput) => void), + cb?: (err: any, data?: ListProjectsCommandOutput) => void + ): Promise | void { + const command = new ListProjectsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        Starts the running of the version of an Amazon Lookout for Vision model. Starting a model takes a while + * to complete. To check the current state of the model, use DescribeModel.

        + *

        Once the model is running, you can detect custom labels in new images by calling + * DetectAnomalies.

        + * + *

        You are charged for the amount of time that the model is running. To stop a running + * model, call StopModel.

        + *
        + */ + public startModel(args: StartModelCommandInput, options?: __HttpHandlerOptions): Promise; + public startModel(args: StartModelCommandInput, cb: (err: any, data?: StartModelCommandOutput) => void): void; + public startModel( + args: StartModelCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: StartModelCommandOutput) => void + ): void; + public startModel( + args: StartModelCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: StartModelCommandOutput) => void), + cb?: (err: any, data?: StartModelCommandOutput) => void + ): Promise | void { + const command = new StartModelCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        Stops a running model. The operation might take a while to complete. To + * check the current status, call DescribeModel.

        + */ + public stopModel(args: StopModelCommandInput, options?: __HttpHandlerOptions): Promise; + public stopModel(args: StopModelCommandInput, cb: (err: any, data?: StopModelCommandOutput) => void): void; + public stopModel( + args: StopModelCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: StopModelCommandOutput) => void + ): void; + public stopModel( + args: StopModelCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: StopModelCommandOutput) => void), + cb?: (err: any, data?: StopModelCommandOutput) => void + ): Promise | void { + const command = new StopModelCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

        Adds one or more JSON Line entries to a dataset. A JSON Line includes information about an image + * used for training or testing an Amazon Lookout for Vision model. The following is an example JSON Line.

        + * + * + *

        Updating a dataset might take a while to complete. To check the current status, call DescribeDataset and + * check the Status field in the response.

        + */ + public updateDatasetEntries( + args: UpdateDatasetEntriesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public updateDatasetEntries( + args: UpdateDatasetEntriesCommandInput, + cb: (err: any, data?: UpdateDatasetEntriesCommandOutput) => void + ): void; + public updateDatasetEntries( + args: UpdateDatasetEntriesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateDatasetEntriesCommandOutput) => void + ): void; + public updateDatasetEntries( + args: UpdateDatasetEntriesCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: UpdateDatasetEntriesCommandOutput) => void), + cb?: (err: any, data?: UpdateDatasetEntriesCommandOutput) => void + ): Promise | void { + const command = new UpdateDatasetEntriesCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } +} diff --git a/clients/client-lookoutvision/LookoutVisionClient.ts b/clients/client-lookoutvision/LookoutVisionClient.ts new file mode 100644 index 000000000000..bbcacd77addd --- /dev/null +++ b/clients/client-lookoutvision/LookoutVisionClient.ts @@ -0,0 +1,258 @@ +import { CreateDatasetCommandInput, CreateDatasetCommandOutput } from "./commands/CreateDatasetCommand"; +import { CreateModelCommandInput, CreateModelCommandOutput } from "./commands/CreateModelCommand"; +import { CreateProjectCommandInput, CreateProjectCommandOutput } from "./commands/CreateProjectCommand"; +import { DeleteDatasetCommandInput, DeleteDatasetCommandOutput } from "./commands/DeleteDatasetCommand"; +import { DeleteModelCommandInput, DeleteModelCommandOutput } from "./commands/DeleteModelCommand"; +import { DeleteProjectCommandInput, DeleteProjectCommandOutput } from "./commands/DeleteProjectCommand"; +import { DescribeDatasetCommandInput, DescribeDatasetCommandOutput } from "./commands/DescribeDatasetCommand"; +import { DescribeModelCommandInput, DescribeModelCommandOutput } from "./commands/DescribeModelCommand"; +import { DescribeProjectCommandInput, DescribeProjectCommandOutput } from "./commands/DescribeProjectCommand"; +import { DetectAnomaliesCommandInput, DetectAnomaliesCommandOutput } from "./commands/DetectAnomaliesCommand"; +import { ListDatasetEntriesCommandInput, ListDatasetEntriesCommandOutput } from "./commands/ListDatasetEntriesCommand"; +import { ListModelsCommandInput, ListModelsCommandOutput } from "./commands/ListModelsCommand"; +import { ListProjectsCommandInput, ListProjectsCommandOutput } from "./commands/ListProjectsCommand"; +import { StartModelCommandInput, StartModelCommandOutput } from "./commands/StartModelCommand"; +import { StopModelCommandInput, StopModelCommandOutput } from "./commands/StopModelCommand"; +import { + UpdateDatasetEntriesCommandInput, + UpdateDatasetEntriesCommandOutput, +} from "./commands/UpdateDatasetEntriesCommand"; +import { ClientDefaultValues as __ClientDefaultValues } from "./runtimeConfig"; +import { + EndpointsInputConfig, + EndpointsResolvedConfig, + RegionInputConfig, + RegionResolvedConfig, + resolveEndpointsConfig, + resolveRegionConfig, +} from "@aws-sdk/config-resolver"; +import { getContentLengthPlugin } from "@aws-sdk/middleware-content-length"; +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, + getHostHeaderPlugin, + resolveHostHeaderConfig, +} from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { RetryInputConfig, RetryResolvedConfig, getRetryPlugin, resolveRetryConfig } from "@aws-sdk/middleware-retry"; +import { + AwsAuthInputConfig, + AwsAuthResolvedConfig, + getAwsAuthPlugin, + resolveAwsAuthConfig, +} from "@aws-sdk/middleware-signing"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, + getUserAgentPlugin, + resolveUserAgentConfig, +} from "@aws-sdk/middleware-user-agent"; +import { HttpHandler as __HttpHandler } from "@aws-sdk/protocol-http"; +import { + Client as __Client, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@aws-sdk/smithy-client"; +import { + RegionInfoProvider, + Credentials as __Credentials, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, +} from "@aws-sdk/types"; + +export type ServiceInputTypes = + | CreateDatasetCommandInput + | CreateModelCommandInput + | CreateProjectCommandInput + | DeleteDatasetCommandInput + | DeleteModelCommandInput + | DeleteProjectCommandInput + | DescribeDatasetCommandInput + | DescribeModelCommandInput + | DescribeProjectCommandInput + | DetectAnomaliesCommandInput + | ListDatasetEntriesCommandInput + | ListModelsCommandInput + | ListProjectsCommandInput + | StartModelCommandInput + | StopModelCommandInput + | UpdateDatasetEntriesCommandInput; + +export type ServiceOutputTypes = + | CreateDatasetCommandOutput + | CreateModelCommandOutput + | CreateProjectCommandOutput + | DeleteDatasetCommandOutput + | DeleteModelCommandOutput + | DeleteProjectCommandOutput + | DescribeDatasetCommandOutput + | DescribeModelCommandOutput + | DescribeProjectCommandOutput + | DetectAnomaliesCommandOutput + | ListDatasetEntriesCommandOutput + | ListModelsCommandOutput + | ListProjectsCommandOutput + | StartModelCommandOutput + | StopModelCommandOutput + | UpdateDatasetEntriesCommandOutput; + +export interface ClientDefaults extends Partial<__SmithyResolvedConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandler; + + /** + * A constructor for a class implementing the @aws-sdk/types.Hash interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + */ + sha256?: __HashConstructor; + + /** + * The function that will be used to convert strings into HTTP endpoints. + */ + urlParser?: __UrlParser; + + /** + * A function that can calculate the length of a request body. + */ + bodyLengthChecker?: (body: any) => number | undefined; + + /** + * A function that converts a stream into an array of bytes. + */ + streamCollector?: __StreamCollector; + + /** + * The function that will be used to convert a base64-encoded string to a byte array + */ + base64Decoder?: __Decoder; + + /** + * The function that will be used to convert binary data to a base64-encoded string + */ + base64Encoder?: __Encoder; + + /** + * The function that will be used to convert a UTF8-encoded string to a byte array + */ + utf8Decoder?: __Decoder; + + /** + * The function that will be used to convert binary data to a UTF-8 encoded string + */ + utf8Encoder?: __Encoder; + + /** + * The string that will be used to populate default value in 'User-Agent' header + */ + defaultUserAgent?: string; + + /** + * The runtime environment + */ + runtime?: string; + + /** + * Disable dyanamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + + /** + * The service name with which to sign requests. + */ + signingName?: string; + + /** + * Default credentials provider; Not available in browser runtime + */ + credentialDefaultProvider?: (input: any) => __Provider<__Credentials>; + + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + + /** + * Fetch related hostname, signing name or signing region with given region. + */ + regionInfoProvider?: RegionInfoProvider; +} + +export type LookoutVisionClientConfig = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & + ClientDefaults & + RegionInputConfig & + EndpointsInputConfig & + AwsAuthInputConfig & + RetryInputConfig & + UserAgentInputConfig & + HostHeaderInputConfig; + +export type LookoutVisionClientResolvedConfig = __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RegionResolvedConfig & + EndpointsResolvedConfig & + AwsAuthResolvedConfig & + RetryResolvedConfig & + UserAgentResolvedConfig & + HostHeaderResolvedConfig; + +/** + *

        This is the Amazon Lookout for Vision API Reference. It provides descriptions of actions, + * data types, common parameters, and common errors.

        + *

        Amazon Lookout for Vision enables you to find visual defects in industrial products, + * accurately and at scale. It uses computer vision to identify missing components in an industrial product, + * damage to vehicles or structures, irregularities in production lines, and even minuscule defects in + * silicon wafers — or any other physical item where quality is important such as a missing capacitor + * on printed circuit boards.

        + */ +export class LookoutVisionClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + LookoutVisionClientResolvedConfig +> { + readonly config: LookoutVisionClientResolvedConfig; + + constructor(configuration: LookoutVisionClientConfig) { + let _config_0 = { + ...__ClientDefaultValues, + ...configuration, + }; + let _config_1 = resolveRegionConfig(_config_0); + let _config_2 = resolveEndpointsConfig(_config_1); + let _config_3 = resolveAwsAuthConfig(_config_2); + let _config_4 = resolveRetryConfig(_config_3); + let _config_5 = resolveUserAgentConfig(_config_4); + let _config_6 = resolveHostHeaderConfig(_config_5); + super(_config_6); + this.config = _config_6; + this.middlewareStack.use(getAwsAuthPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + } + + destroy(): void { + super.destroy(); + } +} diff --git a/clients/client-lookoutvision/README.md b/clients/client-lookoutvision/README.md new file mode 100644 index 000000000000..6dc81bd5c3e1 --- /dev/null +++ b/clients/client-lookoutvision/README.md @@ -0,0 +1,6 @@ +# @aws-sdk/client-lookoutvision + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/client-lookoutvision/rc.svg)](https://www.npmjs.com/package/@aws-sdk/client-lookoutvision) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/client-lookoutvision.svg)](https://www.npmjs.com/package/@aws-sdk/client-lookoutvision) + +For SDK usage, please step to [SDK readme](https://github.com/aws/aws-sdk-js-v3). diff --git a/clients/client-lookoutvision/commands/CreateDatasetCommand.ts b/clients/client-lookoutvision/commands/CreateDatasetCommand.ts new file mode 100644 index 000000000000..77e1a0c5742c --- /dev/null +++ b/clients/client-lookoutvision/commands/CreateDatasetCommand.ts @@ -0,0 +1,97 @@ +import { LookoutVisionClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../LookoutVisionClient"; +import { CreateDatasetRequest, CreateDatasetResponse } from "../models/models_0"; +import { + deserializeAws_restJson1CreateDatasetCommand, + serializeAws_restJson1CreateDatasetCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateDatasetCommandInput = CreateDatasetRequest; +export type CreateDatasetCommandOutput = CreateDatasetResponse & __MetadataBearer; + +/** + *

        Creates a new dataset in an Amazon Lookout for Vision project. CreateDataset can create a + * training or a test dataset from a valid dataset source (DatasetSource).

        + *

        If you want a single dataset project, specify train for the value of + * DatasetType.

        + *

        To have a project with separate training and test datasets, call CreateDataset twice. + * On the first call, specify train for the value of + * DatasetType. On the second call, specify test for the value of + * DatasetType. + * + * of dataset with

        + */ +export class CreateDatasetCommand extends $Command< + CreateDatasetCommandInput, + CreateDatasetCommandOutput, + LookoutVisionClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateDatasetCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: LookoutVisionClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "LookoutVisionClient"; + const commandName = "CreateDatasetCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateDatasetRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreateDatasetResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateDatasetCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1CreateDatasetCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1CreateDatasetCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-lookoutvision/commands/CreateModelCommand.ts b/clients/client-lookoutvision/commands/CreateModelCommand.ts new file mode 100644 index 000000000000..f256ca1b26d1 --- /dev/null +++ b/clients/client-lookoutvision/commands/CreateModelCommand.ts @@ -0,0 +1,98 @@ +import { LookoutVisionClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../LookoutVisionClient"; +import { CreateModelRequest, CreateModelResponse } from "../models/models_0"; +import { + deserializeAws_restJson1CreateModelCommand, + serializeAws_restJson1CreateModelCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateModelCommandInput = CreateModelRequest; +export type CreateModelCommandOutput = CreateModelResponse & __MetadataBearer; + +/** + *

        Creates a new version of a model within an an Amazon Lookout for Vision project. + * CreateModel is an asynchronous operation in which Amazon Lookout for Vision trains, tests, + * and evaluates a new version of a model.

        + *

        To get the current status, check the Status field returned + * in the response from DescribeModel.

        + *

        If the project has a single dataset, Amazon Lookout for Vision internally splits the dataset + * to create a training and a test dataset. + * If the project has a training and a test dataset, Lookout for Vision uses the respective datasets to train and test + * the model.

        + *

        After training completes, the evaluation metrics are stored at the location specified in + * OutputConfig.

        + */ +export class CreateModelCommand extends $Command< + CreateModelCommandInput, + CreateModelCommandOutput, + LookoutVisionClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateModelCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: LookoutVisionClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "LookoutVisionClient"; + const commandName = "CreateModelCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateModelRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreateModelResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateModelCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1CreateModelCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1CreateModelCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-lookoutvision/commands/CreateProjectCommand.ts b/clients/client-lookoutvision/commands/CreateProjectCommand.ts new file mode 100644 index 000000000000..e83a3bb6344a --- /dev/null +++ b/clients/client-lookoutvision/commands/CreateProjectCommand.ts @@ -0,0 +1,89 @@ +import { LookoutVisionClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../LookoutVisionClient"; +import { CreateProjectRequest, CreateProjectResponse } from "../models/models_0"; +import { + deserializeAws_restJson1CreateProjectCommand, + serializeAws_restJson1CreateProjectCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateProjectCommandInput = CreateProjectRequest; +export type CreateProjectCommandOutput = CreateProjectResponse & __MetadataBearer; + +/** + *

        Creates an empty Amazon Lookout for Vision project. After you create the project, add a dataset by calling + * CreateDataset.

        + */ +export class CreateProjectCommand extends $Command< + CreateProjectCommandInput, + CreateProjectCommandOutput, + LookoutVisionClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateProjectCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: LookoutVisionClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "LookoutVisionClient"; + const commandName = "CreateProjectCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateProjectRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreateProjectResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateProjectCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1CreateProjectCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1CreateProjectCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-lookoutvision/commands/DeleteDatasetCommand.ts b/clients/client-lookoutvision/commands/DeleteDatasetCommand.ts new file mode 100644 index 000000000000..b8193fec635c --- /dev/null +++ b/clients/client-lookoutvision/commands/DeleteDatasetCommand.ts @@ -0,0 +1,102 @@ +import { LookoutVisionClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../LookoutVisionClient"; +import { DeleteDatasetRequest, DeleteDatasetResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DeleteDatasetCommand, + serializeAws_restJson1DeleteDatasetCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteDatasetCommandInput = DeleteDatasetRequest; +export type DeleteDatasetCommandOutput = DeleteDatasetResponse & __MetadataBearer; + +/** + *

        Deletes an existing Amazon Lookout for Vision dataset.

        + *

        If your the project has a single + * dataset, you must create a new dataset before you can create a model.

        + *

        If you project has a training dataset and a test dataset consider the following.

        + *
          + *
        • + *

          If you delete the test dataset, your project reverts to a single dataset project. If you then + * train the model, Amazon Lookout for Vision internally splits the remaining dataset into a training and test dataset.

          + *
        • + *
        • + *

          If you delete the training dataset, you must create a training dataset before you can create a model.

          + *
        • + *
        + *

        It might take a while to delete the dataset. To check the current status, check the Status field + * in the response from a call to DescribeDataset.

        + */ +export class DeleteDatasetCommand extends $Command< + DeleteDatasetCommandInput, + DeleteDatasetCommandOutput, + LookoutVisionClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteDatasetCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: LookoutVisionClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "LookoutVisionClient"; + const commandName = "DeleteDatasetCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteDatasetRequest.filterSensitiveLog, + outputFilterSensitiveLog: DeleteDatasetResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteDatasetCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DeleteDatasetCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DeleteDatasetCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-lookoutvision/commands/DeleteModelCommand.ts b/clients/client-lookoutvision/commands/DeleteModelCommand.ts new file mode 100644 index 000000000000..1e4e5cb2c66c --- /dev/null +++ b/clients/client-lookoutvision/commands/DeleteModelCommand.ts @@ -0,0 +1,89 @@ +import { LookoutVisionClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../LookoutVisionClient"; +import { DeleteModelRequest, DeleteModelResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DeleteModelCommand, + serializeAws_restJson1DeleteModelCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteModelCommandInput = DeleteModelRequest; +export type DeleteModelCommandOutput = DeleteModelResponse & __MetadataBearer; + +/** + *

        Deletes an Amazon Lookout for Vision model. You can't delete a running model. To stop a running model, + * use the StopModel operation.

        + */ +export class DeleteModelCommand extends $Command< + DeleteModelCommandInput, + DeleteModelCommandOutput, + LookoutVisionClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteModelCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: LookoutVisionClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "LookoutVisionClient"; + const commandName = "DeleteModelCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteModelRequest.filterSensitiveLog, + outputFilterSensitiveLog: DeleteModelResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteModelCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DeleteModelCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DeleteModelCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-lookoutvision/commands/DeleteProjectCommand.ts b/clients/client-lookoutvision/commands/DeleteProjectCommand.ts new file mode 100644 index 000000000000..ffafae1a497c --- /dev/null +++ b/clients/client-lookoutvision/commands/DeleteProjectCommand.ts @@ -0,0 +1,92 @@ +import { LookoutVisionClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../LookoutVisionClient"; +import { DeleteProjectRequest, DeleteProjectResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DeleteProjectCommand, + serializeAws_restJson1DeleteProjectCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteProjectCommandInput = DeleteProjectRequest; +export type DeleteProjectCommandOutput = DeleteProjectResponse & __MetadataBearer; + +/** + *

        Deletes an Amazon Lookout for Vision project.

        + *

        To delete a project, you must first delete each version of the model associated with + * the project. To delete a model use the DeleteModel operation.

        + *

        The training and test datasets are deleted automatically for you. + * The images referenced by the training and test datasets aren't deleted.

        + */ +export class DeleteProjectCommand extends $Command< + DeleteProjectCommandInput, + DeleteProjectCommandOutput, + LookoutVisionClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteProjectCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: LookoutVisionClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "LookoutVisionClient"; + const commandName = "DeleteProjectCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteProjectRequest.filterSensitiveLog, + outputFilterSensitiveLog: DeleteProjectResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteProjectCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DeleteProjectCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DeleteProjectCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-lookoutvision/commands/DescribeDatasetCommand.ts b/clients/client-lookoutvision/commands/DescribeDatasetCommand.ts new file mode 100644 index 000000000000..dd044d81d748 --- /dev/null +++ b/clients/client-lookoutvision/commands/DescribeDatasetCommand.ts @@ -0,0 +1,88 @@ +import { LookoutVisionClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../LookoutVisionClient"; +import { DescribeDatasetRequest, DescribeDatasetResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DescribeDatasetCommand, + serializeAws_restJson1DescribeDatasetCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeDatasetCommandInput = DescribeDatasetRequest; +export type DescribeDatasetCommandOutput = DescribeDatasetResponse & __MetadataBearer; + +/** + *

        Describe an Amazon Lookout for Vision dataset.

        + */ +export class DescribeDatasetCommand extends $Command< + DescribeDatasetCommandInput, + DescribeDatasetCommandOutput, + LookoutVisionClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeDatasetCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: LookoutVisionClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "LookoutVisionClient"; + const commandName = "DescribeDatasetCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeDatasetRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeDatasetResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeDatasetCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DescribeDatasetCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DescribeDatasetCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-lookoutvision/commands/DescribeModelCommand.ts b/clients/client-lookoutvision/commands/DescribeModelCommand.ts new file mode 100644 index 000000000000..c834f9cfc2bf --- /dev/null +++ b/clients/client-lookoutvision/commands/DescribeModelCommand.ts @@ -0,0 +1,88 @@ +import { LookoutVisionClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../LookoutVisionClient"; +import { DescribeModelRequest, DescribeModelResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DescribeModelCommand, + serializeAws_restJson1DescribeModelCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeModelCommandInput = DescribeModelRequest; +export type DescribeModelCommandOutput = DescribeModelResponse & __MetadataBearer; + +/** + *

        Describes a version of an Amazon Lookout for Vision model.

        + */ +export class DescribeModelCommand extends $Command< + DescribeModelCommandInput, + DescribeModelCommandOutput, + LookoutVisionClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeModelCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: LookoutVisionClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "LookoutVisionClient"; + const commandName = "DescribeModelCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeModelRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeModelResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeModelCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DescribeModelCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DescribeModelCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-lookoutvision/commands/DescribeProjectCommand.ts b/clients/client-lookoutvision/commands/DescribeProjectCommand.ts new file mode 100644 index 000000000000..964444954acc --- /dev/null +++ b/clients/client-lookoutvision/commands/DescribeProjectCommand.ts @@ -0,0 +1,88 @@ +import { LookoutVisionClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../LookoutVisionClient"; +import { DescribeProjectRequest, DescribeProjectResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DescribeProjectCommand, + serializeAws_restJson1DescribeProjectCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeProjectCommandInput = DescribeProjectRequest; +export type DescribeProjectCommandOutput = DescribeProjectResponse & __MetadataBearer; + +/** + *

        Describes an Amazon Lookout for Vision project.

        + */ +export class DescribeProjectCommand extends $Command< + DescribeProjectCommandInput, + DescribeProjectCommandOutput, + LookoutVisionClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeProjectCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: LookoutVisionClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "LookoutVisionClient"; + const commandName = "DescribeProjectCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeProjectRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeProjectResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeProjectCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DescribeProjectCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DescribeProjectCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-lookoutvision/commands/DetectAnomaliesCommand.ts b/clients/client-lookoutvision/commands/DetectAnomaliesCommand.ts new file mode 100644 index 000000000000..28dbe6d6b42c --- /dev/null +++ b/clients/client-lookoutvision/commands/DetectAnomaliesCommand.ts @@ -0,0 +1,97 @@ +import { LookoutVisionClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../LookoutVisionClient"; +import { DetectAnomaliesRequest, DetectAnomaliesResponse } from "../models/models_0"; +import { + deserializeAws_restJson1DetectAnomaliesCommand, + serializeAws_restJson1DetectAnomaliesCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DetectAnomaliesCommandInput = Omit & { + Body: DetectAnomaliesRequest["Body"] | string | Uint8Array | Buffer; +}; +export type DetectAnomaliesCommandOutput = DetectAnomaliesResponse & __MetadataBearer; + +/** + *

        Detects anomalies in an image that you supply.

        + *

        The response from DetectAnomalies includes a boolean prediction + * that the image contains one or more anomalies and a confidence value for the prediction.

        + * + *

        Before calling DetectAnomalies, you must first start your model with the StartModel operation. + * You are charged for the amount of time, in minutes, that a model runs and for the number of anomaly detection units that your + * model uses. If you are not using a model, use the StopModel operation to stop your model.

        + *
        + */ +export class DetectAnomaliesCommand extends $Command< + DetectAnomaliesCommandInput, + DetectAnomaliesCommandOutput, + LookoutVisionClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DetectAnomaliesCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: LookoutVisionClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "LookoutVisionClient"; + const commandName = "DetectAnomaliesCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DetectAnomaliesRequest.filterSensitiveLog, + outputFilterSensitiveLog: DetectAnomaliesResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DetectAnomaliesCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DetectAnomaliesCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DetectAnomaliesCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-lookoutvision/commands/ListDatasetEntriesCommand.ts b/clients/client-lookoutvision/commands/ListDatasetEntriesCommand.ts new file mode 100644 index 000000000000..8bb8649afce7 --- /dev/null +++ b/clients/client-lookoutvision/commands/ListDatasetEntriesCommand.ts @@ -0,0 +1,89 @@ +import { LookoutVisionClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../LookoutVisionClient"; +import { ListDatasetEntriesRequest, ListDatasetEntriesResponse } from "../models/models_0"; +import { + deserializeAws_restJson1ListDatasetEntriesCommand, + serializeAws_restJson1ListDatasetEntriesCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListDatasetEntriesCommandInput = ListDatasetEntriesRequest; +export type ListDatasetEntriesCommandOutput = ListDatasetEntriesResponse & __MetadataBearer; + +/** + *

        Lists the JSON Lines within a dataset. An Amazon Lookout for Vision JSON Line contains the anomaly + * information for a single image, including the image location and the assigned label.

        + */ +export class ListDatasetEntriesCommand extends $Command< + ListDatasetEntriesCommandInput, + ListDatasetEntriesCommandOutput, + LookoutVisionClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListDatasetEntriesCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: LookoutVisionClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "LookoutVisionClient"; + const commandName = "ListDatasetEntriesCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListDatasetEntriesRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListDatasetEntriesResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListDatasetEntriesCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1ListDatasetEntriesCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1ListDatasetEntriesCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-lookoutvision/commands/ListModelsCommand.ts b/clients/client-lookoutvision/commands/ListModelsCommand.ts new file mode 100644 index 000000000000..cfba3bee4113 --- /dev/null +++ b/clients/client-lookoutvision/commands/ListModelsCommand.ts @@ -0,0 +1,88 @@ +import { LookoutVisionClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../LookoutVisionClient"; +import { ListModelsRequest, ListModelsResponse } from "../models/models_0"; +import { + deserializeAws_restJson1ListModelsCommand, + serializeAws_restJson1ListModelsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListModelsCommandInput = ListModelsRequest; +export type ListModelsCommandOutput = ListModelsResponse & __MetadataBearer; + +/** + *

        Lists the versions of a model in an Amazon Lookout for Vision project.

        + */ +export class ListModelsCommand extends $Command< + ListModelsCommandInput, + ListModelsCommandOutput, + LookoutVisionClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListModelsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: LookoutVisionClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "LookoutVisionClient"; + const commandName = "ListModelsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListModelsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListModelsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListModelsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1ListModelsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1ListModelsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-lookoutvision/commands/ListProjectsCommand.ts b/clients/client-lookoutvision/commands/ListProjectsCommand.ts new file mode 100644 index 000000000000..aac0bd8bdbd3 --- /dev/null +++ b/clients/client-lookoutvision/commands/ListProjectsCommand.ts @@ -0,0 +1,88 @@ +import { LookoutVisionClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../LookoutVisionClient"; +import { ListProjectsRequest, ListProjectsResponse } from "../models/models_0"; +import { + deserializeAws_restJson1ListProjectsCommand, + serializeAws_restJson1ListProjectsCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListProjectsCommandInput = ListProjectsRequest; +export type ListProjectsCommandOutput = ListProjectsResponse & __MetadataBearer; + +/** + *

        Lists the Amazon Lookout for Vision projects in your AWS account.

        + */ +export class ListProjectsCommand extends $Command< + ListProjectsCommandInput, + ListProjectsCommandOutput, + LookoutVisionClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListProjectsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: LookoutVisionClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "LookoutVisionClient"; + const commandName = "ListProjectsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListProjectsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListProjectsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListProjectsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1ListProjectsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1ListProjectsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-lookoutvision/commands/StartModelCommand.ts b/clients/client-lookoutvision/commands/StartModelCommand.ts new file mode 100644 index 000000000000..cbd8a58a4a23 --- /dev/null +++ b/clients/client-lookoutvision/commands/StartModelCommand.ts @@ -0,0 +1,95 @@ +import { LookoutVisionClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../LookoutVisionClient"; +import { StartModelRequest, StartModelResponse } from "../models/models_0"; +import { + deserializeAws_restJson1StartModelCommand, + serializeAws_restJson1StartModelCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type StartModelCommandInput = StartModelRequest; +export type StartModelCommandOutput = StartModelResponse & __MetadataBearer; + +/** + *

        Starts the running of the version of an Amazon Lookout for Vision model. Starting a model takes a while + * to complete. To check the current state of the model, use DescribeModel.

        + *

        Once the model is running, you can detect custom labels in new images by calling + * DetectAnomalies.

        + * + *

        You are charged for the amount of time that the model is running. To stop a running + * model, call StopModel.

        + *
        + */ +export class StartModelCommand extends $Command< + StartModelCommandInput, + StartModelCommandOutput, + LookoutVisionClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: StartModelCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: LookoutVisionClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "LookoutVisionClient"; + const commandName = "StartModelCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: StartModelRequest.filterSensitiveLog, + outputFilterSensitiveLog: StartModelResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: StartModelCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1StartModelCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1StartModelCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-lookoutvision/commands/StopModelCommand.ts b/clients/client-lookoutvision/commands/StopModelCommand.ts new file mode 100644 index 000000000000..b8fe2255c6e1 --- /dev/null +++ b/clients/client-lookoutvision/commands/StopModelCommand.ts @@ -0,0 +1,89 @@ +import { LookoutVisionClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../LookoutVisionClient"; +import { StopModelRequest, StopModelResponse } from "../models/models_0"; +import { + deserializeAws_restJson1StopModelCommand, + serializeAws_restJson1StopModelCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type StopModelCommandInput = StopModelRequest; +export type StopModelCommandOutput = StopModelResponse & __MetadataBearer; + +/** + *

        Stops a running model. The operation might take a while to complete. To + * check the current status, call DescribeModel.

        + */ +export class StopModelCommand extends $Command< + StopModelCommandInput, + StopModelCommandOutput, + LookoutVisionClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: StopModelCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: LookoutVisionClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "LookoutVisionClient"; + const commandName = "StopModelCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: StopModelRequest.filterSensitiveLog, + outputFilterSensitiveLog: StopModelResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: StopModelCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1StopModelCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1StopModelCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-lookoutvision/commands/UpdateDatasetEntriesCommand.ts b/clients/client-lookoutvision/commands/UpdateDatasetEntriesCommand.ts new file mode 100644 index 000000000000..49f8cdc56889 --- /dev/null +++ b/clients/client-lookoutvision/commands/UpdateDatasetEntriesCommand.ts @@ -0,0 +1,93 @@ +import { LookoutVisionClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../LookoutVisionClient"; +import { UpdateDatasetEntriesRequest, UpdateDatasetEntriesResponse } from "../models/models_0"; +import { + deserializeAws_restJson1UpdateDatasetEntriesCommand, + serializeAws_restJson1UpdateDatasetEntriesCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type UpdateDatasetEntriesCommandInput = UpdateDatasetEntriesRequest; +export type UpdateDatasetEntriesCommandOutput = UpdateDatasetEntriesResponse & __MetadataBearer; + +/** + *

        Adds one or more JSON Line entries to a dataset. A JSON Line includes information about an image + * used for training or testing an Amazon Lookout for Vision model. The following is an example JSON Line.

        + * + * + *

        Updating a dataset might take a while to complete. To check the current status, call DescribeDataset and + * check the Status field in the response.

        + */ +export class UpdateDatasetEntriesCommand extends $Command< + UpdateDatasetEntriesCommandInput, + UpdateDatasetEntriesCommandOutput, + LookoutVisionClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: UpdateDatasetEntriesCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: LookoutVisionClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "LookoutVisionClient"; + const commandName = "UpdateDatasetEntriesCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: UpdateDatasetEntriesRequest.filterSensitiveLog, + outputFilterSensitiveLog: UpdateDatasetEntriesResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: UpdateDatasetEntriesCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1UpdateDatasetEntriesCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1UpdateDatasetEntriesCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-lookoutvision/endpoints.ts b/clients/client-lookoutvision/endpoints.ts new file mode 100644 index 000000000000..725a833ac4dc --- /dev/null +++ b/clients/client-lookoutvision/endpoints.ts @@ -0,0 +1,81 @@ +import { RegionInfo, RegionInfoProvider } from "@aws-sdk/types"; + +// Partition default templates +const AWS_TEMPLATE = "lookoutvision.{region}.amazonaws.com"; +const AWS_CN_TEMPLATE = "lookoutvision.{region}.amazonaws.com.cn"; +const AWS_ISO_TEMPLATE = "lookoutvision.{region}.c2s.ic.gov"; +const AWS_ISO_B_TEMPLATE = "lookoutvision.{region}.sc2s.sgov.gov"; +const AWS_US_GOV_TEMPLATE = "lookoutvision.{region}.amazonaws.com"; + +// Partition regions +const AWS_REGIONS = new Set([ + "ap-east-1", + "ap-northeast-1", + "ap-northeast-2", + "ap-south-1", + "ap-southeast-1", + "ap-southeast-2", + "ca-central-1", + "eu-central-1", + "eu-north-1", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "me-south-1", + "sa-east-1", + "us-east-1", + "us-east-2", + "us-west-1", + "us-west-2", +]); +const AWS_CN_REGIONS = new Set(["cn-north-1", "cn-northwest-1"]); +const AWS_ISO_REGIONS = new Set(["us-iso-east-1"]); +const AWS_ISO_B_REGIONS = new Set(["us-isob-east-1"]); +const AWS_US_GOV_REGIONS = new Set(["us-gov-east-1", "us-gov-west-1"]); + +export const defaultRegionInfoProvider: RegionInfoProvider = (region: string, options?: any) => { + let regionInfo: RegionInfo | undefined = undefined; + switch (region) { + // First, try to match exact region names. + // Next, try to match partition endpoints. + default: + if (AWS_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_TEMPLATE.replace("{region}", region), + partition: "aws", + }; + } + if (AWS_CN_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_CN_TEMPLATE.replace("{region}", region), + partition: "aws-cn", + }; + } + if (AWS_ISO_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_ISO_TEMPLATE.replace("{region}", region), + partition: "aws-iso", + }; + } + if (AWS_ISO_B_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_ISO_B_TEMPLATE.replace("{region}", region), + partition: "aws-iso-b", + }; + } + if (AWS_US_GOV_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_US_GOV_TEMPLATE.replace("{region}", region), + partition: "aws-us-gov", + }; + } + // Finally, assume it's an AWS partition endpoint. + if (regionInfo === undefined) { + regionInfo = { + hostname: AWS_TEMPLATE.replace("{region}", region), + partition: "aws", + }; + } + } + return Promise.resolve(regionInfo); +}; diff --git a/clients/client-lookoutvision/index.ts b/clients/client-lookoutvision/index.ts new file mode 100644 index 000000000000..9f674be29071 --- /dev/null +++ b/clients/client-lookoutvision/index.ts @@ -0,0 +1,23 @@ +export * from "./LookoutVisionClient"; +export * from "./LookoutVision"; +export * from "./commands/CreateDatasetCommand"; +export * from "./commands/CreateModelCommand"; +export * from "./commands/CreateProjectCommand"; +export * from "./commands/DeleteDatasetCommand"; +export * from "./commands/DeleteModelCommand"; +export * from "./commands/DeleteProjectCommand"; +export * from "./commands/DescribeDatasetCommand"; +export * from "./commands/DescribeModelCommand"; +export * from "./commands/DescribeProjectCommand"; +export * from "./commands/DetectAnomaliesCommand"; +export * from "./commands/ListDatasetEntriesCommand"; +export * from "./pagination/ListDatasetEntriesPaginator"; +export * from "./commands/ListModelsCommand"; +export * from "./pagination/ListModelsPaginator"; +export * from "./commands/ListProjectsCommand"; +export * from "./pagination/ListProjectsPaginator"; +export * from "./commands/StartModelCommand"; +export * from "./commands/StopModelCommand"; +export * from "./commands/UpdateDatasetEntriesCommand"; +export * from "./pagination/Interfaces"; +export * from "./models/index"; diff --git a/clients/client-lookoutvision/models/index.ts b/clients/client-lookoutvision/models/index.ts new file mode 100644 index 000000000000..09c5d6e09b8c --- /dev/null +++ b/clients/client-lookoutvision/models/index.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/clients/client-lookoutvision/models/models_0.ts b/clients/client-lookoutvision/models/models_0.ts new file mode 100644 index 000000000000..881e2a32b261 --- /dev/null +++ b/clients/client-lookoutvision/models/models_0.ts @@ -0,0 +1,1397 @@ +import { SENSITIVE_STRING, SmithyException as __SmithyException } from "@aws-sdk/smithy-client"; +import { MetadataBearer as $MetadataBearer } from "@aws-sdk/types"; +import { Readable } from "stream"; + +/** + *

        You are not authorized to perform the action.

        + */ +export interface AccessDeniedException extends __SmithyException, $MetadataBearer { + name: "AccessDeniedException"; + $fault: "client"; + Message: string | undefined; +} + +export namespace AccessDeniedException { + export const filterSensitiveLog = (obj: AccessDeniedException): any => ({ + ...obj, + }); +} + +export enum ResourceType { + DATASET = "DATASET", + MODEL = "MODEL", + PROJECT = "PROJECT", + TRIAL = "TRIAL", +} + +/** + *

        The update or deletion of a resource caused an inconsistent state.

        + */ +export interface ConflictException extends __SmithyException, $MetadataBearer { + name: "ConflictException"; + $fault: "client"; + Message: string | undefined; + /** + *

        The ID of the resource.

        + */ + ResourceId: string | undefined; + + /** + *

        The type of the resource.

        + */ + ResourceType: ResourceType | string | undefined; +} + +export namespace ConflictException { + export const filterSensitiveLog = (obj: ConflictException): any => ({ + ...obj, + }); +} + +/** + *

        Amazon S3 Location information for an input manifest file.

        + */ +export interface InputS3Object { + /** + *

        The Amazon S3 bucket that contains the manifest.

        + */ + Bucket: string | undefined; + + /** + *

        The name and location of the manifest file withiin the bucket.

        + */ + Key: string | undefined; + + /** + *

        The version ID of the bucket.

        + */ + VersionId?: string; +} + +export namespace InputS3Object { + export const filterSensitiveLog = (obj: InputS3Object): any => ({ + ...obj, + }); +} + +/** + *

        Location information about a manifest file. You can use a manifest file to + * create a dataset.

        + */ +export interface DatasetGroundTruthManifest { + /** + *

        The S3 bucket location for the manifest file.

        + */ + S3Object?: InputS3Object; +} + +export namespace DatasetGroundTruthManifest { + export const filterSensitiveLog = (obj: DatasetGroundTruthManifest): any => ({ + ...obj, + }); +} + +/** + *

        Information about the location of a manifest file that Amazon Lookout for Vision uses to to create a dataset.

        + */ +export interface DatasetSource { + /** + *

        Location information for the manifest file.

        + */ + GroundTruthManifest?: DatasetGroundTruthManifest; +} + +export namespace DatasetSource { + export const filterSensitiveLog = (obj: DatasetSource): any => ({ + ...obj, + }); +} + +export interface CreateDatasetRequest { + /** + *

        The name of the project in which you want to create a dataset.

        + */ + ProjectName: string | undefined; + + /** + *

        The type of the dataset. Specify train for a training dataset. + * Specify test for a test dataset.

        + */ + DatasetType: string | undefined; + + /** + *

        The location of the manifest file that Amazon Lookout for Vision uses to create the dataset.

        + *

        If you don't specify DatasetSource, an empty dataset is created and the operation + * synchronously returns. Later, you can add JSON Lines by calling UpdateDatasetEntries. + *

        + *

        If you specify a value for DataSource, the manifest at the S3 location + * is validated and used to create the dataset. The call to CreateDataset is asynchronous + * and might take a while to complete. To find out the current status, Check the value of Status + * returned in a call to DescribeDataset.

        + */ + DatasetSource?: DatasetSource; + + /** + *

        ClientToken is an idempotency token that ensures a call to CreateDataset + * completes only once. You choose the value to pass. For example, An issue, + * such as an network outage, might prevent you from getting a response from CreateDataset. + * In this case, safely retry your call + * to CreateDataset by using the same ClientToken parameter value. An error occurs + * if the other input parameters are not the same as in the first request. Using a different + * value for ClientToken is considered a new call to CreateDataset. An idempotency + * token is active for 8 hours. + *

        + */ + ClientToken?: string; +} + +export namespace CreateDatasetRequest { + export const filterSensitiveLog = (obj: CreateDatasetRequest): any => ({ + ...obj, + }); +} + +export enum DatasetStatus { + CREATE_COMPLETE = "CREATE_COMPLETE", + CREATE_FAILED = "CREATE_FAILED", + CREATE_IN_PROGRESS = "CREATE_IN_PROGRESS", + DELETE_COMPLETE = "DELETE_COMPLETE", + DELETE_FAILED = "DELETE_FAILED", + DELETE_IN_PROGRESS = "DELETE_IN_PROGRESS", + UPDATE_COMPLETE = "UPDATE_COMPLETE", + UPDATE_FAILED_ROLLBACK_COMPLETE = "UPDATE_FAILED_ROLLBACK_COMPLETE", + UPDATE_FAILED_ROLLBACK_IN_PROGRESS = "UPDATE_FAILED_ROLLBACK_IN_PROGRESS", + UPDATE_IN_PROGRESS = "UPDATE_IN_PROGRESS", +} + +/** + *

        Sumary information for an Amazon Lookout for Vision dataset.

        + */ +export interface DatasetMetadata { + /** + *

        The type of the dataset.

        + */ + DatasetType?: string; + + /** + *

        The Unix timestamp for the date and time that the dataset was created.

        + */ + CreationTimestamp?: Date; + + /** + *

        The status for the dataset.

        + */ + Status?: DatasetStatus | string; + + /** + *

        The status message for the dataset.

        + */ + StatusMessage?: string; +} + +export namespace DatasetMetadata { + export const filterSensitiveLog = (obj: DatasetMetadata): any => ({ + ...obj, + }); +} + +export interface CreateDatasetResponse { + /** + *

        Information about the dataset.

        + */ + DatasetMetadata?: DatasetMetadata; +} + +export namespace CreateDatasetResponse { + export const filterSensitiveLog = (obj: CreateDatasetResponse): any => ({ + ...obj, + }); +} + +/** + *

        Amazon Lookout for Vision experienced a service issue. Try your call again.

        + */ +export interface InternalServerException extends __SmithyException, $MetadataBearer { + name: "InternalServerException"; + $fault: "server"; + Message: string | undefined; + /** + *

        The period of time, in seconds, before the operation can be retried.

        + */ + RetryAfterSeconds?: number; +} + +export namespace InternalServerException { + export const filterSensitiveLog = (obj: InternalServerException): any => ({ + ...obj, + }); +} + +/** + *

        The resource could not be found.

        + */ +export interface ResourceNotFoundException extends __SmithyException, $MetadataBearer { + name: "ResourceNotFoundException"; + $fault: "client"; + Message: string | undefined; + /** + *

        The ID of the resource.

        + */ + ResourceId: string | undefined; + + /** + *

        The type of the resource.

        + */ + ResourceType: ResourceType | string | undefined; +} + +export namespace ResourceNotFoundException { + export const filterSensitiveLog = (obj: ResourceNotFoundException): any => ({ + ...obj, + }); +} + +/** + *

        A service quota was exceeded the allowed limit. For more information, see + * Limits in Amazon Lookout for Vision in the Amazon Lookout for Vision Developer Guide.

        + */ +export interface ServiceQuotaExceededException extends __SmithyException, $MetadataBearer { + name: "ServiceQuotaExceededException"; + $fault: "client"; + Message: string | undefined; + /** + *

        The ID of the resource.

        + */ + ResourceId?: string; + + /** + *

        The type of the resource.

        + */ + ResourceType?: ResourceType | string; + + /** + *

        The quota code.

        + */ + QuotaCode: string | undefined; + + /** + *

        The service code.

        + */ + ServiceCode: string | undefined; +} + +export namespace ServiceQuotaExceededException { + export const filterSensitiveLog = (obj: ServiceQuotaExceededException): any => ({ + ...obj, + }); +} + +/** + *

        Amazon Lookout for Vision is temporarily unable to process the request. Try your call again.

        + */ +export interface ThrottlingException extends __SmithyException, $MetadataBearer { + name: "ThrottlingException"; + $fault: "client"; + Message: string | undefined; + /** + *

        The quota code.

        + */ + QuotaCode?: string; + + /** + *

        The service code.

        + */ + ServiceCode?: string; + + /** + *

        The period of time, in seconds, before the operation can be retried.

        + */ + RetryAfterSeconds?: number; +} + +export namespace ThrottlingException { + export const filterSensitiveLog = (obj: ThrottlingException): any => ({ + ...obj, + }); +} + +/** + *

        An input validation error occured. For example, invalid characters in a project name, + * or if a pagination token is invalid.

        + */ +export interface ValidationException extends __SmithyException, $MetadataBearer { + name: "ValidationException"; + $fault: "client"; + Message: string | undefined; +} + +export namespace ValidationException { + export const filterSensitiveLog = (obj: ValidationException): any => ({ + ...obj, + }); +} + +/** + *

        The S3 location where Amazon Lookout for Vision saves training output.

        + */ +export interface OutputS3Object { + /** + *

        The bucket that contains the training output.

        + */ + Bucket: string | undefined; + + /** + *

        The location of the training output in the bucket.

        + */ + Key: string | undefined; +} + +export namespace OutputS3Object { + export const filterSensitiveLog = (obj: OutputS3Object): any => ({ + ...obj, + }); +} + +/** + *

        Information about the location of a manifest file.

        + */ +export interface S3Location { + /** + *

        The S3 bucket that contain the manifest file.

        + */ + Bucket: string | undefined; + + /** + *

        The path and name of the manifest file with the S3 bucket.

        + */ + Prefix?: string; +} + +export namespace S3Location { + export const filterSensitiveLog = (obj: S3Location): any => ({ + ...obj, + }); +} + +/** + *

        The S3 location where Amazon Lookout for Vision saves model training files.

        + */ +export interface OutputConfig { + /** + *

        The S3 location for the output.

        + */ + S3Location: S3Location | undefined; +} + +export namespace OutputConfig { + export const filterSensitiveLog = (obj: OutputConfig): any => ({ + ...obj, + }); +} + +/** + *

        Information about the evaluation performance of a trained model.

        + */ +export interface ModelPerformance { + /** + *

        The overall F1 score metric for the trained model.

        + */ + F1Score?: number; + + /** + *

        The overall recall metric value for the trained model.

        + */ + Recall?: number; + + /** + *

        The overall precision metric value for the trained model.

        + */ + Precision?: number; +} + +export namespace ModelPerformance { + export const filterSensitiveLog = (obj: ModelPerformance): any => ({ + ...obj, + }); +} + +export enum ModelStatus { + DELETING = "DELETING", + HOSTED = "HOSTED", + HOSTING_FAILED = "HOSTING_FAILED", + STARTING_HOSTING = "STARTING_HOSTING", + STOPPING_HOSTING = "STOPPING_HOSTING", + SYSTEM_UPDATING = "SYSTEM_UPDATING", + TRAINED = "TRAINED", + TRAINING = "TRAINING", + TRAINING_FAILED = "TRAINING_FAILED", +} + +/** + *

        Describes an Amazon Lookout for Vision model.

        + */ +export interface ModelDescription { + /** + *

        The version of the model

        + */ + ModelVersion?: string; + + /** + *

        The Amazon Resource Name (ARN) of the model.

        + */ + ModelArn?: string; + + /** + *

        The unix timestamp for the date and time that the model was created.

        + */ + CreationTimestamp?: Date; + + /** + *

        The description for the model.

        + */ + Description?: string; + + /** + *

        The status of the model.

        + */ + Status?: ModelStatus | string; + + /** + *

        The status message for the model.

        + */ + StatusMessage?: string; + + /** + *

        Performance metrics for the model. Created during training.

        + */ + Performance?: ModelPerformance; + + /** + *

        The S3 location where Amazon Lookout for Vision saves model training files.

        + */ + OutputConfig?: OutputConfig; + + /** + *

        The S3 location where Amazon Lookout for Vision saves the manifest file + * that was used to test the trained model and generate the performance scores.

        + */ + EvaluationManifest?: OutputS3Object; + + /** + *

        The S3 location where Amazon Lookout for Vision saves the performance metrics.

        + */ + EvaluationResult?: OutputS3Object; + + /** + *

        The unix timestamp for the date and time that the evaluation ended.

        + */ + EvaluationEndTimestamp?: Date; + + /** + *

        The identifer for the AWS Key Management Service (AWS KMS) key that was used to encrypt the model + * during training.

        + */ + KmsKeyId?: string; +} + +export namespace ModelDescription { + export const filterSensitiveLog = (obj: ModelDescription): any => ({ + ...obj, + }); +} + +export interface CreateModelRequest { + /** + *

        The name of the project in which you want to create a model version.

        + */ + ProjectName: string | undefined; + + /** + *

        A description for the version of the model.

        + */ + Description?: ModelDescription; + + /** + *

        ClientToken is an idempotency token that ensures a call to CreateModel + * completes only once. You choose the value to pass. For example, An issue, + * such as an network outage, might prevent you from getting a response from CreateModel. + * In this case, safely retry your call + * to CreateModel by using the same ClientToken parameter value. An error occurs + * if the other input parameters are not the same as in the first request. Using a different + * value for ClientToken is considered a new call to CreateModel. An idempotency + * token is active for 8 hours.

        + */ + ClientToken?: string; + + /** + *

        The location where Amazon Lookout for Vision saves the training results.

        + */ + OutputConfig: OutputConfig | undefined; + + /** + *

        The identifier of the AWS Key Management Service (AWS KMS) customer master key (CMK) + * to use for encypting the model. If this parameter is not specified, the + * model is encrypted by a key that AWS owns and manages.

        + */ + KmsKeyId?: string; +} + +export namespace CreateModelRequest { + export const filterSensitiveLog = (obj: CreateModelRequest): any => ({ + ...obj, + }); +} + +/** + *

        Describes an Amazon Lookout for Vision model.

        + */ +export interface ModelMetadata { + /** + *

        The unix timestamp for the date and time that the model was created.

        + */ + CreationTimestamp?: Date; + + /** + *

        The version of the model.

        + */ + ModelVersion?: string; + + /** + *

        The Amazon Resource Name (ARN) of the model.

        + */ + ModelArn?: string; + + /** + *

        The description for the model.

        + */ + Description?: string; + + /** + *

        The status of the model.

        + */ + Status?: ModelStatus | string; + + /** + *

        The status message for the model.

        + */ + StatusMessage?: string; + + /** + *

        Performance metrics for the model. Created during training.

        + */ + Performance?: ModelPerformance; +} + +export namespace ModelMetadata { + export const filterSensitiveLog = (obj: ModelMetadata): any => ({ + ...obj, + }); +} + +export interface CreateModelResponse { + /** + *

        The response from a call to CreateModel.

        + */ + ModelMetadata?: ModelMetadata; +} + +export namespace CreateModelResponse { + export const filterSensitiveLog = (obj: CreateModelResponse): any => ({ + ...obj, + }); +} + +export interface CreateProjectRequest { + /** + *

        S nsme for the project.

        + */ + ProjectName: string | undefined; + + /** + *

        ClientToken is an idempotency token that ensures a call to CreateProject + * completes only once. You choose the value to pass. For example, An issue, + * such as an network outage, might prevent you from getting a response from CreateProject. + * In this case, safely retry your call + * to CreateProject by using the same ClientToken parameter value. An error occurs + * if the other input parameters are not the same as in the first request. Using a different + * value for ClientToken is considered a new call to CreateProject. An idempotency + * token is active for 8 hours.

        + */ + ClientToken?: string; +} + +export namespace CreateProjectRequest { + export const filterSensitiveLog = (obj: CreateProjectRequest): any => ({ + ...obj, + }); +} + +/** + *

        Metadata about an Amazon Lookout for Vision project.

        + */ +export interface ProjectMetadata { + /** + *

        The Amazon Resource Name (ARN) of the project.

        + */ + ProjectArn?: string; + + /** + *

        The name of the project.

        + */ + ProjectName?: string; + + /** + *

        The unix timestamp for the date and time that the project was created.

        + */ + CreationTimestamp?: Date; +} + +export namespace ProjectMetadata { + export const filterSensitiveLog = (obj: ProjectMetadata): any => ({ + ...obj, + }); +} + +export interface CreateProjectResponse { + /** + *

        Information about the project.

        + */ + ProjectMetadata?: ProjectMetadata; +} + +export namespace CreateProjectResponse { + export const filterSensitiveLog = (obj: CreateProjectResponse): any => ({ + ...obj, + }); +} + +/** + *

        Statistics about the images in a dataset.

        + */ +export interface DatasetImageStats { + /** + *

        The total number of images in the dataset.

        + */ + Total?: number; + + /** + *

        The total number of labeled images.

        + */ + Labeled?: number; + + /** + *

        The total number of images labeled as normal.

        + */ + Normal?: number; + + /** + *

        the total number of images labeled as an anomaly.

        + */ + Anomaly?: number; +} + +export namespace DatasetImageStats { + export const filterSensitiveLog = (obj: DatasetImageStats): any => ({ + ...obj, + }); +} + +/** + *

        The description for a dataset. For more information, see DescribeDataset.

        + */ +export interface DatasetDescription { + /** + *

        The name of the project that contains the dataset.

        + */ + ProjectName?: string; + + /** + *

        The type of the dataset. The value train represents a training dataset or single dataset project. + * The value test represents a test dataset.

        + */ + DatasetType?: string; + + /** + *

        The Unix timestamp for the time and date that the dataset was created.

        + */ + CreationTimestamp?: Date; + + /** + *

        The Unix timestamp for the date and time that the dataset was last updated.

        + */ + LastUpdatedTimestamp?: Date; + + /** + *

        The status of the dataset.

        + */ + Status?: DatasetStatus | string; + + /** + *

        The status message for the dataset.

        + */ + StatusMessage?: string; + + /** + *

        + */ + ImageStats?: DatasetImageStats; +} + +export namespace DatasetDescription { + export const filterSensitiveLog = (obj: DatasetDescription): any => ({ + ...obj, + }); +} + +export interface DeleteDatasetRequest { + /** + *

        The name of the project that contains the dataset that you want to delete.

        + */ + ProjectName: string | undefined; + + /** + *

        The type of the dataset to delete. Specify train to delete the training dataset. + * Specify test to delete the test dataset. To delete the dataset in a single dataset project, + * specify train.

        + */ + DatasetType: string | undefined; + + /** + *

        ClientToken is an idempotency token that ensures a call to DeleteDataset + * completes only once. You choose the value to pass. For example, An issue, + * such as an network outage, might prevent you from getting a response from DeleteDataset. + * In this case, safely retry your call + * to DeleteDataset by using the same ClientToken parameter value. An error occurs + * if the other input parameters are not the same as in the first request. Using a different + * value for ClientToken is considered a new call to DeleteDataset. An idempotency + * token is active for 8 hours.

        + */ + ClientToken?: string; +} + +export namespace DeleteDatasetRequest { + export const filterSensitiveLog = (obj: DeleteDatasetRequest): any => ({ + ...obj, + }); +} + +export interface DeleteDatasetResponse {} + +export namespace DeleteDatasetResponse { + export const filterSensitiveLog = (obj: DeleteDatasetResponse): any => ({ + ...obj, + }); +} + +export interface DeleteModelRequest { + /** + *

        The name of the project that contains the model that you want to delete.

        + */ + ProjectName: string | undefined; + + /** + *

        The version of the model that you want to delete.

        + */ + ModelVersion: string | undefined; + + /** + *

        ClientToken is an idempotency token that ensures a call to DeleteModel + * completes only once. You choose the value to pass. For example, An issue, + * such as an network outage, might prevent you from getting a response from DeleteModel. + * In this case, safely retry your call + * to DeleteModel by using the same ClientToken parameter value. An error occurs + * if the other input parameters are not the same as in the first request. Using a different + * value for ClientToken is considered a new call to DeleteModel. An idempotency + * token is active for 8 hours.

        + */ + ClientToken?: string; +} + +export namespace DeleteModelRequest { + export const filterSensitiveLog = (obj: DeleteModelRequest): any => ({ + ...obj, + }); +} + +export interface DeleteModelResponse { + /** + *

        The Amazon Resource Name (ARN) of the model that was deleted.

        + */ + ModelArn?: string; +} + +export namespace DeleteModelResponse { + export const filterSensitiveLog = (obj: DeleteModelResponse): any => ({ + ...obj, + }); +} + +export interface DeleteProjectRequest { + /** + *

        The name of the project to delete.

        + */ + ProjectName: string | undefined; + + /** + *

        ClientToken is an idempotency token that ensures a call to DeleteProject + * completes only once. You choose the value to pass. For example, An issue, + * such as an network outage, might prevent you from getting a response from DeleteProject. + * In this case, safely retry your call + * to DeleteProject by using the same ClientToken parameter value. An error occurs + * if the other input parameters are not the same as in the first request. Using a different + * value for ClientToken is considered a new call to DeleteProject. An idempotency + * token is active for 8 hours.

        + */ + ClientToken?: string; +} + +export namespace DeleteProjectRequest { + export const filterSensitiveLog = (obj: DeleteProjectRequest): any => ({ + ...obj, + }); +} + +export interface DeleteProjectResponse { + /** + *

        The Amazon Resource Name (ARN) of the project that was deleted.

        + */ + ProjectArn?: string; +} + +export namespace DeleteProjectResponse { + export const filterSensitiveLog = (obj: DeleteProjectResponse): any => ({ + ...obj, + }); +} + +export interface DescribeDatasetRequest { + /** + *

        The name of the project that contains the dataset that you want to describe.

        + */ + ProjectName: string | undefined; + + /** + *

        The type of the dataset to describe. Specify train to describe the + * training dataset. Specify test to describe the test dataset. + * If you have a single dataset project, specify train + *

        + */ + DatasetType: string | undefined; +} + +export namespace DescribeDatasetRequest { + export const filterSensitiveLog = (obj: DescribeDatasetRequest): any => ({ + ...obj, + }); +} + +export interface DescribeDatasetResponse { + /** + *

        The description of the requested dataset.

        + */ + DatasetDescription?: DatasetDescription; +} + +export namespace DescribeDatasetResponse { + export const filterSensitiveLog = (obj: DescribeDatasetResponse): any => ({ + ...obj, + }); +} + +export interface DescribeModelRequest { + /** + *

        The project that contains the version of a model that you want to describe.

        + */ + ProjectName: string | undefined; + + /** + *

        The version of the model that you want to describe.

        + */ + ModelVersion: string | undefined; +} + +export namespace DescribeModelRequest { + export const filterSensitiveLog = (obj: DescribeModelRequest): any => ({ + ...obj, + }); +} + +export interface DescribeModelResponse { + /** + *

        Contains the description of the model.

        + */ + ModelDescription?: ModelDescription; +} + +export namespace DescribeModelResponse { + export const filterSensitiveLog = (obj: DescribeModelResponse): any => ({ + ...obj, + }); +} + +export interface DescribeProjectRequest { + /** + *

        The name of the project that you want to describe.

        + */ + ProjectName: string | undefined; +} + +export namespace DescribeProjectRequest { + export const filterSensitiveLog = (obj: DescribeProjectRequest): any => ({ + ...obj, + }); +} + +/** + *

        Describe an Amazon Lookout for Vision project. For more information, see DescribeProject.

        + */ +export interface ProjectDescription { + /** + *

        The Amazon Resource Name (ARN) of the project.

        + */ + ProjectArn?: string; + + /** + *

        The name of the project.

        + */ + ProjectName?: string; + + /** + *

        The unix timestamp for the date and time that the project was created.

        + */ + CreationTimestamp?: Date; + + /** + *

        A list of datasets in the project.

        + */ + Datasets?: DatasetMetadata[]; +} + +export namespace ProjectDescription { + export const filterSensitiveLog = (obj: ProjectDescription): any => ({ + ...obj, + }); +} + +export interface DescribeProjectResponse { + /** + *

        The description of the project.

        + */ + ProjectDescription?: ProjectDescription; +} + +export namespace DescribeProjectResponse { + export const filterSensitiveLog = (obj: DescribeProjectResponse): any => ({ + ...obj, + }); +} + +export interface DetectAnomaliesRequest { + /** + *

        The name of the project that contains the model version that you want to use.

        + */ + ProjectName: string | undefined; + + /** + *

        The version of the model that you want to use.

        + */ + ModelVersion: string | undefined; + + /** + *

        The unencrypted image bytes that you want to analyze.

        + */ + Body: Readable | ReadableStream | Blob | undefined; + + /** + *

        The type of the image passed in Body. + * Valid values are image/png (PNG format images) and image/jpeg (JPG format images).

        + */ + ContentType: string | undefined; +} + +export namespace DetectAnomaliesRequest { + export const filterSensitiveLog = (obj: DetectAnomaliesRequest): any => ({ + ...obj, + }); +} + +/** + *

        The source for an image.

        + */ +export interface ImageSource { + /** + *

        The type of the image.

        + */ + Type?: string; +} + +export namespace ImageSource { + export const filterSensitiveLog = (obj: ImageSource): any => ({ + ...obj, + }); +} + +/** + *

        The prediction results from a call to DetectAnomalies.

        + */ +export interface DetectAnomalyResult { + /** + *

        The source of the image that was analyzed. direct means that the + * images was supplied from the local computer. No other values are supported.

        + */ + Source?: ImageSource; + + /** + *

        True if the image contains an anomaly, otherwise false.

        + */ + IsAnomalous?: boolean; + + /** + *

        The confidence that Amazon Lookout for Vision has in the accuracy of the prediction.

        + */ + Confidence?: number; +} + +export namespace DetectAnomalyResult { + export const filterSensitiveLog = (obj: DetectAnomalyResult): any => ({ + ...obj, + }); +} + +export interface DetectAnomaliesResponse { + /** + *

        The results of the DetectAnomalies operation.

        + */ + DetectAnomalyResult?: DetectAnomalyResult; +} + +export namespace DetectAnomaliesResponse { + export const filterSensitiveLog = (obj: DetectAnomaliesResponse): any => ({ + ...obj, + }); +} + +export interface ListDatasetEntriesRequest { + /** + *

        The name of the project that contains the dataset that you want to list.

        + */ + ProjectName: string | undefined; + + /** + *

        The type of the dataset that you want to list. Specify train to list + * the training dataset. Specify test to list the test dataset. If you have a single dataset + * project, specify train.

        + */ + DatasetType: string | undefined; + + /** + *

        Specify true to include labeled entries, otherwise specify false. If you + * don't specify a value, Lookout for Vision returns all entries.

        + */ + Labeled?: boolean; + + /** + *

        Specify normal to include only normal images. Specify anomaly to only include + * anomalous entries. If you don't specify a value, Amazon Lookout for Vision returns normal and anomalous images.

        + */ + AnomalyClass?: string; + + /** + *

        Only includes entries before the specified date in the response. For example, 2020-06-23T00:00:00.

        + */ + BeforeCreationDate?: Date; + + /** + *

        Only includes entries after the specified date in the response. For example, 2020-06-23T00:00:00.

        + */ + AfterCreationDate?: Date; + + /** + *

        If the previous response was incomplete (because there is more data to retrieve), + * Amazon Lookout for Vision returns a pagination token in the response. You can use this pagination token to + * retrieve the next set of dataset entries.

        + */ + NextToken?: string; + + /** + *

        The maximum number of results to return per paginated call. The largest value you can specify is 100. + * If you specify a value greater than 100, a ValidationException + * error occurs. The default value is 100.

        + */ + MaxResults?: number; + + /** + *

        Perform a "contains" search on the values of the source-ref key within the dataset. + * For example a value of "IMG_17" returns all JSON Lines where the source-ref key value matches *IMG_17*.

        + */ + SourceRefContains?: string; +} + +export namespace ListDatasetEntriesRequest { + export const filterSensitiveLog = (obj: ListDatasetEntriesRequest): any => ({ + ...obj, + }); +} + +export interface ListDatasetEntriesResponse { + /** + *

        A list of the entries (JSON Lines) within the dataset.

        + */ + DatasetEntries?: string[]; + + /** + *

        If the response is truncated, Amazon Lookout for Vision returns this token + * that you can use in the subsequent request to retrieve the next set ofdataset entries.

        + */ + NextToken?: string; +} + +export namespace ListDatasetEntriesResponse { + export const filterSensitiveLog = (obj: ListDatasetEntriesResponse): any => ({ + ...obj, + }); +} + +export interface ListModelsRequest { + /** + *

        The name of the project that contains the model versions that you want to list.

        + */ + ProjectName: string | undefined; + + /** + *

        If the previous response was incomplete (because there is more data to retrieve), + * Amazon Lookout for Vision returns a pagination token in the response. You can use this pagination token to + * retrieve the next set of models.

        + */ + NextToken?: string; + + /** + *

        The maximum number of results to return per paginated call. The largest value you can specify is 100. + * If you specify a value greater than 100, a ValidationException + * error occurs. The default value is 100.

        + */ + MaxResults?: number; +} + +export namespace ListModelsRequest { + export const filterSensitiveLog = (obj: ListModelsRequest): any => ({ + ...obj, + }); +} + +export interface ListModelsResponse { + /** + *

        A list of model versions in the specified project.

        + */ + Models?: ModelMetadata[]; + + /** + *

        If the response is truncated, Amazon Lookout for Vision returns this token + * that you can use in the subsequent request to retrieve the next set of models.

        + */ + NextToken?: string; +} + +export namespace ListModelsResponse { + export const filterSensitiveLog = (obj: ListModelsResponse): any => ({ + ...obj, + }); +} + +export interface ListProjectsRequest { + /** + *

        If the previous response was incomplete (because there is more data to retrieve), + * Amazon Lookout for Vision returns a pagination token in the response. You can use this pagination token to + * retrieve the next set of projects.

        + */ + NextToken?: string; + + /** + *

        The maximum number of results to return per paginated call. The largest value you can specify is 100. + * If you specify a value greater than 100, a ValidationException + * error occurs. The default value is 100.

        + */ + MaxResults?: number; +} + +export namespace ListProjectsRequest { + export const filterSensitiveLog = (obj: ListProjectsRequest): any => ({ + ...obj, + }); +} + +export interface ListProjectsResponse { + /** + *

        A list of projects in your AWS account.

        + */ + Projects?: ProjectMetadata[]; + + /** + *

        If the response is truncated, Amazon Lookout for Vision returns this token + * that you can use in the subsequent request to retrieve the next set of projects.

        + */ + NextToken?: string; +} + +export namespace ListProjectsResponse { + export const filterSensitiveLog = (obj: ListProjectsResponse): any => ({ + ...obj, + }); +} + +export interface StartModelRequest { + /** + *

        The name of the project that contains the model that you want to start.

        + */ + ProjectName: string | undefined; + + /** + *

        The version of the model that you want to start.

        + */ + ModelVersion: string | undefined; + + /** + *

        The minimum number of inference units to use. A single + * inference unit represents 1 hour of processing and can support up to 5 Transaction Pers Second (TPS). + * Use a higher number to increase the TPS throughput of your model. You are charged for the number + * of inference units that you use. + *

        + */ + MinInferenceUnits: number | undefined; + + /** + *

        ClientToken is an idempotency token that ensures a call to StartModel + * completes only once. You choose the value to pass. For example, An issue, + * such as an network outage, might prevent you from getting a response from StartModel. + * In this case, safely retry your call + * to StartModel by using the same ClientToken parameter value. An error occurs + * if the other input parameters are not the same as in the first request. Using a different + * value for ClientToken is considered a new call to StartModel. An idempotency + * token is active for 8 hours. + *

        + */ + ClientToken?: string; +} + +export namespace StartModelRequest { + export const filterSensitiveLog = (obj: StartModelRequest): any => ({ + ...obj, + }); +} + +export enum ModelHostingStatus { + FAILED = "FAILED", + RUNNING = "RUNNING", + STARTING = "STARTING", + STOPPED = "STOPPED", +} + +export interface StartModelResponse { + /** + *

        The current running status of the model.

        + */ + Status?: ModelHostingStatus | string; +} + +export namespace StartModelResponse { + export const filterSensitiveLog = (obj: StartModelResponse): any => ({ + ...obj, + }); +} + +export interface StopModelRequest { + /** + *

        The name of the project that contains the model that you want to stop.

        + */ + ProjectName: string | undefined; + + /** + *

        The version of the model that you want to stop.

        + */ + ModelVersion: string | undefined; + + /** + *

        ClientToken is an idempotency token that ensures a call to StopModel + * completes only once. You choose the value to pass. For example, An issue, + * such as an network outage, might prevent you from getting a response from StopModel. + * In this case, safely retry your call + * to StopModel by using the same ClientToken parameter value. An error occurs + * if the other input parameters are not the same as in the first request. Using a different + * value for ClientToken is considered a new call to StopModel. An idempotency + * token is active for 8 hours. + * + * + *

        + */ + ClientToken?: string; +} + +export namespace StopModelRequest { + export const filterSensitiveLog = (obj: StopModelRequest): any => ({ + ...obj, + }); +} + +export interface StopModelResponse { + /** + *

        The status of the model.

        + */ + Status?: ModelHostingStatus | string; +} + +export namespace StopModelResponse { + export const filterSensitiveLog = (obj: StopModelResponse): any => ({ + ...obj, + }); +} + +export interface UpdateDatasetEntriesRequest { + /** + *

        The name of the project that contains the dataset that you want to update.

        + */ + ProjectName: string | undefined; + + /** + *

        The type of the dataset that you want to update. Specify train to update + * the training dataset. Specify test to update the test dataset. If you + * have a single dataset project, specify train.

        + */ + DatasetType: string | undefined; + + /** + *

        The entries to add to the dataset.

        + */ + Changes: Uint8Array | undefined; + + /** + *

        ClientToken is an idempotency token that ensures a call to UpdateDatasetEntries + * completes only once. You choose the value to pass. For example, An issue, + * such as an network outage, might prevent you from getting a response from UpdateDatasetEntries. + * In this case, safely retry your call + * to UpdateDatasetEntries by using the same ClientToken parameter value. An error occurs + * if the other input parameters are not the same as in the first request. Using a different + * value for ClientToken is considered a new call to UpdateDatasetEntries. An idempotency + * token is active for 8 hours. + *

        + */ + ClientToken?: string; +} + +export namespace UpdateDatasetEntriesRequest { + export const filterSensitiveLog = (obj: UpdateDatasetEntriesRequest): any => ({ + ...obj, + }); +} + +export interface UpdateDatasetEntriesResponse { + /** + *

        The status of the dataset update.

        + */ + Status?: DatasetStatus | string; +} + +export namespace UpdateDatasetEntriesResponse { + export const filterSensitiveLog = (obj: UpdateDatasetEntriesResponse): any => ({ + ...obj, + }); +} diff --git a/clients/client-lookoutvision/package.json b/clients/client-lookoutvision/package.json new file mode 100644 index 000000000000..af823f93d766 --- /dev/null +++ b/clients/client-lookoutvision/package.json @@ -0,0 +1,85 @@ +{ + "name": "@aws-sdk/client-lookoutvision", + "description": "AWS SDK for JavaScript Lookoutvision Client for Node.js, Browser and React Native", + "version": "1.0.0-rc.1", + "scripts": { + "clean": "yarn remove-definitions && yarn remove-dist && yarn remove-documentation", + "build-documentation": "yarn remove-documentation && typedoc ./", + "prepublishOnly": "yarn build", + "pretest": "yarn build:cjs", + "remove-definitions": "rimraf ./types", + "remove-dist": "rimraf ./dist", + "remove-documentation": "rimraf ./docs", + "test": "yarn build && jest --coverage --passWithNoTests", + "build:cjs": "tsc -p tsconfig.json", + "build:es": "tsc -p tsconfig.es.json", + "build": "yarn build:cjs && yarn build:es" + }, + "main": "./dist/cjs/index.js", + "types": "./types/index.d.ts", + "module": "./dist/es/index.js", + "browser": { + "./runtimeConfig": "./runtimeConfig.browser" + }, + "react-native": { + "./runtimeConfig": "./runtimeConfig.native" + }, + "sideEffects": false, + "dependencies": { + "@aws-crypto/sha256-browser": "^1.0.0", + "@aws-crypto/sha256-js": "^1.0.0", + "@aws-sdk/config-resolver": "1.0.0-rc.7", + "@aws-sdk/credential-provider-node": "1.0.0-rc.7", + "@aws-sdk/fetch-http-handler": "1.0.0-rc.7", + "@aws-sdk/hash-node": "1.0.0-rc.7", + "@aws-sdk/invalid-dependency": "1.0.0-rc.3", + "@aws-sdk/middleware-content-length": "1.0.0-rc.7", + "@aws-sdk/middleware-host-header": "1.0.0-rc.7", + "@aws-sdk/middleware-logger": "1.0.0-rc.7", + "@aws-sdk/middleware-retry": "1.0.0-rc.7", + "@aws-sdk/middleware-serde": "1.0.0-rc.7", + "@aws-sdk/middleware-signing": "1.0.0-rc.7", + "@aws-sdk/middleware-stack": "1.0.0-rc.7", + "@aws-sdk/middleware-user-agent": "1.0.0-rc.7", + "@aws-sdk/node-config-provider": "1.0.0-rc.7", + "@aws-sdk/node-http-handler": "1.0.0-rc.7", + "@aws-sdk/protocol-http": "1.0.0-rc.7", + "@aws-sdk/smithy-client": "1.0.0-rc.7", + "@aws-sdk/url-parser-browser": "1.0.0-rc.7", + "@aws-sdk/url-parser-node": "1.0.0-rc.7", + "@aws-sdk/util-base64-browser": "1.0.0-rc.3", + "@aws-sdk/util-base64-node": "1.0.0-rc.3", + "@aws-sdk/util-body-length-browser": "1.0.0-rc.3", + "@aws-sdk/util-body-length-node": "1.0.0-rc.3", + "@aws-sdk/util-user-agent-browser": "1.0.0-rc.7", + "@aws-sdk/util-user-agent-node": "1.0.0-rc.7", + "@aws-sdk/util-utf8-browser": "1.0.0-rc.3", + "@aws-sdk/util-utf8-node": "1.0.0-rc.3", + "tslib": "^2.0.0", + "uuid": "^3.0.0" + }, + "devDependencies": { + "@aws-sdk/client-documentation-generator": "1.0.0-rc.7", + "@aws-sdk/types": "1.0.0-rc.7", + "@types/node": "^12.7.5", + "@types/uuid": "^3.0.0", + "jest": "^26.1.0", + "rimraf": "^3.0.0", + "typedoc": "^0.19.2", + "typescript": "~4.1.2" + }, + "engines": { + "node": ">=10.0.0" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/master/clients/client-lookoutvision", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "clients/client-lookoutvision" + } +} diff --git a/clients/client-lookoutvision/pagination/Interfaces.ts b/clients/client-lookoutvision/pagination/Interfaces.ts new file mode 100644 index 000000000000..8860cab55270 --- /dev/null +++ b/clients/client-lookoutvision/pagination/Interfaces.ts @@ -0,0 +1,7 @@ +import { LookoutVision } from "../LookoutVision"; +import { LookoutVisionClient } from "../LookoutVisionClient"; +import { PaginationConfiguration } from "@aws-sdk/types"; + +export interface LookoutVisionPaginationConfiguration extends PaginationConfiguration { + client: LookoutVision | LookoutVisionClient; +} diff --git a/clients/client-lookoutvision/pagination/ListDatasetEntriesPaginator.ts b/clients/client-lookoutvision/pagination/ListDatasetEntriesPaginator.ts new file mode 100644 index 000000000000..8fef2021922a --- /dev/null +++ b/clients/client-lookoutvision/pagination/ListDatasetEntriesPaginator.ts @@ -0,0 +1,57 @@ +import { LookoutVision } from "../LookoutVision"; +import { LookoutVisionClient } from "../LookoutVisionClient"; +import { + ListDatasetEntriesCommand, + ListDatasetEntriesCommandInput, + ListDatasetEntriesCommandOutput, +} from "../commands/ListDatasetEntriesCommand"; +import { LookoutVisionPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: LookoutVisionClient, + input: ListDatasetEntriesCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListDatasetEntriesCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: LookoutVision, + input: ListDatasetEntriesCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listDatasetEntries(input, ...args); +}; +export async function* paginateListDatasetEntries( + config: LookoutVisionPaginationConfiguration, + input: ListDatasetEntriesCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListDatasetEntriesCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof LookoutVision) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof LookoutVisionClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected LookoutVision | LookoutVisionClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-lookoutvision/pagination/ListModelsPaginator.ts b/clients/client-lookoutvision/pagination/ListModelsPaginator.ts new file mode 100644 index 000000000000..9115f754b0b1 --- /dev/null +++ b/clients/client-lookoutvision/pagination/ListModelsPaginator.ts @@ -0,0 +1,53 @@ +import { LookoutVision } from "../LookoutVision"; +import { LookoutVisionClient } from "../LookoutVisionClient"; +import { ListModelsCommand, ListModelsCommandInput, ListModelsCommandOutput } from "../commands/ListModelsCommand"; +import { LookoutVisionPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: LookoutVisionClient, + input: ListModelsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListModelsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: LookoutVision, + input: ListModelsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listModels(input, ...args); +}; +export async function* paginateListModels( + config: LookoutVisionPaginationConfiguration, + input: ListModelsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListModelsCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof LookoutVision) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof LookoutVisionClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected LookoutVision | LookoutVisionClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-lookoutvision/pagination/ListProjectsPaginator.ts b/clients/client-lookoutvision/pagination/ListProjectsPaginator.ts new file mode 100644 index 000000000000..8b4831b6369e --- /dev/null +++ b/clients/client-lookoutvision/pagination/ListProjectsPaginator.ts @@ -0,0 +1,57 @@ +import { LookoutVision } from "../LookoutVision"; +import { LookoutVisionClient } from "../LookoutVisionClient"; +import { + ListProjectsCommand, + ListProjectsCommandInput, + ListProjectsCommandOutput, +} from "../commands/ListProjectsCommand"; +import { LookoutVisionPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: LookoutVisionClient, + input: ListProjectsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListProjectsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: LookoutVision, + input: ListProjectsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listProjects(input, ...args); +}; +export async function* paginateListProjects( + config: LookoutVisionPaginationConfiguration, + input: ListProjectsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListProjectsCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof LookoutVision) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof LookoutVisionClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected LookoutVision | LookoutVisionClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-lookoutvision/protocols/Aws_restJson1.ts b/clients/client-lookoutvision/protocols/Aws_restJson1.ts new file mode 100644 index 000000000000..af3955e2855b --- /dev/null +++ b/clients/client-lookoutvision/protocols/Aws_restJson1.ts @@ -0,0 +1,2723 @@ +import { CreateDatasetCommandInput, CreateDatasetCommandOutput } from "../commands/CreateDatasetCommand"; +import { CreateModelCommandInput, CreateModelCommandOutput } from "../commands/CreateModelCommand"; +import { CreateProjectCommandInput, CreateProjectCommandOutput } from "../commands/CreateProjectCommand"; +import { DeleteDatasetCommandInput, DeleteDatasetCommandOutput } from "../commands/DeleteDatasetCommand"; +import { DeleteModelCommandInput, DeleteModelCommandOutput } from "../commands/DeleteModelCommand"; +import { DeleteProjectCommandInput, DeleteProjectCommandOutput } from "../commands/DeleteProjectCommand"; +import { DescribeDatasetCommandInput, DescribeDatasetCommandOutput } from "../commands/DescribeDatasetCommand"; +import { DescribeModelCommandInput, DescribeModelCommandOutput } from "../commands/DescribeModelCommand"; +import { DescribeProjectCommandInput, DescribeProjectCommandOutput } from "../commands/DescribeProjectCommand"; +import { DetectAnomaliesCommandInput, DetectAnomaliesCommandOutput } from "../commands/DetectAnomaliesCommand"; +import { ListDatasetEntriesCommandInput, ListDatasetEntriesCommandOutput } from "../commands/ListDatasetEntriesCommand"; +import { ListModelsCommandInput, ListModelsCommandOutput } from "../commands/ListModelsCommand"; +import { ListProjectsCommandInput, ListProjectsCommandOutput } from "../commands/ListProjectsCommand"; +import { StartModelCommandInput, StartModelCommandOutput } from "../commands/StartModelCommand"; +import { StopModelCommandInput, StopModelCommandOutput } from "../commands/StopModelCommand"; +import { + UpdateDatasetEntriesCommandInput, + UpdateDatasetEntriesCommandOutput, +} from "../commands/UpdateDatasetEntriesCommand"; +import { + AccessDeniedException, + ConflictException, + DatasetDescription, + DatasetGroundTruthManifest, + DatasetImageStats, + DatasetMetadata, + DatasetSource, + DetectAnomalyResult, + ImageSource, + InputS3Object, + InternalServerException, + ModelDescription, + ModelMetadata, + ModelPerformance, + OutputConfig, + OutputS3Object, + ProjectDescription, + ProjectMetadata, + ResourceNotFoundException, + S3Location, + ServiceQuotaExceededException, + ThrottlingException, + ValidationException, +} from "../models/models_0"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { + SmithyException as __SmithyException, + extendedEncodeURIComponent as __extendedEncodeURIComponent, +} from "@aws-sdk/smithy-client"; +import { + Endpoint as __Endpoint, + MetadataBearer as __MetadataBearer, + ResponseMetadata as __ResponseMetadata, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; +import { v4 as generateIdempotencyToken } from "uuid"; + +export const serializeAws_restJson1CreateDatasetCommand = async ( + input: CreateDatasetCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + ...(isSerializableHeaderValue(input.ClientToken) && { "X-Amzn-Client-Token": input.ClientToken! }), + }; + let resolvedPath = "/2020-11-20/projects/{ProjectName}/datasets"; + if (input.ProjectName !== undefined) { + const labelValue: string = input.ProjectName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ProjectName."); + } + resolvedPath = resolvedPath.replace("{ProjectName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ProjectName."); + } + let body: any; + body = JSON.stringify({ + ...(input.DatasetSource !== undefined && { + DatasetSource: serializeAws_restJson1DatasetSource(input.DatasetSource, context), + }), + ...(input.DatasetType !== undefined && { DatasetType: input.DatasetType }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1CreateModelCommand = async ( + input: CreateModelCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + ...(isSerializableHeaderValue(input.ClientToken) && { "X-Amzn-Client-Token": input.ClientToken! }), + }; + let resolvedPath = "/2020-11-20/projects/{ProjectName}/models"; + if (input.ProjectName !== undefined) { + const labelValue: string = input.ProjectName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ProjectName."); + } + resolvedPath = resolvedPath.replace("{ProjectName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ProjectName."); + } + let body: any; + body = JSON.stringify({ + ...(input.Description !== undefined && { + Description: serializeAws_restJson1ModelDescription(input.Description, context), + }), + ...(input.KmsKeyId !== undefined && { KmsKeyId: input.KmsKeyId }), + ...(input.OutputConfig !== undefined && { + OutputConfig: serializeAws_restJson1OutputConfig(input.OutputConfig, context), + }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1CreateProjectCommand = async ( + input: CreateProjectCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + ...(isSerializableHeaderValue(input.ClientToken) && { "X-Amzn-Client-Token": input.ClientToken! }), + }; + let resolvedPath = "/2020-11-20/projects"; + let body: any; + body = JSON.stringify({ + ...(input.ProjectName !== undefined && { ProjectName: input.ProjectName }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DeleteDatasetCommand = async ( + input: DeleteDatasetCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + ...(isSerializableHeaderValue(input.ClientToken) && { "X-Amzn-Client-Token": input.ClientToken! }), + }; + let resolvedPath = "/2020-11-20/projects/{ProjectName}/datasets/{DatasetType}"; + if (input.ProjectName !== undefined) { + const labelValue: string = input.ProjectName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ProjectName."); + } + resolvedPath = resolvedPath.replace("{ProjectName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ProjectName."); + } + if (input.DatasetType !== undefined) { + const labelValue: string = input.DatasetType; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: DatasetType."); + } + resolvedPath = resolvedPath.replace("{DatasetType}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: DatasetType."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "DELETE", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DeleteModelCommand = async ( + input: DeleteModelCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + ...(isSerializableHeaderValue(input.ClientToken) && { "X-Amzn-Client-Token": input.ClientToken! }), + }; + let resolvedPath = "/2020-11-20/projects/{ProjectName}/models/{ModelVersion}"; + if (input.ProjectName !== undefined) { + const labelValue: string = input.ProjectName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ProjectName."); + } + resolvedPath = resolvedPath.replace("{ProjectName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ProjectName."); + } + if (input.ModelVersion !== undefined) { + const labelValue: string = input.ModelVersion; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ModelVersion."); + } + resolvedPath = resolvedPath.replace("{ModelVersion}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ModelVersion."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "DELETE", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DeleteProjectCommand = async ( + input: DeleteProjectCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + ...(isSerializableHeaderValue(input.ClientToken) && { "X-Amzn-Client-Token": input.ClientToken! }), + }; + let resolvedPath = "/2020-11-20/projects/{ProjectName}"; + if (input.ProjectName !== undefined) { + const labelValue: string = input.ProjectName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ProjectName."); + } + resolvedPath = resolvedPath.replace("{ProjectName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ProjectName."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "DELETE", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DescribeDatasetCommand = async ( + input: DescribeDatasetCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/2020-11-20/projects/{ProjectName}/datasets/{DatasetType}"; + if (input.ProjectName !== undefined) { + const labelValue: string = input.ProjectName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ProjectName."); + } + resolvedPath = resolvedPath.replace("{ProjectName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ProjectName."); + } + if (input.DatasetType !== undefined) { + const labelValue: string = input.DatasetType; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: DatasetType."); + } + resolvedPath = resolvedPath.replace("{DatasetType}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: DatasetType."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DescribeModelCommand = async ( + input: DescribeModelCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/2020-11-20/projects/{ProjectName}/models/{ModelVersion}"; + if (input.ProjectName !== undefined) { + const labelValue: string = input.ProjectName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ProjectName."); + } + resolvedPath = resolvedPath.replace("{ProjectName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ProjectName."); + } + if (input.ModelVersion !== undefined) { + const labelValue: string = input.ModelVersion; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ModelVersion."); + } + resolvedPath = resolvedPath.replace("{ModelVersion}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ModelVersion."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DescribeProjectCommand = async ( + input: DescribeProjectCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/2020-11-20/projects/{ProjectName}"; + if (input.ProjectName !== undefined) { + const labelValue: string = input.ProjectName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ProjectName."); + } + resolvedPath = resolvedPath.replace("{ProjectName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ProjectName."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1DetectAnomaliesCommand = async ( + input: DetectAnomaliesCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/octet-stream", + ...(isSerializableHeaderValue(input.ContentType) && { "content-type": input.ContentType! }), + }; + let resolvedPath = "/2020-11-20/projects/{ProjectName}/models/{ModelVersion}/detect"; + if (input.ProjectName !== undefined) { + const labelValue: string = input.ProjectName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ProjectName."); + } + resolvedPath = resolvedPath.replace("{ProjectName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ProjectName."); + } + if (input.ModelVersion !== undefined) { + const labelValue: string = input.ModelVersion; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ModelVersion."); + } + resolvedPath = resolvedPath.replace("{ModelVersion}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ModelVersion."); + } + let body: any; + if (input.Body !== undefined) { + body = input.Body; + } + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1ListDatasetEntriesCommand = async ( + input: ListDatasetEntriesCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/2020-11-20/projects/{ProjectName}/datasets/{DatasetType}/entries"; + if (input.ProjectName !== undefined) { + const labelValue: string = input.ProjectName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ProjectName."); + } + resolvedPath = resolvedPath.replace("{ProjectName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ProjectName."); + } + if (input.DatasetType !== undefined) { + const labelValue: string = input.DatasetType; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: DatasetType."); + } + resolvedPath = resolvedPath.replace("{DatasetType}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: DatasetType."); + } + const query: any = { + ...(input.Labeled !== undefined && { labeled: input.Labeled.toString() }), + ...(input.AnomalyClass !== undefined && { anomalyClass: input.AnomalyClass }), + ...(input.BeforeCreationDate !== undefined && { + createdBefore: (input.BeforeCreationDate.toISOString().split(".")[0] + "Z").toString(), + }), + ...(input.AfterCreationDate !== undefined && { + createdAfter: (input.AfterCreationDate.toISOString().split(".")[0] + "Z").toString(), + }), + ...(input.NextToken !== undefined && { nextToken: input.NextToken }), + ...(input.MaxResults !== undefined && { maxResults: input.MaxResults.toString() }), + ...(input.SourceRefContains !== undefined && { sourceRefContains: input.SourceRefContains }), + }; + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; + +export const serializeAws_restJson1ListModelsCommand = async ( + input: ListModelsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/2020-11-20/projects/{ProjectName}/models"; + if (input.ProjectName !== undefined) { + const labelValue: string = input.ProjectName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ProjectName."); + } + resolvedPath = resolvedPath.replace("{ProjectName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ProjectName."); + } + const query: any = { + ...(input.NextToken !== undefined && { nextToken: input.NextToken }), + ...(input.MaxResults !== undefined && { maxResults: input.MaxResults.toString() }), + }; + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; + +export const serializeAws_restJson1ListProjectsCommand = async ( + input: ListProjectsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/2020-11-20/projects"; + const query: any = { + ...(input.NextToken !== undefined && { nextToken: input.NextToken }), + ...(input.MaxResults !== undefined && { maxResults: input.MaxResults.toString() }), + }; + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; + +export const serializeAws_restJson1StartModelCommand = async ( + input: StartModelCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + ...(isSerializableHeaderValue(input.ClientToken) && { "X-Amzn-Client-Token": input.ClientToken! }), + }; + let resolvedPath = "/2020-11-20/projects/{ProjectName}/models/{ModelVersion}/start"; + if (input.ProjectName !== undefined) { + const labelValue: string = input.ProjectName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ProjectName."); + } + resolvedPath = resolvedPath.replace("{ProjectName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ProjectName."); + } + if (input.ModelVersion !== undefined) { + const labelValue: string = input.ModelVersion; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ModelVersion."); + } + resolvedPath = resolvedPath.replace("{ModelVersion}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ModelVersion."); + } + let body: any; + body = JSON.stringify({ + ...(input.MinInferenceUnits !== undefined && { MinInferenceUnits: input.MinInferenceUnits }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1StopModelCommand = async ( + input: StopModelCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + ...(isSerializableHeaderValue(input.ClientToken) && { "X-Amzn-Client-Token": input.ClientToken! }), + }; + let resolvedPath = "/2020-11-20/projects/{ProjectName}/models/{ModelVersion}/stop"; + if (input.ProjectName !== undefined) { + const labelValue: string = input.ProjectName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ProjectName."); + } + resolvedPath = resolvedPath.replace("{ProjectName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ProjectName."); + } + if (input.ModelVersion !== undefined) { + const labelValue: string = input.ModelVersion; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ModelVersion."); + } + resolvedPath = resolvedPath.replace("{ModelVersion}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ModelVersion."); + } + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "POST", + headers, + path: resolvedPath, + body, + }); +}; + +export const serializeAws_restJson1UpdateDatasetEntriesCommand = async ( + input: UpdateDatasetEntriesCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + ...(isSerializableHeaderValue(input.ClientToken) && { "X-Amzn-Client-Token": input.ClientToken! }), + }; + let resolvedPath = "/2020-11-20/projects/{ProjectName}/datasets/{DatasetType}/entries"; + if (input.ProjectName !== undefined) { + const labelValue: string = input.ProjectName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: ProjectName."); + } + resolvedPath = resolvedPath.replace("{ProjectName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: ProjectName."); + } + if (input.DatasetType !== undefined) { + const labelValue: string = input.DatasetType; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: DatasetType."); + } + resolvedPath = resolvedPath.replace("{DatasetType}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: DatasetType."); + } + let body: any; + body = JSON.stringify({ + ...(input.Changes !== undefined && { Changes: context.base64Encoder(input.Changes) }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "PATCH", + headers, + path: resolvedPath, + body, + }); +}; + +export const deserializeAws_restJson1CreateDatasetCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 202 && output.statusCode >= 300) { + return deserializeAws_restJson1CreateDatasetCommandError(output, context); + } + const contents: CreateDatasetCommandOutput = { + $metadata: deserializeMetadata(output), + DatasetMetadata: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.DatasetMetadata !== undefined && data.DatasetMetadata !== null) { + contents.DatasetMetadata = deserializeAws_restJson1DatasetMetadata(data.DatasetMetadata, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1CreateDatasetCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.lookoutvision#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.lookoutvision#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.lookoutvision#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.lookoutvision#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceQuotaExceededException": + case "com.amazonaws.lookoutvision#ServiceQuotaExceededException": + response = { + ...(await deserializeAws_restJson1ServiceQuotaExceededExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.lookoutvision#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.lookoutvision#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1CreateModelCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 202 && output.statusCode >= 300) { + return deserializeAws_restJson1CreateModelCommandError(output, context); + } + const contents: CreateModelCommandOutput = { + $metadata: deserializeMetadata(output), + ModelMetadata: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.ModelMetadata !== undefined && data.ModelMetadata !== null) { + contents.ModelMetadata = deserializeAws_restJson1ModelMetadata(data.ModelMetadata, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1CreateModelCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.lookoutvision#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.lookoutvision#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.lookoutvision#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.lookoutvision#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceQuotaExceededException": + case "com.amazonaws.lookoutvision#ServiceQuotaExceededException": + response = { + ...(await deserializeAws_restJson1ServiceQuotaExceededExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.lookoutvision#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.lookoutvision#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1CreateProjectCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1CreateProjectCommandError(output, context); + } + const contents: CreateProjectCommandOutput = { + $metadata: deserializeMetadata(output), + ProjectMetadata: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.ProjectMetadata !== undefined && data.ProjectMetadata !== null) { + contents.ProjectMetadata = deserializeAws_restJson1ProjectMetadata(data.ProjectMetadata, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1CreateProjectCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.lookoutvision#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.lookoutvision#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.lookoutvision#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.lookoutvision#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceQuotaExceededException": + case "com.amazonaws.lookoutvision#ServiceQuotaExceededException": + response = { + ...(await deserializeAws_restJson1ServiceQuotaExceededExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.lookoutvision#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.lookoutvision#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DeleteDatasetCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 202 && output.statusCode >= 300) { + return deserializeAws_restJson1DeleteDatasetCommandError(output, context); + } + const contents: DeleteDatasetCommandOutput = { + $metadata: deserializeMetadata(output), + }; + await collectBody(output.body, context); + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DeleteDatasetCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.lookoutvision#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.lookoutvision#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.lookoutvision#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.lookoutvision#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.lookoutvision#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.lookoutvision#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DeleteModelCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 202 && output.statusCode >= 300) { + return deserializeAws_restJson1DeleteModelCommandError(output, context); + } + const contents: DeleteModelCommandOutput = { + $metadata: deserializeMetadata(output), + ModelArn: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.ModelArn !== undefined && data.ModelArn !== null) { + contents.ModelArn = data.ModelArn; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DeleteModelCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.lookoutvision#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.lookoutvision#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.lookoutvision#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.lookoutvision#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.lookoutvision#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.lookoutvision#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DeleteProjectCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DeleteProjectCommandError(output, context); + } + const contents: DeleteProjectCommandOutput = { + $metadata: deserializeMetadata(output), + ProjectArn: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.ProjectArn !== undefined && data.ProjectArn !== null) { + contents.ProjectArn = data.ProjectArn; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DeleteProjectCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.lookoutvision#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.lookoutvision#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.lookoutvision#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.lookoutvision#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.lookoutvision#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.lookoutvision#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DescribeDatasetCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DescribeDatasetCommandError(output, context); + } + const contents: DescribeDatasetCommandOutput = { + $metadata: deserializeMetadata(output), + DatasetDescription: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.DatasetDescription !== undefined && data.DatasetDescription !== null) { + contents.DatasetDescription = deserializeAws_restJson1DatasetDescription(data.DatasetDescription, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DescribeDatasetCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.lookoutvision#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.lookoutvision#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.lookoutvision#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.lookoutvision#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.lookoutvision#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.lookoutvision#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DescribeModelCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DescribeModelCommandError(output, context); + } + const contents: DescribeModelCommandOutput = { + $metadata: deserializeMetadata(output), + ModelDescription: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.ModelDescription !== undefined && data.ModelDescription !== null) { + contents.ModelDescription = deserializeAws_restJson1ModelDescription(data.ModelDescription, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DescribeModelCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.lookoutvision#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.lookoutvision#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.lookoutvision#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.lookoutvision#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.lookoutvision#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.lookoutvision#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DescribeProjectCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DescribeProjectCommandError(output, context); + } + const contents: DescribeProjectCommandOutput = { + $metadata: deserializeMetadata(output), + ProjectDescription: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.ProjectDescription !== undefined && data.ProjectDescription !== null) { + contents.ProjectDescription = deserializeAws_restJson1ProjectDescription(data.ProjectDescription, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DescribeProjectCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.lookoutvision#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.lookoutvision#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.lookoutvision#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.lookoutvision#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.lookoutvision#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.lookoutvision#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1DetectAnomaliesCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DetectAnomaliesCommandError(output, context); + } + const contents: DetectAnomaliesCommandOutput = { + $metadata: deserializeMetadata(output), + DetectAnomalyResult: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.DetectAnomalyResult !== undefined && data.DetectAnomalyResult !== null) { + contents.DetectAnomalyResult = deserializeAws_restJson1DetectAnomalyResult(data.DetectAnomalyResult, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DetectAnomaliesCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.lookoutvision#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.lookoutvision#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.lookoutvision#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.lookoutvision#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.lookoutvision#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.lookoutvision#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1ListDatasetEntriesCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListDatasetEntriesCommandError(output, context); + } + const contents: ListDatasetEntriesCommandOutput = { + $metadata: deserializeMetadata(output), + DatasetEntries: undefined, + NextToken: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.DatasetEntries !== undefined && data.DatasetEntries !== null) { + contents.DatasetEntries = deserializeAws_restJson1DatasetEntryList(data.DatasetEntries, context); + } + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = data.NextToken; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListDatasetEntriesCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.lookoutvision#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.lookoutvision#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.lookoutvision#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.lookoutvision#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.lookoutvision#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.lookoutvision#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1ListModelsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListModelsCommandError(output, context); + } + const contents: ListModelsCommandOutput = { + $metadata: deserializeMetadata(output), + Models: undefined, + NextToken: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.Models !== undefined && data.Models !== null) { + contents.Models = deserializeAws_restJson1ModelMetadataList(data.Models, context); + } + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = data.NextToken; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListModelsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.lookoutvision#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.lookoutvision#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.lookoutvision#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.lookoutvision#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.lookoutvision#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.lookoutvision#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1ListProjectsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1ListProjectsCommandError(output, context); + } + const contents: ListProjectsCommandOutput = { + $metadata: deserializeMetadata(output), + NextToken: undefined, + Projects: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.NextToken !== undefined && data.NextToken !== null) { + contents.NextToken = data.NextToken; + } + if (data.Projects !== undefined && data.Projects !== null) { + contents.Projects = deserializeAws_restJson1ProjectMetadataList(data.Projects, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1ListProjectsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.lookoutvision#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.lookoutvision#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.lookoutvision#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.lookoutvision#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.lookoutvision#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.lookoutvision#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1StartModelCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 202 && output.statusCode >= 300) { + return deserializeAws_restJson1StartModelCommandError(output, context); + } + const contents: StartModelCommandOutput = { + $metadata: deserializeMetadata(output), + Status: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.Status !== undefined && data.Status !== null) { + contents.Status = data.Status; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1StartModelCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.lookoutvision#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.lookoutvision#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.lookoutvision#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.lookoutvision#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceQuotaExceededException": + case "com.amazonaws.lookoutvision#ServiceQuotaExceededException": + response = { + ...(await deserializeAws_restJson1ServiceQuotaExceededExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.lookoutvision#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.lookoutvision#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1StopModelCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 202 && output.statusCode >= 300) { + return deserializeAws_restJson1StopModelCommandError(output, context); + } + const contents: StopModelCommandOutput = { + $metadata: deserializeMetadata(output), + Status: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.Status !== undefined && data.Status !== null) { + contents.Status = data.Status; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1StopModelCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.lookoutvision#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.lookoutvision#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.lookoutvision#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.lookoutvision#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.lookoutvision#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.lookoutvision#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1UpdateDatasetEntriesCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 202 && output.statusCode >= 300) { + return deserializeAws_restJson1UpdateDatasetEntriesCommandError(output, context); + } + const contents: UpdateDatasetEntriesCommandOutput = { + $metadata: deserializeMetadata(output), + Status: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.Status !== undefined && data.Status !== null) { + contents.Status = data.Status; + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1UpdateDatasetEntriesCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessDeniedException": + case "com.amazonaws.lookoutvision#AccessDeniedException": + response = { + ...(await deserializeAws_restJson1AccessDeniedExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ConflictException": + case "com.amazonaws.lookoutvision#ConflictException": + response = { + ...(await deserializeAws_restJson1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalServerException": + case "com.amazonaws.lookoutvision#InternalServerException": + response = { + ...(await deserializeAws_restJson1InternalServerExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFoundException": + case "com.amazonaws.lookoutvision#ResourceNotFoundException": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ThrottlingException": + case "com.amazonaws.lookoutvision#ThrottlingException": + response = { + ...(await deserializeAws_restJson1ThrottlingExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationException": + case "com.amazonaws.lookoutvision#ValidationException": + response = { + ...(await deserializeAws_restJson1ValidationExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +const deserializeAws_restJson1AccessDeniedExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: AccessDeniedException = { + name: "AccessDeniedException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1ConflictExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ConflictException = { + name: "ConflictException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + ResourceId: undefined, + ResourceType: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + if (data.ResourceId !== undefined && data.ResourceId !== null) { + contents.ResourceId = data.ResourceId; + } + if (data.ResourceType !== undefined && data.ResourceType !== null) { + contents.ResourceType = data.ResourceType; + } + return contents; +}; + +const deserializeAws_restJson1InternalServerExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: InternalServerException = { + name: "InternalServerException", + $fault: "server", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + RetryAfterSeconds: undefined, + }; + if (parsedOutput.headers["retry-after"] !== undefined) { + contents.RetryAfterSeconds = parseInt(parsedOutput.headers["retry-after"], 10); + } + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1ResourceNotFoundExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ResourceNotFoundException = { + name: "ResourceNotFoundException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + ResourceId: undefined, + ResourceType: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + if (data.ResourceId !== undefined && data.ResourceId !== null) { + contents.ResourceId = data.ResourceId; + } + if (data.ResourceType !== undefined && data.ResourceType !== null) { + contents.ResourceType = data.ResourceType; + } + return contents; +}; + +const deserializeAws_restJson1ServiceQuotaExceededExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ServiceQuotaExceededException = { + name: "ServiceQuotaExceededException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + QuotaCode: undefined, + ResourceId: undefined, + ResourceType: undefined, + ServiceCode: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + if (data.QuotaCode !== undefined && data.QuotaCode !== null) { + contents.QuotaCode = data.QuotaCode; + } + if (data.ResourceId !== undefined && data.ResourceId !== null) { + contents.ResourceId = data.ResourceId; + } + if (data.ResourceType !== undefined && data.ResourceType !== null) { + contents.ResourceType = data.ResourceType; + } + if (data.ServiceCode !== undefined && data.ServiceCode !== null) { + contents.ServiceCode = data.ServiceCode; + } + return contents; +}; + +const deserializeAws_restJson1ThrottlingExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ThrottlingException = { + name: "ThrottlingException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + QuotaCode: undefined, + RetryAfterSeconds: undefined, + ServiceCode: undefined, + }; + if (parsedOutput.headers["retry-after"] !== undefined) { + contents.RetryAfterSeconds = parseInt(parsedOutput.headers["retry-after"], 10); + } + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + if (data.QuotaCode !== undefined && data.QuotaCode !== null) { + contents.QuotaCode = data.QuotaCode; + } + if (data.ServiceCode !== undefined && data.ServiceCode !== null) { + contents.ServiceCode = data.ServiceCode; + } + return contents; +}; + +const deserializeAws_restJson1ValidationExceptionResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ValidationException = { + name: "ValidationException", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const serializeAws_restJson1DatasetGroundTruthManifest = ( + input: DatasetGroundTruthManifest, + context: __SerdeContext +): any => { + return { + ...(input.S3Object !== undefined && { S3Object: serializeAws_restJson1InputS3Object(input.S3Object, context) }), + }; +}; + +const serializeAws_restJson1DatasetSource = (input: DatasetSource, context: __SerdeContext): any => { + return { + ...(input.GroundTruthManifest !== undefined && { + GroundTruthManifest: serializeAws_restJson1DatasetGroundTruthManifest(input.GroundTruthManifest, context), + }), + }; +}; + +const serializeAws_restJson1InputS3Object = (input: InputS3Object, context: __SerdeContext): any => { + return { + ...(input.Bucket !== undefined && { Bucket: input.Bucket }), + ...(input.Key !== undefined && { Key: input.Key }), + ...(input.VersionId !== undefined && { VersionId: input.VersionId }), + }; +}; + +const serializeAws_restJson1ModelDescription = (input: ModelDescription, context: __SerdeContext): any => { + return { + ...(input.CreationTimestamp !== undefined && { + CreationTimestamp: Math.round(input.CreationTimestamp.getTime() / 1000), + }), + ...(input.Description !== undefined && { Description: input.Description }), + ...(input.EvaluationEndTimestamp !== undefined && { + EvaluationEndTimestamp: Math.round(input.EvaluationEndTimestamp.getTime() / 1000), + }), + ...(input.EvaluationManifest !== undefined && { + EvaluationManifest: serializeAws_restJson1OutputS3Object(input.EvaluationManifest, context), + }), + ...(input.EvaluationResult !== undefined && { + EvaluationResult: serializeAws_restJson1OutputS3Object(input.EvaluationResult, context), + }), + ...(input.KmsKeyId !== undefined && { KmsKeyId: input.KmsKeyId }), + ...(input.ModelArn !== undefined && { ModelArn: input.ModelArn }), + ...(input.ModelVersion !== undefined && { ModelVersion: input.ModelVersion }), + ...(input.OutputConfig !== undefined && { + OutputConfig: serializeAws_restJson1OutputConfig(input.OutputConfig, context), + }), + ...(input.Performance !== undefined && { + Performance: serializeAws_restJson1ModelPerformance(input.Performance, context), + }), + ...(input.Status !== undefined && { Status: input.Status }), + ...(input.StatusMessage !== undefined && { StatusMessage: input.StatusMessage }), + }; +}; + +const serializeAws_restJson1ModelPerformance = (input: ModelPerformance, context: __SerdeContext): any => { + return { + ...(input.F1Score !== undefined && { F1Score: input.F1Score }), + ...(input.Precision !== undefined && { Precision: input.Precision }), + ...(input.Recall !== undefined && { Recall: input.Recall }), + }; +}; + +const serializeAws_restJson1OutputConfig = (input: OutputConfig, context: __SerdeContext): any => { + return { + ...(input.S3Location !== undefined && { S3Location: serializeAws_restJson1S3Location(input.S3Location, context) }), + }; +}; + +const serializeAws_restJson1OutputS3Object = (input: OutputS3Object, context: __SerdeContext): any => { + return { + ...(input.Bucket !== undefined && { Bucket: input.Bucket }), + ...(input.Key !== undefined && { Key: input.Key }), + }; +}; + +const serializeAws_restJson1S3Location = (input: S3Location, context: __SerdeContext): any => { + return { + ...(input.Bucket !== undefined && { Bucket: input.Bucket }), + ...(input.Prefix !== undefined && { Prefix: input.Prefix }), + }; +}; + +const deserializeAws_restJson1DatasetDescription = (output: any, context: __SerdeContext): DatasetDescription => { + return { + CreationTimestamp: + output.CreationTimestamp !== undefined && output.CreationTimestamp !== null + ? new Date(Math.round(output.CreationTimestamp * 1000)) + : undefined, + DatasetType: output.DatasetType !== undefined && output.DatasetType !== null ? output.DatasetType : undefined, + ImageStats: + output.ImageStats !== undefined && output.ImageStats !== null + ? deserializeAws_restJson1DatasetImageStats(output.ImageStats, context) + : undefined, + LastUpdatedTimestamp: + output.LastUpdatedTimestamp !== undefined && output.LastUpdatedTimestamp !== null + ? new Date(Math.round(output.LastUpdatedTimestamp * 1000)) + : undefined, + ProjectName: output.ProjectName !== undefined && output.ProjectName !== null ? output.ProjectName : undefined, + Status: output.Status !== undefined && output.Status !== null ? output.Status : undefined, + StatusMessage: + output.StatusMessage !== undefined && output.StatusMessage !== null ? output.StatusMessage : undefined, + } as any; +}; + +const deserializeAws_restJson1DatasetEntryList = (output: any, context: __SerdeContext): string[] => { + return (output || []).map((entry: any) => entry); +}; + +const deserializeAws_restJson1DatasetImageStats = (output: any, context: __SerdeContext): DatasetImageStats => { + return { + Anomaly: output.Anomaly !== undefined && output.Anomaly !== null ? output.Anomaly : undefined, + Labeled: output.Labeled !== undefined && output.Labeled !== null ? output.Labeled : undefined, + Normal: output.Normal !== undefined && output.Normal !== null ? output.Normal : undefined, + Total: output.Total !== undefined && output.Total !== null ? output.Total : undefined, + } as any; +}; + +const deserializeAws_restJson1DatasetMetadata = (output: any, context: __SerdeContext): DatasetMetadata => { + return { + CreationTimestamp: + output.CreationTimestamp !== undefined && output.CreationTimestamp !== null + ? new Date(Math.round(output.CreationTimestamp * 1000)) + : undefined, + DatasetType: output.DatasetType !== undefined && output.DatasetType !== null ? output.DatasetType : undefined, + Status: output.Status !== undefined && output.Status !== null ? output.Status : undefined, + StatusMessage: + output.StatusMessage !== undefined && output.StatusMessage !== null ? output.StatusMessage : undefined, + } as any; +}; + +const deserializeAws_restJson1DatasetMetadataList = (output: any, context: __SerdeContext): DatasetMetadata[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1DatasetMetadata(entry, context)); +}; + +const deserializeAws_restJson1DetectAnomalyResult = (output: any, context: __SerdeContext): DetectAnomalyResult => { + return { + Confidence: output.Confidence !== undefined && output.Confidence !== null ? output.Confidence : undefined, + IsAnomalous: output.IsAnomalous !== undefined && output.IsAnomalous !== null ? output.IsAnomalous : undefined, + Source: + output.Source !== undefined && output.Source !== null + ? deserializeAws_restJson1ImageSource(output.Source, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1ImageSource = (output: any, context: __SerdeContext): ImageSource => { + return { + Type: output.Type !== undefined && output.Type !== null ? output.Type : undefined, + } as any; +}; + +const deserializeAws_restJson1ModelDescription = (output: any, context: __SerdeContext): ModelDescription => { + return { + CreationTimestamp: + output.CreationTimestamp !== undefined && output.CreationTimestamp !== null + ? new Date(Math.round(output.CreationTimestamp * 1000)) + : undefined, + Description: output.Description !== undefined && output.Description !== null ? output.Description : undefined, + EvaluationEndTimestamp: + output.EvaluationEndTimestamp !== undefined && output.EvaluationEndTimestamp !== null + ? new Date(Math.round(output.EvaluationEndTimestamp * 1000)) + : undefined, + EvaluationManifest: + output.EvaluationManifest !== undefined && output.EvaluationManifest !== null + ? deserializeAws_restJson1OutputS3Object(output.EvaluationManifest, context) + : undefined, + EvaluationResult: + output.EvaluationResult !== undefined && output.EvaluationResult !== null + ? deserializeAws_restJson1OutputS3Object(output.EvaluationResult, context) + : undefined, + KmsKeyId: output.KmsKeyId !== undefined && output.KmsKeyId !== null ? output.KmsKeyId : undefined, + ModelArn: output.ModelArn !== undefined && output.ModelArn !== null ? output.ModelArn : undefined, + ModelVersion: output.ModelVersion !== undefined && output.ModelVersion !== null ? output.ModelVersion : undefined, + OutputConfig: + output.OutputConfig !== undefined && output.OutputConfig !== null + ? deserializeAws_restJson1OutputConfig(output.OutputConfig, context) + : undefined, + Performance: + output.Performance !== undefined && output.Performance !== null + ? deserializeAws_restJson1ModelPerformance(output.Performance, context) + : undefined, + Status: output.Status !== undefined && output.Status !== null ? output.Status : undefined, + StatusMessage: + output.StatusMessage !== undefined && output.StatusMessage !== null ? output.StatusMessage : undefined, + } as any; +}; + +const deserializeAws_restJson1ModelMetadata = (output: any, context: __SerdeContext): ModelMetadata => { + return { + CreationTimestamp: + output.CreationTimestamp !== undefined && output.CreationTimestamp !== null + ? new Date(Math.round(output.CreationTimestamp * 1000)) + : undefined, + Description: output.Description !== undefined && output.Description !== null ? output.Description : undefined, + ModelArn: output.ModelArn !== undefined && output.ModelArn !== null ? output.ModelArn : undefined, + ModelVersion: output.ModelVersion !== undefined && output.ModelVersion !== null ? output.ModelVersion : undefined, + Performance: + output.Performance !== undefined && output.Performance !== null + ? deserializeAws_restJson1ModelPerformance(output.Performance, context) + : undefined, + Status: output.Status !== undefined && output.Status !== null ? output.Status : undefined, + StatusMessage: + output.StatusMessage !== undefined && output.StatusMessage !== null ? output.StatusMessage : undefined, + } as any; +}; + +const deserializeAws_restJson1ModelMetadataList = (output: any, context: __SerdeContext): ModelMetadata[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1ModelMetadata(entry, context)); +}; + +const deserializeAws_restJson1ModelPerformance = (output: any, context: __SerdeContext): ModelPerformance => { + return { + F1Score: output.F1Score !== undefined && output.F1Score !== null ? output.F1Score : undefined, + Precision: output.Precision !== undefined && output.Precision !== null ? output.Precision : undefined, + Recall: output.Recall !== undefined && output.Recall !== null ? output.Recall : undefined, + } as any; +}; + +const deserializeAws_restJson1OutputConfig = (output: any, context: __SerdeContext): OutputConfig => { + return { + S3Location: + output.S3Location !== undefined && output.S3Location !== null + ? deserializeAws_restJson1S3Location(output.S3Location, context) + : undefined, + } as any; +}; + +const deserializeAws_restJson1OutputS3Object = (output: any, context: __SerdeContext): OutputS3Object => { + return { + Bucket: output.Bucket !== undefined && output.Bucket !== null ? output.Bucket : undefined, + Key: output.Key !== undefined && output.Key !== null ? output.Key : undefined, + } as any; +}; + +const deserializeAws_restJson1ProjectDescription = (output: any, context: __SerdeContext): ProjectDescription => { + return { + CreationTimestamp: + output.CreationTimestamp !== undefined && output.CreationTimestamp !== null + ? new Date(Math.round(output.CreationTimestamp * 1000)) + : undefined, + Datasets: + output.Datasets !== undefined && output.Datasets !== null + ? deserializeAws_restJson1DatasetMetadataList(output.Datasets, context) + : undefined, + ProjectArn: output.ProjectArn !== undefined && output.ProjectArn !== null ? output.ProjectArn : undefined, + ProjectName: output.ProjectName !== undefined && output.ProjectName !== null ? output.ProjectName : undefined, + } as any; +}; + +const deserializeAws_restJson1ProjectMetadata = (output: any, context: __SerdeContext): ProjectMetadata => { + return { + CreationTimestamp: + output.CreationTimestamp !== undefined && output.CreationTimestamp !== null + ? new Date(Math.round(output.CreationTimestamp * 1000)) + : undefined, + ProjectArn: output.ProjectArn !== undefined && output.ProjectArn !== null ? output.ProjectArn : undefined, + ProjectName: output.ProjectName !== undefined && output.ProjectName !== null ? output.ProjectName : undefined, + } as any; +}; + +const deserializeAws_restJson1ProjectMetadataList = (output: any, context: __SerdeContext): ProjectMetadata[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1ProjectMetadata(entry, context)); +}; + +const deserializeAws_restJson1S3Location = (output: any, context: __SerdeContext): S3Location => { + return { + Bucket: output.Bucket !== undefined && output.Bucket !== null ? output.Bucket : undefined, + Prefix: output.Prefix !== undefined && output.Prefix !== null ? output.Prefix : undefined, + } as any; +}; + +const deserializeMetadata = (output: __HttpResponse): __ResponseMetadata => ({ + httpStatusCode: output.statusCode, + httpHeaders: output.headers, + requestId: output.headers["x-amzn-requestid"], +}); + +// Collect low-level response body stream to Uint8Array. +const collectBody = (streamBody: any = new Uint8Array(), context: __SerdeContext): Promise => { + if (streamBody instanceof Uint8Array) { + return Promise.resolve(streamBody); + } + return context.streamCollector(streamBody) || Promise.resolve(new Uint8Array()); +}; + +// Encode Uint8Array data into string with utf-8. +const collectBodyString = (streamBody: any, context: __SerdeContext): Promise => + collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); + +const isSerializableHeaderValue = (value: any): boolean => + value !== undefined && + value !== "" && + (!Object.getOwnPropertyNames(value).includes("length") || value.length != 0) && + (!Object.getOwnPropertyNames(value).includes("size") || value.size != 0); + +const parseBody = (streamBody: any, context: __SerdeContext): any => + collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + return JSON.parse(encoded); + } + return {}; + }); + +/** + * Load an error code for the aws.rest-json-1.1 protocol. + */ +const loadRestJsonErrorCode = (output: __HttpResponse, data: any): string => { + const findKey = (object: any, key: string) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase()); + + const sanitizeErrorCode = (rawValue: string): string => { + let cleanValue = rawValue; + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== undefined) { + return sanitizeErrorCode(output.headers[headerKey]); + } + + if (data.code !== undefined) { + return sanitizeErrorCode(data.code); + } + + if (data["__type"] !== undefined) { + return sanitizeErrorCode(data["__type"]); + } + + return ""; +}; diff --git a/clients/client-lookoutvision/runtimeConfig.browser.ts b/clients/client-lookoutvision/runtimeConfig.browser.ts new file mode 100644 index 000000000000..174f22a943bb --- /dev/null +++ b/clients/client-lookoutvision/runtimeConfig.browser.ts @@ -0,0 +1,34 @@ +import packageInfo from "./package.json"; + +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { FetchHttpHandler, streamCollector } from "@aws-sdk/fetch-http-handler"; +import { invalidAsyncFunction } from "@aws-sdk/invalid-dependency"; +import { DEFAULT_MAX_ATTEMPTS } from "@aws-sdk/middleware-retry"; +import { parseUrl } from "@aws-sdk/url-parser-browser"; +import { fromBase64, toBase64 } from "@aws-sdk/util-base64-browser"; +import { calculateBodyLength } from "@aws-sdk/util-body-length-browser"; +import { defaultUserAgent } from "@aws-sdk/util-user-agent-browser"; +import { fromUtf8, toUtf8 } from "@aws-sdk/util-utf8-browser"; +import { ClientDefaults } from "./LookoutVisionClient"; +import { ClientSharedValues } from "./runtimeConfig.shared"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...ClientSharedValues, + runtime: "browser", + base64Decoder: fromBase64, + base64Encoder: toBase64, + bodyLengthChecker: calculateBodyLength, + credentialDefaultProvider: invalidAsyncFunction("Credentialis missing") as any, + defaultUserAgent: defaultUserAgent(packageInfo.name, packageInfo.version), + maxAttempts: DEFAULT_MAX_ATTEMPTS, + region: invalidAsyncFunction("Region is missing") as any, + requestHandler: new FetchHttpHandler(), + sha256: Sha256, + streamCollector, + urlParser: parseUrl, + utf8Decoder: fromUtf8, + utf8Encoder: toUtf8, +}; diff --git a/clients/client-lookoutvision/runtimeConfig.native.ts b/clients/client-lookoutvision/runtimeConfig.native.ts new file mode 100644 index 000000000000..9c425471e054 --- /dev/null +++ b/clients/client-lookoutvision/runtimeConfig.native.ts @@ -0,0 +1,17 @@ +import packageInfo from "./package.json"; + +import { Sha256 } from "@aws-crypto/sha256-js"; +import { parseUrl } from "@aws-sdk/url-parser-node"; +import { ClientDefaults } from "./LookoutVisionClient"; +import { ClientDefaultValues as BrowserDefaults } from "./runtimeConfig.browser"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...BrowserDefaults, + runtime: "react-native", + defaultUserAgent: `aws-sdk-js-v3-react-native-${packageInfo.name}/${packageInfo.version}`, + sha256: Sha256, + urlParser: parseUrl, +}; diff --git a/clients/client-lookoutvision/runtimeConfig.shared.ts b/clients/client-lookoutvision/runtimeConfig.shared.ts new file mode 100644 index 000000000000..30843d427c0d --- /dev/null +++ b/clients/client-lookoutvision/runtimeConfig.shared.ts @@ -0,0 +1,13 @@ +import { defaultRegionInfoProvider } from "./endpoints"; +import { Logger as __Logger } from "@aws-sdk/types"; + +/** + * @internal + */ +export const ClientSharedValues = { + apiVersion: "2020-11-20", + disableHostPrefix: false, + logger: {} as __Logger, + regionInfoProvider: defaultRegionInfoProvider, + signingName: "lookoutvision", +}; diff --git a/clients/client-lookoutvision/runtimeConfig.ts b/clients/client-lookoutvision/runtimeConfig.ts new file mode 100644 index 000000000000..9ebb374218c6 --- /dev/null +++ b/clients/client-lookoutvision/runtimeConfig.ts @@ -0,0 +1,36 @@ +import packageInfo from "./package.json"; + +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS } from "@aws-sdk/config-resolver"; +import { defaultProvider as credentialDefaultProvider } from "@aws-sdk/credential-provider-node"; +import { Hash } from "@aws-sdk/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS } from "@aws-sdk/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@aws-sdk/node-config-provider"; +import { NodeHttpHandler, streamCollector } from "@aws-sdk/node-http-handler"; +import { parseUrl } from "@aws-sdk/url-parser-node"; +import { fromBase64, toBase64 } from "@aws-sdk/util-base64-node"; +import { calculateBodyLength } from "@aws-sdk/util-body-length-node"; +import { defaultUserAgent } from "@aws-sdk/util-user-agent-node"; +import { fromUtf8, toUtf8 } from "@aws-sdk/util-utf8-node"; +import { ClientDefaults } from "./LookoutVisionClient"; +import { ClientSharedValues } from "./runtimeConfig.shared"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...ClientSharedValues, + runtime: "node", + base64Decoder: fromBase64, + base64Encoder: toBase64, + bodyLengthChecker: calculateBodyLength, + credentialDefaultProvider, + defaultUserAgent: defaultUserAgent(packageInfo.name, packageInfo.version), + maxAttempts: loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS), + region: loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, NODE_REGION_CONFIG_FILE_OPTIONS), + requestHandler: new NodeHttpHandler(), + sha256: Hash.bind(null, "sha256"), + streamCollector, + urlParser: parseUrl, + utf8Decoder: fromUtf8, + utf8Encoder: toUtf8, +}; diff --git a/clients/client-lookoutvision/tsconfig.es.json b/clients/client-lookoutvision/tsconfig.es.json new file mode 100644 index 000000000000..30df5d2e6986 --- /dev/null +++ b/clients/client-lookoutvision/tsconfig.es.json @@ -0,0 +1,12 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "target": "es5", + "module": "esnext", + "moduleResolution": "node", + "declaration": false, + "declarationDir": null, + "lib": ["dom", "es5", "es2015.promise", "es2015.collection", "es2015.iterable", "es2015.symbol.wellknown"], + "outDir": "dist/es" + } +} diff --git a/clients/client-lookoutvision/tsconfig.json b/clients/client-lookoutvision/tsconfig.json new file mode 100644 index 000000000000..4cf936f614b4 --- /dev/null +++ b/clients/client-lookoutvision/tsconfig.json @@ -0,0 +1,31 @@ +{ + "compilerOptions": { + "alwaysStrict": true, + "target": "ES2018", + "module": "commonjs", + "declaration": true, + "strict": true, + "sourceMap": true, + "downlevelIteration": true, + "importHelpers": true, + "noEmitHelpers": true, + "incremental": true, + "resolveJsonModule": true, + "esModuleInterop": true, + "declarationDir": "./types", + "outDir": "dist/cjs" + }, + "typedocOptions": { + "exclude": ["**/node_modules/**", "**/*.spec.ts", "./protocols/*.ts", "./e2e/*.ts", "./endpoints.ts"], + "excludeNotExported": true, + "excludePrivate": true, + "hideGenerator": true, + "ignoreCompilerErrors": true, + "includeDeclarations": true, + "readme": "./README.md", + "mode": "file", + "out": "./docs", + "theme": "minimal", + "plugin": ["@aws-sdk/client-documentation-generator"] + } +} diff --git a/clients/client-s3/S3.ts b/clients/client-s3/S3.ts index 7c281b3c64b8..770af387eeda 100644 --- a/clients/client-s3/S3.ts +++ b/clients/client-s3/S3.ts @@ -748,19 +748,12 @@ export class S3 extends S3Client { * x-amz-copy-source, must be signed.

        * *

        - * Encryption + * Server-side encryption *

        - *

        The source object that you are copying can be encrypted or unencrypted. The source - * object can be encrypted with server-side encryption using AWS managed encryption keys - * (SSE-S3 or SSE-KMS) or by using a customer-provided encryption key. With server-side - * encryption, Amazon S3 encrypts your data as it writes it to disks in its data centers and - * decrypts the data when you access it.

        - *

        You can optionally use the appropriate encryption-related headers to request server-side - * encryption for the target object. You have the option to provide your own encryption key or - * use SSE-S3 or SSE-KMS, regardless of the form of server-side encryption that was used to - * encrypt the source object. You can even request encryption if the source object was not - * encrypted. For more information about server-side encryption, see Using - * Server-Side Encryption.

        + *

        When you perform a CopyObject operation, you can optionally use the appropriate encryption-related headers to encrypt the object using server-side encryption with AWS managed encryption keys (SSE-S3 or SSE-KMS) or a customer-provided encryption key. With server-side encryption, Amazon S3 encrypts your data as it writes it to disks in its data centers and decrypts the data when you access it. For more information about server-side encryption, see Using + * Server-Side Encryption.

        + *

        If a target object uses SSE-KMS, you can enable an S3 Bucket Key for the object. For more + * information, see Amazon S3 Bucket Keys in the Amazon Simple Storage Service Developer Guide.

        *

        * Access Control List (ACL)-Specific Request * Headers @@ -5266,13 +5259,14 @@ export class S3 extends S3Client { } /** - *

        This implementation of the PUT operation uses the encryption - * subresource to set the default encryption state of an existing bucket.

        - *

        This implementation of the PUT operation sets default encryption for a - * bucket using server-side encryption with Amazon S3-managed keys SSE-S3 or AWS KMS customer - * master keys (CMKs) (SSE-KMS). For information about the Amazon S3 default encryption feature, - * see Amazon S3 Default Bucket - * Encryption.

        + *

        This operation uses the encryption subresource to configure default + * encryption and Amazon S3 Bucket Key for an existing bucket.

        + *

        Default encryption for a bucket can use server-side encryption with Amazon S3-managed keys + * (SSE-S3) or AWS KMS customer master keys (SSE-KMS). If you specify default encryption + * using SSE-KMS, you can also configure Amazon S3 Bucket Key. For information about default + * encryption, see Amazon S3 default bucket encryption + * in the Amazon Simple Storage Service Developer Guide. For more information about S3 Bucket Keys, + * see Amazon S3 Bucket Keys in the Amazon Simple Storage Service Developer Guide.

        * *

        This operation requires AWS Signature Version 4. For more information, see Authenticating Requests (AWS Signature * Version 4).

        @@ -6052,15 +6046,15 @@ export class S3 extends S3Client { * iam:PassRole permission.

        * *

        Specify the replication configuration in the request body. In the replication - * configuration, you provide the name of the destination bucket where you want Amazon S3 to - * replicate objects, the IAM role that Amazon S3 can assume to replicate objects on your behalf, - * and other relevant information.

        + * configuration, you provide the name of the destination bucket or buckets where you want + * Amazon S3 to replicate objects, the IAM role that Amazon S3 can assume to replicate objects on your + * behalf, and other relevant information.

        * * *

        A replication configuration must include at least one rule, and can contain a maximum of * 1,000. Each rule identifies a subset of objects to replicate by filtering the objects in * the source bucket. To choose additional subsets of objects to replicate, add a rule for - * each subset. All rules must specify the same destination bucket.

        + * each subset.

        * *

        To specify a subset of the objects in the source bucket to apply a replication rule to, * add the Filter element as a child of the Rule element. You can filter objects based on an @@ -6564,11 +6558,11 @@ export class S3 extends S3Client { *

        * Server-side Encryption *

        - *

        You can optionally request server-side encryption. With server-side encryption, Amazon S3 - * encrypts your data as it writes it to disks in its data centers and decrypts the data when - * you access it. You have the option to provide your own encryption key or use AWS managed - * encryption keys. For more information, see Using Server-Side - * Encryption.

        + *

        You can optionally request server-side encryption. With server-side encryption, Amazon S3 encrypts your data as it writes it to disks in its data centers and decrypts the data + * when you access it. You have the option to provide your own encryption key or use AWS + * managed encryption keys (SSE-S3 or SSE-KMS). For more information, see Using Server-Side + * Encryption.

        + *

        If you request server-side encryption using AWS Key Management Service (SSE-KMS), you can enable an S3 Bucket Key at the object-level. For more information, see Amazon S3 Bucket Keys in the Amazon Simple Storage Service Developer Guide.

        *

        * Access Control List (ACL)-Specific Request * Headers diff --git a/clients/client-s3/commands/CopyObjectCommand.ts b/clients/client-s3/commands/CopyObjectCommand.ts index 78e2e9c84e97..8083154d9755 100644 --- a/clients/client-s3/commands/CopyObjectCommand.ts +++ b/clients/client-s3/commands/CopyObjectCommand.ts @@ -136,19 +136,12 @@ export type CopyObjectCommandOutput = CopyObjectOutput & __MetadataBearer; * x-amz-copy-source, must be signed.

        * *

        - * Encryption + * Server-side encryption *

        - *

        The source object that you are copying can be encrypted or unencrypted. The source - * object can be encrypted with server-side encryption using AWS managed encryption keys - * (SSE-S3 or SSE-KMS) or by using a customer-provided encryption key. With server-side - * encryption, Amazon S3 encrypts your data as it writes it to disks in its data centers and - * decrypts the data when you access it.

        - *

        You can optionally use the appropriate encryption-related headers to request server-side - * encryption for the target object. You have the option to provide your own encryption key or - * use SSE-S3 or SSE-KMS, regardless of the form of server-side encryption that was used to - * encrypt the source object. You can even request encryption if the source object was not - * encrypted. For more information about server-side encryption, see Using - * Server-Side Encryption.

        + *

        When you perform a CopyObject operation, you can optionally use the appropriate encryption-related headers to encrypt the object using server-side encryption with AWS managed encryption keys (SSE-S3 or SSE-KMS) or a customer-provided encryption key. With server-side encryption, Amazon S3 encrypts your data as it writes it to disks in its data centers and decrypts the data when you access it. For more information about server-side encryption, see Using + * Server-Side Encryption.

        + *

        If a target object uses SSE-KMS, you can enable an S3 Bucket Key for the object. For more + * information, see Amazon S3 Bucket Keys in the Amazon Simple Storage Service Developer Guide.

        *

        * Access Control List (ACL)-Specific Request * Headers diff --git a/clients/client-s3/commands/PutBucketEncryptionCommand.ts b/clients/client-s3/commands/PutBucketEncryptionCommand.ts index e82b890158ab..8ba914907706 100644 --- a/clients/client-s3/commands/PutBucketEncryptionCommand.ts +++ b/clients/client-s3/commands/PutBucketEncryptionCommand.ts @@ -22,13 +22,14 @@ export type PutBucketEncryptionCommandInput = PutBucketEncryptionRequest; export type PutBucketEncryptionCommandOutput = __MetadataBearer; /** - *

        This implementation of the PUT operation uses the encryption - * subresource to set the default encryption state of an existing bucket.

        - *

        This implementation of the PUT operation sets default encryption for a - * bucket using server-side encryption with Amazon S3-managed keys SSE-S3 or AWS KMS customer - * master keys (CMKs) (SSE-KMS). For information about the Amazon S3 default encryption feature, - * see Amazon S3 Default Bucket - * Encryption.

        + *

        This operation uses the encryption subresource to configure default + * encryption and Amazon S3 Bucket Key for an existing bucket.

        + *

        Default encryption for a bucket can use server-side encryption with Amazon S3-managed keys + * (SSE-S3) or AWS KMS customer master keys (SSE-KMS). If you specify default encryption + * using SSE-KMS, you can also configure Amazon S3 Bucket Key. For information about default + * encryption, see Amazon S3 default bucket encryption + * in the Amazon Simple Storage Service Developer Guide. For more information about S3 Bucket Keys, + * see Amazon S3 Bucket Keys in the Amazon Simple Storage Service Developer Guide.

        * *

        This operation requires AWS Signature Version 4. For more information, see Authenticating Requests (AWS Signature * Version 4).

        diff --git a/clients/client-s3/commands/PutBucketReplicationCommand.ts b/clients/client-s3/commands/PutBucketReplicationCommand.ts index fbf8ae746c89..2282ae32db7c 100644 --- a/clients/client-s3/commands/PutBucketReplicationCommand.ts +++ b/clients/client-s3/commands/PutBucketReplicationCommand.ts @@ -30,15 +30,15 @@ export type PutBucketReplicationCommandOutput = __MetadataBearer; * iam:PassRole permission.

        * *

        Specify the replication configuration in the request body. In the replication - * configuration, you provide the name of the destination bucket where you want Amazon S3 to - * replicate objects, the IAM role that Amazon S3 can assume to replicate objects on your behalf, - * and other relevant information.

        + * configuration, you provide the name of the destination bucket or buckets where you want + * Amazon S3 to replicate objects, the IAM role that Amazon S3 can assume to replicate objects on your + * behalf, and other relevant information.

        * * *

        A replication configuration must include at least one rule, and can contain a maximum of * 1,000. Each rule identifies a subset of objects to replicate by filtering the objects in * the source bucket. To choose additional subsets of objects to replicate, add a rule for - * each subset. All rules must specify the same destination bucket.

        + * each subset.

        * *

        To specify a subset of the objects in the source bucket to apply a replication rule to, * add the Filter element as a child of the Rule element. You can filter objects based on an diff --git a/clients/client-s3/commands/PutObjectCommand.ts b/clients/client-s3/commands/PutObjectCommand.ts index e8378fb26881..c92b272df119 100644 --- a/clients/client-s3/commands/PutObjectCommand.ts +++ b/clients/client-s3/commands/PutObjectCommand.ts @@ -50,11 +50,11 @@ export type PutObjectCommandOutput = PutObjectOutput & __MetadataBearer; *

        * Server-side Encryption *

        - *

        You can optionally request server-side encryption. With server-side encryption, Amazon S3 - * encrypts your data as it writes it to disks in its data centers and decrypts the data when - * you access it. You have the option to provide your own encryption key or use AWS managed - * encryption keys. For more information, see Using Server-Side - * Encryption.

        + *

        You can optionally request server-side encryption. With server-side encryption, Amazon S3 encrypts your data as it writes it to disks in its data centers and decrypts the data + * when you access it. You have the option to provide your own encryption key or use AWS + * managed encryption keys (SSE-S3 or SSE-KMS). For more information, see Using Server-Side + * Encryption.

        + *

        If you request server-side encryption using AWS Key Management Service (SSE-KMS), you can enable an S3 Bucket Key at the object-level. For more information, see Amazon S3 Bucket Keys in the Amazon Simple Storage Service Developer Guide.

        *

        * Access Control List (ACL)-Specific Request * Headers diff --git a/clients/client-s3/models/models_0.ts b/clients/client-s3/models/models_0.ts index a0507f4021b8..3649ab82bfc5 100644 --- a/clients/client-s3/models/models_0.ts +++ b/clients/client-s3/models/models_0.ts @@ -320,6 +320,11 @@ export interface CompleteMultipartUploadOutput { */ SSEKMSKeyId?: string; + /** + *

        Indicates whether the multipart upload uses an S3 Bucket Key for server-side encryption with AWS KMS (SSE-KMS).

        + */ + BucketKeyEnabled?: boolean; + /** *

        If present, indicates that the requester was successfully charged for the * request.

        @@ -489,6 +494,11 @@ export interface CopyObjectOutput { */ SSEKMSEncryptionContext?: string; + /** + *

        Indicates whether the copied object uses an S3 Bucket Key for server-side encryption with AWS KMS (SSE-KMS).

        + */ + BucketKeyEnabled?: boolean; + /** *

        If present, indicates that the requester was successfully charged for the * request.

        @@ -735,6 +745,12 @@ export interface CopyObjectRequest { */ SSEKMSEncryptionContext?: string; + /** + *

        Specifies whether Amazon S3 should use an S3 Bucket Key for object encryption with server-side encryption using AWS KMS (SSE-KMS). Setting this header to true causes Amazon S3 to use an S3 Bucket Key for object encryption with SSE-KMS.

        + *

        Specifying this header with a COPY operation doesn’t affect bucket-level settings for S3 Bucket Key.

        + */ + BucketKeyEnabled?: boolean; + /** *

        Specifies the algorithm to use when decrypting the source object (for example, * AES256).

        @@ -1037,6 +1053,11 @@ export interface CreateMultipartUploadOutput { */ SSEKMSEncryptionContext?: string; + /** + *

        Indicates whether the multipart upload uses an S3 Bucket Key for server-side encryption with AWS KMS (SSE-KMS).

        + */ + BucketKeyEnabled?: boolean; + /** *

        If present, indicates that the requester was successfully charged for the * request.

        @@ -1194,6 +1215,12 @@ export interface CreateMultipartUploadRequest { */ SSEKMSEncryptionContext?: string; + /** + *

        Specifies whether Amazon S3 should use an S3 Bucket Key for object encryption with server-side encryption using AWS KMS (SSE-KMS). Setting this header to true causes Amazon S3 to use an S3 Bucket Key for object encryption with SSE-KMS.

        + *

        Specifying this header with an object operation doesn’t affect bucket-level settings for S3 Bucket Key.

        + */ + BucketKeyEnabled?: boolean; + /** *

        Confirms that the requester knows that they will be charged for the request. Bucket * owners need not specify this parameter in their requests. For information about downloading @@ -4158,6 +4185,12 @@ export interface ServerSideEncryptionRule { * be applied.

        */ ApplyServerSideEncryptionByDefault?: ServerSideEncryptionByDefault; + + /** + *

        Specifies whether Amazon S3 should use an S3 Bucket Key with server-side encryption using KMS (SSE-KMS) for new objects in the bucket. Existing objects are not affected. Setting the BucketKeyEnabled element to true causes Amazon S3 to use an S3 Bucket Key. By default, S3 Bucket Key is not enabled.

        + *

        For more information, see Amazon S3 Bucket Keys in the Amazon Simple Storage Service Developer Guide.

        + */ + BucketKeyEnabled?: boolean; } export namespace ServerSideEncryptionRule { @@ -5650,9 +5683,9 @@ export type DeleteMarkerReplicationStatus = "Disabled" | "Enabled"; * DeleteMarkerReplication element. If your Filter includes a * Tag element, the DeleteMarkerReplication * Status must be set to Disabled, because Amazon S3 does not support replicating - * delete markers for tag-based rules. For an example configuration, see Basic - * Rule Configuration.

        - *

        For more information about delete marker replication, see Basic Rule Configuration.

        + * delete markers for tag-based rules. For an example configuration, see Basic Rule Configuration.

        + *

        For more information about delete marker replication, see Basic Rule + * Configuration.

        * *

        If you are using an earlier version of the replication configuration, Amazon S3 handles * replication of delete markers differently. For more information, see Backward Compatibility.

        @@ -5968,6 +6001,32 @@ export namespace ReplicationRuleFilter { }; } +export type ReplicaModificationsStatus = "Disabled" | "Enabled"; + +/** + *

        A filter that you can specify for selection for modifications on replicas. Amazon S3 doesn't + * replicate replica modifications by default. In the latest version of replication + * configuration (when Filter is specified), you can specify this element and set + * the status to Enabled to replicate modifications on replicas.

        + * + *

        If you don't specify the Filter element, Amazon S3 assumes that the + * replication configuration is the earlier version, V1. In the earlier version, this + * element is not allowed.

        + *
        + */ +export interface ReplicaModifications { + /** + *

        Specifies whether Amazon S3 replicates modifications on replicas.

        + */ + Status: ReplicaModificationsStatus | string | undefined; +} + +export namespace ReplicaModifications { + export const filterSensitiveLog = (obj: ReplicaModifications): any => ({ + ...obj, + }); +} + export type SseKmsEncryptedObjectsStatus = "Disabled" | "Enabled"; /** @@ -6002,6 +6061,19 @@ export interface SourceSelectionCriteria { * this element is required.

        */ SseKmsEncryptedObjects?: SseKmsEncryptedObjects; + + /** + *

        A filter that you can specify for selections for modifications on replicas. Amazon S3 doesn't + * replicate replica modifications by default. In the latest version of replication + * configuration (when Filter is specified), you can specify this element and set + * the status to Enabled to replicate modifications on replicas.

        + * + *

        If you don't specify the Filter element, Amazon S3 assumes that the + * replication configuration is the earlier version, V1. In the earlier version, this + * element is not allowed

        + *
        + */ + ReplicaModifications?: ReplicaModifications; } export namespace SourceSelectionCriteria { @@ -6022,19 +6094,11 @@ export interface ReplicationRule { ID?: string; /** - *

        The priority associated with the rule. If you specify multiple rules in a replication - * configuration, Amazon S3 prioritizes the rules to prevent conflicts when filtering. If two or - * more rules identify the same object based on a specified filter, the rule with higher - * priority takes precedence. For example:

        - *
          - *
        • - *

          Same object quality prefix-based filter criteria if prefixes you specified in - * multiple rules overlap

          - *
        • - *
        • - *

          Same object qualify tag-based filter criteria specified in multiple rules

          - *
        • - *
        + *

        The priority indicates which rule has precedence whenever two or more replication rules + * conflict. Amazon S3 will attempt to replicate objects according to all replication rules. + * However, if there are two or more rules with the same destination bucket, then objects will + * be replicated according to the rule with the highest priority. The higher the number, the + * higher the priority.

        *

        For more information, see Replication in the * Amazon Simple Storage Service Developer Guide.

        */ @@ -6085,9 +6149,9 @@ export interface ReplicationRule { * DeleteMarkerReplication element. If your Filter includes a * Tag element, the DeleteMarkerReplication * Status must be set to Disabled, because Amazon S3 does not support replicating - * delete markers for tag-based rules. For an example configuration, see Basic - * Rule Configuration.

        - *

        For more information about delete marker replication, see Basic Rule Configuration.

        + * delete markers for tag-based rules. For an example configuration, see Basic Rule Configuration.

        + *

        For more information about delete marker replication, see Basic Rule + * Configuration.

        * *

        If you are using an earlier version of the replication configuration, Amazon S3 handles * replication of delete markers differently. For more information, see Backward Compatibility.

        @@ -6622,6 +6686,11 @@ export interface GetObjectOutput { */ SSEKMSKeyId?: string; + /** + *

        Indicates whether the object uses an S3 Bucket Key for server-side encryption with AWS KMS (SSE-KMS).

        + */ + BucketKeyEnabled?: boolean; + /** *

        Provides storage class information of the object. Amazon S3 returns this header for all * objects except for S3 Standard storage class objects.

        @@ -7506,6 +7575,11 @@ export interface HeadObjectOutput { */ SSEKMSKeyId?: string; + /** + *

        Indicates whether the object uses an S3 Bucket Key for server-side encryption with AWS KMS (SSE-KMS).

        + */ + BucketKeyEnabled?: boolean; + /** *

        Provides storage class information of the object. Amazon S3 returns this header for all * objects except for S3 Standard storage class objects.

        @@ -7523,10 +7597,10 @@ export interface HeadObjectOutput { /** *

        Amazon S3 can return this header if your request involves a bucket that is either a source or - * destination in a replication rule.

        + * a destination in a replication rule.

        * *

        In replication, you have a source bucket on which you configure replication and - * destination bucket where Amazon S3 stores object replicas. When you request an object + * destination bucket or buckets where Amazon S3 stores object replicas. When you request an object * (GetObject) or object metadata (HeadObject) from these * buckets, Amazon S3 will return the x-amz-replication-status header in the response * as follows:

        @@ -7544,9 +7618,18 @@ export interface HeadObjectOutput { * FAILED indicating object replication status.

        * *
      • - *

        If requesting an object from the destination bucket — Amazon S3 will return the + *

        If requesting an object from a destination bucket — Amazon S3 will return the * x-amz-replication-status header with value REPLICA if the object in - * your request is a replica that Amazon S3 created.

        + * your request is a replica that Amazon S3 created and there is no replica modification + * replication in progress.

        + *
      • + *
      • + *

        When replicating objects to multiple destination buckets the + * x-amz-replication-status header acts differently. The header of the + * source object will only return a value of COMPLETED when replication is successful to + * all destinations. The header will remain at value PENDING until replication has + * completed for all destinations. If one or more destinations fails replication the + * header will return FAILED.

        *
      • *
      * @@ -7945,7 +8028,7 @@ export interface Bucket { Name?: string; /** - *

      Date the bucket was created.

      + *

      Date the bucket was created. This date can change when making changes to your bucket, such as editing its bucket policy.

      */ CreationDate?: Date; } @@ -9825,6 +9908,11 @@ export interface PutObjectOutput { */ SSEKMSEncryptionContext?: string; + /** + *

      Indicates whether the uploaded object uses an S3 Bucket Key for server-side encryption with AWS KMS (SSE-KMS).

      + */ + BucketKeyEnabled?: boolean; + /** *

      If present, indicates that the requester was successfully charged for the * request.

      @@ -10028,6 +10116,12 @@ export interface PutObjectRequest { */ SSEKMSEncryptionContext?: string; + /** + *

      Specifies whether Amazon S3 should use an S3 Bucket Key for object encryption with server-side encryption using AWS KMS (SSE-KMS). Setting this header to true causes Amazon S3 to use an S3 Bucket Key for object encryption with SSE-KMS.

      + *

      Specifying this header with a PUT operation doesn’t affect bucket-level settings for S3 Bucket Key.

      + */ + BucketKeyEnabled?: boolean; + /** *

      Confirms that the requester knows that they will be charged for the request. Bucket * owners need not specify this parameter in their requests. For information about downloading @@ -10539,76 +10633,3 @@ export namespace Encryption { ...(obj.KMSKeyId && { KMSKeyId: SENSITIVE_STRING }), }); } - -/** - *

      A metadata key-value pair to store with an object.

      - */ -export interface MetadataEntry { - /** - *

      Name of the Object.

      - */ - Name?: string; - - /** - *

      Value of the Object.

      - */ - Value?: string; -} - -export namespace MetadataEntry { - export const filterSensitiveLog = (obj: MetadataEntry): any => ({ - ...obj, - }); -} - -/** - *

      Describes an Amazon S3 location that will receive the results of the restore request.

      - */ -export interface S3Location { - /** - *

      The name of the bucket where the restore results will be placed.

      - */ - BucketName: string | undefined; - - /** - *

      The prefix that is prepended to the restore results for this request.

      - */ - Prefix: string | undefined; - - /** - *

      Contains the type of server-side encryption used.

      - */ - Encryption?: Encryption; - - /** - *

      The canned ACL to apply to the restore results.

      - */ - CannedACL?: ObjectCannedACL | string; - - /** - *

      A list of grants that control access to the staged results.

      - */ - AccessControlList?: Grant[]; - - /** - *

      The tag-set that is applied to the restore results.

      - */ - Tagging?: Tagging; - - /** - *

      A list of metadata to store with the restore results in S3.

      - */ - UserMetadata?: MetadataEntry[]; - - /** - *

      The class of storage used to store the restore results.

      - */ - StorageClass?: StorageClass | string; -} - -export namespace S3Location { - export const filterSensitiveLog = (obj: S3Location): any => ({ - ...obj, - ...(obj.Encryption && { Encryption: Encryption.filterSensitiveLog(obj.Encryption) }), - }); -} diff --git a/clients/client-s3/models/models_1.ts b/clients/client-s3/models/models_1.ts index 594400ec6d1f..de9ccbb16311 100644 --- a/clients/client-s3/models/models_1.ts +++ b/clients/client-s3/models/models_1.ts @@ -1,7 +1,91 @@ -import { GlacierJobParameters, RequestCharged, RequestPayer, S3Location, ServerSideEncryption, Tier } from "./models_0"; +import { + Encryption, + GlacierJobParameters, + Grant, + ObjectCannedACL, + RequestCharged, + RequestPayer, + ServerSideEncryption, + StorageClass, + Tagging, + Tier, +} from "./models_0"; import { SENSITIVE_STRING } from "@aws-sdk/smithy-client"; import { Readable } from "stream"; +/** + *

      A metadata key-value pair to store with an object.

      + */ +export interface MetadataEntry { + /** + *

      Name of the Object.

      + */ + Name?: string; + + /** + *

      Value of the Object.

      + */ + Value?: string; +} + +export namespace MetadataEntry { + export const filterSensitiveLog = (obj: MetadataEntry): any => ({ + ...obj, + }); +} + +/** + *

      Describes an Amazon S3 location that will receive the results of the restore request.

      + */ +export interface S3Location { + /** + *

      The name of the bucket where the restore results will be placed.

      + */ + BucketName: string | undefined; + + /** + *

      The prefix that is prepended to the restore results for this request.

      + */ + Prefix: string | undefined; + + /** + *

      Contains the type of server-side encryption used.

      + */ + Encryption?: Encryption; + + /** + *

      The canned ACL to apply to the restore results.

      + */ + CannedACL?: ObjectCannedACL | string; + + /** + *

      A list of grants that control access to the staged results.

      + */ + AccessControlList?: Grant[]; + + /** + *

      The tag-set that is applied to the restore results.

      + */ + Tagging?: Tagging; + + /** + *

      A list of metadata to store with the restore results in S3.

      + */ + UserMetadata?: MetadataEntry[]; + + /** + *

      The class of storage used to store the restore results.

      + */ + StorageClass?: StorageClass | string; +} + +export namespace S3Location { + export const filterSensitiveLog = (obj: S3Location): any => ({ + ...obj, + ...(obj.Encryption && { Encryption: Encryption.filterSensitiveLog(obj.Encryption) }), + }); +} + /** *

      Describes the location where the restore job's output is stored.

      */ @@ -827,6 +911,11 @@ export interface UploadPartOutput { */ SSEKMSKeyId?: string; + /** + *

      Indicates whether the multipart upload uses an S3 Bucket Key for server-side encryption with AWS KMS (SSE-KMS).

      + */ + BucketKeyEnabled?: boolean; + /** *

      If present, indicates that the requester was successfully charged for the * request.

      @@ -984,6 +1073,11 @@ export interface UploadPartCopyOutput { */ SSEKMSKeyId?: string; + /** + *

      Indicates whether the multipart upload uses an S3 Bucket Key for server-side encryption with AWS KMS (SSE-KMS).

      + */ + BucketKeyEnabled?: boolean; + /** *

      If present, indicates that the requester was successfully charged for the * request.

      diff --git a/clients/client-s3/protocols/Aws_restXml.ts b/clients/client-s3/protocols/Aws_restXml.ts index 4f2cd653628c..c8937bdd5915 100644 --- a/clients/client-s3/protocols/Aws_restXml.ts +++ b/clients/client-s3/protocols/Aws_restXml.ts @@ -296,7 +296,6 @@ import { LifecycleRuleAndOperator, LifecycleRuleFilter, LoggingEnabled, - MetadataEntry, Metrics, MetricsAndOperator, MetricsConfiguration, @@ -326,6 +325,7 @@ import { QueueConfiguration, Redirect, RedirectAllRequestsTo, + ReplicaModifications, ReplicationConfiguration, ReplicationRule, ReplicationRuleAndOperator, @@ -335,7 +335,6 @@ import { RequestPaymentConfiguration, RoutingRule, S3KeyFilter, - S3Location, SSEKMS, SSES3, ServerSideEncryptionByDefault, @@ -365,6 +364,7 @@ import { InputSerialization, JSONInput, JSONOutput, + MetadataEntry, OutputLocation, OutputSerialization, ParquetInput, @@ -372,6 +372,7 @@ import { RecordsEvent, RequestProgress, RestoreRequest, + S3Location, ScanRange, SelectObjectContentEventStream, SelectParameters, @@ -563,6 +564,9 @@ export const serializeAws_restXmlCopyObjectCommand = async ( ...(isSerializableHeaderValue(input.SSEKMSEncryptionContext) && { "x-amz-server-side-encryption-context": input.SSEKMSEncryptionContext!, }), + ...(isSerializableHeaderValue(input.BucketKeyEnabled) && { + "x-amz-server-side-encryption-bucket-key-enabled": input.BucketKeyEnabled!.toString(), + }), ...(isSerializableHeaderValue(input.CopySourceSSECustomerAlgorithm) && { "x-amz-copy-source-server-side-encryption-customer-algorithm": input.CopySourceSSECustomerAlgorithm!, }), @@ -722,6 +726,9 @@ export const serializeAws_restXmlCreateMultipartUploadCommand = async ( ...(isSerializableHeaderValue(input.SSEKMSEncryptionContext) && { "x-amz-server-side-encryption-context": input.SSEKMSEncryptionContext!, }), + ...(isSerializableHeaderValue(input.BucketKeyEnabled) && { + "x-amz-server-side-encryption-bucket-key-enabled": input.BucketKeyEnabled!.toString(), + }), ...(isSerializableHeaderValue(input.RequestPayer) && { "x-amz-request-payer": input.RequestPayer! }), ...(isSerializableHeaderValue(input.Tagging) && { "x-amz-tagging": input.Tagging! }), ...(isSerializableHeaderValue(input.ObjectLockMode) && { "x-amz-object-lock-mode": input.ObjectLockMode! }), @@ -3992,6 +3999,9 @@ export const serializeAws_restXmlPutObjectCommand = async ( ...(isSerializableHeaderValue(input.SSEKMSEncryptionContext) && { "x-amz-server-side-encryption-context": input.SSEKMSEncryptionContext!, }), + ...(isSerializableHeaderValue(input.BucketKeyEnabled) && { + "x-amz-server-side-encryption-bucket-key-enabled": input.BucketKeyEnabled!.toString(), + }), ...(isSerializableHeaderValue(input.RequestPayer) && { "x-amz-request-payer": input.RequestPayer! }), ...(isSerializableHeaderValue(input.Tagging) && { "x-amz-tagging": input.Tagging! }), ...(isSerializableHeaderValue(input.ObjectLockMode) && { "x-amz-object-lock-mode": input.ObjectLockMode! }), @@ -4790,6 +4800,7 @@ export const deserializeAws_restXmlCompleteMultipartUploadCommand = async ( const contents: CompleteMultipartUploadCommandOutput = { $metadata: deserializeMetadata(output), Bucket: undefined, + BucketKeyEnabled: undefined, ETag: undefined, Expiration: undefined, Key: undefined, @@ -4811,6 +4822,9 @@ export const deserializeAws_restXmlCompleteMultipartUploadCommand = async ( if (output.headers["x-amz-server-side-encryption-aws-kms-key-id"] !== undefined) { contents.SSEKMSKeyId = output.headers["x-amz-server-side-encryption-aws-kms-key-id"]; } + if (output.headers["x-amz-server-side-encryption-bucket-key-enabled"] !== undefined) { + contents.BucketKeyEnabled = output.headers["x-amz-server-side-encryption-bucket-key-enabled"] === "true"; + } if (output.headers["x-amz-request-charged"] !== undefined) { contents.RequestCharged = output.headers["x-amz-request-charged"]; } @@ -4868,6 +4882,7 @@ export const deserializeAws_restXmlCopyObjectCommand = async ( } const contents: CopyObjectCommandOutput = { $metadata: deserializeMetadata(output), + BucketKeyEnabled: undefined, CopyObjectResult: undefined, CopySourceVersionId: undefined, Expiration: undefined, @@ -4903,6 +4918,9 @@ export const deserializeAws_restXmlCopyObjectCommand = async ( if (output.headers["x-amz-server-side-encryption-context"] !== undefined) { contents.SSEKMSEncryptionContext = output.headers["x-amz-server-side-encryption-context"]; } + if (output.headers["x-amz-server-side-encryption-bucket-key-enabled"] !== undefined) { + contents.BucketKeyEnabled = output.headers["x-amz-server-side-encryption-bucket-key-enabled"] === "true"; + } if (output.headers["x-amz-request-charged"] !== undefined) { contents.RequestCharged = output.headers["x-amz-request-charged"]; } @@ -5023,6 +5041,7 @@ export const deserializeAws_restXmlCreateMultipartUploadCommand = async ( AbortDate: undefined, AbortRuleId: undefined, Bucket: undefined, + BucketKeyEnabled: undefined, Key: undefined, RequestCharged: undefined, SSECustomerAlgorithm: undefined, @@ -5053,6 +5072,9 @@ export const deserializeAws_restXmlCreateMultipartUploadCommand = async ( if (output.headers["x-amz-server-side-encryption-context"] !== undefined) { contents.SSEKMSEncryptionContext = output.headers["x-amz-server-side-encryption-context"]; } + if (output.headers["x-amz-server-side-encryption-bucket-key-enabled"] !== undefined) { + contents.BucketKeyEnabled = output.headers["x-amz-server-side-encryption-bucket-key-enabled"] === "true"; + } if (output.headers["x-amz-request-charged"] !== undefined) { contents.RequestCharged = output.headers["x-amz-request-charged"]; } @@ -6865,6 +6887,7 @@ export const deserializeAws_restXmlGetObjectCommand = async ( $metadata: deserializeMetadata(output), AcceptRanges: undefined, Body: undefined, + BucketKeyEnabled: undefined, CacheControl: undefined, ContentDisposition: undefined, ContentEncoding: undefined, @@ -6958,6 +6981,9 @@ export const deserializeAws_restXmlGetObjectCommand = async ( if (output.headers["x-amz-server-side-encryption-aws-kms-key-id"] !== undefined) { contents.SSEKMSKeyId = output.headers["x-amz-server-side-encryption-aws-kms-key-id"]; } + if (output.headers["x-amz-server-side-encryption-bucket-key-enabled"] !== undefined) { + contents.BucketKeyEnabled = output.headers["x-amz-server-side-encryption-bucket-key-enabled"] === "true"; + } if (output.headers["x-amz-storage-class"] !== undefined) { contents.StorageClass = output.headers["x-amz-storage-class"]; } @@ -7451,6 +7477,7 @@ export const deserializeAws_restXmlHeadObjectCommand = async ( $metadata: deserializeMetadata(output), AcceptRanges: undefined, ArchiveStatus: undefined, + BucketKeyEnabled: undefined, CacheControl: undefined, ContentDisposition: undefined, ContentEncoding: undefined, @@ -7542,6 +7569,9 @@ export const deserializeAws_restXmlHeadObjectCommand = async ( if (output.headers["x-amz-server-side-encryption-aws-kms-key-id"] !== undefined) { contents.SSEKMSKeyId = output.headers["x-amz-server-side-encryption-aws-kms-key-id"]; } + if (output.headers["x-amz-server-side-encryption-bucket-key-enabled"] !== undefined) { + contents.BucketKeyEnabled = output.headers["x-amz-server-side-encryption-bucket-key-enabled"] === "true"; + } if (output.headers["x-amz-storage-class"] !== undefined) { contents.StorageClass = output.headers["x-amz-storage-class"]; } @@ -9226,6 +9256,7 @@ export const deserializeAws_restXmlPutObjectCommand = async ( } const contents: PutObjectCommandOutput = { $metadata: deserializeMetadata(output), + BucketKeyEnabled: undefined, ETag: undefined, Expiration: undefined, RequestCharged: undefined, @@ -9260,6 +9291,9 @@ export const deserializeAws_restXmlPutObjectCommand = async ( if (output.headers["x-amz-server-side-encryption-context"] !== undefined) { contents.SSEKMSEncryptionContext = output.headers["x-amz-server-side-encryption-context"]; } + if (output.headers["x-amz-server-side-encryption-bucket-key-enabled"] !== undefined) { + contents.BucketKeyEnabled = output.headers["x-amz-server-side-encryption-bucket-key-enabled"] === "true"; + } if (output.headers["x-amz-request-charged"] !== undefined) { contents.RequestCharged = output.headers["x-amz-request-charged"]; } @@ -9709,6 +9743,7 @@ export const deserializeAws_restXmlUploadPartCommand = async ( } const contents: UploadPartCommandOutput = { $metadata: deserializeMetadata(output), + BucketKeyEnabled: undefined, ETag: undefined, RequestCharged: undefined, SSECustomerAlgorithm: undefined, @@ -9731,6 +9766,9 @@ export const deserializeAws_restXmlUploadPartCommand = async ( if (output.headers["x-amz-server-side-encryption-aws-kms-key-id"] !== undefined) { contents.SSEKMSKeyId = output.headers["x-amz-server-side-encryption-aws-kms-key-id"]; } + if (output.headers["x-amz-server-side-encryption-bucket-key-enabled"] !== undefined) { + contents.BucketKeyEnabled = output.headers["x-amz-server-side-encryption-bucket-key-enabled"] === "true"; + } if (output.headers["x-amz-request-charged"] !== undefined) { contents.RequestCharged = output.headers["x-amz-request-charged"]; } @@ -9776,6 +9814,7 @@ export const deserializeAws_restXmlUploadPartCopyCommand = async ( } const contents: UploadPartCopyCommandOutput = { $metadata: deserializeMetadata(output), + BucketKeyEnabled: undefined, CopyPartResult: undefined, CopySourceVersionId: undefined, RequestCharged: undefined, @@ -9799,6 +9838,9 @@ export const deserializeAws_restXmlUploadPartCopyCommand = async ( if (output.headers["x-amz-server-side-encryption-aws-kms-key-id"] !== undefined) { contents.SSEKMSKeyId = output.headers["x-amz-server-side-encryption-aws-kms-key-id"]; } + if (output.headers["x-amz-server-side-encryption-bucket-key-enabled"] !== undefined) { + contents.BucketKeyEnabled = output.headers["x-amz-server-side-encryption-bucket-key-enabled"] === "true"; + } if (output.headers["x-amz-request-charged"] !== undefined) { contents.RequestCharged = output.headers["x-amz-request-charged"]; } @@ -11468,6 +11510,17 @@ const serializeAws_restXmlRedirectAllRequestsTo = (input: RedirectAllRequestsTo, return bodyNode; }; +const serializeAws_restXmlReplicaModifications = (input: ReplicaModifications, context: __SerdeContext): any => { + const bodyNode = new __XmlNode("ReplicaModifications"); + if (input.Status !== undefined) { + const node = new __XmlNode("ReplicaModificationsStatus") + .addChildNode(new __XmlText(input.Status)) + .withName("Status"); + bodyNode.addChildNode(node); + } + return bodyNode; +}; + const serializeAws_restXmlReplicationConfiguration = ( input: ReplicationConfiguration, context: __SerdeContext @@ -11829,6 +11882,12 @@ const serializeAws_restXmlServerSideEncryptionRule = ( ).withName("ApplyServerSideEncryptionByDefault"); bodyNode.addChildNode(node); } + if (input.BucketKeyEnabled !== undefined) { + const node = new __XmlNode("BucketKeyEnabled") + .addChildNode(new __XmlText(String(input.BucketKeyEnabled))) + .withName("BucketKeyEnabled"); + bodyNode.addChildNode(node); + } return bodyNode; }; @@ -11850,6 +11909,12 @@ const serializeAws_restXmlSourceSelectionCriteria = (input: SourceSelectionCrite ); bodyNode.addChildNode(node); } + if (input.ReplicaModifications !== undefined) { + const node = serializeAws_restXmlReplicaModifications(input.ReplicaModifications, context).withName( + "ReplicaModifications" + ); + bodyNode.addChildNode(node); + } return bodyNode; }; @@ -13480,6 +13545,16 @@ const deserializeAws_restXmlRedirectAllRequestsTo = (output: any, context: __Ser return contents; }; +const deserializeAws_restXmlReplicaModifications = (output: any, context: __SerdeContext): ReplicaModifications => { + let contents: any = { + Status: undefined, + }; + if (output["Status"] !== undefined) { + contents.Status = output["Status"]; + } + return contents; +}; + const deserializeAws_restXmlReplicationConfiguration = ( output: any, context: __SerdeContext @@ -13688,6 +13763,7 @@ const deserializeAws_restXmlServerSideEncryptionRule = ( ): ServerSideEncryptionRule => { let contents: any = { ApplyServerSideEncryptionByDefault: undefined, + BucketKeyEnabled: undefined, }; if (output["ApplyServerSideEncryptionByDefault"] !== undefined) { contents.ApplyServerSideEncryptionByDefault = deserializeAws_restXmlServerSideEncryptionByDefault( @@ -13695,6 +13771,9 @@ const deserializeAws_restXmlServerSideEncryptionRule = ( context ); } + if (output["BucketKeyEnabled"] !== undefined) { + contents.BucketKeyEnabled = output["BucketKeyEnabled"] == "true"; + } return contents; }; @@ -13711,6 +13790,7 @@ const deserializeAws_restXmlSourceSelectionCriteria = ( ): SourceSelectionCriteria => { let contents: any = { SseKmsEncryptedObjects: undefined, + ReplicaModifications: undefined, }; if (output["SseKmsEncryptedObjects"] !== undefined) { contents.SseKmsEncryptedObjects = deserializeAws_restXmlSseKmsEncryptedObjects( @@ -13718,6 +13798,9 @@ const deserializeAws_restXmlSourceSelectionCriteria = ( context ); } + if (output["ReplicaModifications"] !== undefined) { + contents.ReplicaModifications = deserializeAws_restXmlReplicaModifications(output["ReplicaModifications"], context); + } return contents; }; diff --git a/clients/client-sagemaker-featurestore-runtime/.gitignore b/clients/client-sagemaker-featurestore-runtime/.gitignore new file mode 100644 index 000000000000..b41c05b597c4 --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/.gitignore @@ -0,0 +1,14 @@ +/node_modules/ +/build/ +/coverage/ +/docs/ +/types/ +/dist/ +*.tsbuildinfo +*.tgz +*.log +package-lock.json + +*.d.ts +*.js +*.js.map diff --git a/clients/client-sagemaker-featurestore-runtime/.npmignore b/clients/client-sagemaker-featurestore-runtime/.npmignore new file mode 100644 index 000000000000..b7ff81137c4a --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/.npmignore @@ -0,0 +1,4 @@ +/coverage/ +/docs/ +tsconfig.test.json +*.tsbuildinfo diff --git a/clients/client-sagemaker-featurestore-runtime/LICENSE b/clients/client-sagemaker-featurestore-runtime/LICENSE new file mode 100644 index 000000000000..dd65ae06be7a --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/clients/client-sagemaker-featurestore-runtime/README.md b/clients/client-sagemaker-featurestore-runtime/README.md new file mode 100644 index 000000000000..84ad5d2ef35b --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/README.md @@ -0,0 +1,6 @@ +# @aws-sdk/client-sagemaker-featurestore-runtime + +[![NPM version](https://img.shields.io/npm/v/@aws-sdk/client-sagemaker-featurestore-runtime/rc.svg)](https://www.npmjs.com/package/@aws-sdk/client-sagemaker-featurestore-runtime) +[![NPM downloads](https://img.shields.io/npm/dm/@aws-sdk/client-sagemaker-featurestore-runtime.svg)](https://www.npmjs.com/package/@aws-sdk/client-sagemaker-featurestore-runtime) + +For SDK usage, please step to [SDK readme](https://github.com/aws/aws-sdk-js-v3). diff --git a/clients/client-sagemaker-featurestore-runtime/SageMakerFeatureStoreRuntime.ts b/clients/client-sagemaker-featurestore-runtime/SageMakerFeatureStoreRuntime.ts new file mode 100644 index 000000000000..6892cf6f340a --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/SageMakerFeatureStoreRuntime.ts @@ -0,0 +1,129 @@ +import { SageMakerFeatureStoreRuntimeClient } from "./SageMakerFeatureStoreRuntimeClient"; +import { + DeleteRecordCommand, + DeleteRecordCommandInput, + DeleteRecordCommandOutput, +} from "./commands/DeleteRecordCommand"; +import { GetRecordCommand, GetRecordCommandInput, GetRecordCommandOutput } from "./commands/GetRecordCommand"; +import { PutRecordCommand, PutRecordCommandInput, PutRecordCommandOutput } from "./commands/PutRecordCommand"; +import { HttpHandlerOptions as __HttpHandlerOptions } from "@aws-sdk/types"; + +/** + *

      Contains all data plane API operations and data types for the Amazon SageMaker Feature + * Store. Use this API to put, delete, and retrieve (get) features from a feature + * store.

      + *

      Use the following operations to configure your OnlineStore and + * OfflineStore features, and to create and manage feature groups:

      + * + */ +export class SageMakerFeatureStoreRuntime extends SageMakerFeatureStoreRuntimeClient { + /** + *

      Deletes a Record from a FeatureGroup. A new record will show + * up in the OfflineStore when the DeleteRecord API is called. This + * record will have a value of True in the is_deleted column.

      + */ + public deleteRecord( + args: DeleteRecordCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteRecord(args: DeleteRecordCommandInput, cb: (err: any, data?: DeleteRecordCommandOutput) => void): void; + public deleteRecord( + args: DeleteRecordCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteRecordCommandOutput) => void + ): void; + public deleteRecord( + args: DeleteRecordCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteRecordCommandOutput) => void), + cb?: (err: any, data?: DeleteRecordCommandOutput) => void + ): Promise | void { + const command = new DeleteRecordCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

      Use for OnlineStore serving from a FeatureStore. Only the + * latest records stored in the OnlineStore can be retrieved. If no Record with + * RecordIdentifierValue is found, then an empty result is returned.

      + */ + public getRecord(args: GetRecordCommandInput, options?: __HttpHandlerOptions): Promise; + public getRecord(args: GetRecordCommandInput, cb: (err: any, data?: GetRecordCommandOutput) => void): void; + public getRecord( + args: GetRecordCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetRecordCommandOutput) => void + ): void; + public getRecord( + args: GetRecordCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: GetRecordCommandOutput) => void), + cb?: (err: any, data?: GetRecordCommandOutput) => void + ): Promise | void { + const command = new GetRecordCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

      Used for data ingestion into the FeatureStore. The PutRecord + * API writes to both the OnlineStore and OfflineStore. If the + * record is the latest record for the recordIdentifier, the record is written to + * both the OnlineStore and OfflineStore. If the record is a + * historic record, it is written only to the OfflineStore.

      + */ + public putRecord(args: PutRecordCommandInput, options?: __HttpHandlerOptions): Promise; + public putRecord(args: PutRecordCommandInput, cb: (err: any, data?: PutRecordCommandOutput) => void): void; + public putRecord( + args: PutRecordCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutRecordCommandOutput) => void + ): void; + public putRecord( + args: PutRecordCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: PutRecordCommandOutput) => void), + cb?: (err: any, data?: PutRecordCommandOutput) => void + ): Promise | void { + const command = new PutRecordCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } +} diff --git a/clients/client-sagemaker-featurestore-runtime/SageMakerFeatureStoreRuntimeClient.ts b/clients/client-sagemaker-featurestore-runtime/SageMakerFeatureStoreRuntimeClient.ts new file mode 100644 index 000000000000..332353d63a7a --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/SageMakerFeatureStoreRuntimeClient.ts @@ -0,0 +1,230 @@ +import { DeleteRecordCommandInput, DeleteRecordCommandOutput } from "./commands/DeleteRecordCommand"; +import { GetRecordCommandInput, GetRecordCommandOutput } from "./commands/GetRecordCommand"; +import { PutRecordCommandInput, PutRecordCommandOutput } from "./commands/PutRecordCommand"; +import { ClientDefaultValues as __ClientDefaultValues } from "./runtimeConfig"; +import { + EndpointsInputConfig, + EndpointsResolvedConfig, + RegionInputConfig, + RegionResolvedConfig, + resolveEndpointsConfig, + resolveRegionConfig, +} from "@aws-sdk/config-resolver"; +import { getContentLengthPlugin } from "@aws-sdk/middleware-content-length"; +import { + HostHeaderInputConfig, + HostHeaderResolvedConfig, + getHostHeaderPlugin, + resolveHostHeaderConfig, +} from "@aws-sdk/middleware-host-header"; +import { getLoggerPlugin } from "@aws-sdk/middleware-logger"; +import { RetryInputConfig, RetryResolvedConfig, getRetryPlugin, resolveRetryConfig } from "@aws-sdk/middleware-retry"; +import { + AwsAuthInputConfig, + AwsAuthResolvedConfig, + getAwsAuthPlugin, + resolveAwsAuthConfig, +} from "@aws-sdk/middleware-signing"; +import { + UserAgentInputConfig, + UserAgentResolvedConfig, + getUserAgentPlugin, + resolveUserAgentConfig, +} from "@aws-sdk/middleware-user-agent"; +import { HttpHandler as __HttpHandler } from "@aws-sdk/protocol-http"; +import { + Client as __Client, + SmithyConfiguration as __SmithyConfiguration, + SmithyResolvedConfiguration as __SmithyResolvedConfiguration, +} from "@aws-sdk/smithy-client"; +import { + RegionInfoProvider, + Credentials as __Credentials, + Decoder as __Decoder, + Encoder as __Encoder, + HashConstructor as __HashConstructor, + HttpHandlerOptions as __HttpHandlerOptions, + Logger as __Logger, + Provider as __Provider, + StreamCollector as __StreamCollector, + UrlParser as __UrlParser, +} from "@aws-sdk/types"; + +export type ServiceInputTypes = DeleteRecordCommandInput | GetRecordCommandInput | PutRecordCommandInput; + +export type ServiceOutputTypes = DeleteRecordCommandOutput | GetRecordCommandOutput | PutRecordCommandOutput; + +export interface ClientDefaults extends Partial<__SmithyResolvedConfiguration<__HttpHandlerOptions>> { + /** + * The HTTP handler to use. Fetch in browser and Https in Nodejs. + */ + requestHandler?: __HttpHandler; + + /** + * A constructor for a class implementing the @aws-sdk/types.Hash interface + * that computes the SHA-256 HMAC or checksum of a string or binary buffer. + */ + sha256?: __HashConstructor; + + /** + * The function that will be used to convert strings into HTTP endpoints. + */ + urlParser?: __UrlParser; + + /** + * A function that can calculate the length of a request body. + */ + bodyLengthChecker?: (body: any) => number | undefined; + + /** + * A function that converts a stream into an array of bytes. + */ + streamCollector?: __StreamCollector; + + /** + * The function that will be used to convert a base64-encoded string to a byte array + */ + base64Decoder?: __Decoder; + + /** + * The function that will be used to convert binary data to a base64-encoded string + */ + base64Encoder?: __Encoder; + + /** + * The function that will be used to convert a UTF8-encoded string to a byte array + */ + utf8Decoder?: __Decoder; + + /** + * The function that will be used to convert binary data to a UTF-8 encoded string + */ + utf8Encoder?: __Encoder; + + /** + * The string that will be used to populate default value in 'User-Agent' header + */ + defaultUserAgent?: string; + + /** + * The runtime environment + */ + runtime?: string; + + /** + * Disable dyanamically changing the endpoint of the client based on the hostPrefix + * trait of an operation. + */ + disableHostPrefix?: boolean; + + /** + * The service name with which to sign requests. + */ + signingName?: string; + + /** + * Default credentials provider; Not available in browser runtime + */ + credentialDefaultProvider?: (input: any) => __Provider<__Credentials>; + + /** + * The AWS region to which this client will send requests + */ + region?: string | __Provider; + + /** + * Value for how many times a request will be made at most in case of retry. + */ + maxAttempts?: number | __Provider; + + /** + * Optional logger for logging debug/info/warn/error. + */ + logger?: __Logger; + + /** + * Fetch related hostname, signing name or signing region with given region. + */ + regionInfoProvider?: RegionInfoProvider; +} + +export type SageMakerFeatureStoreRuntimeClientConfig = Partial<__SmithyConfiguration<__HttpHandlerOptions>> & + ClientDefaults & + RegionInputConfig & + EndpointsInputConfig & + AwsAuthInputConfig & + RetryInputConfig & + UserAgentInputConfig & + HostHeaderInputConfig; + +export type SageMakerFeatureStoreRuntimeClientResolvedConfig = __SmithyResolvedConfiguration<__HttpHandlerOptions> & + Required & + RegionResolvedConfig & + EndpointsResolvedConfig & + AwsAuthResolvedConfig & + RetryResolvedConfig & + UserAgentResolvedConfig & + HostHeaderResolvedConfig; + +/** + *

      Contains all data plane API operations and data types for the Amazon SageMaker Feature + * Store. Use this API to put, delete, and retrieve (get) features from a feature + * store.

      + *

      Use the following operations to configure your OnlineStore and + * OfflineStore features, and to create and manage feature groups:

      + * + */ +export class SageMakerFeatureStoreRuntimeClient extends __Client< + __HttpHandlerOptions, + ServiceInputTypes, + ServiceOutputTypes, + SageMakerFeatureStoreRuntimeClientResolvedConfig +> { + readonly config: SageMakerFeatureStoreRuntimeClientResolvedConfig; + + constructor(configuration: SageMakerFeatureStoreRuntimeClientConfig) { + let _config_0 = { + ...__ClientDefaultValues, + ...configuration, + }; + let _config_1 = resolveRegionConfig(_config_0); + let _config_2 = resolveEndpointsConfig(_config_1); + let _config_3 = resolveAwsAuthConfig(_config_2); + let _config_4 = resolveRetryConfig(_config_3); + let _config_5 = resolveUserAgentConfig(_config_4); + let _config_6 = resolveHostHeaderConfig(_config_5); + super(_config_6); + this.config = _config_6; + this.middlewareStack.use(getAwsAuthPlugin(this.config)); + this.middlewareStack.use(getRetryPlugin(this.config)); + this.middlewareStack.use(getUserAgentPlugin(this.config)); + this.middlewareStack.use(getContentLengthPlugin(this.config)); + this.middlewareStack.use(getHostHeaderPlugin(this.config)); + this.middlewareStack.use(getLoggerPlugin(this.config)); + } + + destroy(): void { + super.destroy(); + } +} diff --git a/clients/client-sagemaker-featurestore-runtime/commands/DeleteRecordCommand.ts b/clients/client-sagemaker-featurestore-runtime/commands/DeleteRecordCommand.ts new file mode 100644 index 000000000000..06781e1b211d --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/commands/DeleteRecordCommand.ts @@ -0,0 +1,94 @@ +import { + SageMakerFeatureStoreRuntimeClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../SageMakerFeatureStoreRuntimeClient"; +import { DeleteRecordRequest } from "../models/models_0"; +import { + deserializeAws_restJson1DeleteRecordCommand, + serializeAws_restJson1DeleteRecordCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteRecordCommandInput = DeleteRecordRequest; +export type DeleteRecordCommandOutput = __MetadataBearer; + +/** + *

      Deletes a Record from a FeatureGroup. A new record will show + * up in the OfflineStore when the DeleteRecord API is called. This + * record will have a value of True in the is_deleted column.

      + */ +export class DeleteRecordCommand extends $Command< + DeleteRecordCommandInput, + DeleteRecordCommandOutput, + SageMakerFeatureStoreRuntimeClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteRecordCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerFeatureStoreRuntimeClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerFeatureStoreRuntimeClient"; + const commandName = "DeleteRecordCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteRecordRequest.filterSensitiveLog, + outputFilterSensitiveLog: (output: any) => output, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteRecordCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1DeleteRecordCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1DeleteRecordCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker-featurestore-runtime/commands/GetRecordCommand.ts b/clients/client-sagemaker-featurestore-runtime/commands/GetRecordCommand.ts new file mode 100644 index 000000000000..2e8ac17a4b3d --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/commands/GetRecordCommand.ts @@ -0,0 +1,94 @@ +import { + SageMakerFeatureStoreRuntimeClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../SageMakerFeatureStoreRuntimeClient"; +import { GetRecordRequest, GetRecordResponse } from "../models/models_0"; +import { + deserializeAws_restJson1GetRecordCommand, + serializeAws_restJson1GetRecordCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type GetRecordCommandInput = GetRecordRequest; +export type GetRecordCommandOutput = GetRecordResponse & __MetadataBearer; + +/** + *

      Use for OnlineStore serving from a FeatureStore. Only the + * latest records stored in the OnlineStore can be retrieved. If no Record with + * RecordIdentifierValue is found, then an empty result is returned.

      + */ +export class GetRecordCommand extends $Command< + GetRecordCommandInput, + GetRecordCommandOutput, + SageMakerFeatureStoreRuntimeClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: GetRecordCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerFeatureStoreRuntimeClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerFeatureStoreRuntimeClient"; + const commandName = "GetRecordCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: GetRecordRequest.filterSensitiveLog, + outputFilterSensitiveLog: GetRecordResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: GetRecordCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1GetRecordCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1GetRecordCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker-featurestore-runtime/commands/PutRecordCommand.ts b/clients/client-sagemaker-featurestore-runtime/commands/PutRecordCommand.ts new file mode 100644 index 000000000000..dd3a62023c5c --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/commands/PutRecordCommand.ts @@ -0,0 +1,96 @@ +import { + SageMakerFeatureStoreRuntimeClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes, +} from "../SageMakerFeatureStoreRuntimeClient"; +import { PutRecordRequest } from "../models/models_0"; +import { + deserializeAws_restJson1PutRecordCommand, + serializeAws_restJson1PutRecordCommand, +} from "../protocols/Aws_restJson1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type PutRecordCommandInput = PutRecordRequest; +export type PutRecordCommandOutput = __MetadataBearer; + +/** + *

      Used for data ingestion into the FeatureStore. The PutRecord + * API writes to both the OnlineStore and OfflineStore. If the + * record is the latest record for the recordIdentifier, the record is written to + * both the OnlineStore and OfflineStore. If the record is a + * historic record, it is written only to the OfflineStore.

      + */ +export class PutRecordCommand extends $Command< + PutRecordCommandInput, + PutRecordCommandOutput, + SageMakerFeatureStoreRuntimeClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: PutRecordCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerFeatureStoreRuntimeClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerFeatureStoreRuntimeClient"; + const commandName = "PutRecordCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: PutRecordRequest.filterSensitiveLog, + outputFilterSensitiveLog: (output: any) => output, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: PutRecordCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_restJson1PutRecordCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_restJson1PutRecordCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker-featurestore-runtime/endpoints.ts b/clients/client-sagemaker-featurestore-runtime/endpoints.ts new file mode 100644 index 000000000000..b8d3e657976b --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/endpoints.ts @@ -0,0 +1,81 @@ +import { RegionInfo, RegionInfoProvider } from "@aws-sdk/types"; + +// Partition default templates +const AWS_TEMPLATE = "sagemaker.{region}.amazonaws.com"; +const AWS_CN_TEMPLATE = "sagemaker.{region}.amazonaws.com.cn"; +const AWS_ISO_TEMPLATE = "sagemaker.{region}.c2s.ic.gov"; +const AWS_ISO_B_TEMPLATE = "sagemaker.{region}.sc2s.sgov.gov"; +const AWS_US_GOV_TEMPLATE = "sagemaker.{region}.amazonaws.com"; + +// Partition regions +const AWS_REGIONS = new Set([ + "ap-east-1", + "ap-northeast-1", + "ap-northeast-2", + "ap-south-1", + "ap-southeast-1", + "ap-southeast-2", + "ca-central-1", + "eu-central-1", + "eu-north-1", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "me-south-1", + "sa-east-1", + "us-east-1", + "us-east-2", + "us-west-1", + "us-west-2", +]); +const AWS_CN_REGIONS = new Set(["cn-north-1", "cn-northwest-1"]); +const AWS_ISO_REGIONS = new Set(["us-iso-east-1"]); +const AWS_ISO_B_REGIONS = new Set(["us-isob-east-1"]); +const AWS_US_GOV_REGIONS = new Set(["us-gov-east-1", "us-gov-west-1"]); + +export const defaultRegionInfoProvider: RegionInfoProvider = (region: string, options?: any) => { + let regionInfo: RegionInfo | undefined = undefined; + switch (region) { + // First, try to match exact region names. + // Next, try to match partition endpoints. + default: + if (AWS_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_TEMPLATE.replace("{region}", region), + partition: "aws", + }; + } + if (AWS_CN_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_CN_TEMPLATE.replace("{region}", region), + partition: "aws-cn", + }; + } + if (AWS_ISO_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_ISO_TEMPLATE.replace("{region}", region), + partition: "aws-iso", + }; + } + if (AWS_ISO_B_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_ISO_B_TEMPLATE.replace("{region}", region), + partition: "aws-iso-b", + }; + } + if (AWS_US_GOV_REGIONS.has(region)) { + regionInfo = { + hostname: AWS_US_GOV_TEMPLATE.replace("{region}", region), + partition: "aws-us-gov", + }; + } + // Finally, assume it's an AWS partition endpoint. + if (regionInfo === undefined) { + regionInfo = { + hostname: AWS_TEMPLATE.replace("{region}", region), + partition: "aws", + }; + } + } + return Promise.resolve(regionInfo); +}; diff --git a/clients/client-sagemaker-featurestore-runtime/index.ts b/clients/client-sagemaker-featurestore-runtime/index.ts new file mode 100644 index 000000000000..8a013f2544b1 --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/index.ts @@ -0,0 +1,6 @@ +export * from "./SageMakerFeatureStoreRuntimeClient"; +export * from "./SageMakerFeatureStoreRuntime"; +export * from "./commands/DeleteRecordCommand"; +export * from "./commands/GetRecordCommand"; +export * from "./commands/PutRecordCommand"; +export * from "./models/index"; diff --git a/clients/client-sagemaker-featurestore-runtime/models/index.ts b/clients/client-sagemaker-featurestore-runtime/models/index.ts new file mode 100644 index 000000000000..09c5d6e09b8c --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/models/index.ts @@ -0,0 +1 @@ +export * from "./models_0"; diff --git a/clients/client-sagemaker-featurestore-runtime/models/models_0.ts b/clients/client-sagemaker-featurestore-runtime/models/models_0.ts new file mode 100644 index 000000000000..04b55a192e24 --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/models/models_0.ts @@ -0,0 +1,193 @@ +import { SENSITIVE_STRING, SmithyException as __SmithyException } from "@aws-sdk/smithy-client"; +import { MetadataBearer as $MetadataBearer } from "@aws-sdk/types"; + +/** + *

      You do not have permission to perform an action.

      + */ +export interface AccessForbidden extends __SmithyException, $MetadataBearer { + name: "AccessForbidden"; + $fault: "client"; + Message?: string; +} + +export namespace AccessForbidden { + export const filterSensitiveLog = (obj: AccessForbidden): any => ({ + ...obj, + }); +} + +export interface DeleteRecordRequest { + /** + *

      The name of the feature group to delete the record from.

      + */ + FeatureGroupName: string | undefined; + + /** + *

      The value for the RecordIdentifier that uniquely identifies the record, in + * string format.

      + */ + RecordIdentifierValueAsString: string | undefined; + + /** + *

      Timestamp indicating when the deletion event occurred. EventTime can be + * used to query data at a certain point in time.

      + */ + EventTime: string | undefined; +} + +export namespace DeleteRecordRequest { + export const filterSensitiveLog = (obj: DeleteRecordRequest): any => ({ + ...obj, + }); +} + +/** + *

      An internal failure occurred. Try your request again. If the problem + * persists, contact AWS customer support.

      + */ +export interface InternalFailure extends __SmithyException, $MetadataBearer { + name: "InternalFailure"; + $fault: "server"; + Message?: string; +} + +export namespace InternalFailure { + export const filterSensitiveLog = (obj: InternalFailure): any => ({ + ...obj, + }); +} + +/** + *

      The service is currently unavailable.

      + */ +export interface ServiceUnavailable extends __SmithyException, $MetadataBearer { + name: "ServiceUnavailable"; + $fault: "server"; + Message?: string; +} + +export namespace ServiceUnavailable { + export const filterSensitiveLog = (obj: ServiceUnavailable): any => ({ + ...obj, + }); +} + +/** + *

      There was an error validating your request.

      + */ +export interface ValidationError extends __SmithyException, $MetadataBearer { + name: "ValidationError"; + $fault: "client"; + Message?: string; +} + +export namespace ValidationError { + export const filterSensitiveLog = (obj: ValidationError): any => ({ + ...obj, + }); +} + +export interface GetRecordRequest { + /** + *

      The name of the feature group in which you want to put the records.

      + */ + FeatureGroupName: string | undefined; + + /** + *

      The value that corresponds to RecordIdentifier type and uniquely identifies + * the record in the FeatureGroup.

      + */ + RecordIdentifierValueAsString: string | undefined; + + /** + *

      List of names of Features to be retrieved. If not specified, the latest value for all + * the Features are returned.

      + */ + FeatureNames?: string[]; +} + +export namespace GetRecordRequest { + export const filterSensitiveLog = (obj: GetRecordRequest): any => ({ + ...obj, + }); +} + +/** + *

      The value associated with a feature.

      + */ +export interface FeatureValue { + /** + *

      The name of a feature that a feature value corresponds to.

      + */ + FeatureName: string | undefined; + + /** + *

      The value associated with a feature, in string format. Note that features types can be + * String, Integral, or Fractional. This value represents all three types as a string.

      + */ + ValueAsString: string | undefined; +} + +export namespace FeatureValue { + export const filterSensitiveLog = (obj: FeatureValue): any => ({ + ...obj, + }); +} + +export interface GetRecordResponse { + /** + *

      The record you requested. A list of FeatureValues.

      + */ + Record?: FeatureValue[]; +} + +export namespace GetRecordResponse { + export const filterSensitiveLog = (obj: GetRecordResponse): any => ({ + ...obj, + }); +} + +/** + *

      A resource that is required to perform an action was not found.

      + */ +export interface ResourceNotFound extends __SmithyException, $MetadataBearer { + name: "ResourceNotFound"; + $fault: "client"; + Message?: string; +} + +export namespace ResourceNotFound { + export const filterSensitiveLog = (obj: ResourceNotFound): any => ({ + ...obj, + }); +} + +export interface PutRecordRequest { + /** + *

      The name of the feature group that you want to insert the record into.

      + */ + FeatureGroupName: string | undefined; + + /** + *

      List of FeatureValues to be inserted. This will be a full over-write. If you only want + * to update few of the feature values, do the following:

      + *
        + *
      • + *

        Use GetRecord to retrieve the latest record.

        + *
      • + *
      • + *

        Update the record returned from GetRecord.

        + *
      • + *
      • + *

        Use PutRecord to update feature values.

        + *
      • + *
      + */ + Record: FeatureValue[] | undefined; +} + +export namespace PutRecordRequest { + export const filterSensitiveLog = (obj: PutRecordRequest): any => ({ + ...obj, + }); +} diff --git a/clients/client-sagemaker-featurestore-runtime/package.json b/clients/client-sagemaker-featurestore-runtime/package.json new file mode 100644 index 000000000000..4b2edf5fad24 --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/package.json @@ -0,0 +1,83 @@ +{ + "name": "@aws-sdk/client-sagemaker-featurestore-runtime", + "description": "AWS SDK for JavaScript Sagemaker Featurestore Runtime Client for Node.js, Browser and React Native", + "version": "1.0.0-rc.1", + "scripts": { + "clean": "yarn remove-definitions && yarn remove-dist && yarn remove-documentation", + "build-documentation": "yarn remove-documentation && typedoc ./", + "prepublishOnly": "yarn build", + "pretest": "yarn build:cjs", + "remove-definitions": "rimraf ./types", + "remove-dist": "rimraf ./dist", + "remove-documentation": "rimraf ./docs", + "test": "yarn build && jest --coverage --passWithNoTests", + "build:cjs": "tsc -p tsconfig.json", + "build:es": "tsc -p tsconfig.es.json", + "build": "yarn build:cjs && yarn build:es" + }, + "main": "./dist/cjs/index.js", + "types": "./types/index.d.ts", + "module": "./dist/es/index.js", + "browser": { + "./runtimeConfig": "./runtimeConfig.browser" + }, + "react-native": { + "./runtimeConfig": "./runtimeConfig.native" + }, + "sideEffects": false, + "dependencies": { + "@aws-crypto/sha256-browser": "^1.0.0", + "@aws-crypto/sha256-js": "^1.0.0", + "@aws-sdk/config-resolver": "1.0.0-rc.7", + "@aws-sdk/credential-provider-node": "1.0.0-rc.7", + "@aws-sdk/fetch-http-handler": "1.0.0-rc.7", + "@aws-sdk/hash-node": "1.0.0-rc.7", + "@aws-sdk/invalid-dependency": "1.0.0-rc.3", + "@aws-sdk/middleware-content-length": "1.0.0-rc.7", + "@aws-sdk/middleware-host-header": "1.0.0-rc.7", + "@aws-sdk/middleware-logger": "1.0.0-rc.7", + "@aws-sdk/middleware-retry": "1.0.0-rc.7", + "@aws-sdk/middleware-serde": "1.0.0-rc.7", + "@aws-sdk/middleware-signing": "1.0.0-rc.7", + "@aws-sdk/middleware-stack": "1.0.0-rc.7", + "@aws-sdk/middleware-user-agent": "1.0.0-rc.7", + "@aws-sdk/node-config-provider": "1.0.0-rc.7", + "@aws-sdk/node-http-handler": "1.0.0-rc.7", + "@aws-sdk/protocol-http": "1.0.0-rc.7", + "@aws-sdk/smithy-client": "1.0.0-rc.7", + "@aws-sdk/url-parser-browser": "1.0.0-rc.7", + "@aws-sdk/url-parser-node": "1.0.0-rc.7", + "@aws-sdk/util-base64-browser": "1.0.0-rc.3", + "@aws-sdk/util-base64-node": "1.0.0-rc.3", + "@aws-sdk/util-body-length-browser": "1.0.0-rc.3", + "@aws-sdk/util-body-length-node": "1.0.0-rc.3", + "@aws-sdk/util-user-agent-browser": "1.0.0-rc.7", + "@aws-sdk/util-user-agent-node": "1.0.0-rc.7", + "@aws-sdk/util-utf8-browser": "1.0.0-rc.3", + "@aws-sdk/util-utf8-node": "1.0.0-rc.3", + "tslib": "^2.0.0" + }, + "devDependencies": { + "@aws-sdk/client-documentation-generator": "1.0.0-rc.7", + "@aws-sdk/types": "1.0.0-rc.7", + "@types/node": "^12.7.5", + "jest": "^26.1.0", + "rimraf": "^3.0.0", + "typedoc": "^0.19.2", + "typescript": "~4.1.2" + }, + "engines": { + "node": ">=10.0.0" + }, + "author": { + "name": "AWS SDK for JavaScript Team", + "url": "https://aws.amazon.com/javascript/" + }, + "license": "Apache-2.0", + "homepage": "https://github.com/aws/aws-sdk-js-v3/tree/master/clients/client-sagemaker-featurestore-runtime", + "repository": { + "type": "git", + "url": "https://github.com/aws/aws-sdk-js-v3.git", + "directory": "clients/client-sagemaker-featurestore-runtime" + } +} diff --git a/clients/client-sagemaker-featurestore-runtime/protocols/Aws_restJson1.ts b/clients/client-sagemaker-featurestore-runtime/protocols/Aws_restJson1.ts new file mode 100644 index 000000000000..af5c8e928f25 --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/protocols/Aws_restJson1.ts @@ -0,0 +1,539 @@ +import { DeleteRecordCommandInput, DeleteRecordCommandOutput } from "../commands/DeleteRecordCommand"; +import { GetRecordCommandInput, GetRecordCommandOutput } from "../commands/GetRecordCommand"; +import { PutRecordCommandInput, PutRecordCommandOutput } from "../commands/PutRecordCommand"; +import { + AccessForbidden, + FeatureValue, + InternalFailure, + ResourceNotFound, + ServiceUnavailable, + ValidationError, +} from "../models/models_0"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { + SmithyException as __SmithyException, + extendedEncodeURIComponent as __extendedEncodeURIComponent, +} from "@aws-sdk/smithy-client"; +import { + Endpoint as __Endpoint, + MetadataBearer as __MetadataBearer, + ResponseMetadata as __ResponseMetadata, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export const serializeAws_restJson1DeleteRecordCommand = async ( + input: DeleteRecordCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/FeatureGroup/{FeatureGroupName}"; + if (input.FeatureGroupName !== undefined) { + const labelValue: string = input.FeatureGroupName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: FeatureGroupName."); + } + resolvedPath = resolvedPath.replace("{FeatureGroupName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: FeatureGroupName."); + } + const query: any = { + ...(input.RecordIdentifierValueAsString !== undefined && { + RecordIdentifierValueAsString: input.RecordIdentifierValueAsString, + }), + ...(input.EventTime !== undefined && { EventTime: input.EventTime }), + }; + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "DELETE", + headers, + path: resolvedPath, + query, + body, + }); +}; + +export const serializeAws_restJson1GetRecordCommand = async ( + input: GetRecordCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "", + }; + let resolvedPath = "/FeatureGroup/{FeatureGroupName}"; + if (input.FeatureGroupName !== undefined) { + const labelValue: string = input.FeatureGroupName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: FeatureGroupName."); + } + resolvedPath = resolvedPath.replace("{FeatureGroupName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: FeatureGroupName."); + } + const query: any = { + ...(input.RecordIdentifierValueAsString !== undefined && { + RecordIdentifierValueAsString: input.RecordIdentifierValueAsString, + }), + ...(input.FeatureNames !== undefined && { FeatureName: (input.FeatureNames || []).map((_entry) => _entry) }), + }; + let body: any; + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "GET", + headers, + path: resolvedPath, + query, + body, + }); +}; + +export const serializeAws_restJson1PutRecordCommand = async ( + input: PutRecordCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: any = { + "Content-Type": "application/json", + }; + let resolvedPath = "/FeatureGroup/{FeatureGroupName}"; + if (input.FeatureGroupName !== undefined) { + const labelValue: string = input.FeatureGroupName; + if (labelValue.length <= 0) { + throw new Error("Empty value provided for input HTTP label: FeatureGroupName."); + } + resolvedPath = resolvedPath.replace("{FeatureGroupName}", __extendedEncodeURIComponent(labelValue)); + } else { + throw new Error("No value provided for input HTTP label: FeatureGroupName."); + } + let body: any; + body = JSON.stringify({ + ...(input.Record !== undefined && { Record: serializeAws_restJson1Record(input.Record, context) }), + }); + const { hostname, protocol = "https", port } = await context.endpoint(); + return new __HttpRequest({ + protocol, + hostname, + port, + method: "PUT", + headers, + path: resolvedPath, + body, + }); +}; + +export const deserializeAws_restJson1DeleteRecordCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1DeleteRecordCommandError(output, context); + } + const contents: DeleteRecordCommandOutput = { + $metadata: deserializeMetadata(output), + }; + await collectBody(output.body, context); + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1DeleteRecordCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessForbidden": + case "com.amazonaws.sagemakerfeaturestoreruntime#AccessForbidden": + response = { + ...(await deserializeAws_restJson1AccessForbiddenResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalFailure": + case "com.amazonaws.sagemakerfeaturestoreruntime#InternalFailure": + response = { + ...(await deserializeAws_restJson1InternalFailureResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceUnavailable": + case "com.amazonaws.sagemakerfeaturestoreruntime#ServiceUnavailable": + response = { + ...(await deserializeAws_restJson1ServiceUnavailableResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationError": + case "com.amazonaws.sagemakerfeaturestoreruntime#ValidationError": + response = { + ...(await deserializeAws_restJson1ValidationErrorResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1GetRecordCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1GetRecordCommandError(output, context); + } + const contents: GetRecordCommandOutput = { + $metadata: deserializeMetadata(output), + Record: undefined, + }; + const data: any = await parseBody(output.body, context); + if (data.Record !== undefined && data.Record !== null) { + contents.Record = deserializeAws_restJson1Record(data.Record, context); + } + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1GetRecordCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessForbidden": + case "com.amazonaws.sagemakerfeaturestoreruntime#AccessForbidden": + response = { + ...(await deserializeAws_restJson1AccessForbiddenResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalFailure": + case "com.amazonaws.sagemakerfeaturestoreruntime#InternalFailure": + response = { + ...(await deserializeAws_restJson1InternalFailureResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFound": + case "com.amazonaws.sagemakerfeaturestoreruntime#ResourceNotFound": + response = { + ...(await deserializeAws_restJson1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceUnavailable": + case "com.amazonaws.sagemakerfeaturestoreruntime#ServiceUnavailable": + response = { + ...(await deserializeAws_restJson1ServiceUnavailableResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationError": + case "com.amazonaws.sagemakerfeaturestoreruntime#ValidationError": + response = { + ...(await deserializeAws_restJson1ValidationErrorResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_restJson1PutRecordCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode !== 200 && output.statusCode >= 300) { + return deserializeAws_restJson1PutRecordCommandError(output, context); + } + const contents: PutRecordCommandOutput = { + $metadata: deserializeMetadata(output), + }; + await collectBody(output.body, context); + return Promise.resolve(contents); +}; + +const deserializeAws_restJson1PutRecordCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + errorCode = loadRestJsonErrorCode(output, parsedOutput.body); + switch (errorCode) { + case "AccessForbidden": + case "com.amazonaws.sagemakerfeaturestoreruntime#AccessForbidden": + response = { + ...(await deserializeAws_restJson1AccessForbiddenResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "InternalFailure": + case "com.amazonaws.sagemakerfeaturestoreruntime#InternalFailure": + response = { + ...(await deserializeAws_restJson1InternalFailureResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ServiceUnavailable": + case "com.amazonaws.sagemakerfeaturestoreruntime#ServiceUnavailable": + response = { + ...(await deserializeAws_restJson1ServiceUnavailableResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ValidationError": + case "com.amazonaws.sagemakerfeaturestoreruntime#ValidationError": + response = { + ...(await deserializeAws_restJson1ValidationErrorResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +const deserializeAws_restJson1AccessForbiddenResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: AccessForbidden = { + name: "AccessForbidden", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1InternalFailureResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: InternalFailure = { + name: "InternalFailure", + $fault: "server", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1ResourceNotFoundResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ResourceNotFound = { + name: "ResourceNotFound", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1ServiceUnavailableResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ServiceUnavailable = { + name: "ServiceUnavailable", + $fault: "server", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const deserializeAws_restJson1ValidationErrorResponse = async ( + parsedOutput: any, + context: __SerdeContext +): Promise => { + const contents: ValidationError = { + name: "ValidationError", + $fault: "client", + $metadata: deserializeMetadata(parsedOutput), + Message: undefined, + }; + const data: any = parsedOutput.body; + if (data.Message !== undefined && data.Message !== null) { + contents.Message = data.Message; + } + return contents; +}; + +const serializeAws_restJson1FeatureValue = (input: FeatureValue, context: __SerdeContext): any => { + return { + ...(input.FeatureName !== undefined && { FeatureName: input.FeatureName }), + ...(input.ValueAsString !== undefined && { ValueAsString: input.ValueAsString }), + }; +}; + +const serializeAws_restJson1Record = (input: FeatureValue[], context: __SerdeContext): any => { + return input.map((entry) => serializeAws_restJson1FeatureValue(entry, context)); +}; + +const deserializeAws_restJson1FeatureValue = (output: any, context: __SerdeContext): FeatureValue => { + return { + FeatureName: output.FeatureName !== undefined && output.FeatureName !== null ? output.FeatureName : undefined, + ValueAsString: + output.ValueAsString !== undefined && output.ValueAsString !== null ? output.ValueAsString : undefined, + } as any; +}; + +const deserializeAws_restJson1Record = (output: any, context: __SerdeContext): FeatureValue[] => { + return (output || []).map((entry: any) => deserializeAws_restJson1FeatureValue(entry, context)); +}; + +const deserializeMetadata = (output: __HttpResponse): __ResponseMetadata => ({ + httpStatusCode: output.statusCode, + httpHeaders: output.headers, + requestId: output.headers["x-amzn-requestid"], +}); + +// Collect low-level response body stream to Uint8Array. +const collectBody = (streamBody: any = new Uint8Array(), context: __SerdeContext): Promise => { + if (streamBody instanceof Uint8Array) { + return Promise.resolve(streamBody); + } + return context.streamCollector(streamBody) || Promise.resolve(new Uint8Array()); +}; + +// Encode Uint8Array data into string with utf-8. +const collectBodyString = (streamBody: any, context: __SerdeContext): Promise => + collectBody(streamBody, context).then((body) => context.utf8Encoder(body)); + +const isSerializableHeaderValue = (value: any): boolean => + value !== undefined && + value !== "" && + (!Object.getOwnPropertyNames(value).includes("length") || value.length != 0) && + (!Object.getOwnPropertyNames(value).includes("size") || value.size != 0); + +const parseBody = (streamBody: any, context: __SerdeContext): any => + collectBodyString(streamBody, context).then((encoded) => { + if (encoded.length) { + return JSON.parse(encoded); + } + return {}; + }); + +/** + * Load an error code for the aws.rest-json-1.1 protocol. + */ +const loadRestJsonErrorCode = (output: __HttpResponse, data: any): string => { + const findKey = (object: any, key: string) => Object.keys(object).find((k) => k.toLowerCase() === key.toLowerCase()); + + const sanitizeErrorCode = (rawValue: string): string => { + let cleanValue = rawValue; + if (cleanValue.indexOf(":") >= 0) { + cleanValue = cleanValue.split(":")[0]; + } + if (cleanValue.indexOf("#") >= 0) { + cleanValue = cleanValue.split("#")[1]; + } + return cleanValue; + }; + + const headerKey = findKey(output.headers, "x-amzn-errortype"); + if (headerKey !== undefined) { + return sanitizeErrorCode(output.headers[headerKey]); + } + + if (data.code !== undefined) { + return sanitizeErrorCode(data.code); + } + + if (data["__type"] !== undefined) { + return sanitizeErrorCode(data["__type"]); + } + + return ""; +}; diff --git a/clients/client-sagemaker-featurestore-runtime/runtimeConfig.browser.ts b/clients/client-sagemaker-featurestore-runtime/runtimeConfig.browser.ts new file mode 100644 index 000000000000..14f48f2b1c9b --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/runtimeConfig.browser.ts @@ -0,0 +1,34 @@ +import packageInfo from "./package.json"; + +import { Sha256 } from "@aws-crypto/sha256-browser"; +import { FetchHttpHandler, streamCollector } from "@aws-sdk/fetch-http-handler"; +import { invalidAsyncFunction } from "@aws-sdk/invalid-dependency"; +import { DEFAULT_MAX_ATTEMPTS } from "@aws-sdk/middleware-retry"; +import { parseUrl } from "@aws-sdk/url-parser-browser"; +import { fromBase64, toBase64 } from "@aws-sdk/util-base64-browser"; +import { calculateBodyLength } from "@aws-sdk/util-body-length-browser"; +import { defaultUserAgent } from "@aws-sdk/util-user-agent-browser"; +import { fromUtf8, toUtf8 } from "@aws-sdk/util-utf8-browser"; +import { ClientDefaults } from "./SageMakerFeatureStoreRuntimeClient"; +import { ClientSharedValues } from "./runtimeConfig.shared"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...ClientSharedValues, + runtime: "browser", + base64Decoder: fromBase64, + base64Encoder: toBase64, + bodyLengthChecker: calculateBodyLength, + credentialDefaultProvider: invalidAsyncFunction("Credentialis missing") as any, + defaultUserAgent: defaultUserAgent(packageInfo.name, packageInfo.version), + maxAttempts: DEFAULT_MAX_ATTEMPTS, + region: invalidAsyncFunction("Region is missing") as any, + requestHandler: new FetchHttpHandler(), + sha256: Sha256, + streamCollector, + urlParser: parseUrl, + utf8Decoder: fromUtf8, + utf8Encoder: toUtf8, +}; diff --git a/clients/client-sagemaker-featurestore-runtime/runtimeConfig.native.ts b/clients/client-sagemaker-featurestore-runtime/runtimeConfig.native.ts new file mode 100644 index 000000000000..ab1868223d04 --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/runtimeConfig.native.ts @@ -0,0 +1,17 @@ +import packageInfo from "./package.json"; + +import { Sha256 } from "@aws-crypto/sha256-js"; +import { parseUrl } from "@aws-sdk/url-parser-node"; +import { ClientDefaults } from "./SageMakerFeatureStoreRuntimeClient"; +import { ClientDefaultValues as BrowserDefaults } from "./runtimeConfig.browser"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...BrowserDefaults, + runtime: "react-native", + defaultUserAgent: `aws-sdk-js-v3-react-native-${packageInfo.name}/${packageInfo.version}`, + sha256: Sha256, + urlParser: parseUrl, +}; diff --git a/clients/client-sagemaker-featurestore-runtime/runtimeConfig.shared.ts b/clients/client-sagemaker-featurestore-runtime/runtimeConfig.shared.ts new file mode 100644 index 000000000000..fa67e5168746 --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/runtimeConfig.shared.ts @@ -0,0 +1,13 @@ +import { defaultRegionInfoProvider } from "./endpoints"; +import { Logger as __Logger } from "@aws-sdk/types"; + +/** + * @internal + */ +export const ClientSharedValues = { + apiVersion: "2020-07-01", + disableHostPrefix: false, + logger: {} as __Logger, + regionInfoProvider: defaultRegionInfoProvider, + signingName: "sagemaker", +}; diff --git a/clients/client-sagemaker-featurestore-runtime/runtimeConfig.ts b/clients/client-sagemaker-featurestore-runtime/runtimeConfig.ts new file mode 100644 index 000000000000..4f8fa9b74adc --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/runtimeConfig.ts @@ -0,0 +1,36 @@ +import packageInfo from "./package.json"; + +import { NODE_REGION_CONFIG_FILE_OPTIONS, NODE_REGION_CONFIG_OPTIONS } from "@aws-sdk/config-resolver"; +import { defaultProvider as credentialDefaultProvider } from "@aws-sdk/credential-provider-node"; +import { Hash } from "@aws-sdk/hash-node"; +import { NODE_MAX_ATTEMPT_CONFIG_OPTIONS } from "@aws-sdk/middleware-retry"; +import { loadConfig as loadNodeConfig } from "@aws-sdk/node-config-provider"; +import { NodeHttpHandler, streamCollector } from "@aws-sdk/node-http-handler"; +import { parseUrl } from "@aws-sdk/url-parser-node"; +import { fromBase64, toBase64 } from "@aws-sdk/util-base64-node"; +import { calculateBodyLength } from "@aws-sdk/util-body-length-node"; +import { defaultUserAgent } from "@aws-sdk/util-user-agent-node"; +import { fromUtf8, toUtf8 } from "@aws-sdk/util-utf8-node"; +import { ClientDefaults } from "./SageMakerFeatureStoreRuntimeClient"; +import { ClientSharedValues } from "./runtimeConfig.shared"; + +/** + * @internal + */ +export const ClientDefaultValues: Required = { + ...ClientSharedValues, + runtime: "node", + base64Decoder: fromBase64, + base64Encoder: toBase64, + bodyLengthChecker: calculateBodyLength, + credentialDefaultProvider, + defaultUserAgent: defaultUserAgent(packageInfo.name, packageInfo.version), + maxAttempts: loadNodeConfig(NODE_MAX_ATTEMPT_CONFIG_OPTIONS), + region: loadNodeConfig(NODE_REGION_CONFIG_OPTIONS, NODE_REGION_CONFIG_FILE_OPTIONS), + requestHandler: new NodeHttpHandler(), + sha256: Hash.bind(null, "sha256"), + streamCollector, + urlParser: parseUrl, + utf8Decoder: fromUtf8, + utf8Encoder: toUtf8, +}; diff --git a/clients/client-sagemaker-featurestore-runtime/tsconfig.es.json b/clients/client-sagemaker-featurestore-runtime/tsconfig.es.json new file mode 100644 index 000000000000..30df5d2e6986 --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/tsconfig.es.json @@ -0,0 +1,12 @@ +{ + "extends": "./tsconfig", + "compilerOptions": { + "target": "es5", + "module": "esnext", + "moduleResolution": "node", + "declaration": false, + "declarationDir": null, + "lib": ["dom", "es5", "es2015.promise", "es2015.collection", "es2015.iterable", "es2015.symbol.wellknown"], + "outDir": "dist/es" + } +} diff --git a/clients/client-sagemaker-featurestore-runtime/tsconfig.json b/clients/client-sagemaker-featurestore-runtime/tsconfig.json new file mode 100644 index 000000000000..4cf936f614b4 --- /dev/null +++ b/clients/client-sagemaker-featurestore-runtime/tsconfig.json @@ -0,0 +1,31 @@ +{ + "compilerOptions": { + "alwaysStrict": true, + "target": "ES2018", + "module": "commonjs", + "declaration": true, + "strict": true, + "sourceMap": true, + "downlevelIteration": true, + "importHelpers": true, + "noEmitHelpers": true, + "incremental": true, + "resolveJsonModule": true, + "esModuleInterop": true, + "declarationDir": "./types", + "outDir": "dist/cjs" + }, + "typedocOptions": { + "exclude": ["**/node_modules/**", "**/*.spec.ts", "./protocols/*.ts", "./e2e/*.ts", "./endpoints.ts"], + "excludeNotExported": true, + "excludePrivate": true, + "hideGenerator": true, + "ignoreCompilerErrors": true, + "includeDeclarations": true, + "readme": "./README.md", + "mode": "file", + "out": "./docs", + "theme": "minimal", + "plugin": ["@aws-sdk/client-documentation-generator"] + } +} diff --git a/clients/client-sagemaker/SageMaker.ts b/clients/client-sagemaker/SageMaker.ts index 7c85bf10112a..5c81735ed45c 100644 --- a/clients/client-sagemaker/SageMaker.ts +++ b/clients/client-sagemaker/SageMaker.ts @@ -1,10 +1,20 @@ import { SageMakerClient } from "./SageMakerClient"; +import { + AddAssociationCommand, + AddAssociationCommandInput, + AddAssociationCommandOutput, +} from "./commands/AddAssociationCommand"; import { AddTagsCommand, AddTagsCommandInput, AddTagsCommandOutput } from "./commands/AddTagsCommand"; import { AssociateTrialComponentCommand, AssociateTrialComponentCommandInput, AssociateTrialComponentCommandOutput, } from "./commands/AssociateTrialComponentCommand"; +import { + CreateActionCommand, + CreateActionCommandInput, + CreateActionCommandOutput, +} from "./commands/CreateActionCommand"; import { CreateAlgorithmCommand, CreateAlgorithmCommandInput, @@ -16,6 +26,11 @@ import { CreateAppImageConfigCommandInput, CreateAppImageConfigCommandOutput, } from "./commands/CreateAppImageConfigCommand"; +import { + CreateArtifactCommand, + CreateArtifactCommandInput, + CreateArtifactCommandOutput, +} from "./commands/CreateArtifactCommand"; import { CreateAutoMLJobCommand, CreateAutoMLJobCommandInput, @@ -31,6 +46,11 @@ import { CreateCompilationJobCommandInput, CreateCompilationJobCommandOutput, } from "./commands/CreateCompilationJobCommand"; +import { + CreateContextCommand, + CreateContextCommandInput, + CreateContextCommandOutput, +} from "./commands/CreateContextCommand"; import { CreateDomainCommand, CreateDomainCommandInput, @@ -51,6 +71,11 @@ import { CreateExperimentCommandInput, CreateExperimentCommandOutput, } from "./commands/CreateExperimentCommand"; +import { + CreateFeatureGroupCommand, + CreateFeatureGroupCommandInput, + CreateFeatureGroupCommandOutput, +} from "./commands/CreateFeatureGroupCommand"; import { CreateFlowDefinitionCommand, CreateFlowDefinitionCommandInput, @@ -83,6 +108,11 @@ import { CreateModelPackageCommandInput, CreateModelPackageCommandOutput, } from "./commands/CreateModelPackageCommand"; +import { + CreateModelPackageGroupCommand, + CreateModelPackageGroupCommandInput, + CreateModelPackageGroupCommandOutput, +} from "./commands/CreateModelPackageGroupCommand"; import { CreateMonitoringScheduleCommand, CreateMonitoringScheduleCommandInput, @@ -98,6 +128,11 @@ import { CreateNotebookInstanceLifecycleConfigCommandInput, CreateNotebookInstanceLifecycleConfigCommandOutput, } from "./commands/CreateNotebookInstanceLifecycleConfigCommand"; +import { + CreatePipelineCommand, + CreatePipelineCommandInput, + CreatePipelineCommandOutput, +} from "./commands/CreatePipelineCommand"; import { CreatePresignedDomainUrlCommand, CreatePresignedDomainUrlCommandInput, @@ -113,6 +148,11 @@ import { CreateProcessingJobCommandInput, CreateProcessingJobCommandOutput, } from "./commands/CreateProcessingJobCommand"; +import { + CreateProjectCommand, + CreateProjectCommandInput, + CreateProjectCommandOutput, +} from "./commands/CreateProjectCommand"; import { CreateTrainingJobCommand, CreateTrainingJobCommandInput, @@ -144,6 +184,11 @@ import { CreateWorkteamCommandInput, CreateWorkteamCommandOutput, } from "./commands/CreateWorkteamCommand"; +import { + DeleteActionCommand, + DeleteActionCommandInput, + DeleteActionCommandOutput, +} from "./commands/DeleteActionCommand"; import { DeleteAlgorithmCommand, DeleteAlgorithmCommandInput, @@ -155,11 +200,26 @@ import { DeleteAppImageConfigCommandInput, DeleteAppImageConfigCommandOutput, } from "./commands/DeleteAppImageConfigCommand"; +import { + DeleteArtifactCommand, + DeleteArtifactCommandInput, + DeleteArtifactCommandOutput, +} from "./commands/DeleteArtifactCommand"; +import { + DeleteAssociationCommand, + DeleteAssociationCommandInput, + DeleteAssociationCommandOutput, +} from "./commands/DeleteAssociationCommand"; import { DeleteCodeRepositoryCommand, DeleteCodeRepositoryCommandInput, DeleteCodeRepositoryCommandOutput, } from "./commands/DeleteCodeRepositoryCommand"; +import { + DeleteContextCommand, + DeleteContextCommandInput, + DeleteContextCommandOutput, +} from "./commands/DeleteContextCommand"; import { DeleteDomainCommand, DeleteDomainCommandInput, @@ -180,6 +240,11 @@ import { DeleteExperimentCommandInput, DeleteExperimentCommandOutput, } from "./commands/DeleteExperimentCommand"; +import { + DeleteFeatureGroupCommand, + DeleteFeatureGroupCommandInput, + DeleteFeatureGroupCommandOutput, +} from "./commands/DeleteFeatureGroupCommand"; import { DeleteFlowDefinitionCommand, DeleteFlowDefinitionCommandInput, @@ -202,6 +267,16 @@ import { DeleteModelPackageCommandInput, DeleteModelPackageCommandOutput, } from "./commands/DeleteModelPackageCommand"; +import { + DeleteModelPackageGroupCommand, + DeleteModelPackageGroupCommandInput, + DeleteModelPackageGroupCommandOutput, +} from "./commands/DeleteModelPackageGroupCommand"; +import { + DeleteModelPackageGroupPolicyCommand, + DeleteModelPackageGroupPolicyCommandInput, + DeleteModelPackageGroupPolicyCommandOutput, +} from "./commands/DeleteModelPackageGroupPolicyCommand"; import { DeleteMonitoringScheduleCommand, DeleteMonitoringScheduleCommandInput, @@ -217,6 +292,16 @@ import { DeleteNotebookInstanceLifecycleConfigCommandInput, DeleteNotebookInstanceLifecycleConfigCommandOutput, } from "./commands/DeleteNotebookInstanceLifecycleConfigCommand"; +import { + DeletePipelineCommand, + DeletePipelineCommandInput, + DeletePipelineCommandOutput, +} from "./commands/DeletePipelineCommand"; +import { + DeleteProjectCommand, + DeleteProjectCommandInput, + DeleteProjectCommandOutput, +} from "./commands/DeleteProjectCommand"; import { DeleteTagsCommand, DeleteTagsCommandInput, DeleteTagsCommandOutput } from "./commands/DeleteTagsCommand"; import { DeleteTrialCommand, DeleteTrialCommandInput, DeleteTrialCommandOutput } from "./commands/DeleteTrialCommand"; import { @@ -239,6 +324,11 @@ import { DeleteWorkteamCommandInput, DeleteWorkteamCommandOutput, } from "./commands/DeleteWorkteamCommand"; +import { + DescribeActionCommand, + DescribeActionCommandInput, + DescribeActionCommandOutput, +} from "./commands/DescribeActionCommand"; import { DescribeAlgorithmCommand, DescribeAlgorithmCommandInput, @@ -250,6 +340,11 @@ import { DescribeAppImageConfigCommandInput, DescribeAppImageConfigCommandOutput, } from "./commands/DescribeAppImageConfigCommand"; +import { + DescribeArtifactCommand, + DescribeArtifactCommandInput, + DescribeArtifactCommandOutput, +} from "./commands/DescribeArtifactCommand"; import { DescribeAutoMLJobCommand, DescribeAutoMLJobCommandInput, @@ -265,6 +360,11 @@ import { DescribeCompilationJobCommandInput, DescribeCompilationJobCommandOutput, } from "./commands/DescribeCompilationJobCommand"; +import { + DescribeContextCommand, + DescribeContextCommandInput, + DescribeContextCommandOutput, +} from "./commands/DescribeContextCommand"; import { DescribeDomainCommand, DescribeDomainCommandInput, @@ -285,6 +385,11 @@ import { DescribeExperimentCommandInput, DescribeExperimentCommandOutput, } from "./commands/DescribeExperimentCommand"; +import { + DescribeFeatureGroupCommand, + DescribeFeatureGroupCommandInput, + DescribeFeatureGroupCommandOutput, +} from "./commands/DescribeFeatureGroupCommand"; import { DescribeFlowDefinitionCommand, DescribeFlowDefinitionCommandInput, @@ -325,6 +430,11 @@ import { DescribeModelPackageCommandInput, DescribeModelPackageCommandOutput, } from "./commands/DescribeModelPackageCommand"; +import { + DescribeModelPackageGroupCommand, + DescribeModelPackageGroupCommandInput, + DescribeModelPackageGroupCommandOutput, +} from "./commands/DescribeModelPackageGroupCommand"; import { DescribeMonitoringScheduleCommand, DescribeMonitoringScheduleCommandInput, @@ -340,11 +450,31 @@ import { DescribeNotebookInstanceLifecycleConfigCommandInput, DescribeNotebookInstanceLifecycleConfigCommandOutput, } from "./commands/DescribeNotebookInstanceLifecycleConfigCommand"; +import { + DescribePipelineCommand, + DescribePipelineCommandInput, + DescribePipelineCommandOutput, +} from "./commands/DescribePipelineCommand"; +import { + DescribePipelineDefinitionForExecutionCommand, + DescribePipelineDefinitionForExecutionCommandInput, + DescribePipelineDefinitionForExecutionCommandOutput, +} from "./commands/DescribePipelineDefinitionForExecutionCommand"; +import { + DescribePipelineExecutionCommand, + DescribePipelineExecutionCommandInput, + DescribePipelineExecutionCommandOutput, +} from "./commands/DescribePipelineExecutionCommand"; import { DescribeProcessingJobCommand, DescribeProcessingJobCommandInput, DescribeProcessingJobCommandOutput, } from "./commands/DescribeProcessingJobCommand"; +import { + DescribeProjectCommand, + DescribeProjectCommandInput, + DescribeProjectCommandOutput, +} from "./commands/DescribeProjectCommand"; import { DescribeSubscribedWorkteamCommand, DescribeSubscribedWorkteamCommandInput, @@ -385,16 +515,37 @@ import { DescribeWorkteamCommandInput, DescribeWorkteamCommandOutput, } from "./commands/DescribeWorkteamCommand"; +import { + DisableSagemakerServicecatalogPortfolioCommand, + DisableSagemakerServicecatalogPortfolioCommandInput, + DisableSagemakerServicecatalogPortfolioCommandOutput, +} from "./commands/DisableSagemakerServicecatalogPortfolioCommand"; import { DisassociateTrialComponentCommand, DisassociateTrialComponentCommandInput, DisassociateTrialComponentCommandOutput, } from "./commands/DisassociateTrialComponentCommand"; +import { + EnableSagemakerServicecatalogPortfolioCommand, + EnableSagemakerServicecatalogPortfolioCommandInput, + EnableSagemakerServicecatalogPortfolioCommandOutput, +} from "./commands/EnableSagemakerServicecatalogPortfolioCommand"; +import { + GetModelPackageGroupPolicyCommand, + GetModelPackageGroupPolicyCommandInput, + GetModelPackageGroupPolicyCommandOutput, +} from "./commands/GetModelPackageGroupPolicyCommand"; +import { + GetSagemakerServicecatalogPortfolioStatusCommand, + GetSagemakerServicecatalogPortfolioStatusCommandInput, + GetSagemakerServicecatalogPortfolioStatusCommandOutput, +} from "./commands/GetSagemakerServicecatalogPortfolioStatusCommand"; import { GetSearchSuggestionsCommand, GetSearchSuggestionsCommandInput, GetSearchSuggestionsCommandOutput, } from "./commands/GetSearchSuggestionsCommand"; +import { ListActionsCommand, ListActionsCommandInput, ListActionsCommandOutput } from "./commands/ListActionsCommand"; import { ListAlgorithmsCommand, ListAlgorithmsCommandInput, @@ -406,6 +557,16 @@ import { ListAppImageConfigsCommandOutput, } from "./commands/ListAppImageConfigsCommand"; import { ListAppsCommand, ListAppsCommandInput, ListAppsCommandOutput } from "./commands/ListAppsCommand"; +import { + ListArtifactsCommand, + ListArtifactsCommandInput, + ListArtifactsCommandOutput, +} from "./commands/ListArtifactsCommand"; +import { + ListAssociationsCommand, + ListAssociationsCommandInput, + ListAssociationsCommandOutput, +} from "./commands/ListAssociationsCommand"; import { ListAutoMLJobsCommand, ListAutoMLJobsCommandInput, @@ -426,6 +587,11 @@ import { ListCompilationJobsCommandInput, ListCompilationJobsCommandOutput, } from "./commands/ListCompilationJobsCommand"; +import { + ListContextsCommand, + ListContextsCommandInput, + ListContextsCommandOutput, +} from "./commands/ListContextsCommand"; import { ListDomainsCommand, ListDomainsCommandInput, ListDomainsCommandOutput } from "./commands/ListDomainsCommand"; import { ListEndpointConfigsCommand, @@ -442,6 +608,11 @@ import { ListExperimentsCommandInput, ListExperimentsCommandOutput, } from "./commands/ListExperimentsCommand"; +import { + ListFeatureGroupsCommand, + ListFeatureGroupsCommandInput, + ListFeatureGroupsCommandOutput, +} from "./commands/ListFeatureGroupsCommand"; import { ListFlowDefinitionsCommand, ListFlowDefinitionsCommandInput, @@ -473,6 +644,11 @@ import { ListLabelingJobsForWorkteamCommandInput, ListLabelingJobsForWorkteamCommandOutput, } from "./commands/ListLabelingJobsForWorkteamCommand"; +import { + ListModelPackageGroupsCommand, + ListModelPackageGroupsCommandInput, + ListModelPackageGroupsCommandOutput, +} from "./commands/ListModelPackageGroupsCommand"; import { ListModelPackagesCommand, ListModelPackagesCommandInput, @@ -499,11 +675,36 @@ import { ListNotebookInstancesCommandInput, ListNotebookInstancesCommandOutput, } from "./commands/ListNotebookInstancesCommand"; +import { + ListPipelineExecutionStepsCommand, + ListPipelineExecutionStepsCommandInput, + ListPipelineExecutionStepsCommandOutput, +} from "./commands/ListPipelineExecutionStepsCommand"; +import { + ListPipelineExecutionsCommand, + ListPipelineExecutionsCommandInput, + ListPipelineExecutionsCommandOutput, +} from "./commands/ListPipelineExecutionsCommand"; +import { + ListPipelineParametersForExecutionCommand, + ListPipelineParametersForExecutionCommandInput, + ListPipelineParametersForExecutionCommandOutput, +} from "./commands/ListPipelineParametersForExecutionCommand"; +import { + ListPipelinesCommand, + ListPipelinesCommandInput, + ListPipelinesCommandOutput, +} from "./commands/ListPipelinesCommand"; import { ListProcessingJobsCommand, ListProcessingJobsCommandInput, ListProcessingJobsCommandOutput, } from "./commands/ListProcessingJobsCommand"; +import { + ListProjectsCommand, + ListProjectsCommandInput, + ListProjectsCommandOutput, +} from "./commands/ListProjectsCommand"; import { ListSubscribedWorkteamsCommand, ListSubscribedWorkteamsCommandInput, @@ -546,6 +747,11 @@ import { ListWorkteamsCommandInput, ListWorkteamsCommandOutput, } from "./commands/ListWorkteamsCommand"; +import { + PutModelPackageGroupPolicyCommand, + PutModelPackageGroupPolicyCommandInput, + PutModelPackageGroupPolicyCommandOutput, +} from "./commands/PutModelPackageGroupPolicyCommand"; import { RenderUiTemplateCommand, RenderUiTemplateCommandInput, @@ -562,6 +768,11 @@ import { StartNotebookInstanceCommandInput, StartNotebookInstanceCommandOutput, } from "./commands/StartNotebookInstanceCommand"; +import { + StartPipelineExecutionCommand, + StartPipelineExecutionCommandInput, + StartPipelineExecutionCommandOutput, +} from "./commands/StartPipelineExecutionCommand"; import { StopAutoMLJobCommand, StopAutoMLJobCommandInput, @@ -592,6 +803,11 @@ import { StopNotebookInstanceCommandInput, StopNotebookInstanceCommandOutput, } from "./commands/StopNotebookInstanceCommand"; +import { + StopPipelineExecutionCommand, + StopPipelineExecutionCommandInput, + StopPipelineExecutionCommandOutput, +} from "./commands/StopPipelineExecutionCommand"; import { StopProcessingJobCommand, StopProcessingJobCommandInput, @@ -607,16 +823,31 @@ import { StopTransformJobCommandInput, StopTransformJobCommandOutput, } from "./commands/StopTransformJobCommand"; +import { + UpdateActionCommand, + UpdateActionCommandInput, + UpdateActionCommandOutput, +} from "./commands/UpdateActionCommand"; import { UpdateAppImageConfigCommand, UpdateAppImageConfigCommandInput, UpdateAppImageConfigCommandOutput, } from "./commands/UpdateAppImageConfigCommand"; +import { + UpdateArtifactCommand, + UpdateArtifactCommandInput, + UpdateArtifactCommandOutput, +} from "./commands/UpdateArtifactCommand"; import { UpdateCodeRepositoryCommand, UpdateCodeRepositoryCommandInput, UpdateCodeRepositoryCommandOutput, } from "./commands/UpdateCodeRepositoryCommand"; +import { + UpdateContextCommand, + UpdateContextCommandInput, + UpdateContextCommandOutput, +} from "./commands/UpdateContextCommand"; import { UpdateDomainCommand, UpdateDomainCommandInput, @@ -638,6 +869,11 @@ import { UpdateExperimentCommandOutput, } from "./commands/UpdateExperimentCommand"; import { UpdateImageCommand, UpdateImageCommandInput, UpdateImageCommandOutput } from "./commands/UpdateImageCommand"; +import { + UpdateModelPackageCommand, + UpdateModelPackageCommandInput, + UpdateModelPackageCommandOutput, +} from "./commands/UpdateModelPackageCommand"; import { UpdateMonitoringScheduleCommand, UpdateMonitoringScheduleCommandInput, @@ -653,6 +889,16 @@ import { UpdateNotebookInstanceLifecycleConfigCommandInput, UpdateNotebookInstanceLifecycleConfigCommandOutput, } from "./commands/UpdateNotebookInstanceLifecycleConfigCommand"; +import { + UpdatePipelineCommand, + UpdatePipelineCommandInput, + UpdatePipelineCommandOutput, +} from "./commands/UpdatePipelineCommand"; +import { + UpdatePipelineExecutionCommand, + UpdatePipelineExecutionCommandInput, + UpdatePipelineExecutionCommandOutput, +} from "./commands/UpdatePipelineExecutionCommand"; import { UpdateTrialCommand, UpdateTrialCommandInput, UpdateTrialCommandOutput } from "./commands/UpdateTrialCommand"; import { UpdateTrialComponentCommand, @@ -695,6 +941,42 @@ import { HttpHandlerOptions as __HttpHandlerOptions } from "@aws-sdk/types"; *
    */ export class SageMaker extends SageMakerClient { + /** + *

    Creates an association between the source and the destination. A + * source can be associated with multiple destinations, and a destination can be associated + * with multiple sources. An association is a lineage tracking entity. For more information, see + * Amazon SageMaker + * ML Lineage Tracking.

    + */ + public addAssociation( + args: AddAssociationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public addAssociation( + args: AddAssociationCommandInput, + cb: (err: any, data?: AddAssociationCommandOutput) => void + ): void; + public addAssociation( + args: AddAssociationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: AddAssociationCommandOutput) => void + ): void; + public addAssociation( + args: AddAssociationCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: AddAssociationCommandOutput) => void), + cb?: (err: any, data?: AddAssociationCommandOutput) => void + ): Promise | void { + const command = new AddAssociationCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Adds or overwrites one or more tags for the specified Amazon SageMaker resource. You can add * tags to notebook instances, training jobs, hyperparameter tuning jobs, batch transform @@ -770,6 +1052,39 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Creates an action. An action is a lineage tracking entity that + * represents an action or activity. For example, a model deployment or an HPO job. + * Generally, an action involves at least one input or output artifact. For more information, see + * Amazon SageMaker + * ML Lineage Tracking.

    + */ + public createAction( + args: CreateActionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public createAction(args: CreateActionCommandInput, cb: (err: any, data?: CreateActionCommandOutput) => void): void; + public createAction( + args: CreateActionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateActionCommandOutput) => void + ): void; + public createAction( + args: CreateActionCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateActionCommandOutput) => void), + cb?: (err: any, data?: CreateActionCommandOutput) => void + ): Promise | void { + const command = new CreateActionCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Create a machine learning algorithm that you can use in Amazon SageMaker and list in the AWS * Marketplace.

    @@ -868,6 +1183,42 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Creates an artifact. An artifact is a lineage tracking entity that + * represents a URI addressable object or data. Some examples are the S3 URI of a dataset and + * the ECR registry path of an image. For more information, see + * Amazon SageMaker + * ML Lineage Tracking.

    + */ + public createArtifact( + args: CreateArtifactCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public createArtifact( + args: CreateArtifactCommandInput, + cb: (err: any, data?: CreateArtifactCommandOutput) => void + ): void; + public createArtifact( + args: CreateArtifactCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateArtifactCommandOutput) => void + ): void; + public createArtifact( + args: CreateArtifactCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateArtifactCommandOutput) => void), + cb?: (err: any, data?: CreateArtifactCommandOutput) => void + ): Promise | void { + const command = new CreateArtifactCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Creates an Autopilot job.

    *

    Find the best performing model after you run an Autopilot job by calling . Deploy that model by following the steps described in @@ -1006,14 +1357,60 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Creates a context. A context is a lineage tracking entity that + * represents a logical grouping of other tracking or experiment entities. Some examples are + * an endpoint and a model package. For more information, see + * Amazon SageMaker + * ML Lineage Tracking.

    + */ + public createContext( + args: CreateContextCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public createContext( + args: CreateContextCommandInput, + cb: (err: any, data?: CreateContextCommandOutput) => void + ): void; + public createContext( + args: CreateContextCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateContextCommandOutput) => void + ): void; + public createContext( + args: CreateContextCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateContextCommandOutput) => void), + cb?: (err: any, data?: CreateContextCommandOutput) => void + ): Promise | void { + const command = new CreateContextCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Creates a Domain used by Amazon SageMaker Studio. A domain consists of an associated * Amazon Elastic File System (EFS) volume, a list of authorized users, and a variety of security, application, * policy, and Amazon Virtual Private Cloud (VPC) configurations. An AWS account is limited to one domain per region. * Users within a domain can share notebook files and other artifacts with each other.

    + * + *

    + * EFS storage + *

    *

    When a domain is created, an EFS volume is created for use by all of the users within the * domain. Each user receives a private home directory within the EFS volume for notebooks, * Git repositories, and data files.

    + *

    SageMaker uses the AWS Key Management Service (AWS KMS) to encrypt the EFS volume attached to the domain with + * an AWS managed customer master key (CMK) by default. For more control, you can specify a + * customer managed CMK. For more information, see + * Protect Data at + * Rest Using Encryption.

    + * *

    * VPC configuration *

    @@ -1288,6 +1685,50 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Create a new FeatureGroup. A FeatureGroup is a group of + * Features defined in the FeatureStore to describe a + * Record.

    + *

    The FeatureGroup defines the schema and features contained in the + * FeatureGroup. A FeatureGroup definition is composed of a list of + * Features, a RecordIdentifierFeatureName, an + * EventTimeFeatureName and configurations for its OnlineStore + * and OfflineStore. Check AWS service quotas to see + * the FeatureGroups quota for your AWS account.

    + * + *

    You must include at least one of OnlineStoreConfig and + * OfflineStoreConfig to create a FeatureGroup.

    + *
    + */ + public createFeatureGroup( + args: CreateFeatureGroupCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public createFeatureGroup( + args: CreateFeatureGroupCommandInput, + cb: (err: any, data?: CreateFeatureGroupCommandOutput) => void + ): void; + public createFeatureGroup( + args: CreateFeatureGroupCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateFeatureGroupCommandOutput) => void + ): void; + public createFeatureGroup( + args: CreateFeatureGroupCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateFeatureGroupCommandOutput) => void), + cb?: (err: any, data?: CreateFeatureGroupCommandOutput) => void + ): Promise | void { + const command = new CreateFeatureGroupCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Creates a flow definition.

    */ @@ -1562,13 +2003,24 @@ export class SageMaker extends SageMakerClient { /** *

    Creates a model package that you can use to create Amazon SageMaker models or list on AWS - * Marketplace. Buyers can subscribe to model packages listed on AWS Marketplace to create + * Marketplace, or a versioned model that is part of a model group. Buyers can subscribe to model packages listed on AWS Marketplace to create * models in Amazon SageMaker.

    *

    To create a model package by specifying a Docker container that contains your * inference code and the Amazon S3 location of your model artifacts, provide values for * InferenceSpecification. To create a model from an algorithm resource * that you created or subscribed to in AWS Marketplace, provide a value for * SourceAlgorithmSpecification.

    + * + *

    There are two types of model packages:

    + *
      + *
    • + *

      Versioned - a model that is part of a model group in the model registry.

      + *
    • + *
    • + *

      Unversioned - a model package that is not part of a model group.

      + *
    • + *
    + *
    */ public createModelPackage( args: CreateModelPackageCommandInput, @@ -1599,6 +2051,38 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Creates a model group. A model group contains a group of model versions.

    + */ + public createModelPackageGroup( + args: CreateModelPackageGroupCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public createModelPackageGroup( + args: CreateModelPackageGroupCommandInput, + cb: (err: any, data?: CreateModelPackageGroupCommandOutput) => void + ): void; + public createModelPackageGroup( + args: CreateModelPackageGroupCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateModelPackageGroupCommandOutput) => void + ): void; + public createModelPackageGroup( + args: CreateModelPackageGroupCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateModelPackageGroupCommandOutput) => void), + cb?: (err: any, data?: CreateModelPackageGroupCommandOutput) => void + ): Promise | void { + const command = new CreateModelPackageGroupCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Creates a schedule that regularly starts Amazon SageMaker Processing Jobs to monitor the data * captured for an Amazon SageMaker Endoint.

    @@ -1747,6 +2231,38 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Creates a pipeline using a JSON pipeline definition.

    + */ + public createPipeline( + args: CreatePipelineCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public createPipeline( + args: CreatePipelineCommandInput, + cb: (err: any, data?: CreatePipelineCommandOutput) => void + ): void; + public createPipeline( + args: CreatePipelineCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreatePipelineCommandOutput) => void + ): void; + public createPipeline( + args: CreatePipelineCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreatePipelineCommandOutput) => void), + cb?: (err: any, data?: CreatePipelineCommandOutput) => void + ): Promise | void { + const command = new CreatePipelineCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Creates a URL for a specified UserProfile in a Domain. When accessed in a web browser, * the user will be automatically signed in to Amazon SageMaker Studio, and granted access to all of @@ -1869,13 +2385,46 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Creates a machine learning (ML) project that can contain one or more templates that set + * up an ML pipeline from training to deploying an approved model.

    + */ + public createProject( + args: CreateProjectCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public createProject( + args: CreateProjectCommandInput, + cb: (err: any, data?: CreateProjectCommandOutput) => void + ): void; + public createProject( + args: CreateProjectCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: CreateProjectCommandOutput) => void + ): void; + public createProject( + args: CreateProjectCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: CreateProjectCommandOutput) => void), + cb?: (err: any, data?: CreateProjectCommandOutput) => void + ): Promise | void { + const command = new CreateProjectCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Starts a model training job. After training completes, Amazon SageMaker saves the resulting * model artifacts to an Amazon S3 location that you specify.

    - *

    If you choose to host your model using Amazon SageMaker hosting services, you can use the - * resulting model artifacts as part of the model. You can also use the artifacts in a - * machine learning service other than Amazon SageMaker, provided that you know how to use them for - * inferences. + *

    If you choose to host your model using Amazon SageMaker hosting services, you can use the resulting + * model artifacts as part of the model. You can also use the artifacts in a machine + * learning service other than Amazon SageMaker, provided that you know how to use them for + * inference. * *

    *

    In the request body, you provide the following:

    @@ -1920,7 +2469,7 @@ export class SageMaker extends SageMakerClient { * *
  • *

    - * RoleARN - The Amazon Resource Number (ARN) that Amazon SageMaker assumes + * RoleArn - The Amazon Resource Number (ARN) that Amazon SageMaker assumes * to perform tasks on your behalf during model training. * * You must grant this role the necessary permissions so that Amazon SageMaker can successfully @@ -2240,6 +2789,35 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Deletes an action.

    + */ + public deleteAction( + args: DeleteActionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteAction(args: DeleteActionCommandInput, cb: (err: any, data?: DeleteActionCommandOutput) => void): void; + public deleteAction( + args: DeleteActionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteActionCommandOutput) => void + ): void; + public deleteAction( + args: DeleteActionCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteActionCommandOutput) => void), + cb?: (err: any, data?: DeleteActionCommandOutput) => void + ): Promise | void { + const command = new DeleteActionCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Removes the specified algorithm from your account.

    */ @@ -2330,6 +2908,71 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Deletes an artifact. Either ArtifactArn or Source must be + * specified.

    + */ + public deleteArtifact( + args: DeleteArtifactCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteArtifact( + args: DeleteArtifactCommandInput, + cb: (err: any, data?: DeleteArtifactCommandOutput) => void + ): void; + public deleteArtifact( + args: DeleteArtifactCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteArtifactCommandOutput) => void + ): void; + public deleteArtifact( + args: DeleteArtifactCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteArtifactCommandOutput) => void), + cb?: (err: any, data?: DeleteArtifactCommandOutput) => void + ): Promise | void { + const command = new DeleteArtifactCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Deletes an association.

    + */ + public deleteAssociation( + args: DeleteAssociationCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteAssociation( + args: DeleteAssociationCommandInput, + cb: (err: any, data?: DeleteAssociationCommandOutput) => void + ): void; + public deleteAssociation( + args: DeleteAssociationCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteAssociationCommandOutput) => void + ): void; + public deleteAssociation( + args: DeleteAssociationCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteAssociationCommandOutput) => void), + cb?: (err: any, data?: DeleteAssociationCommandOutput) => void + ): Promise | void { + const command = new DeleteAssociationCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Deletes the specified Git repository from your account.

    */ @@ -2362,6 +3005,38 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Deletes an context.

    + */ + public deleteContext( + args: DeleteContextCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteContext( + args: DeleteContextCommandInput, + cb: (err: any, data?: DeleteContextCommandOutput) => void + ): void; + public deleteContext( + args: DeleteContextCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteContextCommandOutput) => void + ): void; + public deleteContext( + args: DeleteContextCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteContextCommandOutput) => void), + cb?: (err: any, data?: DeleteContextCommandOutput) => void + ): Promise | void { + const command = new DeleteContextCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Used to delete a domain. * If you onboarded with IAM mode, you will need to delete your domain to onboard again using SSO. @@ -2505,12 +3180,49 @@ export class SageMaker extends SageMakerClient { } /** - *

    Deletes the specified flow definition.

    + *

    Delete the FeatureGroup and any data that was written to the + * OnlineStore of the FeatureGroup. Data cannot be accessed from + * the OnlineStore immediately after DeleteFeatureGroup is called.

    + *

    Data written into the OfflineStore will not be deleted. The AWS Glue + * database and tables that are automatically created for your OfflineStore are + * not deleted.

    */ - public deleteFlowDefinition( - args: DeleteFlowDefinitionCommandInput, + public deleteFeatureGroup( + args: DeleteFeatureGroupCommandInput, options?: __HttpHandlerOptions - ): Promise; + ): Promise; + public deleteFeatureGroup( + args: DeleteFeatureGroupCommandInput, + cb: (err: any, data?: DeleteFeatureGroupCommandOutput) => void + ): void; + public deleteFeatureGroup( + args: DeleteFeatureGroupCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteFeatureGroupCommandOutput) => void + ): void; + public deleteFeatureGroup( + args: DeleteFeatureGroupCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteFeatureGroupCommandOutput) => void), + cb?: (err: any, data?: DeleteFeatureGroupCommandOutput) => void + ): Promise | void { + const command = new DeleteFeatureGroupCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Deletes the specified flow definition.

    + */ + public deleteFlowDefinition( + args: DeleteFlowDefinitionCommandInput, + options?: __HttpHandlerOptions + ): Promise; public deleteFlowDefinition( args: DeleteFlowDefinitionCommandInput, cb: (err: any, data?: DeleteFlowDefinitionCommandOutput) => void @@ -2695,6 +3407,70 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Deletes the specified model group.

    + */ + public deleteModelPackageGroup( + args: DeleteModelPackageGroupCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteModelPackageGroup( + args: DeleteModelPackageGroupCommandInput, + cb: (err: any, data?: DeleteModelPackageGroupCommandOutput) => void + ): void; + public deleteModelPackageGroup( + args: DeleteModelPackageGroupCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteModelPackageGroupCommandOutput) => void + ): void; + public deleteModelPackageGroup( + args: DeleteModelPackageGroupCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteModelPackageGroupCommandOutput) => void), + cb?: (err: any, data?: DeleteModelPackageGroupCommandOutput) => void + ): Promise | void { + const command = new DeleteModelPackageGroupCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Deletes a model group resource policy.

    + */ + public deleteModelPackageGroupPolicy( + args: DeleteModelPackageGroupPolicyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteModelPackageGroupPolicy( + args: DeleteModelPackageGroupPolicyCommandInput, + cb: (err: any, data?: DeleteModelPackageGroupPolicyCommandOutput) => void + ): void; + public deleteModelPackageGroupPolicy( + args: DeleteModelPackageGroupPolicyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteModelPackageGroupPolicyCommandOutput) => void + ): void; + public deleteModelPackageGroupPolicy( + args: DeleteModelPackageGroupPolicyCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteModelPackageGroupPolicyCommandOutput) => void), + cb?: (err: any, data?: DeleteModelPackageGroupPolicyCommandOutput) => void + ): Promise | void { + const command = new DeleteModelPackageGroupPolicyCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Deletes a monitoring schedule. Also stops the schedule had not already been stopped. * This does not delete the job execution history of the monitoring schedule.

    @@ -2800,6 +3576,70 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Deletes a pipeline.

    + */ + public deletePipeline( + args: DeletePipelineCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deletePipeline( + args: DeletePipelineCommandInput, + cb: (err: any, data?: DeletePipelineCommandOutput) => void + ): void; + public deletePipeline( + args: DeletePipelineCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeletePipelineCommandOutput) => void + ): void; + public deletePipeline( + args: DeletePipelineCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeletePipelineCommandOutput) => void), + cb?: (err: any, data?: DeletePipelineCommandOutput) => void + ): Promise | void { + const command = new DeletePipelineCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Delete the specified project.

    + */ + public deleteProject( + args: DeleteProjectCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public deleteProject( + args: DeleteProjectCommandInput, + cb: (err: any, data?: DeleteProjectCommandOutput) => void + ): void; + public deleteProject( + args: DeleteProjectCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DeleteProjectCommandOutput) => void + ): void; + public deleteProject( + args: DeleteProjectCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DeleteProjectCommandOutput) => void), + cb?: (err: any, data?: DeleteProjectCommandOutput) => void + ): Promise | void { + const command = new DeleteProjectCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Deletes the specified tags from an Amazon SageMaker resource.

    *

    To list a resource's tags, use the ListTags API.

    @@ -3003,6 +3843,38 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Describes an action.

    + */ + public describeAction( + args: DescribeActionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeAction( + args: DescribeActionCommandInput, + cb: (err: any, data?: DescribeActionCommandOutput) => void + ): void; + public describeAction( + args: DescribeActionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeActionCommandOutput) => void + ): void; + public describeAction( + args: DescribeActionCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeActionCommandOutput) => void), + cb?: (err: any, data?: DescribeActionCommandOutput) => void + ): Promise | void { + const command = new DescribeActionCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Returns a description of the specified algorithm that is in your account.

    */ @@ -3093,6 +3965,38 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Describes an artifact.

    + */ + public describeArtifact( + args: DescribeArtifactCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeArtifact( + args: DescribeArtifactCommandInput, + cb: (err: any, data?: DescribeArtifactCommandOutput) => void + ): void; + public describeArtifact( + args: DescribeArtifactCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeArtifactCommandOutput) => void + ): void; + public describeArtifact( + args: DescribeArtifactCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeArtifactCommandOutput) => void), + cb?: (err: any, data?: DescribeArtifactCommandOutput) => void + ): Promise | void { + const command = new DescribeArtifactCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Returns information about an Amazon SageMaker job.

    */ @@ -3191,6 +4095,38 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Describes a context.

    + */ + public describeContext( + args: DescribeContextCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeContext( + args: DescribeContextCommandInput, + cb: (err: any, data?: DescribeContextCommandOutput) => void + ): void; + public describeContext( + args: DescribeContextCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeContextCommandOutput) => void + ): void; + public describeContext( + args: DescribeContextCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeContextCommandOutput) => void), + cb?: (err: any, data?: DescribeContextCommandOutput) => void + ): Promise | void { + const command = new DescribeContextCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    The description of the domain.

    */ @@ -3320,6 +4256,40 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Use this operation to describe a FeatureGroup. The response includes + * information on the creation time, FeatureGroup name, the unique identifier for + * each FeatureGroup, and more.

    + */ + public describeFeatureGroup( + args: DescribeFeatureGroupCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeFeatureGroup( + args: DescribeFeatureGroupCommandInput, + cb: (err: any, data?: DescribeFeatureGroupCommandOutput) => void + ): void; + public describeFeatureGroup( + args: DescribeFeatureGroupCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeFeatureGroupCommandOutput) => void + ): void; + public describeFeatureGroup( + args: DescribeFeatureGroupCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeFeatureGroupCommandOutput) => void), + cb?: (err: any, data?: DescribeFeatureGroupCommandOutput) => void + ): Promise | void { + const command = new DescribeFeatureGroupCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Returns information about the specified flow definition.

    */ @@ -3581,6 +4551,38 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Gets a description for the specified model group.

    + */ + public describeModelPackageGroup( + args: DescribeModelPackageGroupCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeModelPackageGroup( + args: DescribeModelPackageGroupCommandInput, + cb: (err: any, data?: DescribeModelPackageGroupCommandOutput) => void + ): void; + public describeModelPackageGroup( + args: DescribeModelPackageGroupCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeModelPackageGroupCommandOutput) => void + ): void; + public describeModelPackageGroup( + args: DescribeModelPackageGroupCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeModelPackageGroupCommandOutput) => void), + cb?: (err: any, data?: DescribeModelPackageGroupCommandOutput) => void + ): Promise | void { + const command = new DescribeModelPackageGroupCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Describes the schedule for a monitoring job.

    */ @@ -3681,6 +4683,104 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Describes the details of a pipeline.

    + */ + public describePipeline( + args: DescribePipelineCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describePipeline( + args: DescribePipelineCommandInput, + cb: (err: any, data?: DescribePipelineCommandOutput) => void + ): void; + public describePipeline( + args: DescribePipelineCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribePipelineCommandOutput) => void + ): void; + public describePipeline( + args: DescribePipelineCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribePipelineCommandOutput) => void), + cb?: (err: any, data?: DescribePipelineCommandOutput) => void + ): Promise | void { + const command = new DescribePipelineCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Describes the details of an execution's pipeline definition.

    + */ + public describePipelineDefinitionForExecution( + args: DescribePipelineDefinitionForExecutionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describePipelineDefinitionForExecution( + args: DescribePipelineDefinitionForExecutionCommandInput, + cb: (err: any, data?: DescribePipelineDefinitionForExecutionCommandOutput) => void + ): void; + public describePipelineDefinitionForExecution( + args: DescribePipelineDefinitionForExecutionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribePipelineDefinitionForExecutionCommandOutput) => void + ): void; + public describePipelineDefinitionForExecution( + args: DescribePipelineDefinitionForExecutionCommandInput, + optionsOrCb?: + | __HttpHandlerOptions + | ((err: any, data?: DescribePipelineDefinitionForExecutionCommandOutput) => void), + cb?: (err: any, data?: DescribePipelineDefinitionForExecutionCommandOutput) => void + ): Promise | void { + const command = new DescribePipelineDefinitionForExecutionCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Describes the details of a pipeline execution.

    + */ + public describePipelineExecution( + args: DescribePipelineExecutionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describePipelineExecution( + args: DescribePipelineExecutionCommandInput, + cb: (err: any, data?: DescribePipelineExecutionCommandOutput) => void + ): void; + public describePipelineExecution( + args: DescribePipelineExecutionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribePipelineExecutionCommandOutput) => void + ): void; + public describePipelineExecution( + args: DescribePipelineExecutionCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribePipelineExecutionCommandOutput) => void), + cb?: (err: any, data?: DescribePipelineExecutionCommandOutput) => void + ): Promise | void { + const command = new DescribePipelineExecutionCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Returns a description of a processing job.

    */ @@ -3713,6 +4813,38 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Describes the details of a project.

    + */ + public describeProject( + args: DescribeProjectCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public describeProject( + args: DescribeProjectCommandInput, + cb: (err: any, data?: DescribeProjectCommandOutput) => void + ): void; + public describeProject( + args: DescribeProjectCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DescribeProjectCommandOutput) => void + ): void; + public describeProject( + args: DescribeProjectCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: DescribeProjectCommandOutput) => void), + cb?: (err: any, data?: DescribeProjectCommandOutput) => void + ): Promise | void { + const command = new DescribeProjectCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Gets information about a work team provided by a vendor. It returns details about the * subscription with a vendor in the AWS Marketplace.

    @@ -3977,6 +5109,41 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Disables using Service Catalog in SageMaker. Service Catalog is used to create + * SageMaker projects.

    + */ + public disableSagemakerServicecatalogPortfolio( + args: DisableSagemakerServicecatalogPortfolioCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public disableSagemakerServicecatalogPortfolio( + args: DisableSagemakerServicecatalogPortfolioCommandInput, + cb: (err: any, data?: DisableSagemakerServicecatalogPortfolioCommandOutput) => void + ): void; + public disableSagemakerServicecatalogPortfolio( + args: DisableSagemakerServicecatalogPortfolioCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: DisableSagemakerServicecatalogPortfolioCommandOutput) => void + ): void; + public disableSagemakerServicecatalogPortfolio( + args: DisableSagemakerServicecatalogPortfolioCommandInput, + optionsOrCb?: + | __HttpHandlerOptions + | ((err: any, data?: DisableSagemakerServicecatalogPortfolioCommandOutput) => void), + cb?: (err: any, data?: DisableSagemakerServicecatalogPortfolioCommandOutput) => void + ): Promise | void { + const command = new DisableSagemakerServicecatalogPortfolioCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Disassociates a trial component from a trial. This doesn't effect other trials the * component is associated with. Before you can delete a component, you must disassociate the @@ -4014,6 +5181,111 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Enables using Service Catalog in SageMaker. Service Catalog is used to create + * SageMaker projects.

    + */ + public enableSagemakerServicecatalogPortfolio( + args: EnableSagemakerServicecatalogPortfolioCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public enableSagemakerServicecatalogPortfolio( + args: EnableSagemakerServicecatalogPortfolioCommandInput, + cb: (err: any, data?: EnableSagemakerServicecatalogPortfolioCommandOutput) => void + ): void; + public enableSagemakerServicecatalogPortfolio( + args: EnableSagemakerServicecatalogPortfolioCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: EnableSagemakerServicecatalogPortfolioCommandOutput) => void + ): void; + public enableSagemakerServicecatalogPortfolio( + args: EnableSagemakerServicecatalogPortfolioCommandInput, + optionsOrCb?: + | __HttpHandlerOptions + | ((err: any, data?: EnableSagemakerServicecatalogPortfolioCommandOutput) => void), + cb?: (err: any, data?: EnableSagemakerServicecatalogPortfolioCommandOutput) => void + ): Promise | void { + const command = new EnableSagemakerServicecatalogPortfolioCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Gets a resource policy that manages access for a model group. For information about + * resource policies, see Identity-based + * policies and resource-based policies in the AWS Identity and + * Access Management User Guide..

    + */ + public getModelPackageGroupPolicy( + args: GetModelPackageGroupPolicyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public getModelPackageGroupPolicy( + args: GetModelPackageGroupPolicyCommandInput, + cb: (err: any, data?: GetModelPackageGroupPolicyCommandOutput) => void + ): void; + public getModelPackageGroupPolicy( + args: GetModelPackageGroupPolicyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetModelPackageGroupPolicyCommandOutput) => void + ): void; + public getModelPackageGroupPolicy( + args: GetModelPackageGroupPolicyCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: GetModelPackageGroupPolicyCommandOutput) => void), + cb?: (err: any, data?: GetModelPackageGroupPolicyCommandOutput) => void + ): Promise | void { + const command = new GetModelPackageGroupPolicyCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Gets the status of Service Catalog in SageMaker. Service Catalog is used to create + * SageMaker projects.

    + */ + public getSagemakerServicecatalogPortfolioStatus( + args: GetSagemakerServicecatalogPortfolioStatusCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public getSagemakerServicecatalogPortfolioStatus( + args: GetSagemakerServicecatalogPortfolioStatusCommandInput, + cb: (err: any, data?: GetSagemakerServicecatalogPortfolioStatusCommandOutput) => void + ): void; + public getSagemakerServicecatalogPortfolioStatus( + args: GetSagemakerServicecatalogPortfolioStatusCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: GetSagemakerServicecatalogPortfolioStatusCommandOutput) => void + ): void; + public getSagemakerServicecatalogPortfolioStatus( + args: GetSagemakerServicecatalogPortfolioStatusCommandInput, + optionsOrCb?: + | __HttpHandlerOptions + | ((err: any, data?: GetSagemakerServicecatalogPortfolioStatusCommandOutput) => void), + cb?: (err: any, data?: GetSagemakerServicecatalogPortfolioStatusCommandOutput) => void + ): Promise | void { + const command = new GetSagemakerServicecatalogPortfolioStatusCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    An auto-complete API for the search functionality in the Amazon SageMaker console. It returns * suggestions of possible matches for the property name to use in Search @@ -4049,6 +5321,32 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Lists the actions in your account and their properties.

    + */ + public listActions(args: ListActionsCommandInput, options?: __HttpHandlerOptions): Promise; + public listActions(args: ListActionsCommandInput, cb: (err: any, data?: ListActionsCommandOutput) => void): void; + public listActions( + args: ListActionsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListActionsCommandOutput) => void + ): void; + public listActions( + args: ListActionsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListActionsCommandOutput) => void), + cb?: (err: any, data?: ListActionsCommandOutput) => void + ): Promise | void { + const command = new ListActionsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Lists the machine learning algorithms that have been created.

    */ @@ -4141,6 +5439,70 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Lists the artifacts in your account and their properties.

    + */ + public listArtifacts( + args: ListArtifactsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listArtifacts( + args: ListArtifactsCommandInput, + cb: (err: any, data?: ListArtifactsCommandOutput) => void + ): void; + public listArtifacts( + args: ListArtifactsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListArtifactsCommandOutput) => void + ): void; + public listArtifacts( + args: ListArtifactsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListArtifactsCommandOutput) => void), + cb?: (err: any, data?: ListArtifactsCommandOutput) => void + ): Promise | void { + const command = new ListArtifactsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Lists the associations in your account and their properties.

    + */ + public listAssociations( + args: ListAssociationsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listAssociations( + args: ListAssociationsCommandInput, + cb: (err: any, data?: ListAssociationsCommandOutput) => void + ): void; + public listAssociations( + args: ListAssociationsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListAssociationsCommandOutput) => void + ): void; + public listAssociations( + args: ListAssociationsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListAssociationsCommandOutput) => void), + cb?: (err: any, data?: ListAssociationsCommandOutput) => void + ): Promise | void { + const command = new ListAssociationsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Request a list of jobs.

    */ @@ -4271,6 +5633,35 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Lists the contexts in your account and their properties.

    + */ + public listContexts( + args: ListContextsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listContexts(args: ListContextsCommandInput, cb: (err: any, data?: ListContextsCommandOutput) => void): void; + public listContexts( + args: ListContextsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListContextsCommandOutput) => void + ): void; + public listContexts( + args: ListContextsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListContextsCommandOutput) => void), + cb?: (err: any, data?: ListContextsCommandOutput) => void + ): Promise | void { + const command = new ListContextsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Lists the domains.

    */ @@ -4395,6 +5786,38 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    List FeatureGroups based on given filter and order.

    + */ + public listFeatureGroups( + args: ListFeatureGroupsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listFeatureGroups( + args: ListFeatureGroupsCommandInput, + cb: (err: any, data?: ListFeatureGroupsCommandOutput) => void + ): void; + public listFeatureGroups( + args: ListFeatureGroupsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListFeatureGroupsCommandOutput) => void + ): void; + public listFeatureGroups( + args: ListFeatureGroupsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListFeatureGroupsCommandOutput) => void), + cb?: (err: any, data?: ListFeatureGroupsCommandOutput) => void + ): Promise | void { + const command = new ListFeatureGroupsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Returns information about the flow definitions in your account.

    */ @@ -4617,6 +6040,38 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Gets a list of the model groups in your AWS account.

    + */ + public listModelPackageGroups( + args: ListModelPackageGroupsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listModelPackageGroups( + args: ListModelPackageGroupsCommandInput, + cb: (err: any, data?: ListModelPackageGroupsCommandOutput) => void + ): void; + public listModelPackageGroups( + args: ListModelPackageGroupsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListModelPackageGroupsCommandOutput) => void + ): void; + public listModelPackageGroups( + args: ListModelPackageGroupsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListModelPackageGroupsCommandOutput) => void), + cb?: (err: any, data?: ListModelPackageGroupsCommandOutput) => void + ): Promise | void { + const command = new ListModelPackageGroupsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Lists the model packages that have been created.

    */ @@ -4804,6 +6259,134 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Gets a list of the pipeline executions.

    + */ + public listPipelineExecutions( + args: ListPipelineExecutionsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listPipelineExecutions( + args: ListPipelineExecutionsCommandInput, + cb: (err: any, data?: ListPipelineExecutionsCommandOutput) => void + ): void; + public listPipelineExecutions( + args: ListPipelineExecutionsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListPipelineExecutionsCommandOutput) => void + ): void; + public listPipelineExecutions( + args: ListPipelineExecutionsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListPipelineExecutionsCommandOutput) => void), + cb?: (err: any, data?: ListPipelineExecutionsCommandOutput) => void + ): Promise | void { + const command = new ListPipelineExecutionsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Gets a list of PipeLineExecutionStep objects.

    + */ + public listPipelineExecutionSteps( + args: ListPipelineExecutionStepsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listPipelineExecutionSteps( + args: ListPipelineExecutionStepsCommandInput, + cb: (err: any, data?: ListPipelineExecutionStepsCommandOutput) => void + ): void; + public listPipelineExecutionSteps( + args: ListPipelineExecutionStepsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListPipelineExecutionStepsCommandOutput) => void + ): void; + public listPipelineExecutionSteps( + args: ListPipelineExecutionStepsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListPipelineExecutionStepsCommandOutput) => void), + cb?: (err: any, data?: ListPipelineExecutionStepsCommandOutput) => void + ): Promise | void { + const command = new ListPipelineExecutionStepsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Gets a list of parameters for a pipeline execution.

    + */ + public listPipelineParametersForExecution( + args: ListPipelineParametersForExecutionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listPipelineParametersForExecution( + args: ListPipelineParametersForExecutionCommandInput, + cb: (err: any, data?: ListPipelineParametersForExecutionCommandOutput) => void + ): void; + public listPipelineParametersForExecution( + args: ListPipelineParametersForExecutionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListPipelineParametersForExecutionCommandOutput) => void + ): void; + public listPipelineParametersForExecution( + args: ListPipelineParametersForExecutionCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListPipelineParametersForExecutionCommandOutput) => void), + cb?: (err: any, data?: ListPipelineParametersForExecutionCommandOutput) => void + ): Promise | void { + const command = new ListPipelineParametersForExecutionCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Gets a list of pipelines.

    + */ + public listPipelines( + args: ListPipelinesCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listPipelines( + args: ListPipelinesCommandInput, + cb: (err: any, data?: ListPipelinesCommandOutput) => void + ): void; + public listPipelines( + args: ListPipelinesCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListPipelinesCommandOutput) => void + ): void; + public listPipelines( + args: ListPipelinesCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListPipelinesCommandOutput) => void), + cb?: (err: any, data?: ListPipelinesCommandOutput) => void + ): Promise | void { + const command = new ListPipelinesCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Lists processing jobs that satisfy various filters.

    */ @@ -4836,6 +6419,35 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Gets a list of the projects in an AWS account.

    + */ + public listProjects( + args: ListProjectsCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public listProjects(args: ListProjectsCommandInput, cb: (err: any, data?: ListProjectsCommandOutput) => void): void; + public listProjects( + args: ListProjectsCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: ListProjectsCommandOutput) => void + ): void; + public listProjects( + args: ListProjectsCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: ListProjectsCommandOutput) => void), + cb?: (err: any, data?: ListProjectsCommandOutput) => void + ): Promise | void { + const command = new ListProjectsCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Gets a list of the work teams that you are subscribed to in the AWS Marketplace. The * list may be empty if no work team satisfies the filter specified in the @@ -5175,6 +6787,40 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Adds a resouce policy to control access to a model group. For information about + * resoure policies, see Identity-based + * policies and resource-based policies in the AWS Identity and Access Management User Guide..

    + */ + public putModelPackageGroupPolicy( + args: PutModelPackageGroupPolicyCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public putModelPackageGroupPolicy( + args: PutModelPackageGroupPolicyCommandInput, + cb: (err: any, data?: PutModelPackageGroupPolicyCommandOutput) => void + ): void; + public putModelPackageGroupPolicy( + args: PutModelPackageGroupPolicyCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: PutModelPackageGroupPolicyCommandOutput) => void + ): void; + public putModelPackageGroupPolicy( + args: PutModelPackageGroupPolicyCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: PutModelPackageGroupPolicyCommandOutput) => void), + cb?: (err: any, data?: PutModelPackageGroupPolicyCommandOutput) => void + ): Promise | void { + const command = new PutModelPackageGroupPolicyCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Renders the UI template so that you can preview the worker's experience.

    */ @@ -5307,6 +6953,38 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Starts a pipeline execution.

    + */ + public startPipelineExecution( + args: StartPipelineExecutionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public startPipelineExecution( + args: StartPipelineExecutionCommandInput, + cb: (err: any, data?: StartPipelineExecutionCommandOutput) => void + ): void; + public startPipelineExecution( + args: StartPipelineExecutionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: StartPipelineExecutionCommandOutput) => void + ): void; + public startPipelineExecution( + args: StartPipelineExecutionCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: StartPipelineExecutionCommandOutput) => void), + cb?: (err: any, data?: StartPipelineExecutionCommandOutput) => void + ): Promise | void { + const command = new StartPipelineExecutionCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    A method for forcing the termination of a running job.

    */ @@ -5519,6 +7197,38 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Stops a pipeline execution.

    + */ + public stopPipelineExecution( + args: StopPipelineExecutionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public stopPipelineExecution( + args: StopPipelineExecutionCommandInput, + cb: (err: any, data?: StopPipelineExecutionCommandOutput) => void + ): void; + public stopPipelineExecution( + args: StopPipelineExecutionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: StopPipelineExecutionCommandOutput) => void + ): void; + public stopPipelineExecution( + args: StopPipelineExecutionCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: StopPipelineExecutionCommandOutput) => void), + cb?: (err: any, data?: StopPipelineExecutionCommandOutput) => void + ): Promise | void { + const command = new StopPipelineExecutionCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Stops a processing job.

    */ @@ -5626,6 +7336,35 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Updates an action.

    + */ + public updateAction( + args: UpdateActionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public updateAction(args: UpdateActionCommandInput, cb: (err: any, data?: UpdateActionCommandOutput) => void): void; + public updateAction( + args: UpdateActionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateActionCommandOutput) => void + ): void; + public updateAction( + args: UpdateActionCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: UpdateActionCommandOutput) => void), + cb?: (err: any, data?: UpdateActionCommandOutput) => void + ): Promise | void { + const command = new UpdateActionCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Updates the properties of an AppImageConfig.

    */ @@ -5658,6 +7397,38 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Updates an artifact.

    + */ + public updateArtifact( + args: UpdateArtifactCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public updateArtifact( + args: UpdateArtifactCommandInput, + cb: (err: any, data?: UpdateArtifactCommandOutput) => void + ): void; + public updateArtifact( + args: UpdateArtifactCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateArtifactCommandOutput) => void + ): void; + public updateArtifact( + args: UpdateArtifactCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: UpdateArtifactCommandOutput) => void), + cb?: (err: any, data?: UpdateArtifactCommandOutput) => void + ): Promise | void { + const command = new UpdateArtifactCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Updates the specified Git repository with the specified values.

    */ @@ -5690,6 +7461,38 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Updates a context.

    + */ + public updateContext( + args: UpdateContextCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public updateContext( + args: UpdateContextCommandInput, + cb: (err: any, data?: UpdateContextCommandOutput) => void + ): void; + public updateContext( + args: UpdateContextCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateContextCommandOutput) => void + ): void; + public updateContext( + args: UpdateContextCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: UpdateContextCommandOutput) => void), + cb?: (err: any, data?: UpdateContextCommandOutput) => void + ): Promise | void { + const command = new UpdateContextCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Updates the default settings for new user profiles in the domain.

    */ @@ -5863,6 +7666,38 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Updates a versioned model.

    + */ + public updateModelPackage( + args: UpdateModelPackageCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public updateModelPackage( + args: UpdateModelPackageCommandInput, + cb: (err: any, data?: UpdateModelPackageCommandOutput) => void + ): void; + public updateModelPackage( + args: UpdateModelPackageCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdateModelPackageCommandOutput) => void + ): void; + public updateModelPackage( + args: UpdateModelPackageCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: UpdateModelPackageCommandOutput) => void), + cb?: (err: any, data?: UpdateModelPackageCommandOutput) => void + ): Promise | void { + const command = new UpdateModelPackageCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Updates a previously created schedule.

    */ @@ -5963,6 +7798,70 @@ export class SageMaker extends SageMakerClient { } } + /** + *

    Updates a pipeline.

    + */ + public updatePipeline( + args: UpdatePipelineCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public updatePipeline( + args: UpdatePipelineCommandInput, + cb: (err: any, data?: UpdatePipelineCommandOutput) => void + ): void; + public updatePipeline( + args: UpdatePipelineCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdatePipelineCommandOutput) => void + ): void; + public updatePipeline( + args: UpdatePipelineCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: UpdatePipelineCommandOutput) => void), + cb?: (err: any, data?: UpdatePipelineCommandOutput) => void + ): Promise | void { + const command = new UpdatePipelineCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + + /** + *

    Updates a pipeline execution.

    + */ + public updatePipelineExecution( + args: UpdatePipelineExecutionCommandInput, + options?: __HttpHandlerOptions + ): Promise; + public updatePipelineExecution( + args: UpdatePipelineExecutionCommandInput, + cb: (err: any, data?: UpdatePipelineExecutionCommandOutput) => void + ): void; + public updatePipelineExecution( + args: UpdatePipelineExecutionCommandInput, + options: __HttpHandlerOptions, + cb: (err: any, data?: UpdatePipelineExecutionCommandOutput) => void + ): void; + public updatePipelineExecution( + args: UpdatePipelineExecutionCommandInput, + optionsOrCb?: __HttpHandlerOptions | ((err: any, data?: UpdatePipelineExecutionCommandOutput) => void), + cb?: (err: any, data?: UpdatePipelineExecutionCommandOutput) => void + ): Promise | void { + const command = new UpdatePipelineExecutionCommand(args); + if (typeof optionsOrCb === "function") { + this.send(command, optionsOrCb); + } else if (typeof cb === "function") { + if (typeof optionsOrCb !== "object") throw new Error(`Expect http options but get ${typeof optionsOrCb}`); + this.send(command, optionsOrCb || {}, cb); + } else { + return this.send(command, optionsOrCb); + } + } + /** *

    Updates the display name of a trial.

    */ diff --git a/clients/client-sagemaker/SageMakerClient.ts b/clients/client-sagemaker/SageMakerClient.ts index 28386b80fb40..0aa3f54199cb 100644 --- a/clients/client-sagemaker/SageMakerClient.ts +++ b/clients/client-sagemaker/SageMakerClient.ts @@ -1,14 +1,17 @@ +import { AddAssociationCommandInput, AddAssociationCommandOutput } from "./commands/AddAssociationCommand"; import { AddTagsCommandInput, AddTagsCommandOutput } from "./commands/AddTagsCommand"; import { AssociateTrialComponentCommandInput, AssociateTrialComponentCommandOutput, } from "./commands/AssociateTrialComponentCommand"; +import { CreateActionCommandInput, CreateActionCommandOutput } from "./commands/CreateActionCommand"; import { CreateAlgorithmCommandInput, CreateAlgorithmCommandOutput } from "./commands/CreateAlgorithmCommand"; import { CreateAppCommandInput, CreateAppCommandOutput } from "./commands/CreateAppCommand"; import { CreateAppImageConfigCommandInput, CreateAppImageConfigCommandOutput, } from "./commands/CreateAppImageConfigCommand"; +import { CreateArtifactCommandInput, CreateArtifactCommandOutput } from "./commands/CreateArtifactCommand"; import { CreateAutoMLJobCommandInput, CreateAutoMLJobCommandOutput } from "./commands/CreateAutoMLJobCommand"; import { CreateCodeRepositoryCommandInput, @@ -18,6 +21,7 @@ import { CreateCompilationJobCommandInput, CreateCompilationJobCommandOutput, } from "./commands/CreateCompilationJobCommand"; +import { CreateContextCommandInput, CreateContextCommandOutput } from "./commands/CreateContextCommand"; import { CreateDomainCommandInput, CreateDomainCommandOutput } from "./commands/CreateDomainCommand"; import { CreateEndpointCommandInput, CreateEndpointCommandOutput } from "./commands/CreateEndpointCommand"; import { @@ -25,6 +29,7 @@ import { CreateEndpointConfigCommandOutput, } from "./commands/CreateEndpointConfigCommand"; import { CreateExperimentCommandInput, CreateExperimentCommandOutput } from "./commands/CreateExperimentCommand"; +import { CreateFeatureGroupCommandInput, CreateFeatureGroupCommandOutput } from "./commands/CreateFeatureGroupCommand"; import { CreateFlowDefinitionCommandInput, CreateFlowDefinitionCommandOutput, @@ -39,6 +44,10 @@ import { CreateImageVersionCommandInput, CreateImageVersionCommandOutput } from import { CreateLabelingJobCommandInput, CreateLabelingJobCommandOutput } from "./commands/CreateLabelingJobCommand"; import { CreateModelCommandInput, CreateModelCommandOutput } from "./commands/CreateModelCommand"; import { CreateModelPackageCommandInput, CreateModelPackageCommandOutput } from "./commands/CreateModelPackageCommand"; +import { + CreateModelPackageGroupCommandInput, + CreateModelPackageGroupCommandOutput, +} from "./commands/CreateModelPackageGroupCommand"; import { CreateMonitoringScheduleCommandInput, CreateMonitoringScheduleCommandOutput, @@ -51,6 +60,7 @@ import { CreateNotebookInstanceLifecycleConfigCommandInput, CreateNotebookInstanceLifecycleConfigCommandOutput, } from "./commands/CreateNotebookInstanceLifecycleConfigCommand"; +import { CreatePipelineCommandInput, CreatePipelineCommandOutput } from "./commands/CreatePipelineCommand"; import { CreatePresignedDomainUrlCommandInput, CreatePresignedDomainUrlCommandOutput, @@ -63,6 +73,7 @@ import { CreateProcessingJobCommandInput, CreateProcessingJobCommandOutput, } from "./commands/CreateProcessingJobCommand"; +import { CreateProjectCommandInput, CreateProjectCommandOutput } from "./commands/CreateProjectCommand"; import { CreateTrainingJobCommandInput, CreateTrainingJobCommandOutput } from "./commands/CreateTrainingJobCommand"; import { CreateTransformJobCommandInput, CreateTransformJobCommandOutput } from "./commands/CreateTransformJobCommand"; import { CreateTrialCommandInput, CreateTrialCommandOutput } from "./commands/CreateTrialCommand"; @@ -73,16 +84,20 @@ import { import { CreateUserProfileCommandInput, CreateUserProfileCommandOutput } from "./commands/CreateUserProfileCommand"; import { CreateWorkforceCommandInput, CreateWorkforceCommandOutput } from "./commands/CreateWorkforceCommand"; import { CreateWorkteamCommandInput, CreateWorkteamCommandOutput } from "./commands/CreateWorkteamCommand"; +import { DeleteActionCommandInput, DeleteActionCommandOutput } from "./commands/DeleteActionCommand"; import { DeleteAlgorithmCommandInput, DeleteAlgorithmCommandOutput } from "./commands/DeleteAlgorithmCommand"; import { DeleteAppCommandInput, DeleteAppCommandOutput } from "./commands/DeleteAppCommand"; import { DeleteAppImageConfigCommandInput, DeleteAppImageConfigCommandOutput, } from "./commands/DeleteAppImageConfigCommand"; +import { DeleteArtifactCommandInput, DeleteArtifactCommandOutput } from "./commands/DeleteArtifactCommand"; +import { DeleteAssociationCommandInput, DeleteAssociationCommandOutput } from "./commands/DeleteAssociationCommand"; import { DeleteCodeRepositoryCommandInput, DeleteCodeRepositoryCommandOutput, } from "./commands/DeleteCodeRepositoryCommand"; +import { DeleteContextCommandInput, DeleteContextCommandOutput } from "./commands/DeleteContextCommand"; import { DeleteDomainCommandInput, DeleteDomainCommandOutput } from "./commands/DeleteDomainCommand"; import { DeleteEndpointCommandInput, DeleteEndpointCommandOutput } from "./commands/DeleteEndpointCommand"; import { @@ -90,6 +105,7 @@ import { DeleteEndpointConfigCommandOutput, } from "./commands/DeleteEndpointConfigCommand"; import { DeleteExperimentCommandInput, DeleteExperimentCommandOutput } from "./commands/DeleteExperimentCommand"; +import { DeleteFeatureGroupCommandInput, DeleteFeatureGroupCommandOutput } from "./commands/DeleteFeatureGroupCommand"; import { DeleteFlowDefinitionCommandInput, DeleteFlowDefinitionCommandOutput, @@ -99,6 +115,14 @@ import { DeleteImageCommandInput, DeleteImageCommandOutput } from "./commands/De import { DeleteImageVersionCommandInput, DeleteImageVersionCommandOutput } from "./commands/DeleteImageVersionCommand"; import { DeleteModelCommandInput, DeleteModelCommandOutput } from "./commands/DeleteModelCommand"; import { DeleteModelPackageCommandInput, DeleteModelPackageCommandOutput } from "./commands/DeleteModelPackageCommand"; +import { + DeleteModelPackageGroupCommandInput, + DeleteModelPackageGroupCommandOutput, +} from "./commands/DeleteModelPackageGroupCommand"; +import { + DeleteModelPackageGroupPolicyCommandInput, + DeleteModelPackageGroupPolicyCommandOutput, +} from "./commands/DeleteModelPackageGroupPolicyCommand"; import { DeleteMonitoringScheduleCommandInput, DeleteMonitoringScheduleCommandOutput, @@ -111,6 +135,8 @@ import { DeleteNotebookInstanceLifecycleConfigCommandInput, DeleteNotebookInstanceLifecycleConfigCommandOutput, } from "./commands/DeleteNotebookInstanceLifecycleConfigCommand"; +import { DeletePipelineCommandInput, DeletePipelineCommandOutput } from "./commands/DeletePipelineCommand"; +import { DeleteProjectCommandInput, DeleteProjectCommandOutput } from "./commands/DeleteProjectCommand"; import { DeleteTagsCommandInput, DeleteTagsCommandOutput } from "./commands/DeleteTagsCommand"; import { DeleteTrialCommandInput, DeleteTrialCommandOutput } from "./commands/DeleteTrialCommand"; import { @@ -120,12 +146,14 @@ import { import { DeleteUserProfileCommandInput, DeleteUserProfileCommandOutput } from "./commands/DeleteUserProfileCommand"; import { DeleteWorkforceCommandInput, DeleteWorkforceCommandOutput } from "./commands/DeleteWorkforceCommand"; import { DeleteWorkteamCommandInput, DeleteWorkteamCommandOutput } from "./commands/DeleteWorkteamCommand"; +import { DescribeActionCommandInput, DescribeActionCommandOutput } from "./commands/DescribeActionCommand"; import { DescribeAlgorithmCommandInput, DescribeAlgorithmCommandOutput } from "./commands/DescribeAlgorithmCommand"; import { DescribeAppCommandInput, DescribeAppCommandOutput } from "./commands/DescribeAppCommand"; import { DescribeAppImageConfigCommandInput, DescribeAppImageConfigCommandOutput, } from "./commands/DescribeAppImageConfigCommand"; +import { DescribeArtifactCommandInput, DescribeArtifactCommandOutput } from "./commands/DescribeArtifactCommand"; import { DescribeAutoMLJobCommandInput, DescribeAutoMLJobCommandOutput } from "./commands/DescribeAutoMLJobCommand"; import { DescribeCodeRepositoryCommandInput, @@ -135,6 +163,7 @@ import { DescribeCompilationJobCommandInput, DescribeCompilationJobCommandOutput, } from "./commands/DescribeCompilationJobCommand"; +import { DescribeContextCommandInput, DescribeContextCommandOutput } from "./commands/DescribeContextCommand"; import { DescribeDomainCommandInput, DescribeDomainCommandOutput } from "./commands/DescribeDomainCommand"; import { DescribeEndpointCommandInput, DescribeEndpointCommandOutput } from "./commands/DescribeEndpointCommand"; import { @@ -142,6 +171,10 @@ import { DescribeEndpointConfigCommandOutput, } from "./commands/DescribeEndpointConfigCommand"; import { DescribeExperimentCommandInput, DescribeExperimentCommandOutput } from "./commands/DescribeExperimentCommand"; +import { + DescribeFeatureGroupCommandInput, + DescribeFeatureGroupCommandOutput, +} from "./commands/DescribeFeatureGroupCommand"; import { DescribeFlowDefinitionCommandInput, DescribeFlowDefinitionCommandOutput, @@ -168,6 +201,10 @@ import { DescribeModelPackageCommandInput, DescribeModelPackageCommandOutput, } from "./commands/DescribeModelPackageCommand"; +import { + DescribeModelPackageGroupCommandInput, + DescribeModelPackageGroupCommandOutput, +} from "./commands/DescribeModelPackageGroupCommand"; import { DescribeMonitoringScheduleCommandInput, DescribeMonitoringScheduleCommandOutput, @@ -180,10 +217,20 @@ import { DescribeNotebookInstanceLifecycleConfigCommandInput, DescribeNotebookInstanceLifecycleConfigCommandOutput, } from "./commands/DescribeNotebookInstanceLifecycleConfigCommand"; +import { DescribePipelineCommandInput, DescribePipelineCommandOutput } from "./commands/DescribePipelineCommand"; +import { + DescribePipelineDefinitionForExecutionCommandInput, + DescribePipelineDefinitionForExecutionCommandOutput, +} from "./commands/DescribePipelineDefinitionForExecutionCommand"; +import { + DescribePipelineExecutionCommandInput, + DescribePipelineExecutionCommandOutput, +} from "./commands/DescribePipelineExecutionCommand"; import { DescribeProcessingJobCommandInput, DescribeProcessingJobCommandOutput, } from "./commands/DescribeProcessingJobCommand"; +import { DescribeProjectCommandInput, DescribeProjectCommandOutput } from "./commands/DescribeProjectCommand"; import { DescribeSubscribedWorkteamCommandInput, DescribeSubscribedWorkteamCommandOutput, @@ -207,20 +254,39 @@ import { } from "./commands/DescribeUserProfileCommand"; import { DescribeWorkforceCommandInput, DescribeWorkforceCommandOutput } from "./commands/DescribeWorkforceCommand"; import { DescribeWorkteamCommandInput, DescribeWorkteamCommandOutput } from "./commands/DescribeWorkteamCommand"; +import { + DisableSagemakerServicecatalogPortfolioCommandInput, + DisableSagemakerServicecatalogPortfolioCommandOutput, +} from "./commands/DisableSagemakerServicecatalogPortfolioCommand"; import { DisassociateTrialComponentCommandInput, DisassociateTrialComponentCommandOutput, } from "./commands/DisassociateTrialComponentCommand"; +import { + EnableSagemakerServicecatalogPortfolioCommandInput, + EnableSagemakerServicecatalogPortfolioCommandOutput, +} from "./commands/EnableSagemakerServicecatalogPortfolioCommand"; +import { + GetModelPackageGroupPolicyCommandInput, + GetModelPackageGroupPolicyCommandOutput, +} from "./commands/GetModelPackageGroupPolicyCommand"; +import { + GetSagemakerServicecatalogPortfolioStatusCommandInput, + GetSagemakerServicecatalogPortfolioStatusCommandOutput, +} from "./commands/GetSagemakerServicecatalogPortfolioStatusCommand"; import { GetSearchSuggestionsCommandInput, GetSearchSuggestionsCommandOutput, } from "./commands/GetSearchSuggestionsCommand"; +import { ListActionsCommandInput, ListActionsCommandOutput } from "./commands/ListActionsCommand"; import { ListAlgorithmsCommandInput, ListAlgorithmsCommandOutput } from "./commands/ListAlgorithmsCommand"; import { ListAppImageConfigsCommandInput, ListAppImageConfigsCommandOutput, } from "./commands/ListAppImageConfigsCommand"; import { ListAppsCommandInput, ListAppsCommandOutput } from "./commands/ListAppsCommand"; +import { ListArtifactsCommandInput, ListArtifactsCommandOutput } from "./commands/ListArtifactsCommand"; +import { ListAssociationsCommandInput, ListAssociationsCommandOutput } from "./commands/ListAssociationsCommand"; import { ListAutoMLJobsCommandInput, ListAutoMLJobsCommandOutput } from "./commands/ListAutoMLJobsCommand"; import { ListCandidatesForAutoMLJobCommandInput, @@ -234,6 +300,7 @@ import { ListCompilationJobsCommandInput, ListCompilationJobsCommandOutput, } from "./commands/ListCompilationJobsCommand"; +import { ListContextsCommandInput, ListContextsCommandOutput } from "./commands/ListContextsCommand"; import { ListDomainsCommandInput, ListDomainsCommandOutput } from "./commands/ListDomainsCommand"; import { ListEndpointConfigsCommandInput, @@ -241,6 +308,7 @@ import { } from "./commands/ListEndpointConfigsCommand"; import { ListEndpointsCommandInput, ListEndpointsCommandOutput } from "./commands/ListEndpointsCommand"; import { ListExperimentsCommandInput, ListExperimentsCommandOutput } from "./commands/ListExperimentsCommand"; +import { ListFeatureGroupsCommandInput, ListFeatureGroupsCommandOutput } from "./commands/ListFeatureGroupsCommand"; import { ListFlowDefinitionsCommandInput, ListFlowDefinitionsCommandOutput, @@ -257,6 +325,10 @@ import { ListLabelingJobsForWorkteamCommandInput, ListLabelingJobsForWorkteamCommandOutput, } from "./commands/ListLabelingJobsForWorkteamCommand"; +import { + ListModelPackageGroupsCommandInput, + ListModelPackageGroupsCommandOutput, +} from "./commands/ListModelPackageGroupsCommand"; import { ListModelPackagesCommandInput, ListModelPackagesCommandOutput } from "./commands/ListModelPackagesCommand"; import { ListModelsCommandInput, ListModelsCommandOutput } from "./commands/ListModelsCommand"; import { @@ -275,7 +347,21 @@ import { ListNotebookInstancesCommandInput, ListNotebookInstancesCommandOutput, } from "./commands/ListNotebookInstancesCommand"; +import { + ListPipelineExecutionStepsCommandInput, + ListPipelineExecutionStepsCommandOutput, +} from "./commands/ListPipelineExecutionStepsCommand"; +import { + ListPipelineExecutionsCommandInput, + ListPipelineExecutionsCommandOutput, +} from "./commands/ListPipelineExecutionsCommand"; +import { + ListPipelineParametersForExecutionCommandInput, + ListPipelineParametersForExecutionCommandOutput, +} from "./commands/ListPipelineParametersForExecutionCommand"; +import { ListPipelinesCommandInput, ListPipelinesCommandOutput } from "./commands/ListPipelinesCommand"; import { ListProcessingJobsCommandInput, ListProcessingJobsCommandOutput } from "./commands/ListProcessingJobsCommand"; +import { ListProjectsCommandInput, ListProjectsCommandOutput } from "./commands/ListProjectsCommand"; import { ListSubscribedWorkteamsCommandInput, ListSubscribedWorkteamsCommandOutput, @@ -295,6 +381,10 @@ import { ListTrialsCommandInput, ListTrialsCommandOutput } from "./commands/List import { ListUserProfilesCommandInput, ListUserProfilesCommandOutput } from "./commands/ListUserProfilesCommand"; import { ListWorkforcesCommandInput, ListWorkforcesCommandOutput } from "./commands/ListWorkforcesCommand"; import { ListWorkteamsCommandInput, ListWorkteamsCommandOutput } from "./commands/ListWorkteamsCommand"; +import { + PutModelPackageGroupPolicyCommandInput, + PutModelPackageGroupPolicyCommandOutput, +} from "./commands/PutModelPackageGroupPolicyCommand"; import { RenderUiTemplateCommandInput, RenderUiTemplateCommandOutput } from "./commands/RenderUiTemplateCommand"; import { SearchCommandInput, SearchCommandOutput } from "./commands/SearchCommand"; import { @@ -305,6 +395,10 @@ import { StartNotebookInstanceCommandInput, StartNotebookInstanceCommandOutput, } from "./commands/StartNotebookInstanceCommand"; +import { + StartPipelineExecutionCommandInput, + StartPipelineExecutionCommandOutput, +} from "./commands/StartPipelineExecutionCommand"; import { StopAutoMLJobCommandInput, StopAutoMLJobCommandOutput } from "./commands/StopAutoMLJobCommand"; import { StopCompilationJobCommandInput, StopCompilationJobCommandOutput } from "./commands/StopCompilationJobCommand"; import { @@ -320,17 +414,24 @@ import { StopNotebookInstanceCommandInput, StopNotebookInstanceCommandOutput, } from "./commands/StopNotebookInstanceCommand"; +import { + StopPipelineExecutionCommandInput, + StopPipelineExecutionCommandOutput, +} from "./commands/StopPipelineExecutionCommand"; import { StopProcessingJobCommandInput, StopProcessingJobCommandOutput } from "./commands/StopProcessingJobCommand"; import { StopTrainingJobCommandInput, StopTrainingJobCommandOutput } from "./commands/StopTrainingJobCommand"; import { StopTransformJobCommandInput, StopTransformJobCommandOutput } from "./commands/StopTransformJobCommand"; +import { UpdateActionCommandInput, UpdateActionCommandOutput } from "./commands/UpdateActionCommand"; import { UpdateAppImageConfigCommandInput, UpdateAppImageConfigCommandOutput, } from "./commands/UpdateAppImageConfigCommand"; +import { UpdateArtifactCommandInput, UpdateArtifactCommandOutput } from "./commands/UpdateArtifactCommand"; import { UpdateCodeRepositoryCommandInput, UpdateCodeRepositoryCommandOutput, } from "./commands/UpdateCodeRepositoryCommand"; +import { UpdateContextCommandInput, UpdateContextCommandOutput } from "./commands/UpdateContextCommand"; import { UpdateDomainCommandInput, UpdateDomainCommandOutput } from "./commands/UpdateDomainCommand"; import { UpdateEndpointCommandInput, UpdateEndpointCommandOutput } from "./commands/UpdateEndpointCommand"; import { @@ -339,6 +440,7 @@ import { } from "./commands/UpdateEndpointWeightsAndCapacitiesCommand"; import { UpdateExperimentCommandInput, UpdateExperimentCommandOutput } from "./commands/UpdateExperimentCommand"; import { UpdateImageCommandInput, UpdateImageCommandOutput } from "./commands/UpdateImageCommand"; +import { UpdateModelPackageCommandInput, UpdateModelPackageCommandOutput } from "./commands/UpdateModelPackageCommand"; import { UpdateMonitoringScheduleCommandInput, UpdateMonitoringScheduleCommandOutput, @@ -351,6 +453,11 @@ import { UpdateNotebookInstanceLifecycleConfigCommandInput, UpdateNotebookInstanceLifecycleConfigCommandOutput, } from "./commands/UpdateNotebookInstanceLifecycleConfigCommand"; +import { UpdatePipelineCommandInput, UpdatePipelineCommandOutput } from "./commands/UpdatePipelineCommand"; +import { + UpdatePipelineExecutionCommandInput, + UpdatePipelineExecutionCommandOutput, +} from "./commands/UpdatePipelineExecutionCommand"; import { UpdateTrialCommandInput, UpdateTrialCommandOutput } from "./commands/UpdateTrialCommand"; import { UpdateTrialComponentCommandInput, @@ -409,18 +516,23 @@ import { } from "@aws-sdk/types"; export type ServiceInputTypes = + | AddAssociationCommandInput | AddTagsCommandInput | AssociateTrialComponentCommandInput + | CreateActionCommandInput | CreateAlgorithmCommandInput | CreateAppCommandInput | CreateAppImageConfigCommandInput + | CreateArtifactCommandInput | CreateAutoMLJobCommandInput | CreateCodeRepositoryCommandInput | CreateCompilationJobCommandInput + | CreateContextCommandInput | CreateDomainCommandInput | CreateEndpointCommandInput | CreateEndpointConfigCommandInput | CreateExperimentCommandInput + | CreateFeatureGroupCommandInput | CreateFlowDefinitionCommandInput | CreateHumanTaskUiCommandInput | CreateHyperParameterTuningJobCommandInput @@ -429,12 +541,15 @@ export type ServiceInputTypes = | CreateLabelingJobCommandInput | CreateModelCommandInput | CreateModelPackageCommandInput + | CreateModelPackageGroupCommandInput | CreateMonitoringScheduleCommandInput | CreateNotebookInstanceCommandInput | CreateNotebookInstanceLifecycleConfigCommandInput + | CreatePipelineCommandInput | CreatePresignedDomainUrlCommandInput | CreatePresignedNotebookInstanceUrlCommandInput | CreateProcessingJobCommandInput + | CreateProjectCommandInput | CreateTrainingJobCommandInput | CreateTransformJobCommandInput | CreateTrialCommandInput @@ -442,39 +557,52 @@ export type ServiceInputTypes = | CreateUserProfileCommandInput | CreateWorkforceCommandInput | CreateWorkteamCommandInput + | DeleteActionCommandInput | DeleteAlgorithmCommandInput | DeleteAppCommandInput | DeleteAppImageConfigCommandInput + | DeleteArtifactCommandInput + | DeleteAssociationCommandInput | DeleteCodeRepositoryCommandInput + | DeleteContextCommandInput | DeleteDomainCommandInput | DeleteEndpointCommandInput | DeleteEndpointConfigCommandInput | DeleteExperimentCommandInput + | DeleteFeatureGroupCommandInput | DeleteFlowDefinitionCommandInput | DeleteHumanTaskUiCommandInput | DeleteImageCommandInput | DeleteImageVersionCommandInput | DeleteModelCommandInput | DeleteModelPackageCommandInput + | DeleteModelPackageGroupCommandInput + | DeleteModelPackageGroupPolicyCommandInput | DeleteMonitoringScheduleCommandInput | DeleteNotebookInstanceCommandInput | DeleteNotebookInstanceLifecycleConfigCommandInput + | DeletePipelineCommandInput + | DeleteProjectCommandInput | DeleteTagsCommandInput | DeleteTrialCommandInput | DeleteTrialComponentCommandInput | DeleteUserProfileCommandInput | DeleteWorkforceCommandInput | DeleteWorkteamCommandInput + | DescribeActionCommandInput | DescribeAlgorithmCommandInput | DescribeAppCommandInput | DescribeAppImageConfigCommandInput + | DescribeArtifactCommandInput | DescribeAutoMLJobCommandInput | DescribeCodeRepositoryCommandInput | DescribeCompilationJobCommandInput + | DescribeContextCommandInput | DescribeDomainCommandInput | DescribeEndpointCommandInput | DescribeEndpointConfigCommandInput | DescribeExperimentCommandInput + | DescribeFeatureGroupCommandInput | DescribeFlowDefinitionCommandInput | DescribeHumanTaskUiCommandInput | DescribeHyperParameterTuningJobCommandInput @@ -483,10 +611,15 @@ export type ServiceInputTypes = | DescribeLabelingJobCommandInput | DescribeModelCommandInput | DescribeModelPackageCommandInput + | DescribeModelPackageGroupCommandInput | DescribeMonitoringScheduleCommandInput | DescribeNotebookInstanceCommandInput | DescribeNotebookInstanceLifecycleConfigCommandInput + | DescribePipelineCommandInput + | DescribePipelineDefinitionForExecutionCommandInput + | DescribePipelineExecutionCommandInput | DescribeProcessingJobCommandInput + | DescribeProjectCommandInput | DescribeSubscribedWorkteamCommandInput | DescribeTrainingJobCommandInput | DescribeTransformJobCommandInput @@ -495,19 +628,28 @@ export type ServiceInputTypes = | DescribeUserProfileCommandInput | DescribeWorkforceCommandInput | DescribeWorkteamCommandInput + | DisableSagemakerServicecatalogPortfolioCommandInput | DisassociateTrialComponentCommandInput + | EnableSagemakerServicecatalogPortfolioCommandInput + | GetModelPackageGroupPolicyCommandInput + | GetSagemakerServicecatalogPortfolioStatusCommandInput | GetSearchSuggestionsCommandInput + | ListActionsCommandInput | ListAlgorithmsCommandInput | ListAppImageConfigsCommandInput | ListAppsCommandInput + | ListArtifactsCommandInput + | ListAssociationsCommandInput | ListAutoMLJobsCommandInput | ListCandidatesForAutoMLJobCommandInput | ListCodeRepositoriesCommandInput | ListCompilationJobsCommandInput + | ListContextsCommandInput | ListDomainsCommandInput | ListEndpointConfigsCommandInput | ListEndpointsCommandInput | ListExperimentsCommandInput + | ListFeatureGroupsCommandInput | ListFlowDefinitionsCommandInput | ListHumanTaskUisCommandInput | ListHyperParameterTuningJobsCommandInput @@ -515,13 +657,19 @@ export type ServiceInputTypes = | ListImagesCommandInput | ListLabelingJobsCommandInput | ListLabelingJobsForWorkteamCommandInput + | ListModelPackageGroupsCommandInput | ListModelPackagesCommandInput | ListModelsCommandInput | ListMonitoringExecutionsCommandInput | ListMonitoringSchedulesCommandInput | ListNotebookInstanceLifecycleConfigsCommandInput | ListNotebookInstancesCommandInput + | ListPipelineExecutionStepsCommandInput + | ListPipelineExecutionsCommandInput + | ListPipelineParametersForExecutionCommandInput + | ListPipelinesCommandInput | ListProcessingJobsCommandInput + | ListProjectsCommandInput | ListSubscribedWorkteamsCommandInput | ListTagsCommandInput | ListTrainingJobsCommandInput @@ -532,29 +680,38 @@ export type ServiceInputTypes = | ListUserProfilesCommandInput | ListWorkforcesCommandInput | ListWorkteamsCommandInput + | PutModelPackageGroupPolicyCommandInput | RenderUiTemplateCommandInput | SearchCommandInput | StartMonitoringScheduleCommandInput | StartNotebookInstanceCommandInput + | StartPipelineExecutionCommandInput | StopAutoMLJobCommandInput | StopCompilationJobCommandInput | StopHyperParameterTuningJobCommandInput | StopLabelingJobCommandInput | StopMonitoringScheduleCommandInput | StopNotebookInstanceCommandInput + | StopPipelineExecutionCommandInput | StopProcessingJobCommandInput | StopTrainingJobCommandInput | StopTransformJobCommandInput + | UpdateActionCommandInput | UpdateAppImageConfigCommandInput + | UpdateArtifactCommandInput | UpdateCodeRepositoryCommandInput + | UpdateContextCommandInput | UpdateDomainCommandInput | UpdateEndpointCommandInput | UpdateEndpointWeightsAndCapacitiesCommandInput | UpdateExperimentCommandInput | UpdateImageCommandInput + | UpdateModelPackageCommandInput | UpdateMonitoringScheduleCommandInput | UpdateNotebookInstanceCommandInput | UpdateNotebookInstanceLifecycleConfigCommandInput + | UpdatePipelineCommandInput + | UpdatePipelineExecutionCommandInput | UpdateTrialCommandInput | UpdateTrialComponentCommandInput | UpdateUserProfileCommandInput @@ -562,18 +719,23 @@ export type ServiceInputTypes = | UpdateWorkteamCommandInput; export type ServiceOutputTypes = + | AddAssociationCommandOutput | AddTagsCommandOutput | AssociateTrialComponentCommandOutput + | CreateActionCommandOutput | CreateAlgorithmCommandOutput | CreateAppCommandOutput | CreateAppImageConfigCommandOutput + | CreateArtifactCommandOutput | CreateAutoMLJobCommandOutput | CreateCodeRepositoryCommandOutput | CreateCompilationJobCommandOutput + | CreateContextCommandOutput | CreateDomainCommandOutput | CreateEndpointCommandOutput | CreateEndpointConfigCommandOutput | CreateExperimentCommandOutput + | CreateFeatureGroupCommandOutput | CreateFlowDefinitionCommandOutput | CreateHumanTaskUiCommandOutput | CreateHyperParameterTuningJobCommandOutput @@ -582,12 +744,15 @@ export type ServiceOutputTypes = | CreateLabelingJobCommandOutput | CreateModelCommandOutput | CreateModelPackageCommandOutput + | CreateModelPackageGroupCommandOutput | CreateMonitoringScheduleCommandOutput | CreateNotebookInstanceCommandOutput | CreateNotebookInstanceLifecycleConfigCommandOutput + | CreatePipelineCommandOutput | CreatePresignedDomainUrlCommandOutput | CreatePresignedNotebookInstanceUrlCommandOutput | CreateProcessingJobCommandOutput + | CreateProjectCommandOutput | CreateTrainingJobCommandOutput | CreateTransformJobCommandOutput | CreateTrialCommandOutput @@ -595,39 +760,52 @@ export type ServiceOutputTypes = | CreateUserProfileCommandOutput | CreateWorkforceCommandOutput | CreateWorkteamCommandOutput + | DeleteActionCommandOutput | DeleteAlgorithmCommandOutput | DeleteAppCommandOutput | DeleteAppImageConfigCommandOutput + | DeleteArtifactCommandOutput + | DeleteAssociationCommandOutput | DeleteCodeRepositoryCommandOutput + | DeleteContextCommandOutput | DeleteDomainCommandOutput | DeleteEndpointCommandOutput | DeleteEndpointConfigCommandOutput | DeleteExperimentCommandOutput + | DeleteFeatureGroupCommandOutput | DeleteFlowDefinitionCommandOutput | DeleteHumanTaskUiCommandOutput | DeleteImageCommandOutput | DeleteImageVersionCommandOutput | DeleteModelCommandOutput | DeleteModelPackageCommandOutput + | DeleteModelPackageGroupCommandOutput + | DeleteModelPackageGroupPolicyCommandOutput | DeleteMonitoringScheduleCommandOutput | DeleteNotebookInstanceCommandOutput | DeleteNotebookInstanceLifecycleConfigCommandOutput + | DeletePipelineCommandOutput + | DeleteProjectCommandOutput | DeleteTagsCommandOutput | DeleteTrialCommandOutput | DeleteTrialComponentCommandOutput | DeleteUserProfileCommandOutput | DeleteWorkforceCommandOutput | DeleteWorkteamCommandOutput + | DescribeActionCommandOutput | DescribeAlgorithmCommandOutput | DescribeAppCommandOutput | DescribeAppImageConfigCommandOutput + | DescribeArtifactCommandOutput | DescribeAutoMLJobCommandOutput | DescribeCodeRepositoryCommandOutput | DescribeCompilationJobCommandOutput + | DescribeContextCommandOutput | DescribeDomainCommandOutput | DescribeEndpointCommandOutput | DescribeEndpointConfigCommandOutput | DescribeExperimentCommandOutput + | DescribeFeatureGroupCommandOutput | DescribeFlowDefinitionCommandOutput | DescribeHumanTaskUiCommandOutput | DescribeHyperParameterTuningJobCommandOutput @@ -636,10 +814,15 @@ export type ServiceOutputTypes = | DescribeLabelingJobCommandOutput | DescribeModelCommandOutput | DescribeModelPackageCommandOutput + | DescribeModelPackageGroupCommandOutput | DescribeMonitoringScheduleCommandOutput | DescribeNotebookInstanceCommandOutput | DescribeNotebookInstanceLifecycleConfigCommandOutput + | DescribePipelineCommandOutput + | DescribePipelineDefinitionForExecutionCommandOutput + | DescribePipelineExecutionCommandOutput | DescribeProcessingJobCommandOutput + | DescribeProjectCommandOutput | DescribeSubscribedWorkteamCommandOutput | DescribeTrainingJobCommandOutput | DescribeTransformJobCommandOutput @@ -648,19 +831,28 @@ export type ServiceOutputTypes = | DescribeUserProfileCommandOutput | DescribeWorkforceCommandOutput | DescribeWorkteamCommandOutput + | DisableSagemakerServicecatalogPortfolioCommandOutput | DisassociateTrialComponentCommandOutput + | EnableSagemakerServicecatalogPortfolioCommandOutput + | GetModelPackageGroupPolicyCommandOutput + | GetSagemakerServicecatalogPortfolioStatusCommandOutput | GetSearchSuggestionsCommandOutput + | ListActionsCommandOutput | ListAlgorithmsCommandOutput | ListAppImageConfigsCommandOutput | ListAppsCommandOutput + | ListArtifactsCommandOutput + | ListAssociationsCommandOutput | ListAutoMLJobsCommandOutput | ListCandidatesForAutoMLJobCommandOutput | ListCodeRepositoriesCommandOutput | ListCompilationJobsCommandOutput + | ListContextsCommandOutput | ListDomainsCommandOutput | ListEndpointConfigsCommandOutput | ListEndpointsCommandOutput | ListExperimentsCommandOutput + | ListFeatureGroupsCommandOutput | ListFlowDefinitionsCommandOutput | ListHumanTaskUisCommandOutput | ListHyperParameterTuningJobsCommandOutput @@ -668,13 +860,19 @@ export type ServiceOutputTypes = | ListImagesCommandOutput | ListLabelingJobsCommandOutput | ListLabelingJobsForWorkteamCommandOutput + | ListModelPackageGroupsCommandOutput | ListModelPackagesCommandOutput | ListModelsCommandOutput | ListMonitoringExecutionsCommandOutput | ListMonitoringSchedulesCommandOutput | ListNotebookInstanceLifecycleConfigsCommandOutput | ListNotebookInstancesCommandOutput + | ListPipelineExecutionStepsCommandOutput + | ListPipelineExecutionsCommandOutput + | ListPipelineParametersForExecutionCommandOutput + | ListPipelinesCommandOutput | ListProcessingJobsCommandOutput + | ListProjectsCommandOutput | ListSubscribedWorkteamsCommandOutput | ListTagsCommandOutput | ListTrainingJobsCommandOutput @@ -685,29 +883,38 @@ export type ServiceOutputTypes = | ListUserProfilesCommandOutput | ListWorkforcesCommandOutput | ListWorkteamsCommandOutput + | PutModelPackageGroupPolicyCommandOutput | RenderUiTemplateCommandOutput | SearchCommandOutput | StartMonitoringScheduleCommandOutput | StartNotebookInstanceCommandOutput + | StartPipelineExecutionCommandOutput | StopAutoMLJobCommandOutput | StopCompilationJobCommandOutput | StopHyperParameterTuningJobCommandOutput | StopLabelingJobCommandOutput | StopMonitoringScheduleCommandOutput | StopNotebookInstanceCommandOutput + | StopPipelineExecutionCommandOutput | StopProcessingJobCommandOutput | StopTrainingJobCommandOutput | StopTransformJobCommandOutput + | UpdateActionCommandOutput | UpdateAppImageConfigCommandOutput + | UpdateArtifactCommandOutput | UpdateCodeRepositoryCommandOutput + | UpdateContextCommandOutput | UpdateDomainCommandOutput | UpdateEndpointCommandOutput | UpdateEndpointWeightsAndCapacitiesCommandOutput | UpdateExperimentCommandOutput | UpdateImageCommandOutput + | UpdateModelPackageCommandOutput | UpdateMonitoringScheduleCommandOutput | UpdateNotebookInstanceCommandOutput | UpdateNotebookInstanceLifecycleConfigCommandOutput + | UpdatePipelineCommandOutput + | UpdatePipelineExecutionCommandOutput | UpdateTrialCommandOutput | UpdateTrialComponentCommandOutput | UpdateUserProfileCommandOutput diff --git a/clients/client-sagemaker/commands/AddAssociationCommand.ts b/clients/client-sagemaker/commands/AddAssociationCommand.ts new file mode 100644 index 000000000000..6da3f770e671 --- /dev/null +++ b/clients/client-sagemaker/commands/AddAssociationCommand.ts @@ -0,0 +1,92 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { AddAssociationRequest, AddAssociationResponse } from "../models/models_0"; +import { + deserializeAws_json1_1AddAssociationCommand, + serializeAws_json1_1AddAssociationCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type AddAssociationCommandInput = AddAssociationRequest; +export type AddAssociationCommandOutput = AddAssociationResponse & __MetadataBearer; + +/** + *

    Creates an association between the source and the destination. A + * source can be associated with multiple destinations, and a destination can be associated + * with multiple sources. An association is a lineage tracking entity. For more information, see + * Amazon SageMaker + * ML Lineage Tracking.

    + */ +export class AddAssociationCommand extends $Command< + AddAssociationCommandInput, + AddAssociationCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: AddAssociationCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "AddAssociationCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: AddAssociationRequest.filterSensitiveLog, + outputFilterSensitiveLog: AddAssociationResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: AddAssociationCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1AddAssociationCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1AddAssociationCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/CreateActionCommand.ts b/clients/client-sagemaker/commands/CreateActionCommand.ts new file mode 100644 index 000000000000..3006341ec7f0 --- /dev/null +++ b/clients/client-sagemaker/commands/CreateActionCommand.ts @@ -0,0 +1,92 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { CreateActionRequest, CreateActionResponse } from "../models/models_0"; +import { + deserializeAws_json1_1CreateActionCommand, + serializeAws_json1_1CreateActionCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateActionCommandInput = CreateActionRequest; +export type CreateActionCommandOutput = CreateActionResponse & __MetadataBearer; + +/** + *

    Creates an action. An action is a lineage tracking entity that + * represents an action or activity. For example, a model deployment or an HPO job. + * Generally, an action involves at least one input or output artifact. For more information, see + * Amazon SageMaker + * ML Lineage Tracking.

    + */ +export class CreateActionCommand extends $Command< + CreateActionCommandInput, + CreateActionCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateActionCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "CreateActionCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateActionRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreateActionResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateActionCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1CreateActionCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1CreateActionCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/CreateArtifactCommand.ts b/clients/client-sagemaker/commands/CreateArtifactCommand.ts new file mode 100644 index 000000000000..9e451bbb2332 --- /dev/null +++ b/clients/client-sagemaker/commands/CreateArtifactCommand.ts @@ -0,0 +1,92 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { CreateArtifactRequest, CreateArtifactResponse } from "../models/models_0"; +import { + deserializeAws_json1_1CreateArtifactCommand, + serializeAws_json1_1CreateArtifactCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateArtifactCommandInput = CreateArtifactRequest; +export type CreateArtifactCommandOutput = CreateArtifactResponse & __MetadataBearer; + +/** + *

    Creates an artifact. An artifact is a lineage tracking entity that + * represents a URI addressable object or data. Some examples are the S3 URI of a dataset and + * the ECR registry path of an image. For more information, see + * Amazon SageMaker + * ML Lineage Tracking.

    + */ +export class CreateArtifactCommand extends $Command< + CreateArtifactCommandInput, + CreateArtifactCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateArtifactCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "CreateArtifactCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateArtifactRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreateArtifactResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateArtifactCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1CreateArtifactCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1CreateArtifactCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/CreateContextCommand.ts b/clients/client-sagemaker/commands/CreateContextCommand.ts new file mode 100644 index 000000000000..57cdcc9efe72 --- /dev/null +++ b/clients/client-sagemaker/commands/CreateContextCommand.ts @@ -0,0 +1,92 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { CreateContextRequest, CreateContextResponse } from "../models/models_0"; +import { + deserializeAws_json1_1CreateContextCommand, + serializeAws_json1_1CreateContextCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateContextCommandInput = CreateContextRequest; +export type CreateContextCommandOutput = CreateContextResponse & __MetadataBearer; + +/** + *

    Creates a context. A context is a lineage tracking entity that + * represents a logical grouping of other tracking or experiment entities. Some examples are + * an endpoint and a model package. For more information, see + * Amazon SageMaker + * ML Lineage Tracking.

    + */ +export class CreateContextCommand extends $Command< + CreateContextCommandInput, + CreateContextCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateContextCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "CreateContextCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateContextRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreateContextResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateContextCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1CreateContextCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1CreateContextCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/CreateDomainCommand.ts b/clients/client-sagemaker/commands/CreateDomainCommand.ts index 68dbadfa9346..b10243da7c83 100644 --- a/clients/client-sagemaker/commands/CreateDomainCommand.ts +++ b/clients/client-sagemaker/commands/CreateDomainCommand.ts @@ -25,9 +25,19 @@ export type CreateDomainCommandOutput = CreateDomainResponse & __MetadataBearer; * Amazon Elastic File System (EFS) volume, a list of authorized users, and a variety of security, application, * policy, and Amazon Virtual Private Cloud (VPC) configurations. An AWS account is limited to one domain per region. * Users within a domain can share notebook files and other artifacts with each other.

    + * + *

    + * EFS storage + *

    *

    When a domain is created, an EFS volume is created for use by all of the users within the * domain. Each user receives a private home directory within the EFS volume for notebooks, * Git repositories, and data files.

    + *

    SageMaker uses the AWS Key Management Service (AWS KMS) to encrypt the EFS volume attached to the domain with + * an AWS managed customer master key (CMK) by default. For more control, you can specify a + * customer managed CMK. For more information, see + * Protect Data at + * Rest Using Encryption.

    + * *

    * VPC configuration *

    diff --git a/clients/client-sagemaker/commands/CreateFeatureGroupCommand.ts b/clients/client-sagemaker/commands/CreateFeatureGroupCommand.ts new file mode 100644 index 000000000000..31a2c61872e1 --- /dev/null +++ b/clients/client-sagemaker/commands/CreateFeatureGroupCommand.ts @@ -0,0 +1,100 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { CreateFeatureGroupRequest, CreateFeatureGroupResponse } from "../models/models_0"; +import { + deserializeAws_json1_1CreateFeatureGroupCommand, + serializeAws_json1_1CreateFeatureGroupCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateFeatureGroupCommandInput = CreateFeatureGroupRequest; +export type CreateFeatureGroupCommandOutput = CreateFeatureGroupResponse & __MetadataBearer; + +/** + *

    Create a new FeatureGroup. A FeatureGroup is a group of + * Features defined in the FeatureStore to describe a + * Record.

    + *

    The FeatureGroup defines the schema and features contained in the + * FeatureGroup. A FeatureGroup definition is composed of a list of + * Features, a RecordIdentifierFeatureName, an + * EventTimeFeatureName and configurations for its OnlineStore + * and OfflineStore. Check AWS service quotas to see + * the FeatureGroups quota for your AWS account.

    + * + *

    You must include at least one of OnlineStoreConfig and + * OfflineStoreConfig to create a FeatureGroup.

    + *
    + */ +export class CreateFeatureGroupCommand extends $Command< + CreateFeatureGroupCommandInput, + CreateFeatureGroupCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateFeatureGroupCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "CreateFeatureGroupCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateFeatureGroupRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreateFeatureGroupResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateFeatureGroupCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1CreateFeatureGroupCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1CreateFeatureGroupCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/CreateModelPackageCommand.ts b/clients/client-sagemaker/commands/CreateModelPackageCommand.ts index cf4377066146..e5159864d728 100644 --- a/clients/client-sagemaker/commands/CreateModelPackageCommand.ts +++ b/clients/client-sagemaker/commands/CreateModelPackageCommand.ts @@ -22,13 +22,24 @@ export type CreateModelPackageCommandOutput = CreateModelPackageOutput & __Metad /** *

    Creates a model package that you can use to create Amazon SageMaker models or list on AWS - * Marketplace. Buyers can subscribe to model packages listed on AWS Marketplace to create + * Marketplace, or a versioned model that is part of a model group. Buyers can subscribe to model packages listed on AWS Marketplace to create * models in Amazon SageMaker.

    *

    To create a model package by specifying a Docker container that contains your * inference code and the Amazon S3 location of your model artifacts, provide values for * InferenceSpecification. To create a model from an algorithm resource * that you created or subscribed to in AWS Marketplace, provide a value for * SourceAlgorithmSpecification.

    + * + *

    There are two types of model packages:

    + *
      + *
    • + *

      Versioned - a model that is part of a model group in the model registry.

      + *
    • + *
    • + *

      Unversioned - a model package that is not part of a model group.

      + *
    • + *
    + *
    */ export class CreateModelPackageCommand extends $Command< CreateModelPackageCommandInput, diff --git a/clients/client-sagemaker/commands/CreateModelPackageGroupCommand.ts b/clients/client-sagemaker/commands/CreateModelPackageGroupCommand.ts new file mode 100644 index 000000000000..c15721827a0a --- /dev/null +++ b/clients/client-sagemaker/commands/CreateModelPackageGroupCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { CreateModelPackageGroupInput, CreateModelPackageGroupOutput } from "../models/models_0"; +import { + deserializeAws_json1_1CreateModelPackageGroupCommand, + serializeAws_json1_1CreateModelPackageGroupCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateModelPackageGroupCommandInput = CreateModelPackageGroupInput; +export type CreateModelPackageGroupCommandOutput = CreateModelPackageGroupOutput & __MetadataBearer; + +/** + *

    Creates a model group. A model group contains a group of model versions.

    + */ +export class CreateModelPackageGroupCommand extends $Command< + CreateModelPackageGroupCommandInput, + CreateModelPackageGroupCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateModelPackageGroupCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "CreateModelPackageGroupCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateModelPackageGroupInput.filterSensitiveLog, + outputFilterSensitiveLog: CreateModelPackageGroupOutput.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateModelPackageGroupCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1CreateModelPackageGroupCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1CreateModelPackageGroupCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/CreatePipelineCommand.ts b/clients/client-sagemaker/commands/CreatePipelineCommand.ts new file mode 100644 index 000000000000..a8500687eb9e --- /dev/null +++ b/clients/client-sagemaker/commands/CreatePipelineCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { CreatePipelineRequest, CreatePipelineResponse } from "../models/models_0"; +import { + deserializeAws_json1_1CreatePipelineCommand, + serializeAws_json1_1CreatePipelineCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreatePipelineCommandInput = CreatePipelineRequest; +export type CreatePipelineCommandOutput = CreatePipelineResponse & __MetadataBearer; + +/** + *

    Creates a pipeline using a JSON pipeline definition.

    + */ +export class CreatePipelineCommand extends $Command< + CreatePipelineCommandInput, + CreatePipelineCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreatePipelineCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "CreatePipelineCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreatePipelineRequest.filterSensitiveLog, + outputFilterSensitiveLog: CreatePipelineResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreatePipelineCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1CreatePipelineCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1CreatePipelineCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/CreatePresignedDomainUrlCommand.ts b/clients/client-sagemaker/commands/CreatePresignedDomainUrlCommand.ts index cdcd3cfd35f4..f3af193307fe 100644 --- a/clients/client-sagemaker/commands/CreatePresignedDomainUrlCommand.ts +++ b/clients/client-sagemaker/commands/CreatePresignedDomainUrlCommand.ts @@ -1,5 +1,6 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { CreatePresignedDomainUrlRequest, CreatePresignedDomainUrlResponse } from "../models/models_0"; +import { CreatePresignedDomainUrlRequest } from "../models/models_0"; +import { CreatePresignedDomainUrlResponse } from "../models/models_1"; import { deserializeAws_json1_1CreatePresignedDomainUrlCommand, serializeAws_json1_1CreatePresignedDomainUrlCommand, diff --git a/clients/client-sagemaker/commands/CreatePresignedNotebookInstanceUrlCommand.ts b/clients/client-sagemaker/commands/CreatePresignedNotebookInstanceUrlCommand.ts index e35dd7abacae..5d740d95b9fd 100644 --- a/clients/client-sagemaker/commands/CreatePresignedNotebookInstanceUrlCommand.ts +++ b/clients/client-sagemaker/commands/CreatePresignedNotebookInstanceUrlCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { CreatePresignedNotebookInstanceUrlInput, CreatePresignedNotebookInstanceUrlOutput } from "../models/models_0"; +import { CreatePresignedNotebookInstanceUrlInput, CreatePresignedNotebookInstanceUrlOutput } from "../models/models_1"; import { deserializeAws_json1_1CreatePresignedNotebookInstanceUrlCommand, serializeAws_json1_1CreatePresignedNotebookInstanceUrlCommand, diff --git a/clients/client-sagemaker/commands/CreateProcessingJobCommand.ts b/clients/client-sagemaker/commands/CreateProcessingJobCommand.ts index d7368be1b6ee..818f46cb1632 100644 --- a/clients/client-sagemaker/commands/CreateProcessingJobCommand.ts +++ b/clients/client-sagemaker/commands/CreateProcessingJobCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { CreateProcessingJobRequest, CreateProcessingJobResponse } from "../models/models_0"; +import { CreateProcessingJobRequest, CreateProcessingJobResponse } from "../models/models_1"; import { deserializeAws_json1_1CreateProcessingJobCommand, serializeAws_json1_1CreateProcessingJobCommand, diff --git a/clients/client-sagemaker/commands/CreateProjectCommand.ts b/clients/client-sagemaker/commands/CreateProjectCommand.ts new file mode 100644 index 000000000000..eaeeacc508ae --- /dev/null +++ b/clients/client-sagemaker/commands/CreateProjectCommand.ts @@ -0,0 +1,89 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { CreateProjectInput, CreateProjectOutput } from "../models/models_1"; +import { + deserializeAws_json1_1CreateProjectCommand, + serializeAws_json1_1CreateProjectCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type CreateProjectCommandInput = CreateProjectInput; +export type CreateProjectCommandOutput = CreateProjectOutput & __MetadataBearer; + +/** + *

    Creates a machine learning (ML) project that can contain one or more templates that set + * up an ML pipeline from training to deploying an approved model.

    + */ +export class CreateProjectCommand extends $Command< + CreateProjectCommandInput, + CreateProjectCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: CreateProjectCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "CreateProjectCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: CreateProjectInput.filterSensitiveLog, + outputFilterSensitiveLog: CreateProjectOutput.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: CreateProjectCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1CreateProjectCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1CreateProjectCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/CreateTrainingJobCommand.ts b/clients/client-sagemaker/commands/CreateTrainingJobCommand.ts index f79e29503d46..7b0ec210d635 100644 --- a/clients/client-sagemaker/commands/CreateTrainingJobCommand.ts +++ b/clients/client-sagemaker/commands/CreateTrainingJobCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { CreateTrainingJobRequest, CreateTrainingJobResponse } from "../models/models_0"; +import { CreateTrainingJobRequest, CreateTrainingJobResponse } from "../models/models_1"; import { deserializeAws_json1_1CreateTrainingJobCommand, serializeAws_json1_1CreateTrainingJobCommand, @@ -23,10 +23,10 @@ export type CreateTrainingJobCommandOutput = CreateTrainingJobResponse & __Metad /** *

    Starts a model training job. After training completes, Amazon SageMaker saves the resulting * model artifacts to an Amazon S3 location that you specify.

    - *

    If you choose to host your model using Amazon SageMaker hosting services, you can use the - * resulting model artifacts as part of the model. You can also use the artifacts in a - * machine learning service other than Amazon SageMaker, provided that you know how to use them for - * inferences. + *

    If you choose to host your model using Amazon SageMaker hosting services, you can use the resulting + * model artifacts as part of the model. You can also use the artifacts in a machine + * learning service other than Amazon SageMaker, provided that you know how to use them for + * inference. * *

    *

    In the request body, you provide the following:

    @@ -71,7 +71,7 @@ export type CreateTrainingJobCommandOutput = CreateTrainingJobResponse & __Metad *
  • *
  • *

    - * RoleARN - The Amazon Resource Number (ARN) that Amazon SageMaker assumes + * RoleArn - The Amazon Resource Number (ARN) that Amazon SageMaker assumes * to perform tasks on your behalf during model training. * * You must grant this role the necessary permissions so that Amazon SageMaker can successfully diff --git a/clients/client-sagemaker/commands/CreateTransformJobCommand.ts b/clients/client-sagemaker/commands/CreateTransformJobCommand.ts index 498949e41ad7..53118fb4c7b1 100644 --- a/clients/client-sagemaker/commands/CreateTransformJobCommand.ts +++ b/clients/client-sagemaker/commands/CreateTransformJobCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { CreateTransformJobRequest, CreateTransformJobResponse } from "../models/models_0"; +import { CreateTransformJobRequest, CreateTransformJobResponse } from "../models/models_1"; import { deserializeAws_json1_1CreateTransformJobCommand, serializeAws_json1_1CreateTransformJobCommand, diff --git a/clients/client-sagemaker/commands/CreateTrialCommand.ts b/clients/client-sagemaker/commands/CreateTrialCommand.ts index d7f9c9c0dc3f..7f261665573a 100644 --- a/clients/client-sagemaker/commands/CreateTrialCommand.ts +++ b/clients/client-sagemaker/commands/CreateTrialCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { CreateTrialRequest, CreateTrialResponse } from "../models/models_0"; +import { CreateTrialRequest, CreateTrialResponse } from "../models/models_1"; import { deserializeAws_json1_1CreateTrialCommand, serializeAws_json1_1CreateTrialCommand, diff --git a/clients/client-sagemaker/commands/CreateTrialComponentCommand.ts b/clients/client-sagemaker/commands/CreateTrialComponentCommand.ts index 598252490895..9b56d0ff6e7e 100644 --- a/clients/client-sagemaker/commands/CreateTrialComponentCommand.ts +++ b/clients/client-sagemaker/commands/CreateTrialComponentCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { CreateTrialComponentRequest, CreateTrialComponentResponse } from "../models/models_0"; +import { CreateTrialComponentRequest, CreateTrialComponentResponse } from "../models/models_1"; import { deserializeAws_json1_1CreateTrialComponentCommand, serializeAws_json1_1CreateTrialComponentCommand, diff --git a/clients/client-sagemaker/commands/CreateUserProfileCommand.ts b/clients/client-sagemaker/commands/CreateUserProfileCommand.ts index a648a1838319..864cdf158ca5 100644 --- a/clients/client-sagemaker/commands/CreateUserProfileCommand.ts +++ b/clients/client-sagemaker/commands/CreateUserProfileCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { CreateUserProfileRequest, CreateUserProfileResponse } from "../models/models_0"; +import { CreateUserProfileRequest, CreateUserProfileResponse } from "../models/models_1"; import { deserializeAws_json1_1CreateUserProfileCommand, serializeAws_json1_1CreateUserProfileCommand, diff --git a/clients/client-sagemaker/commands/CreateWorkforceCommand.ts b/clients/client-sagemaker/commands/CreateWorkforceCommand.ts index b2c128158554..a30f4d0e5bad 100644 --- a/clients/client-sagemaker/commands/CreateWorkforceCommand.ts +++ b/clients/client-sagemaker/commands/CreateWorkforceCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { CreateWorkforceRequest, CreateWorkforceResponse } from "../models/models_0"; +import { CreateWorkforceRequest, CreateWorkforceResponse } from "../models/models_1"; import { deserializeAws_json1_1CreateWorkforceCommand, serializeAws_json1_1CreateWorkforceCommand, diff --git a/clients/client-sagemaker/commands/CreateWorkteamCommand.ts b/clients/client-sagemaker/commands/CreateWorkteamCommand.ts index 2ac2ee4ee769..fddfe007e85a 100644 --- a/clients/client-sagemaker/commands/CreateWorkteamCommand.ts +++ b/clients/client-sagemaker/commands/CreateWorkteamCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { CreateWorkteamRequest, CreateWorkteamResponse } from "../models/models_0"; +import { CreateWorkteamRequest, CreateWorkteamResponse } from "../models/models_1"; import { deserializeAws_json1_1CreateWorkteamCommand, serializeAws_json1_1CreateWorkteamCommand, diff --git a/clients/client-sagemaker/commands/DeleteActionCommand.ts b/clients/client-sagemaker/commands/DeleteActionCommand.ts new file mode 100644 index 000000000000..ec0470d88a3f --- /dev/null +++ b/clients/client-sagemaker/commands/DeleteActionCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { DeleteActionRequest, DeleteActionResponse } from "../models/models_1"; +import { + deserializeAws_json1_1DeleteActionCommand, + serializeAws_json1_1DeleteActionCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteActionCommandInput = DeleteActionRequest; +export type DeleteActionCommandOutput = DeleteActionResponse & __MetadataBearer; + +/** + *

    Deletes an action.

    + */ +export class DeleteActionCommand extends $Command< + DeleteActionCommandInput, + DeleteActionCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteActionCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DeleteActionCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteActionRequest.filterSensitiveLog, + outputFilterSensitiveLog: DeleteActionResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteActionCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DeleteActionCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DeleteActionCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/DeleteAlgorithmCommand.ts b/clients/client-sagemaker/commands/DeleteAlgorithmCommand.ts index 48dbc402084e..fe35087e97b4 100644 --- a/clients/client-sagemaker/commands/DeleteAlgorithmCommand.ts +++ b/clients/client-sagemaker/commands/DeleteAlgorithmCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { DeleteAlgorithmInput } from "../models/models_0"; +import { DeleteAlgorithmInput } from "../models/models_1"; import { deserializeAws_json1_1DeleteAlgorithmCommand, serializeAws_json1_1DeleteAlgorithmCommand, diff --git a/clients/client-sagemaker/commands/DeleteAppCommand.ts b/clients/client-sagemaker/commands/DeleteAppCommand.ts index 6aa8ebaa4b15..38b2c9bdad4a 100644 --- a/clients/client-sagemaker/commands/DeleteAppCommand.ts +++ b/clients/client-sagemaker/commands/DeleteAppCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { DeleteAppRequest } from "../models/models_0"; +import { DeleteAppRequest } from "../models/models_1"; import { deserializeAws_json1_1DeleteAppCommand, serializeAws_json1_1DeleteAppCommand } from "../protocols/Aws_json1_1"; import { getSerdePlugin } from "@aws-sdk/middleware-serde"; import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; diff --git a/clients/client-sagemaker/commands/DeleteAppImageConfigCommand.ts b/clients/client-sagemaker/commands/DeleteAppImageConfigCommand.ts index e2d23be2f8bb..1133235f4636 100644 --- a/clients/client-sagemaker/commands/DeleteAppImageConfigCommand.ts +++ b/clients/client-sagemaker/commands/DeleteAppImageConfigCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { DeleteAppImageConfigRequest } from "../models/models_0"; +import { DeleteAppImageConfigRequest } from "../models/models_1"; import { deserializeAws_json1_1DeleteAppImageConfigCommand, serializeAws_json1_1DeleteAppImageConfigCommand, diff --git a/clients/client-sagemaker/commands/DeleteArtifactCommand.ts b/clients/client-sagemaker/commands/DeleteArtifactCommand.ts new file mode 100644 index 000000000000..3cbf962cfc55 --- /dev/null +++ b/clients/client-sagemaker/commands/DeleteArtifactCommand.ts @@ -0,0 +1,89 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { DeleteArtifactRequest, DeleteArtifactResponse } from "../models/models_1"; +import { + deserializeAws_json1_1DeleteArtifactCommand, + serializeAws_json1_1DeleteArtifactCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteArtifactCommandInput = DeleteArtifactRequest; +export type DeleteArtifactCommandOutput = DeleteArtifactResponse & __MetadataBearer; + +/** + *

    Deletes an artifact. Either ArtifactArn or Source must be + * specified.

    + */ +export class DeleteArtifactCommand extends $Command< + DeleteArtifactCommandInput, + DeleteArtifactCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteArtifactCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DeleteArtifactCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteArtifactRequest.filterSensitiveLog, + outputFilterSensitiveLog: DeleteArtifactResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteArtifactCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DeleteArtifactCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DeleteArtifactCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/DeleteAssociationCommand.ts b/clients/client-sagemaker/commands/DeleteAssociationCommand.ts new file mode 100644 index 000000000000..3a9bd1db2127 --- /dev/null +++ b/clients/client-sagemaker/commands/DeleteAssociationCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { DeleteAssociationRequest, DeleteAssociationResponse } from "../models/models_1"; +import { + deserializeAws_json1_1DeleteAssociationCommand, + serializeAws_json1_1DeleteAssociationCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteAssociationCommandInput = DeleteAssociationRequest; +export type DeleteAssociationCommandOutput = DeleteAssociationResponse & __MetadataBearer; + +/** + *

    Deletes an association.

    + */ +export class DeleteAssociationCommand extends $Command< + DeleteAssociationCommandInput, + DeleteAssociationCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteAssociationCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DeleteAssociationCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteAssociationRequest.filterSensitiveLog, + outputFilterSensitiveLog: DeleteAssociationResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteAssociationCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DeleteAssociationCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DeleteAssociationCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/DeleteCodeRepositoryCommand.ts b/clients/client-sagemaker/commands/DeleteCodeRepositoryCommand.ts index 2699123ab77f..b36fe6b5eb77 100644 --- a/clients/client-sagemaker/commands/DeleteCodeRepositoryCommand.ts +++ b/clients/client-sagemaker/commands/DeleteCodeRepositoryCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { DeleteCodeRepositoryInput } from "../models/models_0"; +import { DeleteCodeRepositoryInput } from "../models/models_1"; import { deserializeAws_json1_1DeleteCodeRepositoryCommand, serializeAws_json1_1DeleteCodeRepositoryCommand, diff --git a/clients/client-sagemaker/commands/DeleteContextCommand.ts b/clients/client-sagemaker/commands/DeleteContextCommand.ts new file mode 100644 index 000000000000..6390306d167e --- /dev/null +++ b/clients/client-sagemaker/commands/DeleteContextCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { DeleteContextRequest, DeleteContextResponse } from "../models/models_1"; +import { + deserializeAws_json1_1DeleteContextCommand, + serializeAws_json1_1DeleteContextCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteContextCommandInput = DeleteContextRequest; +export type DeleteContextCommandOutput = DeleteContextResponse & __MetadataBearer; + +/** + *

    Deletes an context.

    + */ +export class DeleteContextCommand extends $Command< + DeleteContextCommandInput, + DeleteContextCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteContextCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DeleteContextCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteContextRequest.filterSensitiveLog, + outputFilterSensitiveLog: DeleteContextResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteContextCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DeleteContextCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DeleteContextCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/DeleteFeatureGroupCommand.ts b/clients/client-sagemaker/commands/DeleteFeatureGroupCommand.ts new file mode 100644 index 000000000000..ceb7adc330b9 --- /dev/null +++ b/clients/client-sagemaker/commands/DeleteFeatureGroupCommand.ts @@ -0,0 +1,93 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { DeleteFeatureGroupRequest } from "../models/models_1"; +import { + deserializeAws_json1_1DeleteFeatureGroupCommand, + serializeAws_json1_1DeleteFeatureGroupCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteFeatureGroupCommandInput = DeleteFeatureGroupRequest; +export type DeleteFeatureGroupCommandOutput = __MetadataBearer; + +/** + *

    Delete the FeatureGroup and any data that was written to the + * OnlineStore of the FeatureGroup. Data cannot be accessed from + * the OnlineStore immediately after DeleteFeatureGroup is called.

    + *

    Data written into the OfflineStore will not be deleted. The AWS Glue + * database and tables that are automatically created for your OfflineStore are + * not deleted.

    + */ +export class DeleteFeatureGroupCommand extends $Command< + DeleteFeatureGroupCommandInput, + DeleteFeatureGroupCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteFeatureGroupCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DeleteFeatureGroupCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteFeatureGroupRequest.filterSensitiveLog, + outputFilterSensitiveLog: (output: any) => output, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteFeatureGroupCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DeleteFeatureGroupCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DeleteFeatureGroupCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/DeleteModelPackageGroupCommand.ts b/clients/client-sagemaker/commands/DeleteModelPackageGroupCommand.ts new file mode 100644 index 000000000000..dee19d14f68c --- /dev/null +++ b/clients/client-sagemaker/commands/DeleteModelPackageGroupCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { DeleteModelPackageGroupInput } from "../models/models_1"; +import { + deserializeAws_json1_1DeleteModelPackageGroupCommand, + serializeAws_json1_1DeleteModelPackageGroupCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteModelPackageGroupCommandInput = DeleteModelPackageGroupInput; +export type DeleteModelPackageGroupCommandOutput = __MetadataBearer; + +/** + *

    Deletes the specified model group.

    + */ +export class DeleteModelPackageGroupCommand extends $Command< + DeleteModelPackageGroupCommandInput, + DeleteModelPackageGroupCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteModelPackageGroupCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DeleteModelPackageGroupCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteModelPackageGroupInput.filterSensitiveLog, + outputFilterSensitiveLog: (output: any) => output, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteModelPackageGroupCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DeleteModelPackageGroupCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DeleteModelPackageGroupCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/DeleteModelPackageGroupPolicyCommand.ts b/clients/client-sagemaker/commands/DeleteModelPackageGroupPolicyCommand.ts new file mode 100644 index 000000000000..6f06e4a71ba9 --- /dev/null +++ b/clients/client-sagemaker/commands/DeleteModelPackageGroupPolicyCommand.ts @@ -0,0 +1,91 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { DeleteModelPackageGroupPolicyInput } from "../models/models_1"; +import { + deserializeAws_json1_1DeleteModelPackageGroupPolicyCommand, + serializeAws_json1_1DeleteModelPackageGroupPolicyCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteModelPackageGroupPolicyCommandInput = DeleteModelPackageGroupPolicyInput; +export type DeleteModelPackageGroupPolicyCommandOutput = __MetadataBearer; + +/** + *

    Deletes a model group resource policy.

    + */ +export class DeleteModelPackageGroupPolicyCommand extends $Command< + DeleteModelPackageGroupPolicyCommandInput, + DeleteModelPackageGroupPolicyCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteModelPackageGroupPolicyCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DeleteModelPackageGroupPolicyCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteModelPackageGroupPolicyInput.filterSensitiveLog, + outputFilterSensitiveLog: (output: any) => output, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteModelPackageGroupPolicyCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DeleteModelPackageGroupPolicyCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_json1_1DeleteModelPackageGroupPolicyCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/DeletePipelineCommand.ts b/clients/client-sagemaker/commands/DeletePipelineCommand.ts new file mode 100644 index 000000000000..40142286ba2d --- /dev/null +++ b/clients/client-sagemaker/commands/DeletePipelineCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { DeletePipelineRequest, DeletePipelineResponse } from "../models/models_1"; +import { + deserializeAws_json1_1DeletePipelineCommand, + serializeAws_json1_1DeletePipelineCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeletePipelineCommandInput = DeletePipelineRequest; +export type DeletePipelineCommandOutput = DeletePipelineResponse & __MetadataBearer; + +/** + *

    Deletes a pipeline.

    + */ +export class DeletePipelineCommand extends $Command< + DeletePipelineCommandInput, + DeletePipelineCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeletePipelineCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DeletePipelineCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeletePipelineRequest.filterSensitiveLog, + outputFilterSensitiveLog: DeletePipelineResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeletePipelineCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DeletePipelineCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DeletePipelineCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/DeleteProjectCommand.ts b/clients/client-sagemaker/commands/DeleteProjectCommand.ts new file mode 100644 index 000000000000..4b04fe1cd45c --- /dev/null +++ b/clients/client-sagemaker/commands/DeleteProjectCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { DeleteProjectInput } from "../models/models_1"; +import { + deserializeAws_json1_1DeleteProjectCommand, + serializeAws_json1_1DeleteProjectCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DeleteProjectCommandInput = DeleteProjectInput; +export type DeleteProjectCommandOutput = __MetadataBearer; + +/** + *

    Delete the specified project.

    + */ +export class DeleteProjectCommand extends $Command< + DeleteProjectCommandInput, + DeleteProjectCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DeleteProjectCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DeleteProjectCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DeleteProjectInput.filterSensitiveLog, + outputFilterSensitiveLog: (output: any) => output, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DeleteProjectCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DeleteProjectCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DeleteProjectCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/DescribeActionCommand.ts b/clients/client-sagemaker/commands/DescribeActionCommand.ts new file mode 100644 index 000000000000..7552edd51009 --- /dev/null +++ b/clients/client-sagemaker/commands/DescribeActionCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { DescribeActionRequest, DescribeActionResponse } from "../models/models_1"; +import { + deserializeAws_json1_1DescribeActionCommand, + serializeAws_json1_1DescribeActionCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeActionCommandInput = DescribeActionRequest; +export type DescribeActionCommandOutput = DescribeActionResponse & __MetadataBearer; + +/** + *

    Describes an action.

    + */ +export class DescribeActionCommand extends $Command< + DescribeActionCommandInput, + DescribeActionCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeActionCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DescribeActionCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeActionRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeActionResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeActionCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DescribeActionCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DescribeActionCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/DescribeArtifactCommand.ts b/clients/client-sagemaker/commands/DescribeArtifactCommand.ts new file mode 100644 index 000000000000..5b315518b635 --- /dev/null +++ b/clients/client-sagemaker/commands/DescribeArtifactCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { DescribeArtifactRequest, DescribeArtifactResponse } from "../models/models_1"; +import { + deserializeAws_json1_1DescribeArtifactCommand, + serializeAws_json1_1DescribeArtifactCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeArtifactCommandInput = DescribeArtifactRequest; +export type DescribeArtifactCommandOutput = DescribeArtifactResponse & __MetadataBearer; + +/** + *

    Describes an artifact.

    + */ +export class DescribeArtifactCommand extends $Command< + DescribeArtifactCommandInput, + DescribeArtifactCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeArtifactCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DescribeArtifactCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeArtifactRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeArtifactResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeArtifactCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DescribeArtifactCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DescribeArtifactCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/DescribeContextCommand.ts b/clients/client-sagemaker/commands/DescribeContextCommand.ts new file mode 100644 index 000000000000..a1c7b244fe46 --- /dev/null +++ b/clients/client-sagemaker/commands/DescribeContextCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { DescribeContextRequest, DescribeContextResponse } from "../models/models_1"; +import { + deserializeAws_json1_1DescribeContextCommand, + serializeAws_json1_1DescribeContextCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeContextCommandInput = DescribeContextRequest; +export type DescribeContextCommandOutput = DescribeContextResponse & __MetadataBearer; + +/** + *

    Describes a context.

    + */ +export class DescribeContextCommand extends $Command< + DescribeContextCommandInput, + DescribeContextCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeContextCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DescribeContextCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeContextRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeContextResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeContextCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DescribeContextCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DescribeContextCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/DescribeFeatureGroupCommand.ts b/clients/client-sagemaker/commands/DescribeFeatureGroupCommand.ts new file mode 100644 index 000000000000..5e96066a8809 --- /dev/null +++ b/clients/client-sagemaker/commands/DescribeFeatureGroupCommand.ts @@ -0,0 +1,90 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { DescribeFeatureGroupRequest, DescribeFeatureGroupResponse } from "../models/models_1"; +import { + deserializeAws_json1_1DescribeFeatureGroupCommand, + serializeAws_json1_1DescribeFeatureGroupCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeFeatureGroupCommandInput = DescribeFeatureGroupRequest; +export type DescribeFeatureGroupCommandOutput = DescribeFeatureGroupResponse & __MetadataBearer; + +/** + *

    Use this operation to describe a FeatureGroup. The response includes + * information on the creation time, FeatureGroup name, the unique identifier for + * each FeatureGroup, and more.

    + */ +export class DescribeFeatureGroupCommand extends $Command< + DescribeFeatureGroupCommandInput, + DescribeFeatureGroupCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeFeatureGroupCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DescribeFeatureGroupCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeFeatureGroupRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribeFeatureGroupResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeFeatureGroupCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DescribeFeatureGroupCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DescribeFeatureGroupCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/DescribeModelPackageGroupCommand.ts b/clients/client-sagemaker/commands/DescribeModelPackageGroupCommand.ts new file mode 100644 index 000000000000..51787c63daa4 --- /dev/null +++ b/clients/client-sagemaker/commands/DescribeModelPackageGroupCommand.ts @@ -0,0 +1,91 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { DescribeModelPackageGroupInput, DescribeModelPackageGroupOutput } from "../models/models_1"; +import { + deserializeAws_json1_1DescribeModelPackageGroupCommand, + serializeAws_json1_1DescribeModelPackageGroupCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeModelPackageGroupCommandInput = DescribeModelPackageGroupInput; +export type DescribeModelPackageGroupCommandOutput = DescribeModelPackageGroupOutput & __MetadataBearer; + +/** + *

    Gets a description for the specified model group.

    + */ +export class DescribeModelPackageGroupCommand extends $Command< + DescribeModelPackageGroupCommandInput, + DescribeModelPackageGroupCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeModelPackageGroupCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DescribeModelPackageGroupCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeModelPackageGroupInput.filterSensitiveLog, + outputFilterSensitiveLog: DescribeModelPackageGroupOutput.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeModelPackageGroupCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DescribeModelPackageGroupCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_json1_1DescribeModelPackageGroupCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/DescribePipelineCommand.ts b/clients/client-sagemaker/commands/DescribePipelineCommand.ts new file mode 100644 index 000000000000..557dcac69e09 --- /dev/null +++ b/clients/client-sagemaker/commands/DescribePipelineCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { DescribePipelineRequest, DescribePipelineResponse } from "../models/models_1"; +import { + deserializeAws_json1_1DescribePipelineCommand, + serializeAws_json1_1DescribePipelineCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribePipelineCommandInput = DescribePipelineRequest; +export type DescribePipelineCommandOutput = DescribePipelineResponse & __MetadataBearer; + +/** + *

    Describes the details of a pipeline.

    + */ +export class DescribePipelineCommand extends $Command< + DescribePipelineCommandInput, + DescribePipelineCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribePipelineCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DescribePipelineCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribePipelineRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribePipelineResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribePipelineCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DescribePipelineCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DescribePipelineCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/DescribePipelineDefinitionForExecutionCommand.ts b/clients/client-sagemaker/commands/DescribePipelineDefinitionForExecutionCommand.ts new file mode 100644 index 000000000000..d7c81ac3c970 --- /dev/null +++ b/clients/client-sagemaker/commands/DescribePipelineDefinitionForExecutionCommand.ts @@ -0,0 +1,98 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { + DescribePipelineDefinitionForExecutionRequest, + DescribePipelineDefinitionForExecutionResponse, +} from "../models/models_1"; +import { + deserializeAws_json1_1DescribePipelineDefinitionForExecutionCommand, + serializeAws_json1_1DescribePipelineDefinitionForExecutionCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribePipelineDefinitionForExecutionCommandInput = DescribePipelineDefinitionForExecutionRequest; +export type DescribePipelineDefinitionForExecutionCommandOutput = DescribePipelineDefinitionForExecutionResponse & + __MetadataBearer; + +/** + *

    Describes the details of an execution's pipeline definition.

    + */ +export class DescribePipelineDefinitionForExecutionCommand extends $Command< + DescribePipelineDefinitionForExecutionCommandInput, + DescribePipelineDefinitionForExecutionCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribePipelineDefinitionForExecutionCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DescribePipelineDefinitionForExecutionCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribePipelineDefinitionForExecutionRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribePipelineDefinitionForExecutionResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize( + input: DescribePipelineDefinitionForExecutionCommandInput, + context: __SerdeContext + ): Promise<__HttpRequest> { + return serializeAws_json1_1DescribePipelineDefinitionForExecutionCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_json1_1DescribePipelineDefinitionForExecutionCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/DescribePipelineExecutionCommand.ts b/clients/client-sagemaker/commands/DescribePipelineExecutionCommand.ts new file mode 100644 index 000000000000..2beef8b66e8e --- /dev/null +++ b/clients/client-sagemaker/commands/DescribePipelineExecutionCommand.ts @@ -0,0 +1,91 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { DescribePipelineExecutionRequest, DescribePipelineExecutionResponse } from "../models/models_1"; +import { + deserializeAws_json1_1DescribePipelineExecutionCommand, + serializeAws_json1_1DescribePipelineExecutionCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribePipelineExecutionCommandInput = DescribePipelineExecutionRequest; +export type DescribePipelineExecutionCommandOutput = DescribePipelineExecutionResponse & __MetadataBearer; + +/** + *

    Describes the details of a pipeline execution.

    + */ +export class DescribePipelineExecutionCommand extends $Command< + DescribePipelineExecutionCommandInput, + DescribePipelineExecutionCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribePipelineExecutionCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DescribePipelineExecutionCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribePipelineExecutionRequest.filterSensitiveLog, + outputFilterSensitiveLog: DescribePipelineExecutionResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribePipelineExecutionCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DescribePipelineExecutionCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_json1_1DescribePipelineExecutionCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/DescribeProjectCommand.ts b/clients/client-sagemaker/commands/DescribeProjectCommand.ts new file mode 100644 index 000000000000..6ccf31bcf57e --- /dev/null +++ b/clients/client-sagemaker/commands/DescribeProjectCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { DescribeProjectInput, DescribeProjectOutput } from "../models/models_1"; +import { + deserializeAws_json1_1DescribeProjectCommand, + serializeAws_json1_1DescribeProjectCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DescribeProjectCommandInput = DescribeProjectInput; +export type DescribeProjectCommandOutput = DescribeProjectOutput & __MetadataBearer; + +/** + *

    Describes the details of a project.

    + */ +export class DescribeProjectCommand extends $Command< + DescribeProjectCommandInput, + DescribeProjectCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DescribeProjectCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DescribeProjectCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DescribeProjectInput.filterSensitiveLog, + outputFilterSensitiveLog: DescribeProjectOutput.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: DescribeProjectCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1DescribeProjectCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1DescribeProjectCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/DisableSagemakerServicecatalogPortfolioCommand.ts b/clients/client-sagemaker/commands/DisableSagemakerServicecatalogPortfolioCommand.ts new file mode 100644 index 000000000000..c73fa6c4b991 --- /dev/null +++ b/clients/client-sagemaker/commands/DisableSagemakerServicecatalogPortfolioCommand.ts @@ -0,0 +1,102 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { + DisableSagemakerServicecatalogPortfolioInput, + DisableSagemakerServicecatalogPortfolioOutput, +} from "../models/models_1"; +import { + deserializeAws_json1_1DisableSagemakerServicecatalogPortfolioCommand, + serializeAws_json1_1DisableSagemakerServicecatalogPortfolioCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type DisableSagemakerServicecatalogPortfolioCommandInput = DisableSagemakerServicecatalogPortfolioInput; +export type DisableSagemakerServicecatalogPortfolioCommandOutput = DisableSagemakerServicecatalogPortfolioOutput & + __MetadataBearer; + +/** + *

    Disables using Service Catalog in SageMaker. Service Catalog is used to create + * SageMaker projects.

    + */ +export class DisableSagemakerServicecatalogPortfolioCommand extends $Command< + DisableSagemakerServicecatalogPortfolioCommandInput, + DisableSagemakerServicecatalogPortfolioCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: DisableSagemakerServicecatalogPortfolioCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler< + DisableSagemakerServicecatalogPortfolioCommandInput, + DisableSagemakerServicecatalogPortfolioCommandOutput + > { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "DisableSagemakerServicecatalogPortfolioCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: DisableSagemakerServicecatalogPortfolioInput.filterSensitiveLog, + outputFilterSensitiveLog: DisableSagemakerServicecatalogPortfolioOutput.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize( + input: DisableSagemakerServicecatalogPortfolioCommandInput, + context: __SerdeContext + ): Promise<__HttpRequest> { + return serializeAws_json1_1DisableSagemakerServicecatalogPortfolioCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_json1_1DisableSagemakerServicecatalogPortfolioCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/EnableSagemakerServicecatalogPortfolioCommand.ts b/clients/client-sagemaker/commands/EnableSagemakerServicecatalogPortfolioCommand.ts new file mode 100644 index 000000000000..2c9a8940e219 --- /dev/null +++ b/clients/client-sagemaker/commands/EnableSagemakerServicecatalogPortfolioCommand.ts @@ -0,0 +1,99 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { + EnableSagemakerServicecatalogPortfolioInput, + EnableSagemakerServicecatalogPortfolioOutput, +} from "../models/models_1"; +import { + deserializeAws_json1_1EnableSagemakerServicecatalogPortfolioCommand, + serializeAws_json1_1EnableSagemakerServicecatalogPortfolioCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type EnableSagemakerServicecatalogPortfolioCommandInput = EnableSagemakerServicecatalogPortfolioInput; +export type EnableSagemakerServicecatalogPortfolioCommandOutput = EnableSagemakerServicecatalogPortfolioOutput & + __MetadataBearer; + +/** + *

    Enables using Service Catalog in SageMaker. Service Catalog is used to create + * SageMaker projects.

    + */ +export class EnableSagemakerServicecatalogPortfolioCommand extends $Command< + EnableSagemakerServicecatalogPortfolioCommandInput, + EnableSagemakerServicecatalogPortfolioCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: EnableSagemakerServicecatalogPortfolioCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "EnableSagemakerServicecatalogPortfolioCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: EnableSagemakerServicecatalogPortfolioInput.filterSensitiveLog, + outputFilterSensitiveLog: EnableSagemakerServicecatalogPortfolioOutput.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize( + input: EnableSagemakerServicecatalogPortfolioCommandInput, + context: __SerdeContext + ): Promise<__HttpRequest> { + return serializeAws_json1_1EnableSagemakerServicecatalogPortfolioCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_json1_1EnableSagemakerServicecatalogPortfolioCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/GetModelPackageGroupPolicyCommand.ts b/clients/client-sagemaker/commands/GetModelPackageGroupPolicyCommand.ts new file mode 100644 index 000000000000..764cfffd8df5 --- /dev/null +++ b/clients/client-sagemaker/commands/GetModelPackageGroupPolicyCommand.ts @@ -0,0 +1,94 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { GetModelPackageGroupPolicyInput, GetModelPackageGroupPolicyOutput } from "../models/models_1"; +import { + deserializeAws_json1_1GetModelPackageGroupPolicyCommand, + serializeAws_json1_1GetModelPackageGroupPolicyCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type GetModelPackageGroupPolicyCommandInput = GetModelPackageGroupPolicyInput; +export type GetModelPackageGroupPolicyCommandOutput = GetModelPackageGroupPolicyOutput & __MetadataBearer; + +/** + *

    Gets a resource policy that manages access for a model group. For information about + * resource policies, see Identity-based + * policies and resource-based policies in the AWS Identity and + * Access Management User Guide..

    + */ +export class GetModelPackageGroupPolicyCommand extends $Command< + GetModelPackageGroupPolicyCommandInput, + GetModelPackageGroupPolicyCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: GetModelPackageGroupPolicyCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "GetModelPackageGroupPolicyCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: GetModelPackageGroupPolicyInput.filterSensitiveLog, + outputFilterSensitiveLog: GetModelPackageGroupPolicyOutput.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: GetModelPackageGroupPolicyCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1GetModelPackageGroupPolicyCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_json1_1GetModelPackageGroupPolicyCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/GetSagemakerServicecatalogPortfolioStatusCommand.ts b/clients/client-sagemaker/commands/GetSagemakerServicecatalogPortfolioStatusCommand.ts new file mode 100644 index 000000000000..005df582cff4 --- /dev/null +++ b/clients/client-sagemaker/commands/GetSagemakerServicecatalogPortfolioStatusCommand.ts @@ -0,0 +1,102 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { + GetSagemakerServicecatalogPortfolioStatusInput, + GetSagemakerServicecatalogPortfolioStatusOutput, +} from "../models/models_1"; +import { + deserializeAws_json1_1GetSagemakerServicecatalogPortfolioStatusCommand, + serializeAws_json1_1GetSagemakerServicecatalogPortfolioStatusCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type GetSagemakerServicecatalogPortfolioStatusCommandInput = GetSagemakerServicecatalogPortfolioStatusInput; +export type GetSagemakerServicecatalogPortfolioStatusCommandOutput = GetSagemakerServicecatalogPortfolioStatusOutput & + __MetadataBearer; + +/** + *

    Gets the status of Service Catalog in SageMaker. Service Catalog is used to create + * SageMaker projects.

    + */ +export class GetSagemakerServicecatalogPortfolioStatusCommand extends $Command< + GetSagemakerServicecatalogPortfolioStatusCommandInput, + GetSagemakerServicecatalogPortfolioStatusCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: GetSagemakerServicecatalogPortfolioStatusCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler< + GetSagemakerServicecatalogPortfolioStatusCommandInput, + GetSagemakerServicecatalogPortfolioStatusCommandOutput + > { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "GetSagemakerServicecatalogPortfolioStatusCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: GetSagemakerServicecatalogPortfolioStatusInput.filterSensitiveLog, + outputFilterSensitiveLog: GetSagemakerServicecatalogPortfolioStatusOutput.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize( + input: GetSagemakerServicecatalogPortfolioStatusCommandInput, + context: __SerdeContext + ): Promise<__HttpRequest> { + return serializeAws_json1_1GetSagemakerServicecatalogPortfolioStatusCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_json1_1GetSagemakerServicecatalogPortfolioStatusCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/ListActionsCommand.ts b/clients/client-sagemaker/commands/ListActionsCommand.ts new file mode 100644 index 000000000000..8adc9a183e93 --- /dev/null +++ b/clients/client-sagemaker/commands/ListActionsCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { ListActionsRequest, ListActionsResponse } from "../models/models_1"; +import { + deserializeAws_json1_1ListActionsCommand, + serializeAws_json1_1ListActionsCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListActionsCommandInput = ListActionsRequest; +export type ListActionsCommandOutput = ListActionsResponse & __MetadataBearer; + +/** + *

    Lists the actions in your account and their properties.

    + */ +export class ListActionsCommand extends $Command< + ListActionsCommandInput, + ListActionsCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListActionsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "ListActionsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListActionsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListActionsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListActionsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1ListActionsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1ListActionsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/ListArtifactsCommand.ts b/clients/client-sagemaker/commands/ListArtifactsCommand.ts new file mode 100644 index 000000000000..fe2bc0eb7867 --- /dev/null +++ b/clients/client-sagemaker/commands/ListArtifactsCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { ListArtifactsRequest, ListArtifactsResponse } from "../models/models_1"; +import { + deserializeAws_json1_1ListArtifactsCommand, + serializeAws_json1_1ListArtifactsCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListArtifactsCommandInput = ListArtifactsRequest; +export type ListArtifactsCommandOutput = ListArtifactsResponse & __MetadataBearer; + +/** + *

    Lists the artifacts in your account and their properties.

    + */ +export class ListArtifactsCommand extends $Command< + ListArtifactsCommandInput, + ListArtifactsCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListArtifactsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "ListArtifactsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListArtifactsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListArtifactsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListArtifactsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1ListArtifactsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1ListArtifactsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/ListAssociationsCommand.ts b/clients/client-sagemaker/commands/ListAssociationsCommand.ts new file mode 100644 index 000000000000..267d0b38e5e1 --- /dev/null +++ b/clients/client-sagemaker/commands/ListAssociationsCommand.ts @@ -0,0 +1,89 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { ListAssociationsRequest } from "../models/models_1"; +import { ListAssociationsResponse } from "../models/models_2"; +import { + deserializeAws_json1_1ListAssociationsCommand, + serializeAws_json1_1ListAssociationsCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListAssociationsCommandInput = ListAssociationsRequest; +export type ListAssociationsCommandOutput = ListAssociationsResponse & __MetadataBearer; + +/** + *

    Lists the associations in your account and their properties.

    + */ +export class ListAssociationsCommand extends $Command< + ListAssociationsCommandInput, + ListAssociationsCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListAssociationsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "ListAssociationsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListAssociationsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListAssociationsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListAssociationsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1ListAssociationsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1ListAssociationsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/ListAutoMLJobsCommand.ts b/clients/client-sagemaker/commands/ListAutoMLJobsCommand.ts index 2807f8196c71..35e5b08ffb08 100644 --- a/clients/client-sagemaker/commands/ListAutoMLJobsCommand.ts +++ b/clients/client-sagemaker/commands/ListAutoMLJobsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListAutoMLJobsRequest, ListAutoMLJobsResponse } from "../models/models_1"; +import { ListAutoMLJobsRequest, ListAutoMLJobsResponse } from "../models/models_2"; import { deserializeAws_json1_1ListAutoMLJobsCommand, serializeAws_json1_1ListAutoMLJobsCommand, diff --git a/clients/client-sagemaker/commands/ListCandidatesForAutoMLJobCommand.ts b/clients/client-sagemaker/commands/ListCandidatesForAutoMLJobCommand.ts index 9923ee42cdce..dbff2953b478 100644 --- a/clients/client-sagemaker/commands/ListCandidatesForAutoMLJobCommand.ts +++ b/clients/client-sagemaker/commands/ListCandidatesForAutoMLJobCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListCandidatesForAutoMLJobRequest, ListCandidatesForAutoMLJobResponse } from "../models/models_1"; +import { ListCandidatesForAutoMLJobRequest, ListCandidatesForAutoMLJobResponse } from "../models/models_2"; import { deserializeAws_json1_1ListCandidatesForAutoMLJobCommand, serializeAws_json1_1ListCandidatesForAutoMLJobCommand, diff --git a/clients/client-sagemaker/commands/ListCodeRepositoriesCommand.ts b/clients/client-sagemaker/commands/ListCodeRepositoriesCommand.ts index 2581282e208b..eea6e4d34e21 100644 --- a/clients/client-sagemaker/commands/ListCodeRepositoriesCommand.ts +++ b/clients/client-sagemaker/commands/ListCodeRepositoriesCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListCodeRepositoriesInput, ListCodeRepositoriesOutput } from "../models/models_1"; +import { ListCodeRepositoriesInput, ListCodeRepositoriesOutput } from "../models/models_2"; import { deserializeAws_json1_1ListCodeRepositoriesCommand, serializeAws_json1_1ListCodeRepositoriesCommand, diff --git a/clients/client-sagemaker/commands/ListCompilationJobsCommand.ts b/clients/client-sagemaker/commands/ListCompilationJobsCommand.ts index 60300f22da60..271d36a1bbe4 100644 --- a/clients/client-sagemaker/commands/ListCompilationJobsCommand.ts +++ b/clients/client-sagemaker/commands/ListCompilationJobsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListCompilationJobsRequest, ListCompilationJobsResponse } from "../models/models_1"; +import { ListCompilationJobsRequest, ListCompilationJobsResponse } from "../models/models_2"; import { deserializeAws_json1_1ListCompilationJobsCommand, serializeAws_json1_1ListCompilationJobsCommand, diff --git a/clients/client-sagemaker/commands/ListContextsCommand.ts b/clients/client-sagemaker/commands/ListContextsCommand.ts new file mode 100644 index 000000000000..16fd6bf6b340 --- /dev/null +++ b/clients/client-sagemaker/commands/ListContextsCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { ListContextsRequest, ListContextsResponse } from "../models/models_2"; +import { + deserializeAws_json1_1ListContextsCommand, + serializeAws_json1_1ListContextsCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListContextsCommandInput = ListContextsRequest; +export type ListContextsCommandOutput = ListContextsResponse & __MetadataBearer; + +/** + *

    Lists the contexts in your account and their properties.

    + */ +export class ListContextsCommand extends $Command< + ListContextsCommandInput, + ListContextsCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListContextsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "ListContextsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListContextsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListContextsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListContextsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1ListContextsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1ListContextsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/ListDomainsCommand.ts b/clients/client-sagemaker/commands/ListDomainsCommand.ts index 6fde955edd04..f1593b3b9b08 100644 --- a/clients/client-sagemaker/commands/ListDomainsCommand.ts +++ b/clients/client-sagemaker/commands/ListDomainsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListDomainsRequest, ListDomainsResponse } from "../models/models_1"; +import { ListDomainsRequest, ListDomainsResponse } from "../models/models_2"; import { deserializeAws_json1_1ListDomainsCommand, serializeAws_json1_1ListDomainsCommand, diff --git a/clients/client-sagemaker/commands/ListEndpointConfigsCommand.ts b/clients/client-sagemaker/commands/ListEndpointConfigsCommand.ts index 38a27faeb1ba..1a65f42e2dad 100644 --- a/clients/client-sagemaker/commands/ListEndpointConfigsCommand.ts +++ b/clients/client-sagemaker/commands/ListEndpointConfigsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListEndpointConfigsInput, ListEndpointConfigsOutput } from "../models/models_1"; +import { ListEndpointConfigsInput, ListEndpointConfigsOutput } from "../models/models_2"; import { deserializeAws_json1_1ListEndpointConfigsCommand, serializeAws_json1_1ListEndpointConfigsCommand, diff --git a/clients/client-sagemaker/commands/ListEndpointsCommand.ts b/clients/client-sagemaker/commands/ListEndpointsCommand.ts index 84b7100602bb..4e7a48d6255e 100644 --- a/clients/client-sagemaker/commands/ListEndpointsCommand.ts +++ b/clients/client-sagemaker/commands/ListEndpointsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListEndpointsInput, ListEndpointsOutput } from "../models/models_1"; +import { ListEndpointsInput, ListEndpointsOutput } from "../models/models_2"; import { deserializeAws_json1_1ListEndpointsCommand, serializeAws_json1_1ListEndpointsCommand, diff --git a/clients/client-sagemaker/commands/ListExperimentsCommand.ts b/clients/client-sagemaker/commands/ListExperimentsCommand.ts index 9460582e0a6f..cfa5b0c008bf 100644 --- a/clients/client-sagemaker/commands/ListExperimentsCommand.ts +++ b/clients/client-sagemaker/commands/ListExperimentsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListExperimentsRequest, ListExperimentsResponse } from "../models/models_1"; +import { ListExperimentsRequest, ListExperimentsResponse } from "../models/models_2"; import { deserializeAws_json1_1ListExperimentsCommand, serializeAws_json1_1ListExperimentsCommand, diff --git a/clients/client-sagemaker/commands/ListFeatureGroupsCommand.ts b/clients/client-sagemaker/commands/ListFeatureGroupsCommand.ts new file mode 100644 index 000000000000..7f424e3dcc1e --- /dev/null +++ b/clients/client-sagemaker/commands/ListFeatureGroupsCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { ListFeatureGroupsRequest, ListFeatureGroupsResponse } from "../models/models_2"; +import { + deserializeAws_json1_1ListFeatureGroupsCommand, + serializeAws_json1_1ListFeatureGroupsCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListFeatureGroupsCommandInput = ListFeatureGroupsRequest; +export type ListFeatureGroupsCommandOutput = ListFeatureGroupsResponse & __MetadataBearer; + +/** + *

    List FeatureGroups based on given filter and order.

    + */ +export class ListFeatureGroupsCommand extends $Command< + ListFeatureGroupsCommandInput, + ListFeatureGroupsCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListFeatureGroupsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "ListFeatureGroupsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListFeatureGroupsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListFeatureGroupsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListFeatureGroupsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1ListFeatureGroupsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1ListFeatureGroupsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/ListFlowDefinitionsCommand.ts b/clients/client-sagemaker/commands/ListFlowDefinitionsCommand.ts index a73e5ae7e79f..ac07f82e72ee 100644 --- a/clients/client-sagemaker/commands/ListFlowDefinitionsCommand.ts +++ b/clients/client-sagemaker/commands/ListFlowDefinitionsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListFlowDefinitionsRequest, ListFlowDefinitionsResponse } from "../models/models_1"; +import { ListFlowDefinitionsRequest, ListFlowDefinitionsResponse } from "../models/models_2"; import { deserializeAws_json1_1ListFlowDefinitionsCommand, serializeAws_json1_1ListFlowDefinitionsCommand, diff --git a/clients/client-sagemaker/commands/ListHumanTaskUisCommand.ts b/clients/client-sagemaker/commands/ListHumanTaskUisCommand.ts index d48eb1207f78..04dc0cf605e0 100644 --- a/clients/client-sagemaker/commands/ListHumanTaskUisCommand.ts +++ b/clients/client-sagemaker/commands/ListHumanTaskUisCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListHumanTaskUisRequest, ListHumanTaskUisResponse } from "../models/models_1"; +import { ListHumanTaskUisRequest, ListHumanTaskUisResponse } from "../models/models_2"; import { deserializeAws_json1_1ListHumanTaskUisCommand, serializeAws_json1_1ListHumanTaskUisCommand, diff --git a/clients/client-sagemaker/commands/ListHyperParameterTuningJobsCommand.ts b/clients/client-sagemaker/commands/ListHyperParameterTuningJobsCommand.ts index e01fb417a64e..531c6b1aad4b 100644 --- a/clients/client-sagemaker/commands/ListHyperParameterTuningJobsCommand.ts +++ b/clients/client-sagemaker/commands/ListHyperParameterTuningJobsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListHyperParameterTuningJobsRequest, ListHyperParameterTuningJobsResponse } from "../models/models_1"; +import { ListHyperParameterTuningJobsRequest, ListHyperParameterTuningJobsResponse } from "../models/models_2"; import { deserializeAws_json1_1ListHyperParameterTuningJobsCommand, serializeAws_json1_1ListHyperParameterTuningJobsCommand, diff --git a/clients/client-sagemaker/commands/ListImageVersionsCommand.ts b/clients/client-sagemaker/commands/ListImageVersionsCommand.ts index 63f392144732..bafb988368aa 100644 --- a/clients/client-sagemaker/commands/ListImageVersionsCommand.ts +++ b/clients/client-sagemaker/commands/ListImageVersionsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListImageVersionsRequest, ListImageVersionsResponse } from "../models/models_1"; +import { ListImageVersionsRequest, ListImageVersionsResponse } from "../models/models_2"; import { deserializeAws_json1_1ListImageVersionsCommand, serializeAws_json1_1ListImageVersionsCommand, diff --git a/clients/client-sagemaker/commands/ListImagesCommand.ts b/clients/client-sagemaker/commands/ListImagesCommand.ts index 0a7cd8a37f29..9cbef5b7a3fe 100644 --- a/clients/client-sagemaker/commands/ListImagesCommand.ts +++ b/clients/client-sagemaker/commands/ListImagesCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListImagesRequest, ListImagesResponse } from "../models/models_1"; +import { ListImagesRequest, ListImagesResponse } from "../models/models_2"; import { deserializeAws_json1_1ListImagesCommand, serializeAws_json1_1ListImagesCommand, diff --git a/clients/client-sagemaker/commands/ListLabelingJobsCommand.ts b/clients/client-sagemaker/commands/ListLabelingJobsCommand.ts index 1ffa30e9954f..b91ea7899a86 100644 --- a/clients/client-sagemaker/commands/ListLabelingJobsCommand.ts +++ b/clients/client-sagemaker/commands/ListLabelingJobsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListLabelingJobsRequest, ListLabelingJobsResponse } from "../models/models_1"; +import { ListLabelingJobsRequest, ListLabelingJobsResponse } from "../models/models_2"; import { deserializeAws_json1_1ListLabelingJobsCommand, serializeAws_json1_1ListLabelingJobsCommand, diff --git a/clients/client-sagemaker/commands/ListLabelingJobsForWorkteamCommand.ts b/clients/client-sagemaker/commands/ListLabelingJobsForWorkteamCommand.ts index 7cf0d4928b1b..c392583dc899 100644 --- a/clients/client-sagemaker/commands/ListLabelingJobsForWorkteamCommand.ts +++ b/clients/client-sagemaker/commands/ListLabelingJobsForWorkteamCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListLabelingJobsForWorkteamRequest, ListLabelingJobsForWorkteamResponse } from "../models/models_1"; +import { ListLabelingJobsForWorkteamRequest, ListLabelingJobsForWorkteamResponse } from "../models/models_2"; import { deserializeAws_json1_1ListLabelingJobsForWorkteamCommand, serializeAws_json1_1ListLabelingJobsForWorkteamCommand, diff --git a/clients/client-sagemaker/commands/ListModelPackageGroupsCommand.ts b/clients/client-sagemaker/commands/ListModelPackageGroupsCommand.ts new file mode 100644 index 000000000000..e2c7721a3743 --- /dev/null +++ b/clients/client-sagemaker/commands/ListModelPackageGroupsCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { ListModelPackageGroupsInput, ListModelPackageGroupsOutput } from "../models/models_2"; +import { + deserializeAws_json1_1ListModelPackageGroupsCommand, + serializeAws_json1_1ListModelPackageGroupsCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListModelPackageGroupsCommandInput = ListModelPackageGroupsInput; +export type ListModelPackageGroupsCommandOutput = ListModelPackageGroupsOutput & __MetadataBearer; + +/** + *

    Gets a list of the model groups in your AWS account.

    + */ +export class ListModelPackageGroupsCommand extends $Command< + ListModelPackageGroupsCommandInput, + ListModelPackageGroupsCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListModelPackageGroupsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "ListModelPackageGroupsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListModelPackageGroupsInput.filterSensitiveLog, + outputFilterSensitiveLog: ListModelPackageGroupsOutput.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListModelPackageGroupsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1ListModelPackageGroupsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1ListModelPackageGroupsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/ListModelPackagesCommand.ts b/clients/client-sagemaker/commands/ListModelPackagesCommand.ts index 56fa062b077e..0501cdcc30de 100644 --- a/clients/client-sagemaker/commands/ListModelPackagesCommand.ts +++ b/clients/client-sagemaker/commands/ListModelPackagesCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListModelPackagesInput, ListModelPackagesOutput } from "../models/models_1"; +import { ListModelPackagesInput, ListModelPackagesOutput } from "../models/models_2"; import { deserializeAws_json1_1ListModelPackagesCommand, serializeAws_json1_1ListModelPackagesCommand, diff --git a/clients/client-sagemaker/commands/ListModelsCommand.ts b/clients/client-sagemaker/commands/ListModelsCommand.ts index 2f7e9dee64aa..affb95ea74e8 100644 --- a/clients/client-sagemaker/commands/ListModelsCommand.ts +++ b/clients/client-sagemaker/commands/ListModelsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListModelsInput, ListModelsOutput } from "../models/models_1"; +import { ListModelsInput, ListModelsOutput } from "../models/models_2"; import { deserializeAws_json1_1ListModelsCommand, serializeAws_json1_1ListModelsCommand, diff --git a/clients/client-sagemaker/commands/ListMonitoringExecutionsCommand.ts b/clients/client-sagemaker/commands/ListMonitoringExecutionsCommand.ts index 76ffe7f0ef14..e1ad4e522d9a 100644 --- a/clients/client-sagemaker/commands/ListMonitoringExecutionsCommand.ts +++ b/clients/client-sagemaker/commands/ListMonitoringExecutionsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListMonitoringExecutionsRequest, ListMonitoringExecutionsResponse } from "../models/models_1"; +import { ListMonitoringExecutionsRequest, ListMonitoringExecutionsResponse } from "../models/models_2"; import { deserializeAws_json1_1ListMonitoringExecutionsCommand, serializeAws_json1_1ListMonitoringExecutionsCommand, diff --git a/clients/client-sagemaker/commands/ListMonitoringSchedulesCommand.ts b/clients/client-sagemaker/commands/ListMonitoringSchedulesCommand.ts index 07cbd1979696..342786480ca9 100644 --- a/clients/client-sagemaker/commands/ListMonitoringSchedulesCommand.ts +++ b/clients/client-sagemaker/commands/ListMonitoringSchedulesCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListMonitoringSchedulesRequest, ListMonitoringSchedulesResponse } from "../models/models_1"; +import { ListMonitoringSchedulesRequest, ListMonitoringSchedulesResponse } from "../models/models_2"; import { deserializeAws_json1_1ListMonitoringSchedulesCommand, serializeAws_json1_1ListMonitoringSchedulesCommand, diff --git a/clients/client-sagemaker/commands/ListNotebookInstanceLifecycleConfigsCommand.ts b/clients/client-sagemaker/commands/ListNotebookInstanceLifecycleConfigsCommand.ts index 79e421321195..35feef558527 100644 --- a/clients/client-sagemaker/commands/ListNotebookInstanceLifecycleConfigsCommand.ts +++ b/clients/client-sagemaker/commands/ListNotebookInstanceLifecycleConfigsCommand.ts @@ -2,7 +2,7 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } import { ListNotebookInstanceLifecycleConfigsInput, ListNotebookInstanceLifecycleConfigsOutput, -} from "../models/models_1"; +} from "../models/models_2"; import { deserializeAws_json1_1ListNotebookInstanceLifecycleConfigsCommand, serializeAws_json1_1ListNotebookInstanceLifecycleConfigsCommand, diff --git a/clients/client-sagemaker/commands/ListNotebookInstancesCommand.ts b/clients/client-sagemaker/commands/ListNotebookInstancesCommand.ts index bbadd6dbba0f..47490332ad21 100644 --- a/clients/client-sagemaker/commands/ListNotebookInstancesCommand.ts +++ b/clients/client-sagemaker/commands/ListNotebookInstancesCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListNotebookInstancesInput, ListNotebookInstancesOutput } from "../models/models_1"; +import { ListNotebookInstancesInput, ListNotebookInstancesOutput } from "../models/models_2"; import { deserializeAws_json1_1ListNotebookInstancesCommand, serializeAws_json1_1ListNotebookInstancesCommand, diff --git a/clients/client-sagemaker/commands/ListPipelineExecutionStepsCommand.ts b/clients/client-sagemaker/commands/ListPipelineExecutionStepsCommand.ts new file mode 100644 index 000000000000..07f459086e0f --- /dev/null +++ b/clients/client-sagemaker/commands/ListPipelineExecutionStepsCommand.ts @@ -0,0 +1,91 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { ListPipelineExecutionStepsRequest, ListPipelineExecutionStepsResponse } from "../models/models_2"; +import { + deserializeAws_json1_1ListPipelineExecutionStepsCommand, + serializeAws_json1_1ListPipelineExecutionStepsCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListPipelineExecutionStepsCommandInput = ListPipelineExecutionStepsRequest; +export type ListPipelineExecutionStepsCommandOutput = ListPipelineExecutionStepsResponse & __MetadataBearer; + +/** + *

    Gets a list of PipeLineExecutionStep objects.

    + */ +export class ListPipelineExecutionStepsCommand extends $Command< + ListPipelineExecutionStepsCommandInput, + ListPipelineExecutionStepsCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListPipelineExecutionStepsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "ListPipelineExecutionStepsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListPipelineExecutionStepsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListPipelineExecutionStepsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListPipelineExecutionStepsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1ListPipelineExecutionStepsCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_json1_1ListPipelineExecutionStepsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/ListPipelineExecutionsCommand.ts b/clients/client-sagemaker/commands/ListPipelineExecutionsCommand.ts new file mode 100644 index 000000000000..4f2f77ef6ddc --- /dev/null +++ b/clients/client-sagemaker/commands/ListPipelineExecutionsCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { ListPipelineExecutionsRequest, ListPipelineExecutionsResponse } from "../models/models_2"; +import { + deserializeAws_json1_1ListPipelineExecutionsCommand, + serializeAws_json1_1ListPipelineExecutionsCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListPipelineExecutionsCommandInput = ListPipelineExecutionsRequest; +export type ListPipelineExecutionsCommandOutput = ListPipelineExecutionsResponse & __MetadataBearer; + +/** + *

    Gets a list of the pipeline executions.

    + */ +export class ListPipelineExecutionsCommand extends $Command< + ListPipelineExecutionsCommandInput, + ListPipelineExecutionsCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListPipelineExecutionsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "ListPipelineExecutionsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListPipelineExecutionsRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListPipelineExecutionsResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListPipelineExecutionsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1ListPipelineExecutionsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1ListPipelineExecutionsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/ListPipelineParametersForExecutionCommand.ts b/clients/client-sagemaker/commands/ListPipelineParametersForExecutionCommand.ts new file mode 100644 index 000000000000..5a66e4135bf8 --- /dev/null +++ b/clients/client-sagemaker/commands/ListPipelineParametersForExecutionCommand.ts @@ -0,0 +1,98 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { + ListPipelineParametersForExecutionRequest, + ListPipelineParametersForExecutionResponse, +} from "../models/models_2"; +import { + deserializeAws_json1_1ListPipelineParametersForExecutionCommand, + serializeAws_json1_1ListPipelineParametersForExecutionCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListPipelineParametersForExecutionCommandInput = ListPipelineParametersForExecutionRequest; +export type ListPipelineParametersForExecutionCommandOutput = ListPipelineParametersForExecutionResponse & + __MetadataBearer; + +/** + *

    Gets a list of parameters for a pipeline execution.

    + */ +export class ListPipelineParametersForExecutionCommand extends $Command< + ListPipelineParametersForExecutionCommandInput, + ListPipelineParametersForExecutionCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListPipelineParametersForExecutionCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "ListPipelineParametersForExecutionCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListPipelineParametersForExecutionRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListPipelineParametersForExecutionResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize( + input: ListPipelineParametersForExecutionCommandInput, + context: __SerdeContext + ): Promise<__HttpRequest> { + return serializeAws_json1_1ListPipelineParametersForExecutionCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_json1_1ListPipelineParametersForExecutionCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/ListPipelinesCommand.ts b/clients/client-sagemaker/commands/ListPipelinesCommand.ts new file mode 100644 index 000000000000..c3e1a34d05b7 --- /dev/null +++ b/clients/client-sagemaker/commands/ListPipelinesCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { ListPipelinesRequest, ListPipelinesResponse } from "../models/models_2"; +import { + deserializeAws_json1_1ListPipelinesCommand, + serializeAws_json1_1ListPipelinesCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListPipelinesCommandInput = ListPipelinesRequest; +export type ListPipelinesCommandOutput = ListPipelinesResponse & __MetadataBearer; + +/** + *

    Gets a list of pipelines.

    + */ +export class ListPipelinesCommand extends $Command< + ListPipelinesCommandInput, + ListPipelinesCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListPipelinesCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "ListPipelinesCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListPipelinesRequest.filterSensitiveLog, + outputFilterSensitiveLog: ListPipelinesResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListPipelinesCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1ListPipelinesCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1ListPipelinesCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/ListProcessingJobsCommand.ts b/clients/client-sagemaker/commands/ListProcessingJobsCommand.ts index 5b2fd527a025..eab43fcc2658 100644 --- a/clients/client-sagemaker/commands/ListProcessingJobsCommand.ts +++ b/clients/client-sagemaker/commands/ListProcessingJobsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListProcessingJobsRequest, ListProcessingJobsResponse } from "../models/models_1"; +import { ListProcessingJobsRequest, ListProcessingJobsResponse } from "../models/models_2"; import { deserializeAws_json1_1ListProcessingJobsCommand, serializeAws_json1_1ListProcessingJobsCommand, diff --git a/clients/client-sagemaker/commands/ListProjectsCommand.ts b/clients/client-sagemaker/commands/ListProjectsCommand.ts new file mode 100644 index 000000000000..bdbc2b05e994 --- /dev/null +++ b/clients/client-sagemaker/commands/ListProjectsCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { ListProjectsInput, ListProjectsOutput } from "../models/models_2"; +import { + deserializeAws_json1_1ListProjectsCommand, + serializeAws_json1_1ListProjectsCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type ListProjectsCommandInput = ListProjectsInput; +export type ListProjectsCommandOutput = ListProjectsOutput & __MetadataBearer; + +/** + *

    Gets a list of the projects in an AWS account.

    + */ +export class ListProjectsCommand extends $Command< + ListProjectsCommandInput, + ListProjectsCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: ListProjectsCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "ListProjectsCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: ListProjectsInput.filterSensitiveLog, + outputFilterSensitiveLog: ListProjectsOutput.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: ListProjectsCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1ListProjectsCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1ListProjectsCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/ListSubscribedWorkteamsCommand.ts b/clients/client-sagemaker/commands/ListSubscribedWorkteamsCommand.ts index 486060d3e8ca..1a6e343e88ee 100644 --- a/clients/client-sagemaker/commands/ListSubscribedWorkteamsCommand.ts +++ b/clients/client-sagemaker/commands/ListSubscribedWorkteamsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListSubscribedWorkteamsRequest, ListSubscribedWorkteamsResponse } from "../models/models_1"; +import { ListSubscribedWorkteamsRequest, ListSubscribedWorkteamsResponse } from "../models/models_2"; import { deserializeAws_json1_1ListSubscribedWorkteamsCommand, serializeAws_json1_1ListSubscribedWorkteamsCommand, diff --git a/clients/client-sagemaker/commands/ListTagsCommand.ts b/clients/client-sagemaker/commands/ListTagsCommand.ts index 0d09ba9e32fc..8441d1e3e2d2 100644 --- a/clients/client-sagemaker/commands/ListTagsCommand.ts +++ b/clients/client-sagemaker/commands/ListTagsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListTagsInput, ListTagsOutput } from "../models/models_1"; +import { ListTagsInput, ListTagsOutput } from "../models/models_2"; import { deserializeAws_json1_1ListTagsCommand, serializeAws_json1_1ListTagsCommand } from "../protocols/Aws_json1_1"; import { getSerdePlugin } from "@aws-sdk/middleware-serde"; import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; diff --git a/clients/client-sagemaker/commands/ListTrainingJobsCommand.ts b/clients/client-sagemaker/commands/ListTrainingJobsCommand.ts index e5dfc67c1507..45ebf993a2cb 100644 --- a/clients/client-sagemaker/commands/ListTrainingJobsCommand.ts +++ b/clients/client-sagemaker/commands/ListTrainingJobsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListTrainingJobsRequest, ListTrainingJobsResponse } from "../models/models_1"; +import { ListTrainingJobsRequest, ListTrainingJobsResponse } from "../models/models_2"; import { deserializeAws_json1_1ListTrainingJobsCommand, serializeAws_json1_1ListTrainingJobsCommand, diff --git a/clients/client-sagemaker/commands/ListTrainingJobsForHyperParameterTuningJobCommand.ts b/clients/client-sagemaker/commands/ListTrainingJobsForHyperParameterTuningJobCommand.ts index 3b8fd10007e2..574b17c27f58 100644 --- a/clients/client-sagemaker/commands/ListTrainingJobsForHyperParameterTuningJobCommand.ts +++ b/clients/client-sagemaker/commands/ListTrainingJobsForHyperParameterTuningJobCommand.ts @@ -2,7 +2,7 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } import { ListTrainingJobsForHyperParameterTuningJobRequest, ListTrainingJobsForHyperParameterTuningJobResponse, -} from "../models/models_1"; +} from "../models/models_2"; import { deserializeAws_json1_1ListTrainingJobsForHyperParameterTuningJobCommand, serializeAws_json1_1ListTrainingJobsForHyperParameterTuningJobCommand, diff --git a/clients/client-sagemaker/commands/ListTransformJobsCommand.ts b/clients/client-sagemaker/commands/ListTransformJobsCommand.ts index 9c78927a4e63..4cdd58e7adfe 100644 --- a/clients/client-sagemaker/commands/ListTransformJobsCommand.ts +++ b/clients/client-sagemaker/commands/ListTransformJobsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListTransformJobsRequest, ListTransformJobsResponse } from "../models/models_1"; +import { ListTransformJobsRequest, ListTransformJobsResponse } from "../models/models_2"; import { deserializeAws_json1_1ListTransformJobsCommand, serializeAws_json1_1ListTransformJobsCommand, diff --git a/clients/client-sagemaker/commands/ListTrialComponentsCommand.ts b/clients/client-sagemaker/commands/ListTrialComponentsCommand.ts index d17d7ab2e570..0534ad95059d 100644 --- a/clients/client-sagemaker/commands/ListTrialComponentsCommand.ts +++ b/clients/client-sagemaker/commands/ListTrialComponentsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListTrialComponentsRequest, ListTrialComponentsResponse } from "../models/models_1"; +import { ListTrialComponentsRequest, ListTrialComponentsResponse } from "../models/models_2"; import { deserializeAws_json1_1ListTrialComponentsCommand, serializeAws_json1_1ListTrialComponentsCommand, diff --git a/clients/client-sagemaker/commands/ListTrialsCommand.ts b/clients/client-sagemaker/commands/ListTrialsCommand.ts index bf490174efbf..25a349326577 100644 --- a/clients/client-sagemaker/commands/ListTrialsCommand.ts +++ b/clients/client-sagemaker/commands/ListTrialsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListTrialsRequest, ListTrialsResponse } from "../models/models_1"; +import { ListTrialsRequest, ListTrialsResponse } from "../models/models_2"; import { deserializeAws_json1_1ListTrialsCommand, serializeAws_json1_1ListTrialsCommand, diff --git a/clients/client-sagemaker/commands/ListUserProfilesCommand.ts b/clients/client-sagemaker/commands/ListUserProfilesCommand.ts index 94e8c349133d..47bf2aaceaba 100644 --- a/clients/client-sagemaker/commands/ListUserProfilesCommand.ts +++ b/clients/client-sagemaker/commands/ListUserProfilesCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListUserProfilesRequest, ListUserProfilesResponse } from "../models/models_1"; +import { ListUserProfilesRequest, ListUserProfilesResponse } from "../models/models_2"; import { deserializeAws_json1_1ListUserProfilesCommand, serializeAws_json1_1ListUserProfilesCommand, diff --git a/clients/client-sagemaker/commands/ListWorkforcesCommand.ts b/clients/client-sagemaker/commands/ListWorkforcesCommand.ts index 8620b50a54c7..ccc347cedf46 100644 --- a/clients/client-sagemaker/commands/ListWorkforcesCommand.ts +++ b/clients/client-sagemaker/commands/ListWorkforcesCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListWorkforcesRequest, ListWorkforcesResponse } from "../models/models_1"; +import { ListWorkforcesRequest, ListWorkforcesResponse } from "../models/models_2"; import { deserializeAws_json1_1ListWorkforcesCommand, serializeAws_json1_1ListWorkforcesCommand, diff --git a/clients/client-sagemaker/commands/ListWorkteamsCommand.ts b/clients/client-sagemaker/commands/ListWorkteamsCommand.ts index 2741a900c7ef..77e96c287b28 100644 --- a/clients/client-sagemaker/commands/ListWorkteamsCommand.ts +++ b/clients/client-sagemaker/commands/ListWorkteamsCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { ListWorkteamsRequest, ListWorkteamsResponse } from "../models/models_1"; +import { ListWorkteamsRequest, ListWorkteamsResponse } from "../models/models_2"; import { deserializeAws_json1_1ListWorkteamsCommand, serializeAws_json1_1ListWorkteamsCommand, diff --git a/clients/client-sagemaker/commands/PutModelPackageGroupPolicyCommand.ts b/clients/client-sagemaker/commands/PutModelPackageGroupPolicyCommand.ts new file mode 100644 index 000000000000..a4c568d3ba4b --- /dev/null +++ b/clients/client-sagemaker/commands/PutModelPackageGroupPolicyCommand.ts @@ -0,0 +1,93 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { PutModelPackageGroupPolicyInput, PutModelPackageGroupPolicyOutput } from "../models/models_2"; +import { + deserializeAws_json1_1PutModelPackageGroupPolicyCommand, + serializeAws_json1_1PutModelPackageGroupPolicyCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type PutModelPackageGroupPolicyCommandInput = PutModelPackageGroupPolicyInput; +export type PutModelPackageGroupPolicyCommandOutput = PutModelPackageGroupPolicyOutput & __MetadataBearer; + +/** + *

    Adds a resouce policy to control access to a model group. For information about + * resoure policies, see Identity-based + * policies and resource-based policies in the AWS Identity and Access Management User Guide..

    + */ +export class PutModelPackageGroupPolicyCommand extends $Command< + PutModelPackageGroupPolicyCommandInput, + PutModelPackageGroupPolicyCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: PutModelPackageGroupPolicyCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "PutModelPackageGroupPolicyCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: PutModelPackageGroupPolicyInput.filterSensitiveLog, + outputFilterSensitiveLog: PutModelPackageGroupPolicyOutput.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: PutModelPackageGroupPolicyCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1PutModelPackageGroupPolicyCommand(input, context); + } + + private deserialize( + output: __HttpResponse, + context: __SerdeContext + ): Promise { + return deserializeAws_json1_1PutModelPackageGroupPolicyCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/RenderUiTemplateCommand.ts b/clients/client-sagemaker/commands/RenderUiTemplateCommand.ts index 4b4de5975bdd..0f01daa88f88 100644 --- a/clients/client-sagemaker/commands/RenderUiTemplateCommand.ts +++ b/clients/client-sagemaker/commands/RenderUiTemplateCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { RenderUiTemplateRequest, RenderUiTemplateResponse } from "../models/models_1"; +import { RenderUiTemplateRequest, RenderUiTemplateResponse } from "../models/models_2"; import { deserializeAws_json1_1RenderUiTemplateCommand, serializeAws_json1_1RenderUiTemplateCommand, diff --git a/clients/client-sagemaker/commands/SearchCommand.ts b/clients/client-sagemaker/commands/SearchCommand.ts index 971be9d9b0ab..dc92e79b6f75 100644 --- a/clients/client-sagemaker/commands/SearchCommand.ts +++ b/clients/client-sagemaker/commands/SearchCommand.ts @@ -1,6 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { SearchResponse } from "../models/models_1"; -import { SearchRequest } from "../models/models_2"; +import { SearchRequest, SearchResponse } from "../models/models_2"; import { deserializeAws_json1_1SearchCommand, serializeAws_json1_1SearchCommand } from "../protocols/Aws_json1_1"; import { getSerdePlugin } from "@aws-sdk/middleware-serde"; import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; diff --git a/clients/client-sagemaker/commands/StartMonitoringScheduleCommand.ts b/clients/client-sagemaker/commands/StartMonitoringScheduleCommand.ts index 935b4dbca576..416ae6bd17a2 100644 --- a/clients/client-sagemaker/commands/StartMonitoringScheduleCommand.ts +++ b/clients/client-sagemaker/commands/StartMonitoringScheduleCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { StartMonitoringScheduleRequest } from "../models/models_1"; +import { StartMonitoringScheduleRequest } from "../models/models_2"; import { deserializeAws_json1_1StartMonitoringScheduleCommand, serializeAws_json1_1StartMonitoringScheduleCommand, diff --git a/clients/client-sagemaker/commands/StartNotebookInstanceCommand.ts b/clients/client-sagemaker/commands/StartNotebookInstanceCommand.ts index 3b78ec4cda3c..2b7fc2481905 100644 --- a/clients/client-sagemaker/commands/StartNotebookInstanceCommand.ts +++ b/clients/client-sagemaker/commands/StartNotebookInstanceCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { StartNotebookInstanceInput } from "../models/models_1"; +import { StartNotebookInstanceInput } from "../models/models_2"; import { deserializeAws_json1_1StartNotebookInstanceCommand, serializeAws_json1_1StartNotebookInstanceCommand, diff --git a/clients/client-sagemaker/commands/StartPipelineExecutionCommand.ts b/clients/client-sagemaker/commands/StartPipelineExecutionCommand.ts new file mode 100644 index 000000000000..83471b04c59d --- /dev/null +++ b/clients/client-sagemaker/commands/StartPipelineExecutionCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { StartPipelineExecutionRequest, StartPipelineExecutionResponse } from "../models/models_2"; +import { + deserializeAws_json1_1StartPipelineExecutionCommand, + serializeAws_json1_1StartPipelineExecutionCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type StartPipelineExecutionCommandInput = StartPipelineExecutionRequest; +export type StartPipelineExecutionCommandOutput = StartPipelineExecutionResponse & __MetadataBearer; + +/** + *

    Starts a pipeline execution.

    + */ +export class StartPipelineExecutionCommand extends $Command< + StartPipelineExecutionCommandInput, + StartPipelineExecutionCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: StartPipelineExecutionCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "StartPipelineExecutionCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: StartPipelineExecutionRequest.filterSensitiveLog, + outputFilterSensitiveLog: StartPipelineExecutionResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: StartPipelineExecutionCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1StartPipelineExecutionCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1StartPipelineExecutionCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/StopAutoMLJobCommand.ts b/clients/client-sagemaker/commands/StopAutoMLJobCommand.ts index 186337bb737f..42f865fda8db 100644 --- a/clients/client-sagemaker/commands/StopAutoMLJobCommand.ts +++ b/clients/client-sagemaker/commands/StopAutoMLJobCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { StopAutoMLJobRequest } from "../models/models_1"; +import { StopAutoMLJobRequest } from "../models/models_2"; import { deserializeAws_json1_1StopAutoMLJobCommand, serializeAws_json1_1StopAutoMLJobCommand, diff --git a/clients/client-sagemaker/commands/StopCompilationJobCommand.ts b/clients/client-sagemaker/commands/StopCompilationJobCommand.ts index 7c6ce80ac644..fd0247912eb8 100644 --- a/clients/client-sagemaker/commands/StopCompilationJobCommand.ts +++ b/clients/client-sagemaker/commands/StopCompilationJobCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { StopCompilationJobRequest } from "../models/models_1"; +import { StopCompilationJobRequest } from "../models/models_2"; import { deserializeAws_json1_1StopCompilationJobCommand, serializeAws_json1_1StopCompilationJobCommand, diff --git a/clients/client-sagemaker/commands/StopHyperParameterTuningJobCommand.ts b/clients/client-sagemaker/commands/StopHyperParameterTuningJobCommand.ts index b9378b32120a..a89fabc72d19 100644 --- a/clients/client-sagemaker/commands/StopHyperParameterTuningJobCommand.ts +++ b/clients/client-sagemaker/commands/StopHyperParameterTuningJobCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { StopHyperParameterTuningJobRequest } from "../models/models_1"; +import { StopHyperParameterTuningJobRequest } from "../models/models_2"; import { deserializeAws_json1_1StopHyperParameterTuningJobCommand, serializeAws_json1_1StopHyperParameterTuningJobCommand, diff --git a/clients/client-sagemaker/commands/StopLabelingJobCommand.ts b/clients/client-sagemaker/commands/StopLabelingJobCommand.ts index c57383fd380c..ca3f24202284 100644 --- a/clients/client-sagemaker/commands/StopLabelingJobCommand.ts +++ b/clients/client-sagemaker/commands/StopLabelingJobCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { StopLabelingJobRequest } from "../models/models_1"; +import { StopLabelingJobRequest } from "../models/models_2"; import { deserializeAws_json1_1StopLabelingJobCommand, serializeAws_json1_1StopLabelingJobCommand, diff --git a/clients/client-sagemaker/commands/StopMonitoringScheduleCommand.ts b/clients/client-sagemaker/commands/StopMonitoringScheduleCommand.ts index c058f902d493..393c06007d5d 100644 --- a/clients/client-sagemaker/commands/StopMonitoringScheduleCommand.ts +++ b/clients/client-sagemaker/commands/StopMonitoringScheduleCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { StopMonitoringScheduleRequest } from "../models/models_1"; +import { StopMonitoringScheduleRequest } from "../models/models_2"; import { deserializeAws_json1_1StopMonitoringScheduleCommand, serializeAws_json1_1StopMonitoringScheduleCommand, diff --git a/clients/client-sagemaker/commands/StopNotebookInstanceCommand.ts b/clients/client-sagemaker/commands/StopNotebookInstanceCommand.ts index 39fa9b171921..77b30487ccac 100644 --- a/clients/client-sagemaker/commands/StopNotebookInstanceCommand.ts +++ b/clients/client-sagemaker/commands/StopNotebookInstanceCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { StopNotebookInstanceInput } from "../models/models_1"; +import { StopNotebookInstanceInput } from "../models/models_2"; import { deserializeAws_json1_1StopNotebookInstanceCommand, serializeAws_json1_1StopNotebookInstanceCommand, diff --git a/clients/client-sagemaker/commands/StopPipelineExecutionCommand.ts b/clients/client-sagemaker/commands/StopPipelineExecutionCommand.ts new file mode 100644 index 000000000000..ce07ff503800 --- /dev/null +++ b/clients/client-sagemaker/commands/StopPipelineExecutionCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { StopPipelineExecutionRequest, StopPipelineExecutionResponse } from "../models/models_2"; +import { + deserializeAws_json1_1StopPipelineExecutionCommand, + serializeAws_json1_1StopPipelineExecutionCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type StopPipelineExecutionCommandInput = StopPipelineExecutionRequest; +export type StopPipelineExecutionCommandOutput = StopPipelineExecutionResponse & __MetadataBearer; + +/** + *

    Stops a pipeline execution.

    + */ +export class StopPipelineExecutionCommand extends $Command< + StopPipelineExecutionCommandInput, + StopPipelineExecutionCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: StopPipelineExecutionCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "StopPipelineExecutionCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: StopPipelineExecutionRequest.filterSensitiveLog, + outputFilterSensitiveLog: StopPipelineExecutionResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: StopPipelineExecutionCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1StopPipelineExecutionCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1StopPipelineExecutionCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/StopProcessingJobCommand.ts b/clients/client-sagemaker/commands/StopProcessingJobCommand.ts index 4955ad679857..8d5a405de600 100644 --- a/clients/client-sagemaker/commands/StopProcessingJobCommand.ts +++ b/clients/client-sagemaker/commands/StopProcessingJobCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { StopProcessingJobRequest } from "../models/models_1"; +import { StopProcessingJobRequest } from "../models/models_2"; import { deserializeAws_json1_1StopProcessingJobCommand, serializeAws_json1_1StopProcessingJobCommand, diff --git a/clients/client-sagemaker/commands/StopTrainingJobCommand.ts b/clients/client-sagemaker/commands/StopTrainingJobCommand.ts index 81c91abe6b80..20e225307abd 100644 --- a/clients/client-sagemaker/commands/StopTrainingJobCommand.ts +++ b/clients/client-sagemaker/commands/StopTrainingJobCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { StopTrainingJobRequest } from "../models/models_1"; +import { StopTrainingJobRequest } from "../models/models_2"; import { deserializeAws_json1_1StopTrainingJobCommand, serializeAws_json1_1StopTrainingJobCommand, diff --git a/clients/client-sagemaker/commands/StopTransformJobCommand.ts b/clients/client-sagemaker/commands/StopTransformJobCommand.ts index 1f2e3d7f51bd..14c453bc02a1 100644 --- a/clients/client-sagemaker/commands/StopTransformJobCommand.ts +++ b/clients/client-sagemaker/commands/StopTransformJobCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { StopTransformJobRequest } from "../models/models_1"; +import { StopTransformJobRequest } from "../models/models_2"; import { deserializeAws_json1_1StopTransformJobCommand, serializeAws_json1_1StopTransformJobCommand, diff --git a/clients/client-sagemaker/commands/UpdateActionCommand.ts b/clients/client-sagemaker/commands/UpdateActionCommand.ts new file mode 100644 index 000000000000..489e5f81fef7 --- /dev/null +++ b/clients/client-sagemaker/commands/UpdateActionCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { UpdateActionRequest, UpdateActionResponse } from "../models/models_2"; +import { + deserializeAws_json1_1UpdateActionCommand, + serializeAws_json1_1UpdateActionCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type UpdateActionCommandInput = UpdateActionRequest; +export type UpdateActionCommandOutput = UpdateActionResponse & __MetadataBearer; + +/** + *

    Updates an action.

    + */ +export class UpdateActionCommand extends $Command< + UpdateActionCommandInput, + UpdateActionCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: UpdateActionCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "UpdateActionCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: UpdateActionRequest.filterSensitiveLog, + outputFilterSensitiveLog: UpdateActionResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: UpdateActionCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1UpdateActionCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1UpdateActionCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/UpdateAppImageConfigCommand.ts b/clients/client-sagemaker/commands/UpdateAppImageConfigCommand.ts index 0ff758ba9112..5ff0c004cccf 100644 --- a/clients/client-sagemaker/commands/UpdateAppImageConfigCommand.ts +++ b/clients/client-sagemaker/commands/UpdateAppImageConfigCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { UpdateAppImageConfigRequest, UpdateAppImageConfigResponse } from "../models/models_1"; +import { UpdateAppImageConfigRequest, UpdateAppImageConfigResponse } from "../models/models_2"; import { deserializeAws_json1_1UpdateAppImageConfigCommand, serializeAws_json1_1UpdateAppImageConfigCommand, diff --git a/clients/client-sagemaker/commands/UpdateArtifactCommand.ts b/clients/client-sagemaker/commands/UpdateArtifactCommand.ts new file mode 100644 index 000000000000..0764bf4999b8 --- /dev/null +++ b/clients/client-sagemaker/commands/UpdateArtifactCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { UpdateArtifactRequest, UpdateArtifactResponse } from "../models/models_2"; +import { + deserializeAws_json1_1UpdateArtifactCommand, + serializeAws_json1_1UpdateArtifactCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type UpdateArtifactCommandInput = UpdateArtifactRequest; +export type UpdateArtifactCommandOutput = UpdateArtifactResponse & __MetadataBearer; + +/** + *

    Updates an artifact.

    + */ +export class UpdateArtifactCommand extends $Command< + UpdateArtifactCommandInput, + UpdateArtifactCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: UpdateArtifactCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "UpdateArtifactCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: UpdateArtifactRequest.filterSensitiveLog, + outputFilterSensitiveLog: UpdateArtifactResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: UpdateArtifactCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1UpdateArtifactCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1UpdateArtifactCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/UpdateCodeRepositoryCommand.ts b/clients/client-sagemaker/commands/UpdateCodeRepositoryCommand.ts index 7c4909a1f755..e25990748980 100644 --- a/clients/client-sagemaker/commands/UpdateCodeRepositoryCommand.ts +++ b/clients/client-sagemaker/commands/UpdateCodeRepositoryCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { UpdateCodeRepositoryInput, UpdateCodeRepositoryOutput } from "../models/models_1"; +import { UpdateCodeRepositoryInput, UpdateCodeRepositoryOutput } from "../models/models_2"; import { deserializeAws_json1_1UpdateCodeRepositoryCommand, serializeAws_json1_1UpdateCodeRepositoryCommand, diff --git a/clients/client-sagemaker/commands/UpdateContextCommand.ts b/clients/client-sagemaker/commands/UpdateContextCommand.ts new file mode 100644 index 000000000000..ca83df4929b5 --- /dev/null +++ b/clients/client-sagemaker/commands/UpdateContextCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { UpdateContextRequest, UpdateContextResponse } from "../models/models_2"; +import { + deserializeAws_json1_1UpdateContextCommand, + serializeAws_json1_1UpdateContextCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type UpdateContextCommandInput = UpdateContextRequest; +export type UpdateContextCommandOutput = UpdateContextResponse & __MetadataBearer; + +/** + *

    Updates a context.

    + */ +export class UpdateContextCommand extends $Command< + UpdateContextCommandInput, + UpdateContextCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: UpdateContextCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "UpdateContextCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: UpdateContextRequest.filterSensitiveLog, + outputFilterSensitiveLog: UpdateContextResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: UpdateContextCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1UpdateContextCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1UpdateContextCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/UpdateDomainCommand.ts b/clients/client-sagemaker/commands/UpdateDomainCommand.ts index 1148560b972c..2638c26a7f08 100644 --- a/clients/client-sagemaker/commands/UpdateDomainCommand.ts +++ b/clients/client-sagemaker/commands/UpdateDomainCommand.ts @@ -1,5 +1,5 @@ import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; -import { UpdateDomainRequest, UpdateDomainResponse } from "../models/models_1"; +import { UpdateDomainRequest, UpdateDomainResponse } from "../models/models_2"; import { deserializeAws_json1_1UpdateDomainCommand, serializeAws_json1_1UpdateDomainCommand, diff --git a/clients/client-sagemaker/commands/UpdateModelPackageCommand.ts b/clients/client-sagemaker/commands/UpdateModelPackageCommand.ts new file mode 100644 index 000000000000..d01ba0dc0382 --- /dev/null +++ b/clients/client-sagemaker/commands/UpdateModelPackageCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { UpdateModelPackageInput, UpdateModelPackageOutput } from "../models/models_2"; +import { + deserializeAws_json1_1UpdateModelPackageCommand, + serializeAws_json1_1UpdateModelPackageCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type UpdateModelPackageCommandInput = UpdateModelPackageInput; +export type UpdateModelPackageCommandOutput = UpdateModelPackageOutput & __MetadataBearer; + +/** + *

    Updates a versioned model.

    + */ +export class UpdateModelPackageCommand extends $Command< + UpdateModelPackageCommandInput, + UpdateModelPackageCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: UpdateModelPackageCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "UpdateModelPackageCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: UpdateModelPackageInput.filterSensitiveLog, + outputFilterSensitiveLog: UpdateModelPackageOutput.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: UpdateModelPackageCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1UpdateModelPackageCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1UpdateModelPackageCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/UpdatePipelineCommand.ts b/clients/client-sagemaker/commands/UpdatePipelineCommand.ts new file mode 100644 index 000000000000..af885cc33f52 --- /dev/null +++ b/clients/client-sagemaker/commands/UpdatePipelineCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { UpdatePipelineRequest, UpdatePipelineResponse } from "../models/models_2"; +import { + deserializeAws_json1_1UpdatePipelineCommand, + serializeAws_json1_1UpdatePipelineCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type UpdatePipelineCommandInput = UpdatePipelineRequest; +export type UpdatePipelineCommandOutput = UpdatePipelineResponse & __MetadataBearer; + +/** + *

    Updates a pipeline.

    + */ +export class UpdatePipelineCommand extends $Command< + UpdatePipelineCommandInput, + UpdatePipelineCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: UpdatePipelineCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "UpdatePipelineCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: UpdatePipelineRequest.filterSensitiveLog, + outputFilterSensitiveLog: UpdatePipelineResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: UpdatePipelineCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1UpdatePipelineCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1UpdatePipelineCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/commands/UpdatePipelineExecutionCommand.ts b/clients/client-sagemaker/commands/UpdatePipelineExecutionCommand.ts new file mode 100644 index 000000000000..5948517cb2a5 --- /dev/null +++ b/clients/client-sagemaker/commands/UpdatePipelineExecutionCommand.ts @@ -0,0 +1,88 @@ +import { SageMakerClientResolvedConfig, ServiceInputTypes, ServiceOutputTypes } from "../SageMakerClient"; +import { UpdatePipelineExecutionRequest, UpdatePipelineExecutionResponse } from "../models/models_2"; +import { + deserializeAws_json1_1UpdatePipelineExecutionCommand, + serializeAws_json1_1UpdatePipelineExecutionCommand, +} from "../protocols/Aws_json1_1"; +import { getSerdePlugin } from "@aws-sdk/middleware-serde"; +import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; +import { Command as $Command } from "@aws-sdk/smithy-client"; +import { + FinalizeHandlerArguments, + Handler, + HandlerExecutionContext, + MiddlewareStack, + HttpHandlerOptions as __HttpHandlerOptions, + MetadataBearer as __MetadataBearer, + SerdeContext as __SerdeContext, +} from "@aws-sdk/types"; + +export type UpdatePipelineExecutionCommandInput = UpdatePipelineExecutionRequest; +export type UpdatePipelineExecutionCommandOutput = UpdatePipelineExecutionResponse & __MetadataBearer; + +/** + *

    Updates a pipeline execution.

    + */ +export class UpdatePipelineExecutionCommand extends $Command< + UpdatePipelineExecutionCommandInput, + UpdatePipelineExecutionCommandOutput, + SageMakerClientResolvedConfig +> { + // Start section: command_properties + // End section: command_properties + + constructor(readonly input: UpdatePipelineExecutionCommandInput) { + // Start section: command_constructor + super(); + // End section: command_constructor + } + + /** + * @internal + */ + resolveMiddleware( + clientStack: MiddlewareStack, + configuration: SageMakerClientResolvedConfig, + options?: __HttpHandlerOptions + ): Handler { + this.middlewareStack.use(getSerdePlugin(configuration, this.serialize, this.deserialize)); + + const stack = clientStack.concat(this.middlewareStack); + + const { logger } = configuration; + const clientName = "SageMakerClient"; + const commandName = "UpdatePipelineExecutionCommand"; + const handlerExecutionContext: HandlerExecutionContext = { + logger, + clientName, + commandName, + inputFilterSensitiveLog: UpdatePipelineExecutionRequest.filterSensitiveLog, + outputFilterSensitiveLog: UpdatePipelineExecutionResponse.filterSensitiveLog, + }; + + if (typeof logger.info === "function") { + logger.info({ + clientName, + commandName, + }); + } + + const { requestHandler } = configuration; + return stack.resolve( + (request: FinalizeHandlerArguments) => + requestHandler.handle(request.request as __HttpRequest, options || {}), + handlerExecutionContext + ); + } + + private serialize(input: UpdatePipelineExecutionCommandInput, context: __SerdeContext): Promise<__HttpRequest> { + return serializeAws_json1_1UpdatePipelineExecutionCommand(input, context); + } + + private deserialize(output: __HttpResponse, context: __SerdeContext): Promise { + return deserializeAws_json1_1UpdatePipelineExecutionCommand(output, context); + } + + // Start section: command_body_extra + // End section: command_body_extra +} diff --git a/clients/client-sagemaker/index.ts b/clients/client-sagemaker/index.ts index 1d8677aac1d5..6a436516fa36 100644 --- a/clients/client-sagemaker/index.ts +++ b/clients/client-sagemaker/index.ts @@ -1,17 +1,22 @@ export * from "./SageMakerClient"; export * from "./SageMaker"; +export * from "./commands/AddAssociationCommand"; export * from "./commands/AddTagsCommand"; export * from "./commands/AssociateTrialComponentCommand"; +export * from "./commands/CreateActionCommand"; export * from "./commands/CreateAlgorithmCommand"; export * from "./commands/CreateAppCommand"; export * from "./commands/CreateAppImageConfigCommand"; +export * from "./commands/CreateArtifactCommand"; export * from "./commands/CreateAutoMLJobCommand"; export * from "./commands/CreateCodeRepositoryCommand"; export * from "./commands/CreateCompilationJobCommand"; +export * from "./commands/CreateContextCommand"; export * from "./commands/CreateDomainCommand"; export * from "./commands/CreateEndpointCommand"; export * from "./commands/CreateEndpointConfigCommand"; export * from "./commands/CreateExperimentCommand"; +export * from "./commands/CreateFeatureGroupCommand"; export * from "./commands/CreateFlowDefinitionCommand"; export * from "./commands/CreateHumanTaskUiCommand"; export * from "./commands/CreateHyperParameterTuningJobCommand"; @@ -20,12 +25,15 @@ export * from "./commands/CreateImageVersionCommand"; export * from "./commands/CreateLabelingJobCommand"; export * from "./commands/CreateModelCommand"; export * from "./commands/CreateModelPackageCommand"; +export * from "./commands/CreateModelPackageGroupCommand"; export * from "./commands/CreateMonitoringScheduleCommand"; export * from "./commands/CreateNotebookInstanceCommand"; export * from "./commands/CreateNotebookInstanceLifecycleConfigCommand"; +export * from "./commands/CreatePipelineCommand"; export * from "./commands/CreatePresignedDomainUrlCommand"; export * from "./commands/CreatePresignedNotebookInstanceUrlCommand"; export * from "./commands/CreateProcessingJobCommand"; +export * from "./commands/CreateProjectCommand"; export * from "./commands/CreateTrainingJobCommand"; export * from "./commands/CreateTransformJobCommand"; export * from "./commands/CreateTrialCommand"; @@ -33,39 +41,52 @@ export * from "./commands/CreateTrialComponentCommand"; export * from "./commands/CreateUserProfileCommand"; export * from "./commands/CreateWorkforceCommand"; export * from "./commands/CreateWorkteamCommand"; +export * from "./commands/DeleteActionCommand"; export * from "./commands/DeleteAlgorithmCommand"; export * from "./commands/DeleteAppCommand"; export * from "./commands/DeleteAppImageConfigCommand"; +export * from "./commands/DeleteArtifactCommand"; +export * from "./commands/DeleteAssociationCommand"; export * from "./commands/DeleteCodeRepositoryCommand"; +export * from "./commands/DeleteContextCommand"; export * from "./commands/DeleteDomainCommand"; export * from "./commands/DeleteEndpointCommand"; export * from "./commands/DeleteEndpointConfigCommand"; export * from "./commands/DeleteExperimentCommand"; +export * from "./commands/DeleteFeatureGroupCommand"; export * from "./commands/DeleteFlowDefinitionCommand"; export * from "./commands/DeleteHumanTaskUiCommand"; export * from "./commands/DeleteImageCommand"; export * from "./commands/DeleteImageVersionCommand"; export * from "./commands/DeleteModelCommand"; export * from "./commands/DeleteModelPackageCommand"; +export * from "./commands/DeleteModelPackageGroupCommand"; +export * from "./commands/DeleteModelPackageGroupPolicyCommand"; export * from "./commands/DeleteMonitoringScheduleCommand"; export * from "./commands/DeleteNotebookInstanceCommand"; export * from "./commands/DeleteNotebookInstanceLifecycleConfigCommand"; +export * from "./commands/DeletePipelineCommand"; +export * from "./commands/DeleteProjectCommand"; export * from "./commands/DeleteTagsCommand"; export * from "./commands/DeleteTrialCommand"; export * from "./commands/DeleteTrialComponentCommand"; export * from "./commands/DeleteUserProfileCommand"; export * from "./commands/DeleteWorkforceCommand"; export * from "./commands/DeleteWorkteamCommand"; +export * from "./commands/DescribeActionCommand"; export * from "./commands/DescribeAlgorithmCommand"; export * from "./commands/DescribeAppCommand"; export * from "./commands/DescribeAppImageConfigCommand"; +export * from "./commands/DescribeArtifactCommand"; export * from "./commands/DescribeAutoMLJobCommand"; export * from "./commands/DescribeCodeRepositoryCommand"; export * from "./commands/DescribeCompilationJobCommand"; +export * from "./commands/DescribeContextCommand"; export * from "./commands/DescribeDomainCommand"; export * from "./commands/DescribeEndpointCommand"; export * from "./commands/DescribeEndpointConfigCommand"; export * from "./commands/DescribeExperimentCommand"; +export * from "./commands/DescribeFeatureGroupCommand"; export * from "./commands/DescribeFlowDefinitionCommand"; export * from "./commands/DescribeHumanTaskUiCommand"; export * from "./commands/DescribeHyperParameterTuningJobCommand"; @@ -74,10 +95,15 @@ export * from "./commands/DescribeImageVersionCommand"; export * from "./commands/DescribeLabelingJobCommand"; export * from "./commands/DescribeModelCommand"; export * from "./commands/DescribeModelPackageCommand"; +export * from "./commands/DescribeModelPackageGroupCommand"; export * from "./commands/DescribeMonitoringScheduleCommand"; export * from "./commands/DescribeNotebookInstanceCommand"; export * from "./commands/DescribeNotebookInstanceLifecycleConfigCommand"; +export * from "./commands/DescribePipelineCommand"; +export * from "./commands/DescribePipelineDefinitionForExecutionCommand"; +export * from "./commands/DescribePipelineExecutionCommand"; export * from "./commands/DescribeProcessingJobCommand"; +export * from "./commands/DescribeProjectCommand"; export * from "./commands/DescribeSubscribedWorkteamCommand"; export * from "./commands/DescribeTrainingJobCommand"; export * from "./commands/DescribeTransformJobCommand"; @@ -86,13 +112,24 @@ export * from "./commands/DescribeTrialComponentCommand"; export * from "./commands/DescribeUserProfileCommand"; export * from "./commands/DescribeWorkforceCommand"; export * from "./commands/DescribeWorkteamCommand"; +export * from "./commands/DisableSagemakerServicecatalogPortfolioCommand"; export * from "./commands/DisassociateTrialComponentCommand"; +export * from "./commands/EnableSagemakerServicecatalogPortfolioCommand"; +export * from "./commands/GetModelPackageGroupPolicyCommand"; +export * from "./commands/GetSagemakerServicecatalogPortfolioStatusCommand"; export * from "./commands/GetSearchSuggestionsCommand"; +export * from "./commands/ListActionsCommand"; +export * from "./pagination/ListActionsPaginator"; export * from "./commands/ListAlgorithmsCommand"; export * from "./pagination/ListAlgorithmsPaginator"; export * from "./commands/ListAppImageConfigsCommand"; +export * from "./pagination/ListAppImageConfigsPaginator"; export * from "./commands/ListAppsCommand"; export * from "./pagination/ListAppsPaginator"; +export * from "./commands/ListArtifactsCommand"; +export * from "./pagination/ListArtifactsPaginator"; +export * from "./commands/ListAssociationsCommand"; +export * from "./pagination/ListAssociationsPaginator"; export * from "./commands/ListAutoMLJobsCommand"; export * from "./pagination/ListAutoMLJobsPaginator"; export * from "./commands/ListCandidatesForAutoMLJobCommand"; @@ -101,6 +138,8 @@ export * from "./commands/ListCodeRepositoriesCommand"; export * from "./pagination/ListCodeRepositoriesPaginator"; export * from "./commands/ListCompilationJobsCommand"; export * from "./pagination/ListCompilationJobsPaginator"; +export * from "./commands/ListContextsCommand"; +export * from "./pagination/ListContextsPaginator"; export * from "./commands/ListDomainsCommand"; export * from "./pagination/ListDomainsPaginator"; export * from "./commands/ListEndpointConfigsCommand"; @@ -109,6 +148,7 @@ export * from "./commands/ListEndpointsCommand"; export * from "./pagination/ListEndpointsPaginator"; export * from "./commands/ListExperimentsCommand"; export * from "./pagination/ListExperimentsPaginator"; +export * from "./commands/ListFeatureGroupsCommand"; export * from "./commands/ListFlowDefinitionsCommand"; export * from "./pagination/ListFlowDefinitionsPaginator"; export * from "./commands/ListHumanTaskUisCommand"; @@ -123,6 +163,8 @@ export * from "./commands/ListLabelingJobsCommand"; export * from "./pagination/ListLabelingJobsPaginator"; export * from "./commands/ListLabelingJobsForWorkteamCommand"; export * from "./pagination/ListLabelingJobsForWorkteamPaginator"; +export * from "./commands/ListModelPackageGroupsCommand"; +export * from "./pagination/ListModelPackageGroupsPaginator"; export * from "./commands/ListModelPackagesCommand"; export * from "./pagination/ListModelPackagesPaginator"; export * from "./commands/ListModelsCommand"; @@ -135,8 +177,18 @@ export * from "./commands/ListNotebookInstanceLifecycleConfigsCommand"; export * from "./pagination/ListNotebookInstanceLifecycleConfigsPaginator"; export * from "./commands/ListNotebookInstancesCommand"; export * from "./pagination/ListNotebookInstancesPaginator"; +export * from "./commands/ListPipelineExecutionsCommand"; +export * from "./pagination/ListPipelineExecutionsPaginator"; +export * from "./commands/ListPipelineExecutionStepsCommand"; +export * from "./pagination/ListPipelineExecutionStepsPaginator"; +export * from "./commands/ListPipelineParametersForExecutionCommand"; +export * from "./pagination/ListPipelineParametersForExecutionPaginator"; +export * from "./commands/ListPipelinesCommand"; +export * from "./pagination/ListPipelinesPaginator"; export * from "./commands/ListProcessingJobsCommand"; export * from "./pagination/ListProcessingJobsPaginator"; +export * from "./commands/ListProjectsCommand"; +export * from "./pagination/ListProjectsPaginator"; export * from "./commands/ListSubscribedWorkteamsCommand"; export * from "./pagination/ListSubscribedWorkteamsPaginator"; export * from "./commands/ListTagsCommand"; @@ -157,30 +209,39 @@ export * from "./commands/ListWorkforcesCommand"; export * from "./pagination/ListWorkforcesPaginator"; export * from "./commands/ListWorkteamsCommand"; export * from "./pagination/ListWorkteamsPaginator"; +export * from "./commands/PutModelPackageGroupPolicyCommand"; export * from "./commands/RenderUiTemplateCommand"; export * from "./commands/SearchCommand"; export * from "./pagination/SearchPaginator"; export * from "./commands/StartMonitoringScheduleCommand"; export * from "./commands/StartNotebookInstanceCommand"; +export * from "./commands/StartPipelineExecutionCommand"; export * from "./commands/StopAutoMLJobCommand"; export * from "./commands/StopCompilationJobCommand"; export * from "./commands/StopHyperParameterTuningJobCommand"; export * from "./commands/StopLabelingJobCommand"; export * from "./commands/StopMonitoringScheduleCommand"; export * from "./commands/StopNotebookInstanceCommand"; +export * from "./commands/StopPipelineExecutionCommand"; export * from "./commands/StopProcessingJobCommand"; export * from "./commands/StopTrainingJobCommand"; export * from "./commands/StopTransformJobCommand"; +export * from "./commands/UpdateActionCommand"; export * from "./commands/UpdateAppImageConfigCommand"; +export * from "./commands/UpdateArtifactCommand"; export * from "./commands/UpdateCodeRepositoryCommand"; +export * from "./commands/UpdateContextCommand"; export * from "./commands/UpdateDomainCommand"; export * from "./commands/UpdateEndpointCommand"; export * from "./commands/UpdateEndpointWeightsAndCapacitiesCommand"; export * from "./commands/UpdateExperimentCommand"; export * from "./commands/UpdateImageCommand"; +export * from "./commands/UpdateModelPackageCommand"; export * from "./commands/UpdateMonitoringScheduleCommand"; export * from "./commands/UpdateNotebookInstanceCommand"; export * from "./commands/UpdateNotebookInstanceLifecycleConfigCommand"; +export * from "./commands/UpdatePipelineCommand"; +export * from "./commands/UpdatePipelineExecutionCommand"; export * from "./commands/UpdateTrialCommand"; export * from "./commands/UpdateTrialComponentCommand"; export * from "./commands/UpdateUserProfileCommand"; diff --git a/clients/client-sagemaker/models/models_0.ts b/clients/client-sagemaker/models/models_0.ts index 95fe5caa4d8a..3b55a2640048 100644 --- a/clients/client-sagemaker/models/models_0.ts +++ b/clients/client-sagemaker/models/models_0.ts @@ -5,6 +5,188 @@ import { } from "@aws-sdk/smithy-client"; import { MetadataBearer as $MetadataBearer } from "@aws-sdk/types"; +/** + *

    A structure describing the source of an action.

    + */ +export interface ActionSource { + /** + *

    The URI of the source.

    + */ + SourceUri: string | undefined; + + /** + *

    The type of the source.

    + */ + SourceType?: string; + + /** + *

    The ID of the source.

    + */ + SourceId?: string; +} + +export namespace ActionSource { + export const filterSensitiveLog = (obj: ActionSource): any => ({ + ...obj, + }); +} + +export enum ActionStatus { + COMPLETED = "Completed", + FAILED = "Failed", + IN_PROGRESS = "InProgress", + STOPPED = "Stopped", + STOPPING = "Stopping", + UNKNOWN = "Unknown", +} + +/** + *

    Lists the properties of an action. An action represents an action + * or activity. Some examples are a workflow step and a model deployment. Generally, an + * action involves at least one input artifact or output artifact.

    + */ +export interface ActionSummary { + /** + *

    The Amazon Resource Name (ARN) of the action.

    + */ + ActionArn?: string; + + /** + *

    The name of the action.

    + */ + ActionName?: string; + + /** + *

    The source of the action.

    + */ + Source?: ActionSource; + + /** + *

    The type of the action.

    + */ + ActionType?: string; + + /** + *

    The status of the action.

    + */ + Status?: ActionStatus | string; + + /** + *

    When the action was created.

    + */ + CreationTime?: Date; + + /** + *

    When the action was last modified.

    + */ + LastModifiedTime?: Date; +} + +export namespace ActionSummary { + export const filterSensitiveLog = (obj: ActionSummary): any => ({ + ...obj, + }); +} + +export enum AssociationEdgeType { + ASSOCIATED_WITH = "AssociatedWith", + CONTRIBUTED_TO = "ContributedTo", + DERIVED_FROM = "DerivedFrom", + PRODUCED = "Produced", +} + +export interface AddAssociationRequest { + /** + *

    The ARN of the source.

    + */ + SourceArn: string | undefined; + + /** + *

    The Amazon Resource Name (ARN) of the destination.

    + */ + DestinationArn: string | undefined; + + /** + *

    The type of association. The following are suggested uses for each type. Amazon SageMaker + * places no restrictions on their use.

    + *
      + *
    • + *

      ContributedTo - The source contributed to the destination or had a part in + * enabling the destination. For example, the training data contributed to the training + * job.

      + *
    • + *
    • + *

      AssociatedWith - The source is connected to the destination. For example, an + * approval workflow is associated with a model deployment.

      + *
    • + *
    • + *

      DerivedFrom - The destination is a modification of the source. For example, a digest + * output of a channel input for a processing job is derived from the original inputs.

      + *
    • + *
    • + *

      Produced - The source generated the destination. For example, a training job + * produced a model artifact.

      + *
    • + *
    + */ + AssociationType?: AssociationEdgeType | string; +} + +export namespace AddAssociationRequest { + export const filterSensitiveLog = (obj: AddAssociationRequest): any => ({ + ...obj, + }); +} + +export interface AddAssociationResponse { + /** + *

    The ARN of the source.

    + */ + SourceArn?: string; + + /** + *

    The Amazon Resource Name (ARN) of the destination.

    + */ + DestinationArn?: string; +} + +export namespace AddAssociationResponse { + export const filterSensitiveLog = (obj: AddAssociationResponse): any => ({ + ...obj, + }); +} + +/** + *

    You have exceeded an Amazon SageMaker resource limit. For example, you might have too many + * training jobs created.

    + */ +export interface ResourceLimitExceeded extends __SmithyException, $MetadataBearer { + name: "ResourceLimitExceeded"; + $fault: "client"; + Message?: string; +} + +export namespace ResourceLimitExceeded { + export const filterSensitiveLog = (obj: ResourceLimitExceeded): any => ({ + ...obj, + }); +} + +/** + *

    Resource being access is not found.

    + */ +export interface ResourceNotFound extends __SmithyException, $MetadataBearer { + name: "ResourceNotFound"; + $fault: "client"; + Message?: string; +} + +export namespace ResourceNotFound { + export const filterSensitiveLog = (obj: ResourceNotFound): any => ({ + ...obj, + }); +} + /** *

    Describes a tag.

    */ @@ -33,9 +215,10 @@ export interface AddTagsInput { ResourceArn: string | undefined; /** - *

    An array of Tag objects. Each tag is a key-value pair. Only the - * key parameter is required. If you don't specify a value, Amazon SageMaker sets the - * value to an empty string.

    + *

    An array of key-value pairs. You can use tags to categorize your AWS resources in + * different ways, for example, by purpose, owner, or environment. For more information, + * see Tagging AWS + * Resources.

    */ Tags: Tag[] | undefined; } @@ -59,6 +242,22 @@ export namespace AddTagsOutput { }); } +/** + *

    This API is not supported.

    + */ +export interface Alarm { + /** + *

    + */ + AlarmName?: string; +} + +export namespace Alarm { + export const filterSensitiveLog = (obj: Alarm): any => ({ + ...obj, + }); +} + export enum AlgorithmSortBy { CREATION_TIME = "CreationTime", NAME = "Name", @@ -2635,6 +2834,97 @@ export namespace AppSpecification { }); } +export enum ArtifactSourceIdType { + CUSTOM = "Custom", + MD5_HASH = "MD5Hash", + S3_ETAG = "S3ETag", + S3_VERSION = "S3Version", +} + +/** + *

    The ID and ID type of an artifact source.

    + */ +export interface ArtifactSourceType { + /** + *

    The type of ID.

    + */ + SourceIdType: ArtifactSourceIdType | string | undefined; + + /** + *

    The ID.

    + */ + Value: string | undefined; +} + +export namespace ArtifactSourceType { + export const filterSensitiveLog = (obj: ArtifactSourceType): any => ({ + ...obj, + }); +} + +/** + *

    A structure describing the source of an artifact.

    + */ +export interface ArtifactSource { + /** + *

    The URI of the source.

    + */ + SourceUri: string | undefined; + + /** + *

    A list of source types.

    + */ + SourceTypes?: ArtifactSourceType[]; +} + +export namespace ArtifactSource { + export const filterSensitiveLog = (obj: ArtifactSource): any => ({ + ...obj, + }); +} + +/** + *

    Lists a summary of the properties of an artifact. An artifact represents a URI + * addressable object or data. Some examples are a dataset and a model.

    + */ +export interface ArtifactSummary { + /** + *

    The Amazon Resource Name (ARN) of the artifact.

    + */ + ArtifactArn?: string; + + /** + *

    The name of the artifact.

    + */ + ArtifactName?: string; + + /** + *

    The source of the artifact.

    + */ + Source?: ArtifactSource; + + /** + *

    The type of the artifact.

    + */ + ArtifactType?: string; + + /** + *

    When the artifact was created.

    + */ + CreationTime?: Date; + + /** + *

    When the artifact was last modified.

    + */ + LastModifiedTime?: Date; +} + +export namespace ArtifactSummary { + export const filterSensitiveLog = (obj: ArtifactSummary): any => ({ + ...obj, + }); +} + export interface AssociateTrialComponentRequest { /** *

    The name of the component to associated with the trial.

    @@ -2672,71 +2962,192 @@ export namespace AssociateTrialComponentResponse { } /** - *

    You have exceeded an Amazon SageMaker resource limit. For example, you might have too many - * training jobs created.

    + *

    Information about the user who created or modified an experiment, trial, or trial + * component.

    */ -export interface ResourceLimitExceeded extends __SmithyException, $MetadataBearer { - name: "ResourceLimitExceeded"; - $fault: "client"; - Message?: string; +export interface UserContext { + /** + *

    The Amazon Resource Name (ARN) of the user's profile.

    + */ + UserProfileArn?: string; + + /** + *

    The name of the user's profile.

    + */ + UserProfileName?: string; + + /** + *

    The domain associated with the user.

    + */ + DomainId?: string; } -export namespace ResourceLimitExceeded { - export const filterSensitiveLog = (obj: ResourceLimitExceeded): any => ({ +export namespace UserContext { + export const filterSensitiveLog = (obj: UserContext): any => ({ ...obj, }); } /** - *

    Resource being access is not found.

    + *

    Lists a summary of the properties of an association. An association is an entity that + * links other lineage or experiment entities. An example would be an association between a + * training job and a model.

    */ -export interface ResourceNotFound extends __SmithyException, $MetadataBearer { - name: "ResourceNotFound"; - $fault: "client"; - Message?: string; -} +export interface AssociationSummary { + /** + *

    The ARN of the source.

    + */ + SourceArn?: string; -export namespace ResourceNotFound { - export const filterSensitiveLog = (obj: ResourceNotFound): any => ({ - ...obj, - }); -} + /** + *

    The Amazon Resource Name (ARN) of the destination.

    + */ + DestinationArn?: string; -export enum AuthMode { - IAM = "IAM", - SSO = "SSO", -} + /** + *

    The source type.

    + */ + SourceType?: string; -export enum CandidateStatus { - COMPLETED = "Completed", - FAILED = "Failed", - IN_PROGRESS = "InProgress", - STOPPED = "Stopped", - STOPPING = "Stopping", -} + /** + *

    The destination type.

    + */ + DestinationType?: string; -export enum CandidateStepType { - PROCESSING = "AWS::SageMaker::ProcessingJob", - TRAINING = "AWS::SageMaker::TrainingJob", - TRANSFORM = "AWS::SageMaker::TransformJob", -} + /** + *

    The type of the association.

    + */ + AssociationType?: AssociationEdgeType | string; -/** - *

    Information about the steps for a Candidate, and what step it is working on.

    - */ -export interface AutoMLCandidateStep { /** - *

    Whether the Candidate is at the transform, training, or processing step.

    + *

    The name of the source.

    */ - CandidateStepType: CandidateStepType | string | undefined; + SourceName?: string; /** - *

    The ARN for the Candidate's step.

    + *

    The name of the destination.

    */ - CandidateStepArn: string | undefined; + DestinationName?: string; /** - *

    The name for the Candidate's step.

    + *

    When the association was created.

    + */ + CreationTime?: Date; + + /** + *

    Information about the user who created or modified an experiment, trial, or trial + * component.

    + */ + CreatedBy?: UserContext; +} + +export namespace AssociationSummary { + export const filterSensitiveLog = (obj: AssociationSummary): any => ({ + ...obj, + }); +} + +export enum AthenaResultCompressionType { + GZIP = "GZIP", + SNAPPY = "SNAPPY", + ZLIB = "ZLIB", +} + +export enum AthenaResultFormat { + AVRO = "AVRO", + JSON = "JSON", + ORC = "ORC", + PARQUET = "PARQUET", + TEXTFILE = "TEXTFILE", +} + +/** + *

    Configuration for Athena Dataset Definition input.

    + */ +export interface AthenaDatasetDefinition { + /** + *

    The name of the data catalog used in Athena query execution.

    + */ + Catalog: string | undefined; + + /** + *

    The name of the database used in the Athena query execution.

    + */ + Database: string | undefined; + + /** + *

    The SQL query statements, to be executed.

    + */ + QueryString: string | undefined; + + /** + *

    The name of the workgroup in which the Athena query is being started.

    + */ + WorkGroup?: string; + + /** + *

    The location in Amazon S3 where Athena query results are stored.

    + */ + OutputS3Uri: string | undefined; + + /** + *

    The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data generated from + * an Athena query execution.

    + */ + KmsKeyId?: string; + + /** + *

    The data storage format for Athena query results.

    + */ + OutputFormat: AthenaResultFormat | string | undefined; + + /** + *

    The compression used for Athena query results.

    + */ + OutputCompression?: AthenaResultCompressionType | string; +} + +export namespace AthenaDatasetDefinition { + export const filterSensitiveLog = (obj: AthenaDatasetDefinition): any => ({ + ...obj, + }); +} + +export enum AuthMode { + IAM = "IAM", + SSO = "SSO", +} + +export enum CandidateStatus { + COMPLETED = "Completed", + FAILED = "Failed", + IN_PROGRESS = "InProgress", + STOPPED = "Stopped", + STOPPING = "Stopping", +} + +export enum CandidateStepType { + PROCESSING = "AWS::SageMaker::ProcessingJob", + TRAINING = "AWS::SageMaker::TrainingJob", + TRANSFORM = "AWS::SageMaker::TransformJob", +} + +/** + *

    Information about the steps for a Candidate, and what step it is working on.

    + */ +export interface AutoMLCandidateStep { + /** + *

    Whether the Candidate is at the transform, training, or processing step.

    + */ + CandidateStepType: CandidateStepType | string | undefined; + + /** + *

    The ARN for the Candidate's step.

    + */ + CandidateStepArn: string | undefined; + + /** + *

    The name for the Candidate's step.

    */ CandidateStepName: string | undefined; } @@ -3282,16 +3693,173 @@ export enum AutoMLSortOrder { DESCENDING = "Descending", } +/** + *

    Currently, the AutoRollbackConfig API is not supported.

    + */ +export interface AutoRollbackConfig { + /** + *

    + */ + Alarms?: Alarm[]; +} + +export namespace AutoRollbackConfig { + export const filterSensitiveLog = (obj: AutoRollbackConfig): any => ({ + ...obj, + }); +} + export enum AwsManagedHumanLoopRequestSource { REKOGNITION_DETECT_MODERATION_LABELS_IMAGE_V3 = "AWS/Rekognition/DetectModerationLabels/Image/V3", TEXTRACT_ANALYZE_DOCUMENT_FORMS_V1 = "AWS/Textract/AnalyzeDocument/Forms/V1", } +/** + *

    + */ +export interface MetricsSource { + /** + *

    + */ + ContentType: string | undefined; + + /** + *

    + */ + ContentDigest?: string; + + /** + *

    + */ + S3Uri: string | undefined; +} + +export namespace MetricsSource { + export const filterSensitiveLog = (obj: MetricsSource): any => ({ + ...obj, + }); +} + +/** + *

    Contains bias metrics for a model.

    + */ +export interface Bias { + /** + *

    The bias report for a model

    + */ + Report?: MetricsSource; +} + +export namespace Bias { + export const filterSensitiveLog = (obj: Bias): any => ({ + ...obj, + }); +} + +export enum CapacitySizeType { + CAPACITY_PERCENT = "CAPACITY_PERCENT", + INSTANCE_COUNT = "INSTANCE_COUNT", +} + +/** + *

    Currently, the CapacitySize API is not supported.

    + */ +export interface CapacitySize { + /** + *

    This API is not supported.

    + */ + Type: CapacitySizeType | string | undefined; + + /** + *

    + */ + Value: number | undefined; +} + +export namespace CapacitySize { + export const filterSensitiveLog = (obj: CapacitySize): any => ({ + ...obj, + }); +} + +export enum TrafficRoutingConfigType { + ALL_AT_ONCE = "ALL_AT_ONCE", + CANARY = "CANARY", +} + +/** + *

    Currently, the TrafficRoutingConfig API is not supported.

    + */ +export interface TrafficRoutingConfig { + /** + *

    + */ + Type: TrafficRoutingConfigType | string | undefined; + + /** + *

    + */ + WaitIntervalInSeconds: number | undefined; + + /** + *

    + */ + CanarySize?: CapacitySize; +} + +export namespace TrafficRoutingConfig { + export const filterSensitiveLog = (obj: TrafficRoutingConfig): any => ({ + ...obj, + }); +} + +/** + *

    Currently, the BlueGreenUpdatePolicy API is not supported.

    + */ +export interface BlueGreenUpdatePolicy { + /** + *

    + */ + TrafficRoutingConfiguration: TrafficRoutingConfig | undefined; + + /** + *

    + */ + TerminationWaitInSeconds?: number; + + /** + *

    + */ + MaximumExecutionTimeoutInSeconds?: number; +} + +export namespace BlueGreenUpdatePolicy { + export const filterSensitiveLog = (obj: BlueGreenUpdatePolicy): any => ({ + ...obj, + }); +} + export enum BooleanOperator { AND = "And", OR = "Or", } +/** + *

    Details on the cache hit of a pipeline execution step.

    + */ +export interface CacheHitResult { + /** + *

    The Amazon Resource Name (ARN) of the pipeline execution.

    + */ + SourcePipelineExecutionArn?: string; +} + +export namespace CacheHitResult { + export const filterSensitiveLog = (obj: CacheHitResult): any => ({ + ...obj, + }); +} + export enum CandidateSortBy { CreationTime = "CreationTime", FinalObjectiveMetricValue = "FinalObjectiveMetricValue", @@ -3631,6 +4199,7 @@ export enum TargetDevice { COREML = "coreml", DEEPLENS = "deeplens", IMX8QM = "imx8qm", + JACINTO_TDA4VM = "jacinto_tda4vm", JETSON_NANO = "jetson_nano", JETSON_TX1 = "jetson_tx1", JETSON_TX2 = "jetson_tx2", @@ -3744,6 +4313,27 @@ export namespace CompilationJobSummary { }); } +export enum ConditionOutcome { + FALSE = "False", + TRUE = "True", +} + +/** + *

    Metadata for a Condition step.

    + */ +export interface ConditionStepMetadata { + /** + *

    The outcome of the Condition step evaluation.

    + */ + Outcome?: ConditionOutcome | string; +} + +export namespace ConditionStepMetadata { + export const filterSensitiveLog = (obj: ConditionStepMetadata): any => ({ + ...obj, + }); +} + /** *

    There was a conflict when you attempted to modify an experiment, trial, or trial * component.

    @@ -3896,6 +4486,74 @@ export enum ContentClassifier { FREE_OF_PERSONALLY_IDENTIFIABLE_INFORMATION = "FreeOfPersonallyIdentifiableInformation", } +/** + *

    A structure describing the source of a context.

    + */ +export interface ContextSource { + /** + *

    The URI of the source.

    + */ + SourceUri: string | undefined; + + /** + *

    The type of the source.

    + */ + SourceType?: string; + + /** + *

    The ID of the source.

    + */ + SourceId?: string; +} + +export namespace ContextSource { + export const filterSensitiveLog = (obj: ContextSource): any => ({ + ...obj, + }); +} + +/** + *

    Lists a summary of the properties of a context. A context provides a logical grouping + * of other entities.

    + */ +export interface ContextSummary { + /** + *

    The Amazon Resource Name (ARN) of the context.

    + */ + ContextArn?: string; + + /** + *

    The name of the context.

    + */ + ContextName?: string; + + /** + *

    The source of the context.

    + */ + Source?: ContextSource; + + /** + *

    The type of the context.

    + */ + ContextType?: string; + + /** + *

    When the context was created.

    + */ + CreationTime?: Date; + + /** + *

    When the context was last modified.

    + */ + LastModifiedTime?: Date; +} + +export namespace ContextSummary { + export const filterSensitiveLog = (obj: ContextSummary): any => ({ + ...obj, + }); +} + export enum HyperParameterScalingType { AUTO = "Auto", LINEAR = "Linear", @@ -3987,21 +4645,113 @@ export namespace ContinuousParameterRangeSpecification { } /** - *

    Describes the Docker container for the model package.

    + *

    Metadata properties of the tracking entity, trial, or trial component.

    */ -export interface ModelPackageContainerDefinition { +export interface MetadataProperties { /** - *

    The DNS host name for the Docker container.

    + *

    The commit ID.

    */ - ContainerHostname?: string; + CommitId?: string; /** - *

    The Amazon EC2 Container Registry (Amazon ECR) path where inference code is stored.

    - *

    If you are using your own custom algorithm instead of an algorithm provided by Amazon SageMaker, - * the inference code must meet Amazon SageMaker requirements. Amazon SageMaker supports both - * registry/repository[:tag] and registry/repository[@digest] - * image path formats. For more information, see Using Your Own Algorithms with Amazon - * SageMaker.

    + *

    The repository.

    + */ + Repository?: string; + + /** + *

    The entity this entity was generated by.

    + */ + GeneratedBy?: string; + + /** + *

    The project ID.

    + */ + ProjectId?: string; +} + +export namespace MetadataProperties { + export const filterSensitiveLog = (obj: MetadataProperties): any => ({ + ...obj, + }); +} + +export interface CreateActionRequest { + /** + *

    The name of the action. Must be unique to your account in an AWS Region.

    + */ + ActionName: string | undefined; + + /** + *

    The source type, ID, and URI.

    + */ + Source: ActionSource | undefined; + + /** + *

    The action type.

    + */ + ActionType: string | undefined; + + /** + *

    The description of the action.

    + */ + Description?: string; + + /** + *

    The status of the action.

    + */ + Status?: ActionStatus | string; + + /** + *

    A list of properties to add to the action.

    + */ + Properties?: { [key: string]: string }; + + /** + *

    Metadata properties of the tracking entity, trial, or trial component.

    + */ + MetadataProperties?: MetadataProperties; + + /** + *

    A list of tags to apply to the action.

    + */ + Tags?: Tag[]; +} + +export namespace CreateActionRequest { + export const filterSensitiveLog = (obj: CreateActionRequest): any => ({ + ...obj, + }); +} + +export interface CreateActionResponse { + /** + *

    The Amazon Resource Name (ARN) of the action.

    + */ + ActionArn?: string; +} + +export namespace CreateActionResponse { + export const filterSensitiveLog = (obj: CreateActionResponse): any => ({ + ...obj, + }); +} + +/** + *

    Describes the Docker container for the model package.

    + */ +export interface ModelPackageContainerDefinition { + /** + *

    The DNS host name for the Docker container.

    + */ + ContainerHostname?: string; + + /** + *

    The Amazon EC2 Container Registry (Amazon ECR) path where inference code is stored.

    + *

    If you are using your own custom algorithm instead of an algorithm provided by Amazon SageMaker, + * the inference code must meet Amazon SageMaker requirements. Amazon SageMaker supports both + * registry/repository[:tag] and registry/repository[@digest] + * image path formats. For more information, see Using Your Own Algorithms with Amazon + * SageMaker.

    */ Image: string | undefined; @@ -4115,13 +4865,15 @@ export interface InferenceSpecification { /** *

    A list of the instance types on which a transformation job can be run or on which an * endpoint can be deployed.

    + *

    This parameter is required for unversioned models, and optional for versioned models.

    */ - SupportedTransformInstanceTypes: (TransformInstanceType | string)[] | undefined; + SupportedTransformInstanceTypes?: (TransformInstanceType | string)[]; /** *

    A list of the instance types that are used to generate inferences in real-time.

    + *

    This parameter is required for unversioned models, and optional for versioned models.

    */ - SupportedRealtimeInferenceInstanceTypes: (ProductionVariantInstanceType | string)[] | undefined; + SupportedRealtimeInferenceInstanceTypes?: (ProductionVariantInstanceType | string)[]; /** *

    The supported MIME types for the input data.

    @@ -4415,6 +5167,14 @@ export interface CreateAlgorithmInput { *

    Whether to certify the algorithm so that it can be listed in AWS Marketplace.

    */ CertifyForMarketplace?: boolean; + + /** + *

    An array of key-value pairs. You can use tags to categorize your AWS resources in + * different ways, for example, by purpose, owner, or environment. For more information, + * see Tagging AWS + * Resources.

    + */ + Tags?: Tag[]; } export namespace CreateAlgorithmInput { @@ -4566,6 +5326,57 @@ export namespace CreateAppImageConfigResponse { }); } +export interface CreateArtifactRequest { + /** + *

    The name of the artifact. Must be unique to your account in an AWS Region.

    + */ + ArtifactName?: string; + + /** + *

    The ID, ID type, and URI of the source.

    + */ + Source: ArtifactSource | undefined; + + /** + *

    The artifact type.

    + */ + ArtifactType: string | undefined; + + /** + *

    A list of properties to add to the artifact.

    + */ + Properties?: { [key: string]: string }; + + /** + *

    Metadata properties of the tracking entity, trial, or trial component.

    + */ + MetadataProperties?: MetadataProperties; + + /** + *

    A list of tags to apply to the artifact.

    + */ + Tags?: Tag[]; +} + +export namespace CreateArtifactRequest { + export const filterSensitiveLog = (obj: CreateArtifactRequest): any => ({ + ...obj, + }); +} + +export interface CreateArtifactResponse { + /** + *

    The Amazon Resource Name (ARN) of the artifact.

    + */ + ArtifactArn?: string; +} + +export namespace CreateArtifactResponse { + export const filterSensitiveLog = (obj: CreateArtifactResponse): any => ({ + ...obj, + }); +} + export enum ProblemType { BINARY_CLASSIFICATION = "BinaryClassification", MULTICLASS_CLASSIFICATION = "MulticlassClassification", @@ -4656,6 +5467,14 @@ export interface CreateCodeRepositoryInput { * located, the default branch, and credentials to use to access the repository.

    */ GitConfig: GitConfig | undefined; + + /** + *

    An array of key-value pairs. You can use tags to categorize your AWS resources in + * different ways, for example, by purpose, owner, or environment. For more information, + * see Tagging AWS + * Resources.

    + */ + Tags?: Tag[]; } export namespace CreateCodeRepositoryInput { @@ -4683,6 +5502,7 @@ export enum Framework { MXNET = "MXNET", ONNX = "ONNX", PYTORCH = "PYTORCH", + SKLEARN = "SKLEARN", TENSORFLOW = "TENSORFLOW", TFLITE = "TFLITE", XGBOOST = "XGBOOST", @@ -5280,6 +6100,33 @@ export interface OutputConfig { * */ CompilerOptions?: string; + + /** + *

    The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume + * after compilation job. If you don't provide a KMS key ID, Amazon SageMaker uses the default KMS key for Amazon S3 for your role's account

    + *

    The KmsKeyId can be any of the following formats:

    + *
      + *
    • + *

      Key ID: 1234abcd-12ab-34cd-56ef-1234567890ab + *

      + *
    • + *
    • + *

      Key ARN: + * arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab + *

      + *
    • + *
    • + *

      Alias name: alias/ExampleAlias + *

      + *
    • + *
    • + *

      Alias name ARN: + * arn:aws:kms:us-west-2:111122223333:alias/ExampleAlias + *

      + *
    • + *
    + */ + KmsKeyId?: string; } export namespace OutputConfig { @@ -5341,9 +6188,10 @@ export interface CreateCompilationJobRequest { StoppingCondition: StoppingCondition | undefined; /** - *

    An array of key-value pairs that you want to use to organize and track your AWS - * resource costs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management - * User Guide.

    + *

    An array of key-value pairs. You can use tags to categorize your AWS resources in + * different ways, for example, by purpose, owner, or environment. For more information, + * see Tagging AWS + * Resources.

    */ Tags?: Tag[]; } @@ -5375,6 +6223,57 @@ export namespace CreateCompilationJobResponse { }); } +export interface CreateContextRequest { + /** + *

    The name of the context. Must be unique to your account in an AWS Region.

    + */ + ContextName: string | undefined; + + /** + *

    The source type, ID, and URI.

    + */ + Source: ContextSource | undefined; + + /** + *

    The context type.

    + */ + ContextType: string | undefined; + + /** + *

    The description of the context.

    + */ + Description?: string; + + /** + *

    A list of properties to add to the context.

    + */ + Properties?: { [key: string]: string }; + + /** + *

    A list of tags to apply to the context.

    + */ + Tags?: Tag[]; +} + +export namespace CreateContextRequest { + export const filterSensitiveLog = (obj: CreateContextRequest): any => ({ + ...obj, + }); +} + +export interface CreateContextResponse { + /** + *

    The Amazon Resource Name (ARN) of the context.

    + */ + ContextArn?: string; +} + +export namespace CreateContextResponse { + export const filterSensitiveLog = (obj: CreateContextResponse): any => ({ + ...obj, + }); +} + /** *

    The JupyterServer app settings.

    */ @@ -5635,11 +6534,10 @@ export interface CreateEndpointInput { EndpointConfigName: string | undefined; /** - *

    An array of key-value pairs. For more information, see Using - * Cost Allocation Tagsin the AWS Billing and Cost Management User - * Guide. - * - *

    + *

    An array of key-value pairs. You can use tags to categorize your AWS resources in + * different ways, for example, by purpose, owner, or environment. For more information, + * see Tagging AWS + * Resources.

    */ Tags?: Tag[]; } @@ -5783,9 +6681,10 @@ export interface CreateEndpointConfigInput { DataCaptureConfig?: DataCaptureConfig; /** - *

    A list of key-value pairs. For more information, see Using - * Cost Allocation Tags in the AWS Billing and Cost Management User - * Guide.

    + *

    An array of key-value pairs. You can use tags to categorize your AWS resources in + * different ways, for example, by purpose, owner, or environment. For more information, + * see Tagging AWS + * Resources.

    */ Tags?: Tag[]; @@ -5899,93 +6798,478 @@ export namespace CreateExperimentResponse { }); } +export enum FeatureType { + FRACTIONAL = "Fractional", + INTEGRAL = "Integral", + STRING = "String", +} + /** - *

    Defines under what conditions SageMaker creates a human loop. Used within . See for the required - * format of activation conditions.

    + *

    A list of features. You must include FeatureName and + * FeatureType. Valid feature FeatureTypes are + * Integral, Fractional and String.

    */ -export interface HumanLoopActivationConditionsConfig { +export interface FeatureDefinition { /** - *

    JSON expressing use-case specific conditions declaratively. If any condition is matched, atomic tasks are created against the configured work team. - * The set of conditions is different for Rekognition and Textract. For more information about how to structure the JSON, see - * JSON Schema for Human Loop Activation Conditions in Amazon Augmented AI - * in the Amazon SageMaker Developer Guide.

    + *

    The name of a feature. The type must be a string. FeatureName cannot be any + * of the following: is_deleted, write_time, + * api_invocation_time.

    */ - HumanLoopActivationConditions: __LazyJsonString | string | undefined; + FeatureName?: string; + + /** + *

    The value type of a feature. Valid values are Integral, Fractional, or String.

    + */ + FeatureType?: FeatureType | string; } -export namespace HumanLoopActivationConditionsConfig { - export const filterSensitiveLog = (obj: HumanLoopActivationConditionsConfig): any => ({ +export namespace FeatureDefinition { + export const filterSensitiveLog = (obj: FeatureDefinition): any => ({ ...obj, }); } /** - *

    Provides information about how and under what conditions SageMaker creates a human loop. If HumanLoopActivationConfig is not given, then all requests go to humans.

    + *

    The meta data of the Glue table which serves as data catalog for the + * OfflineStore.

    */ -export interface HumanLoopActivationConfig { +export interface DataCatalogConfig { /** - *

    Container structure for defining under what conditions SageMaker creates a human loop.

    + *

    The name of the Glue table.

    */ - HumanLoopActivationConditionsConfig: HumanLoopActivationConditionsConfig | undefined; + TableName: string | undefined; + + /** + *

    The name of the Glue table catalog.

    + */ + Catalog: string | undefined; + + /** + *

    The name of the Glue table database.

    + */ + Database: string | undefined; } -export namespace HumanLoopActivationConfig { - export const filterSensitiveLog = (obj: HumanLoopActivationConfig): any => ({ +export namespace DataCatalogConfig { + export const filterSensitiveLog = (obj: DataCatalogConfig): any => ({ ...obj, }); } /** - *

    Represents an amount of money in United States dollars/

    + *

    The Amazon Simple Storage (Amazon S3) location and and security configuration for OfflineStore.

    */ -export interface USD { +export interface S3StorageConfig { /** - *

    The whole number of dollars in the amount.

    + *

    The S3 URI, or location in Amazon S3, of OfflineStore.

    + *

    S3 URIs have a format similar to the following: s3://example-bucket/prefix/.

    */ - Dollars?: number; + S3Uri: string | undefined; /** - *

    The fractional portion, in cents, of the amount.

    + *

    The AWS Key Management Service (KMS) key ID of the key used to encrypt any objects + * written into the OfflineStore S3 location.

    + *

    The IAM roleARN that is passed as a parameter to + * CreateFeatureGroup must have below permissions to the + * KmsKeyId:

    + *
      + *
    • + *

      + * "kms:GenerateDataKey" + *

      + *
    • + *
    */ - Cents?: number; + KmsKeyId?: string; +} +export namespace S3StorageConfig { + export const filterSensitiveLog = (obj: S3StorageConfig): any => ({ + ...obj, + }); +} + +/** + *

    The configuration of an OfflineStore.

    + *

    Provide an OfflineStoreConfig in a request to + * CreateFeatureGroup to create an OfflineStore.

    + *

    To encrypt an OfflineStore using at rest data encryption, specify AWS Key + * Management Service (KMS) key ID, or KMSKeyId, in + * S3StorageConfig.

    + */ +export interface OfflineStoreConfig { /** - *

    Fractions of a cent, in tenths.

    + *

    The Amazon Simple Storage (Amazon S3) location of OfflineStore.

    */ - TenthFractionsOfACent?: number; + S3StorageConfig: S3StorageConfig | undefined; + + /** + *

    Set to True to disable the automatic creation of an AWS Glue table when + * configuring an OfflineStore.

    + */ + DisableGlueTableCreation?: boolean; + + /** + *

    The meta data of the Glue table that is autogenerated when an OfflineStore + * is created.

    + */ + DataCatalogConfig?: DataCatalogConfig; } -export namespace USD { - export const filterSensitiveLog = (obj: USD): any => ({ +export namespace OfflineStoreConfig { + export const filterSensitiveLog = (obj: OfflineStoreConfig): any => ({ ...obj, }); } /** - *

    Defines the amount of money paid to an Amazon Mechanical Turk worker for each task performed.

    - *

    Use one of the following prices for bounding box tasks. Prices are in US dollars and - * should be based on the complexity of the task; the longer it takes in your initial - * testing, the more you should offer.

    - *
      - *
    • - *

      0.036

      - *
    • - *
    • - *

      0.048

      - *
    • - *
    • - *

      0.060

      - *
    • - *
    • - *

      0.072

      - *
    • - *
    • - *

      0.120

      - *
    • - *
    • - *

      0.240

      - *
    • - *
    • + *

      The security configuration for OnlineStore.

      + */ +export interface OnlineStoreSecurityConfig { + /** + *

      The ID of the AWS Key Management Service (AWS KMS) key that SageMaker Feature Store uses + * to encrypt the Amazon S3 objects at rest using Amazon S3 server-side encryption.

      + *

      The caller (either IAM user or IAM role) of CreateFeatureGroup must have + * below permissions to the OnlineStore + * KmsKeyId:

      + *
        + *
      • + *

        + * "kms:Encrypt" + *

        + *
      • + *
      • + *

        + * "kms:Decrypt" + *

        + *
      • + *
      • + *

        + * "kms:DescribeKey" + *

        + *
      • + *
      • + *

        + * "kms:CreateGrant" + *

        + *
      • + *
      • + *

        + * "kms:RetireGrant" + *

        + *
      • + *
      • + *

        + * "kms:ReEncryptFrom" + *

        + *
      • + *
      • + *

        + * "kms:ReEncryptTo" + *

        + *
      • + *
      • + *

        + * "kms:GenerateDataKey" + *

        + *
      • + *
      • + *

        + * "kms:ListAliases" + *

        + *
      • + *
      • + *

        + * "kms:ListGrants" + *

        + *
      • + *
      • + *

        + * "kms:RevokeGrant" + *

        + *
      • + *
      + *

      The caller (either IAM user or IAM role) to all DataPlane operations + * (PutRecord, GetRecord, DeleteRecord) must have + * the following permissions to the KmsKeyId:

      + *
        + *
      • + *

        + * "kms:Decrypt" + *

        + *
      • + *
      + */ + KmsKeyId?: string; +} + +export namespace OnlineStoreSecurityConfig { + export const filterSensitiveLog = (obj: OnlineStoreSecurityConfig): any => ({ + ...obj, + }); +} + +/** + *

      Use this to specify the AWS Key Management Service (KMS) Key ID, or + * KMSKeyId, for at rest data encryption. You can turn + * OnlineStore on or off by specifying the EnableOnlineStore flag + * at General Assembly; the default value is False.

      + */ +export interface OnlineStoreConfig { + /** + *

      Use to specify KMS Key ID (KMSKeyId) for at-rest encryption of your + * OnlineStore.

      + */ + SecurityConfig?: OnlineStoreSecurityConfig; + + /** + *

      Turn OnlineStore off by specifying False + * for the EnableOnlineStore flag. Turn OnlineStore + * on by specifying True + * for the EnableOnlineStore flag.

      + *

      The default value is False.

      + */ + EnableOnlineStore?: boolean; +} + +export namespace OnlineStoreConfig { + export const filterSensitiveLog = (obj: OnlineStoreConfig): any => ({ + ...obj, + }); +} + +export interface CreateFeatureGroupRequest { + /** + *

      The name of the FeatureGroup. The name must be unique within an AWS Region + * in an AWS account. The name:

      + *
        + *
      • + *

        Must start and end with an alphanumeric character.

        + *
      • + *
      • + *

        Can only contain alphanumeric character and hyphens. Spaces are not allowed. + *

        + *
      • + *
      + */ + FeatureGroupName: string | undefined; + + /** + *

      The name of the Feature whose value uniquely identifies a + * Record defined in the FeatureStore. Only the latest record per + * identifier value will be stored in the OnlineStore. + * RecordIdentifierFeatureName must be one of feature definitions' + * names.

      + *

      You use the RecordIdentifierFeatureName to access data in a + * FeatureStore.

      + *

      This name:

      + *
        + *
      • + *

        Must start and end with an alphanumeric character.

        + *
      • + *
      • + *

        Can only contains alphanumeric characters, hyphens, underscores. Spaces are not + * allowed.

        + *
      • + *
      + */ + RecordIdentifierFeatureName: string | undefined; + + /** + *

      The name of the feature that stores the EventTime of a Record + * in a FeatureGroup.

      + *

      An EventTime is a point in time when a new event occurs that corresponds to + * the creation or update of a Record in a FeatureGroup. All + * Records in the FeatureGroup must have a corresponding + * EventTime.

      + *

      An EventTime can be a String or Fractional.

      + *
        + *
      • + *

        + * Fractional: EventTime feature values must be a Unix + * timestamp in seconds.

        + *
      • + *
      • + *

        + * String: EventTime feature values must be an ISO-8601 + * string in the format. The following formats are supported + * yyyy-MM-dd'T'HH:mm:ssZ and yyyy-MM-dd'T'HH:mm:ss.SSSZ + * where yyyy, MM, and dd represent the year, + * month, and day respectively and HH, mm, ss, + * and if applicable, SSS represent the hour, month, second and + * milliseconds respsectively. 'T' and Z are constants.

        + *
      • + *
      + */ + EventTimeFeatureName: string | undefined; + + /** + *

      A list of Feature names and types. Name and Type + * is compulsory per Feature.

      + *

      Valid feature FeatureTypes are Integral, + * Fractional and String.

      + *

      + * FeatureNames cannot be any of the following: is_deleted, + * write_time, api_invocation_time + *

      + *

      You can create up to 2,500 FeatureDefinitions per + * FeatureGroup.

      + */ + FeatureDefinitions: FeatureDefinition[] | undefined; + + /** + *

      You can turn the OnlineStore on or off by specifying True for + * the EnableOnlineStore flag in OnlineStoreConfig; the default + * value is False.

      + *

      You can also include an AWS KMS key ID (KMSKeyId) for at-rest encryption of + * the OnlineStore.

      + */ + OnlineStoreConfig?: OnlineStoreConfig; + + /** + *

      Use this to configure an OfflineFeatureStore. This parameter allows you to + * specify:

      + *
        + *
      • + *

        The Amazon Simple Storage Service (Amazon S3) location of an + * OfflineStore.

        + *
      • + *
      • + *

        A configuration for an AWS Glue or AWS Hive data cataolgue.

        + *
      • + *
      • + *

        An KMS encryption key to encrypt the Amazon S3 location used for + * OfflineStore.

        + *
      • + *
      + *

      To learn more about this parameter, see OfflineStoreConfig.

      + */ + OfflineStoreConfig?: OfflineStoreConfig; + + /** + *

      The Amazon Resource Name (ARN) of the IAM execution role used to persist data into the + * OfflineStore if an OfflineStoreConfig is provided.

      + */ + RoleArn?: string; + + /** + *

      A free-form description of a FeatureGroup.

      + */ + Description?: string; + + /** + *

      Tags used to identify Features in each FeatureGroup.

      + */ + Tags?: Tag[]; +} + +export namespace CreateFeatureGroupRequest { + export const filterSensitiveLog = (obj: CreateFeatureGroupRequest): any => ({ + ...obj, + }); +} + +export interface CreateFeatureGroupResponse { + /** + *

      The Amazon Resource Name (ARN) of the FeatureGroup. This is a unique + * identifier for the feature group.

      + */ + FeatureGroupArn: string | undefined; +} + +export namespace CreateFeatureGroupResponse { + export const filterSensitiveLog = (obj: CreateFeatureGroupResponse): any => ({ + ...obj, + }); +} + +/** + *

      Defines under what conditions SageMaker creates a human loop. Used within . See for the required + * format of activation conditions.

      + */ +export interface HumanLoopActivationConditionsConfig { + /** + *

      JSON expressing use-case specific conditions declaratively. If any condition is matched, atomic tasks are created against the configured work team. + * The set of conditions is different for Rekognition and Textract. For more information about how to structure the JSON, see + * JSON Schema for Human Loop Activation Conditions in Amazon Augmented AI + * in the Amazon SageMaker Developer Guide.

      + */ + HumanLoopActivationConditions: __LazyJsonString | string | undefined; +} + +export namespace HumanLoopActivationConditionsConfig { + export const filterSensitiveLog = (obj: HumanLoopActivationConditionsConfig): any => ({ + ...obj, + }); +} + +/** + *

      Provides information about how and under what conditions SageMaker creates a human loop. If HumanLoopActivationConfig is not given, then all requests go to humans.

      + */ +export interface HumanLoopActivationConfig { + /** + *

      Container structure for defining under what conditions SageMaker creates a human loop.

      + */ + HumanLoopActivationConditionsConfig: HumanLoopActivationConditionsConfig | undefined; +} + +export namespace HumanLoopActivationConfig { + export const filterSensitiveLog = (obj: HumanLoopActivationConfig): any => ({ + ...obj, + }); +} + +/** + *

      Represents an amount of money in United States dollars/

      + */ +export interface USD { + /** + *

      The whole number of dollars in the amount.

      + */ + Dollars?: number; + + /** + *

      The fractional portion, in cents, of the amount.

      + */ + Cents?: number; + + /** + *

      Fractions of a cent, in tenths.

      + */ + TenthFractionsOfACent?: number; +} + +export namespace USD { + export const filterSensitiveLog = (obj: USD): any => ({ + ...obj, + }); +} + +/** + *

      Defines the amount of money paid to an Amazon Mechanical Turk worker for each task performed.

      + *

      Use one of the following prices for bounding box tasks. Prices are in US dollars and + * should be based on the complexity of the task; the longer it takes in your initial + * testing, the more you should offer.

      + *
        + *
      • + *

        0.036

        + *
      • + *
      • + *

        0.048

        + *
      • + *
      • + *

        0.060

        + *
      • + *
      • + *

        0.072

        + *
      • + *
      • + *

        0.120

        + *
      • + *
      • + *

        0.240

        + *
      • + *
      • *

        0.360

        *
      • *
      • @@ -7350,9 +8634,9 @@ export interface CreateHyperParameterTuningJobRequest { /** *

        An array of key-value pairs. You can use tags to categorize your AWS resources in - * different ways, for example, by purpose, owner, or environment. For more information, - * see AWS - * Tagging Strategies.

        + * different ways, for example, by purpose, owner, or environment. For more information, + * see Tagging AWS + * Resources.

        *

        Tags that you specify for the tuning job are also added to all training jobs that the * tuning job launches.

        */ @@ -9591,10 +10875,10 @@ export interface CreateModelInput { ExecutionRoleArn: string | undefined; /** - *

        An array of key-value pairs. For more information, see Using - * Cost Allocation Tags in the AWS Billing and Cost Management User - * Guide. - *

        + *

        An array of key-value pairs. You can use tags to categorize your AWS resources in + * different ways, for example, by purpose, owner, or environment. For more information, + * see Tagging AWS + * Resources.

        */ Tags?: Tag[]; @@ -9633,27 +10917,122 @@ export namespace CreateModelOutput { }); } +export enum ModelApprovalStatus { + APPROVED = "Approved", + PENDING_MANUAL_APPROVAL = "PendingManualApproval", + REJECTED = "Rejected", +} + /** - *

        Specifies an algorithm that was used to create the model package. The algorithm must - * be either an algorithm resource in your Amazon SageMaker account or an algorithm in AWS Marketplace that you - * are subscribed to.

        + *

        Contains explainability metrics for a model.

        */ -export interface SourceAlgorithm { +export interface Explainability { /** - *

        The Amazon S3 path where the model artifacts, which result from model training, are stored. - * This path must point to a single gzip compressed tar archive - * (.tar.gz suffix).

        - * - *

        The model artifacts must be in an S3 bucket that is in the same region as the - * algorithm.

        - *
        + *

        The explainability report for a model.

        */ - ModelDataUrl?: string; + Report?: MetricsSource; +} - /** - *

        The name of an algorithm that was used to create the model package. The algorithm must - * be either an algorithm resource in your Amazon SageMaker account or an algorithm in AWS Marketplace that you - * are subscribed to.

        +export namespace Explainability { + export const filterSensitiveLog = (obj: Explainability): any => ({ + ...obj, + }); +} + +/** + *

        Data quality constraints and statistics for a model.

        + */ +export interface ModelDataQuality { + /** + *

        Data quality statistics for a model.

        + */ + Statistics?: MetricsSource; + + /** + *

        Data quality constraints for a model.

        + */ + Constraints?: MetricsSource; +} + +export namespace ModelDataQuality { + export const filterSensitiveLog = (obj: ModelDataQuality): any => ({ + ...obj, + }); +} + +/** + *

        Model quality statistics and constraints.

        + */ +export interface ModelQuality { + /** + *

        Model quality statistics.

        + */ + Statistics?: MetricsSource; + + /** + *

        Model quality constraints.

        + */ + Constraints?: MetricsSource; +} + +export namespace ModelQuality { + export const filterSensitiveLog = (obj: ModelQuality): any => ({ + ...obj, + }); +} + +/** + *

        Contains metrics captured from a model.

        + */ +export interface ModelMetrics { + /** + *

        Metrics that measure the quality of a model.

        + */ + ModelQuality?: ModelQuality; + + /** + *

        Metrics that measure the quality of the input data for a model.

        + */ + ModelDataQuality?: ModelDataQuality; + + /** + *

        Metrics that measure bais in a model.

        + */ + Bias?: Bias; + + /** + *

        Metrics that help explain a model.

        + */ + Explainability?: Explainability; +} + +export namespace ModelMetrics { + export const filterSensitiveLog = (obj: ModelMetrics): any => ({ + ...obj, + }); +} + +/** + *

        Specifies an algorithm that was used to create the model package. The algorithm must + * be either an algorithm resource in your Amazon SageMaker account or an algorithm in AWS Marketplace that you + * are subscribed to.

        + */ +export interface SourceAlgorithm { + /** + *

        The Amazon S3 path where the model artifacts, which result from model training, are stored. + * This path must point to a single gzip compressed tar archive + * (.tar.gz suffix).

        + * + *

        The model artifacts must be in an S3 bucket that is in the same region as the + * algorithm.

        + *
        + */ + ModelDataUrl?: string; + + /** + *

        The name of an algorithm that was used to create the model package. The algorithm must + * be either an algorithm resource in your Amazon SageMaker account or an algorithm in AWS Marketplace that you + * are subscribed to.

        */ AlgorithmName: string | undefined; } @@ -9731,9 +11110,18 @@ export interface CreateModelPackageInput { /** *

        The name of the model package. The name must have 1 to 63 characters. Valid characters * are a-z, A-Z, 0-9, and - (hyphen).

        + *

        This parameter is required for unversioned models. It is not applicable to versioned + * models.

        */ ModelPackageName?: string; + /** + *

        The name of the model group that this model version belongs to.

        + *

        This parameter is required for versioned models, and does not apply to unversioned + * models.

        + */ + ModelPackageGroupName?: string; + /** *

        A description of the model package.

        */ @@ -9772,8 +11160,40 @@ export interface CreateModelPackageInput { /** *

        Whether to certify the model package for listing on AWS Marketplace.

        + *

        This parameter is optional for unversioned models, and does not apply to versioned + * models.

        */ CertifyForMarketplace?: boolean; + + /** + *

        A list of key value pairs associated with the model. For more information, see Tagging AWS + * resources in the AWS General Reference Guide.

        + */ + Tags?: Tag[]; + + /** + *

        Whether the model is approved for deployment.

        + *

        This parameter is optional for versioned models, and does not apply to unversioned + * models.

        + *

        For versioned models, the value of this parameter must be set to Approved + * to deploy the model.

        + */ + ModelApprovalStatus?: ModelApprovalStatus | string; + + /** + *

        Metadata properties of the tracking entity, trial, or trial component.

        + */ + MetadataProperties?: MetadataProperties; + + /** + *

        A structure that contains model metrics reports.

        + */ + ModelMetrics?: ModelMetrics; + + /** + *

        A unique token that guarantees that the call to this API is idempotent.

        + */ + ClientToken?: string; } export namespace CreateModelPackageInput { @@ -9795,6 +11215,44 @@ export namespace CreateModelPackageOutput { }); } +export interface CreateModelPackageGroupInput { + /** + *

        The name of the model group.

        + */ + ModelPackageGroupName: string | undefined; + + /** + *

        A description for the model group.

        + */ + ModelPackageGroupDescription?: string; + + /** + *

        A list of key value pairs associated with the model group. For more information, see + * Tagging AWS + * resources in the AWS General Reference Guide.

        + */ + Tags?: Tag[]; +} + +export namespace CreateModelPackageGroupInput { + export const filterSensitiveLog = (obj: CreateModelPackageGroupInput): any => ({ + ...obj, + }); +} + +export interface CreateModelPackageGroupOutput { + /** + *

        The Amazon Resource Name (ARN) of the model group.

        + */ + ModelPackageGroupArn: string | undefined; +} + +export namespace CreateModelPackageGroupOutput { + export const filterSensitiveLog = (obj: CreateModelPackageGroupOutput): any => ({ + ...obj, + }); +} + /** *

        The constraints resource for a monitoring job.

        */ @@ -10317,7 +11775,7 @@ export interface MonitoringScheduleConfig { /** *

        Defines the monitoring job.

        */ - MonitoringJobDefinition: MonitoringJobDefinition | undefined; + MonitoringJobDefinition?: MonitoringJobDefinition; } export namespace MonitoringScheduleConfig { @@ -10470,8 +11928,10 @@ export interface CreateNotebookInstanceInput { KmsKeyId?: string; /** - *

        A list of tags to associate with the notebook instance. You can add tags later by - * using the CreateTags API.

        + *

        An array of key-value pairs. You can use tags to categorize your AWS resources in + * different ways, for example, by purpose, owner, or environment. For more information, + * see Tagging AWS + * Resources.

        */ Tags?: Tag[]; @@ -10624,1688 +12084,82 @@ export namespace CreateNotebookInstanceLifecycleConfigOutput { }); } -export interface CreatePresignedDomainUrlRequest { - /** - *

        The domain ID.

        - */ - DomainId: string | undefined; - - /** - *

        The name of the UserProfile to sign-in as.

        - */ - UserProfileName: string | undefined; - - /** - *

        The session expiration duration in seconds.

        - */ - SessionExpirationDurationInSeconds?: number; -} - -export namespace CreatePresignedDomainUrlRequest { - export const filterSensitiveLog = (obj: CreatePresignedDomainUrlRequest): any => ({ - ...obj, - }); -} - -export interface CreatePresignedDomainUrlResponse { - /** - *

        The presigned URL.

        - */ - AuthorizedUrl?: string; -} - -export namespace CreatePresignedDomainUrlResponse { - export const filterSensitiveLog = (obj: CreatePresignedDomainUrlResponse): any => ({ - ...obj, - }); -} - -export interface CreatePresignedNotebookInstanceUrlInput { - /** - *

        The name of the notebook instance.

        - */ - NotebookInstanceName: string | undefined; - - /** - *

        The duration of the session, in seconds. The default is 12 hours.

        - */ - SessionExpirationDurationInSeconds?: number; -} - -export namespace CreatePresignedNotebookInstanceUrlInput { - export const filterSensitiveLog = (obj: CreatePresignedNotebookInstanceUrlInput): any => ({ - ...obj, - }); -} - -export interface CreatePresignedNotebookInstanceUrlOutput { - /** - *

        A JSON object that contains the URL string.

        - */ - AuthorizedUrl?: string; -} - -export namespace CreatePresignedNotebookInstanceUrlOutput { - export const filterSensitiveLog = (obj: CreatePresignedNotebookInstanceUrlOutput): any => ({ - ...obj, - }); -} - -/** - *

        Associates a SageMaker job as a trial component with an experiment and trial. Specified when - * you call the following APIs:

        - * - */ -export interface ExperimentConfig { - /** - *

        The name of an existing experiment to associate the trial component with.

        - */ - ExperimentName?: string; - - /** - *

        The name of an existing trial to associate the trial component with. If not specified, a - * new trial is created.

        - */ - TrialName?: string; - - /** - *

        The display name for the trial component. If this key isn't specified, the display name is - * the trial component name.

        - */ - TrialComponentDisplayName?: string; -} - -export namespace ExperimentConfig { - export const filterSensitiveLog = (obj: ExperimentConfig): any => ({ - ...obj, - }); -} - -export enum ProcessingS3CompressionType { - GZIP = "Gzip", - NONE = "None", -} - -export enum ProcessingS3DataType { - MANIFEST_FILE = "ManifestFile", - S3_PREFIX = "S3Prefix", -} - -/** - *

        Information about where and how you want to obtain the inputs for an processing - * job.

        - */ -export interface ProcessingS3Input { - /** - *

        The URI for the Amazon S3 storage where you want Amazon SageMaker to download the artifacts needed - * to run a processing job.

        - */ - S3Uri: string | undefined; - +export interface CreatePipelineRequest { /** - *

        The local path to the Amazon S3 bucket where you want Amazon SageMaker to download the inputs to - * run a processing job. LocalPath is an absolute path to the input - * data.

        + *

        The name of the pipeline.

        */ - LocalPath: string | undefined; + PipelineName: string | undefined; /** - *

        Whether you use an S3Prefix or a ManifestFile for - * the data type. If you choose S3Prefix, S3Uri identifies a key - * name prefix. Amazon SageMaker uses all objects with the specified key name prefix for the processing - * job. If you choose ManifestFile, S3Uri identifies an object - * that is a manifest file containing a list of object keys that you want Amazon SageMaker to use for - * the processing job.

        + *

        The display name of the pipeline.

        */ - S3DataType: ProcessingS3DataType | string | undefined; + PipelineDisplayName?: string; /** - *

        Whether to use File or Pipe input mode. In - * File mode, Amazon SageMaker copies the data from the input source onto the local - * Amazon Elastic Block Store (Amazon EBS) volumes before starting your training algorithm. - * This is the most commonly used input mode. In Pipe mode, Amazon SageMaker streams input - * data from the source directly to your algorithm without using the EBS volume.

        + *

        The JSON pipeline definition of the pipeline.

        */ - S3InputMode: ProcessingS3InputMode | string | undefined; + PipelineDefinition: string | undefined; /** - *

        Whether the data stored in Amazon S3 is FullyReplicated or - * ShardedByS3Key.

        + *

        A description of the pipeline.

        */ - S3DataDistributionType?: ProcessingS3DataDistributionType | string; + PipelineDescription?: string; /** - *

        Whether to use Gzip compression for Amazon S3 storage.

        + *

        A unique, case-sensitive identifier that you provide to ensure the idempotency of the + * operation. An idempotent operation completes no more than one time.

        */ - S3CompressionType?: ProcessingS3CompressionType | string; -} - -export namespace ProcessingS3Input { - export const filterSensitiveLog = (obj: ProcessingS3Input): any => ({ - ...obj, - }); -} + ClientRequestToken?: string; -/** - *

        The inputs for a processing job.

        - */ -export interface ProcessingInput { /** - *

        The name of the inputs for the processing job.

        + *

        The Amazon Resource Name (ARN) of the role used by the pipeline to access and create resources.

        */ - InputName: string | undefined; + RoleArn: string | undefined; /** - *

        The S3 inputs for the processing job.

        + *

        A list of tags to apply to the created pipeline.

        */ - S3Input: ProcessingS3Input | undefined; + Tags?: Tag[]; } -export namespace ProcessingInput { - export const filterSensitiveLog = (obj: ProcessingInput): any => ({ +export namespace CreatePipelineRequest { + export const filterSensitiveLog = (obj: CreatePipelineRequest): any => ({ ...obj, }); } -/** - *

        Information about where and how you want to store the results of an - * processing job.

        - */ -export interface ProcessingS3Output { - /** - *

        A URI that identifies the Amazon S3 bucket where you want Amazon SageMaker to save the results of - * a processing job.

        - */ - S3Uri: string | undefined; - - /** - *

        The local path to the Amazon S3 bucket where you want Amazon SageMaker to save the results of an - * processing job. LocalPath is an absolute path to the input data.

        - */ - LocalPath: string | undefined; - +export interface CreatePipelineResponse { /** - *

        Whether to upload the results of the processing job continuously or after the job - * completes.

        + *

        The Amazon Resource Name (ARN) of the created pipeline.

        */ - S3UploadMode: ProcessingS3UploadMode | string | undefined; + PipelineArn?: string; } -export namespace ProcessingS3Output { - export const filterSensitiveLog = (obj: ProcessingS3Output): any => ({ +export namespace CreatePipelineResponse { + export const filterSensitiveLog = (obj: CreatePipelineResponse): any => ({ ...obj, }); } -/** - *

        Describes the results of a processing job.

        - */ -export interface ProcessingOutput { - /** - *

        The name for the processing job output.

        - */ - OutputName: string | undefined; - +export interface CreatePresignedDomainUrlRequest { /** - *

        Configuration for processing job outputs in Amazon S3.

        + *

        The domain ID.

        */ - S3Output: ProcessingS3Output | undefined; -} - -export namespace ProcessingOutput { - export const filterSensitiveLog = (obj: ProcessingOutput): any => ({ - ...obj, - }); -} + DomainId: string | undefined; -/** - *

        The output configuration for the processing job.

        - */ -export interface ProcessingOutputConfig { /** - *

        Output configuration information for a processing job.

        + *

        The name of the UserProfile to sign-in as.

        */ - Outputs: ProcessingOutput[] | undefined; + UserProfileName: string | undefined; /** - *

        The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the processing - * job output. KmsKeyId can be an ID of a KMS key, ARN of a KMS key, alias of - * a KMS key, or alias of a KMS key. The KmsKeyId is applied to all - * outputs.

        + *

        The session expiration duration in seconds.

        */ - KmsKeyId?: string; -} - -export namespace ProcessingOutputConfig { - export const filterSensitiveLog = (obj: ProcessingOutputConfig): any => ({ - ...obj, - }); + SessionExpirationDurationInSeconds?: number; } -/** - *

        Configuration for the cluster used to run a processing job.

        - */ -export interface ProcessingClusterConfig { - /** - *

        The number of ML compute instances to use in the processing job. For distributed - * processing jobs, specify a value greater than 1. The default value is 1.

        - */ - InstanceCount: number | undefined; - - /** - *

        The ML compute instance type for the processing job.

        - */ - InstanceType: ProcessingInstanceType | string | undefined; - - /** - *

        The size of the ML storage volume in gigabytes that you want to provision. You must - * specify sufficient ML storage for your scenario.

        - */ - VolumeSizeInGB: number | undefined; - - /** - *

        The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the - * storage volume attached to the ML compute instance(s) that run the processing job. - *

        - */ - VolumeKmsKeyId?: string; -} - -export namespace ProcessingClusterConfig { - export const filterSensitiveLog = (obj: ProcessingClusterConfig): any => ({ - ...obj, - }); -} - -/** - *

        Identifies the resources, ML compute instances, and ML storage volumes to deploy for a - * processing job. In distributed training, you specify more than one instance.

        - */ -export interface ProcessingResources { - /** - *

        The configuration for the resources in a cluster used to run the processing - * job.

        - */ - ClusterConfig: ProcessingClusterConfig | undefined; -} - -export namespace ProcessingResources { - export const filterSensitiveLog = (obj: ProcessingResources): any => ({ - ...obj, - }); -} - -/** - *

        Specifies a time limit for how long the processing job is allowed to run.

        - */ -export interface ProcessingStoppingCondition { - /** - *

        Specifies the maximum runtime in seconds.

        - */ - MaxRuntimeInSeconds: number | undefined; -} - -export namespace ProcessingStoppingCondition { - export const filterSensitiveLog = (obj: ProcessingStoppingCondition): any => ({ - ...obj, - }); -} - -export interface CreateProcessingJobRequest { - /** - *

        For each input, data is downloaded from S3 into the processing container before the - * processing job begins running if "S3InputMode" is set to File.

        - */ - ProcessingInputs?: ProcessingInput[]; - - /** - *

        Output configuration for the processing job.

        - */ - ProcessingOutputConfig?: ProcessingOutputConfig; - - /** - *

        The name of the processing job. The name must be unique within an AWS Region in the - * AWS account.

        - */ - ProcessingJobName: string | undefined; - - /** - *

        Identifies the resources, ML compute instances, and ML storage volumes to deploy for a - * processing job. In distributed training, you specify more than one instance.

        - */ - ProcessingResources: ProcessingResources | undefined; - - /** - *

        The time limit for how long the processing job is allowed to run.

        - */ - StoppingCondition?: ProcessingStoppingCondition; - - /** - *

        Configures the processing job to run a specified Docker container image.

        - */ - AppSpecification: AppSpecification | undefined; - - /** - *

        Sets the environment variables in the Docker container.

        - */ - Environment?: { [key: string]: string }; - - /** - *

        Networking options for a processing job.

        - */ - NetworkConfig?: NetworkConfig; - - /** - *

        The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on - * your behalf.

        - */ - RoleArn: string | undefined; - - /** - *

        (Optional) An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management - * User Guide.

        - */ - Tags?: Tag[]; - - /** - *

        Associates a SageMaker job as a trial component with an experiment and trial. Specified when - * you call the following APIs:

        - * - */ - ExperimentConfig?: ExperimentConfig; -} - -export namespace CreateProcessingJobRequest { - export const filterSensitiveLog = (obj: CreateProcessingJobRequest): any => ({ - ...obj, - }); -} - -export interface CreateProcessingJobResponse { - /** - *

        The Amazon Resource Name (ARN) of the processing job.

        - */ - ProcessingJobArn: string | undefined; -} - -export namespace CreateProcessingJobResponse { - export const filterSensitiveLog = (obj: CreateProcessingJobResponse): any => ({ - ...obj, - }); -} - -/** - *

        Configuration information for the debug hook parameters, collection configuration, and - * storage paths.

        - */ -export interface DebugHookConfig { - /** - *

        Path to local storage location for tensors. Defaults to - * /opt/ml/output/tensors/.

        - */ - LocalPath?: string; - - /** - *

        Path to Amazon S3 storage location for tensors.

        - */ - S3OutputPath: string | undefined; - - /** - *

        Configuration information for the debug hook parameters.

        - */ - HookParameters?: { [key: string]: string }; - - /** - *

        Configuration information for tensor collections.

        - */ - CollectionConfigurations?: CollectionConfiguration[]; -} - -export namespace DebugHookConfig { - export const filterSensitiveLog = (obj: DebugHookConfig): any => ({ - ...obj, - }); -} - -/** - *

        Configuration information for debugging rules.

        - */ -export interface DebugRuleConfiguration { - /** - *

        The name of the rule configuration. It must be unique relative to other rule - * configuration names.

        - */ - RuleConfigurationName: string | undefined; - - /** - *

        Path to local storage location for output of rules. Defaults to - * /opt/ml/processing/output/rule/.

        - */ - LocalPath?: string; - - /** - *

        Path to Amazon S3 storage location for rules.

        - */ - S3OutputPath?: string; - - /** - *

        The Amazon Elastic Container (ECR) Image for the managed rule evaluation.

        - */ - RuleEvaluatorImage: string | undefined; - - /** - *

        The instance type to deploy for a training job.

        - */ - InstanceType?: ProcessingInstanceType | string; - - /** - *

        The size, in GB, of the ML storage volume attached to the processing instance.

        - */ - VolumeSizeInGB?: number; - - /** - *

        Runtime configuration for rule container.

        - */ - RuleParameters?: { [key: string]: string }; -} - -export namespace DebugRuleConfiguration { - export const filterSensitiveLog = (obj: DebugRuleConfiguration): any => ({ - ...obj, - }); -} - -/** - *

        Configuration of storage locations for TensorBoard output.

        - */ -export interface TensorBoardOutputConfig { - /** - *

        Path to local storage location for tensorBoard output. Defaults to - * /opt/ml/output/tensorboard.

        - */ - LocalPath?: string; - - /** - *

        Path to Amazon S3 storage location for TensorBoard output.

        - */ - S3OutputPath: string | undefined; -} - -export namespace TensorBoardOutputConfig { - export const filterSensitiveLog = (obj: TensorBoardOutputConfig): any => ({ - ...obj, - }); -} - -export interface CreateTrainingJobRequest { - /** - *

        The name of the training job. The name must be unique within an AWS Region in an - * AWS account.

        - */ - TrainingJobName: string | undefined; - - /** - *

        Algorithm-specific parameters that influence the quality of the model. You set - * hyperparameters before you start the learning process. For a list of hyperparameters for - * each training algorithm provided by Amazon SageMaker, see Algorithms.

        - *

        You can specify a maximum of 100 hyperparameters. Each hyperparameter is a - * key-value pair. Each key and value is limited to 256 characters, as specified by the - * Length Constraint.

        - */ - HyperParameters?: { [key: string]: string }; - - /** - *

        The registry path of the Docker image that contains the training algorithm and - * algorithm-specific metadata, including the input mode. For more information about - * algorithms provided by Amazon SageMaker, see Algorithms. For information about - * providing your own algorithms, see Using Your Own Algorithms with Amazon - * SageMaker.

        - */ - AlgorithmSpecification: AlgorithmSpecification | undefined; - - /** - *

        The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform - * tasks on your behalf.

        - *

        During model training, Amazon SageMaker needs your permission to read input data from an S3 - * bucket, download a Docker image that contains training code, write model artifacts to an - * S3 bucket, write logs to Amazon CloudWatch Logs, and publish metrics to Amazon CloudWatch. You grant - * permissions for all of these tasks to an IAM role. For more information, see Amazon SageMaker - * Roles.

        - * - *

        To be able to pass this role to Amazon SageMaker, the caller of this API must have the - * iam:PassRole permission.

        - *
        - */ - RoleArn: string | undefined; - - /** - *

        An array of Channel objects. Each channel is a named input source. - * InputDataConfig - * - * describes the input data and its location.

        - *

        Algorithms can accept input data from one or more channels. For example, an - * algorithm might have two channels of input data, training_data and - * validation_data. The configuration for each channel provides the S3, - * EFS, or FSx location where the input data is stored. It also provides information about - * the stored data: the MIME type, compression method, and whether the data is wrapped in - * RecordIO format.

        - *

        Depending on the input mode that the algorithm supports, Amazon SageMaker either copies input - * data files from an S3 bucket to a local directory in the Docker container, or makes it - * available as input streams. For example, if you specify an EFS location, input data - * files will be made available as input streams. They do not need to be - * downloaded.

        - */ - InputDataConfig?: Channel[]; - - /** - *

        Specifies the path to the S3 location where you want to store model artifacts. Amazon SageMaker - * creates subfolders for the artifacts.

        - */ - OutputDataConfig: OutputDataConfig | undefined; - - /** - *

        The resources, including the ML compute instances and ML storage volumes, to use - * for model training.

        - *

        ML storage volumes store model artifacts and incremental states. Training - * algorithms might also use ML storage volumes for scratch space. If you want Amazon SageMaker to use - * the ML storage volume to store the training data, choose File as the - * TrainingInputMode in the algorithm specification. For distributed - * training algorithms, specify an instance count greater than 1.

        - */ - ResourceConfig: ResourceConfig | undefined; - - /** - *

        A VpcConfig object that specifies the VPC that you want your - * training job to connect to. Control access to and from your training container by - * configuring the VPC. For more information, see Protect Training Jobs by Using an Amazon - * Virtual Private Cloud.

        - */ - VpcConfig?: VpcConfig; - - /** - *

        Specifies a limit to how long a model training job can run. When the job reaches the - * time limit, Amazon SageMaker ends the training job. Use this API to cap model training costs.

        - *

        To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays - * job termination for 120 seconds. Algorithms can use this 120-second window to save the - * model artifacts, so the results of training are not lost.

        - */ - StoppingCondition: StoppingCondition | undefined; - - /** - *

        An array of key-value pairs. For more information, see Using - * Cost Allocation Tags in the AWS Billing and Cost Management User - * Guide. - * - *

        - */ - Tags?: Tag[]; - - /** - *

        Isolates the training container. No inbound or outbound network calls can be made, - * except for calls between peers within a training cluster for distributed training. If - * you enable network isolation for training jobs that are configured to use a VPC, Amazon SageMaker - * downloads and uploads customer data and model artifacts through the specified VPC, but - * the training container does not have network access.

        - */ - EnableNetworkIsolation?: boolean; - - /** - *

        To encrypt all communications between ML compute instances in distributed training, - * choose True. Encryption provides greater security for distributed training, - * but training might take longer. How long it takes depends on the amount of communication - * between compute instances, especially if you use a deep learning algorithm in - * distributed training. For more information, see Protect Communications Between ML - * Compute Instances in a Distributed Training Job.

        - */ - EnableInterContainerTrafficEncryption?: boolean; - - /** - *

        To train models using managed spot training, choose True. Managed spot - * training provides a fully managed and scalable infrastructure for training machine - * learning models. this option is useful when training jobs can be interrupted and when - * there is flexibility when the training job is run.

        - *

        The complete and intermediate results of jobs are stored in an Amazon S3 bucket, and can be - * used as a starting point to train models incrementally. Amazon SageMaker provides metrics and - * logs in CloudWatch. They can be used to see when managed spot training jobs are running, - * interrupted, resumed, or completed.

        - */ - EnableManagedSpotTraining?: boolean; - - /** - *

        Contains information about the output location for managed spot training checkpoint - * data.

        - */ - CheckpointConfig?: CheckpointConfig; - - /** - *

        Configuration information for the debug hook parameters, collection configuration, and - * storage paths.

        - */ - DebugHookConfig?: DebugHookConfig; - - /** - *

        Configuration information for debugging rules.

        - */ - DebugRuleConfigurations?: DebugRuleConfiguration[]; - - /** - *

        Configuration of storage locations for TensorBoard output.

        - */ - TensorBoardOutputConfig?: TensorBoardOutputConfig; - - /** - *

        Associates a SageMaker job as a trial component with an experiment and trial. Specified when - * you call the following APIs:

        - * - */ - ExperimentConfig?: ExperimentConfig; -} - -export namespace CreateTrainingJobRequest { - export const filterSensitiveLog = (obj: CreateTrainingJobRequest): any => ({ - ...obj, - }); -} - -export interface CreateTrainingJobResponse { - /** - *

        The Amazon Resource Name (ARN) of the training job.

        - */ - TrainingJobArn: string | undefined; -} - -export namespace CreateTrainingJobResponse { - export const filterSensitiveLog = (obj: CreateTrainingJobResponse): any => ({ - ...obj, - }); -} - -export enum JoinSource { - INPUT = "Input", - NONE = "None", -} - -/** - *

        The data structure used to specify the data to be used for inference in a batch - * transform job and to associate the data that is relevant to the prediction results in - * the output. The input filter provided allows you to exclude input data that is not - * needed for inference in a batch transform job. The output filter provided allows you to - * include input data relevant to interpreting the predictions in the output from the job. - * For more information, see Associate Prediction - * Results with their Corresponding Input Records.

        - */ -export interface DataProcessing { - /** - *

        A JSONPath expression used to select a portion of the input data to pass to - * the algorithm. Use the InputFilter parameter to exclude fields, such as an - * ID column, from the input. If you want Amazon SageMaker to pass the entire input dataset to the - * algorithm, accept the default value $.

        - *

        Examples: "$", "$[1:]", "$.features" - *

        - */ - InputFilter?: string; - - /** - *

        A JSONPath expression used to select a portion of the joined dataset to save - * in the output file for a batch transform job. If you want Amazon SageMaker to store the entire input - * dataset in the output file, leave the default value, $. If you specify - * indexes that aren't within the dimension size of the joined dataset, you get an - * error.

        - *

        Examples: "$", "$[0,5:]", - * "$['id','SageMakerOutput']" - *

        - */ - OutputFilter?: string; - - /** - *

        Specifies the source of the data to join with the transformed data. The valid values - * are None and Input. The default value is None, - * which specifies not to join the input with the transformed data. If you want the batch - * transform job to join the original input data with the transformed data, set - * JoinSource to Input.

        - * - *

        For JSON or JSONLines objects, such as a JSON array, Amazon SageMaker adds the transformed data to - * the input JSON object in an attribute called SageMakerOutput. The joined - * result for JSON must be a key-value pair object. If the input is not a key-value pair - * object, Amazon SageMaker creates a new JSON file. In the new JSON file, and the input data is stored - * under the SageMakerInput key and the results are stored in - * SageMakerOutput.

        - *

        For CSV files, Amazon SageMaker combines the transformed data with the input data at the end of - * the input data and stores it in the output file. The joined data has the joined input - * data followed by the transformed data and the output is a CSV file.

        - */ - JoinSource?: JoinSource | string; -} - -export namespace DataProcessing { - export const filterSensitiveLog = (obj: DataProcessing): any => ({ - ...obj, - }); -} - -/** - *

        Configures the timeout and maximum number of retries for processing a transform job - * invocation.

        - */ -export interface ModelClientConfig { - /** - *

        The timeout value in seconds for an invocation request.

        - */ - InvocationsTimeoutInSeconds?: number; - - /** - *

        The maximum number of retries when invocation requests are failing.

        - */ - InvocationsMaxRetries?: number; -} - -export namespace ModelClientConfig { - export const filterSensitiveLog = (obj: ModelClientConfig): any => ({ - ...obj, - }); -} - -export interface CreateTransformJobRequest { - /** - *

        The name of the transform job. The name must be unique within an AWS Region in an - * AWS account.

        - */ - TransformJobName: string | undefined; - - /** - *

        The name of the model that you want to use for the transform job. - * ModelName must be the name of an existing Amazon SageMaker model within an AWS - * Region in an AWS account.

        - */ - ModelName: string | undefined; - - /** - *

        The maximum number of parallel requests that can be sent to each instance in a - * transform job. If MaxConcurrentTransforms is set to 0 or left - * unset, Amazon SageMaker checks the optional execution-parameters to determine the settings for your - * chosen algorithm. If the execution-parameters endpoint is not enabled, the default value - * is 1. For more information on execution-parameters, see How Containers Serve Requests. For built-in algorithms, you don't need to - * set a value for MaxConcurrentTransforms.

        - */ - MaxConcurrentTransforms?: number; - - /** - *

        Configures the timeout and maximum number of retries for processing a transform job - * invocation.

        - */ - ModelClientConfig?: ModelClientConfig; - - /** - *

        The maximum allowed size of the payload, in MB. A payload is the - * data portion of a record (without metadata). The value in MaxPayloadInMB - * must be greater than, or equal to, the size of a single record. To estimate the size of - * a record in MB, divide the size of your dataset by the number of records. To ensure that - * the records fit within the maximum payload size, we recommend using a slightly larger - * value. The default value is 6 MB. - *

        - *

        For cases where the payload might be arbitrarily large and is transmitted using HTTP - * chunked encoding, set the value to 0. - * This - * feature works only in supported algorithms. Currently, Amazon SageMaker built-in - * algorithms do not support HTTP chunked encoding.

        - */ - MaxPayloadInMB?: number; - - /** - *

        Specifies the number of records to include in a mini-batch for an HTTP inference - * request. A record - * is a single unit of input data that - * inference can be made on. For example, a single line in a CSV file is a record.

        - *

        To enable the batch strategy, you must set the SplitType property to - * Line, RecordIO, or TFRecord.

        - *

        To use only one record when making an HTTP invocation request to a container, set - * BatchStrategy to SingleRecord and SplitType - * to Line.

        - *

        To fit as many records in a mini-batch as can fit within the - * MaxPayloadInMB limit, set BatchStrategy to - * MultiRecord and SplitType to Line.

        - */ - BatchStrategy?: BatchStrategy | string; - - /** - *

        The environment variables to set in the Docker container. We support up to 16 key and - * values entries in the map.

        - */ - Environment?: { [key: string]: string }; - - /** - *

        Describes the input source and - * the - * way the transform job consumes it.

        - */ - TransformInput: TransformInput | undefined; - - /** - *

        Describes the results of the transform job.

        - */ - TransformOutput: TransformOutput | undefined; - - /** - *

        Describes the resources, including - * ML - * instance types and ML instance count, to use for the transform - * job.

        - */ - TransformResources: TransformResources | undefined; - - /** - *

        The data structure used to specify the data to be used for inference in a batch - * transform job and to associate the data that is relevant to the prediction results in - * the output. The input filter provided allows you to exclude input data that is not - * needed for inference in a batch transform job. The output filter provided allows you to - * include input data relevant to interpreting the predictions in the output from the job. - * For more information, see Associate Prediction - * Results with their Corresponding Input Records.

        - */ - DataProcessing?: DataProcessing; - - /** - *

        (Optional) - * An - * array of key-value pairs. For more information, see Using - * Cost Allocation Tags in the AWS Billing and Cost Management User - * Guide.

        - */ - Tags?: Tag[]; - - /** - *

        Associates a SageMaker job as a trial component with an experiment and trial. Specified when - * you call the following APIs:

        - * - */ - ExperimentConfig?: ExperimentConfig; -} - -export namespace CreateTransformJobRequest { - export const filterSensitiveLog = (obj: CreateTransformJobRequest): any => ({ - ...obj, - }); -} - -export interface CreateTransformJobResponse { - /** - *

        The Amazon Resource Name (ARN) of the transform job.

        - */ - TransformJobArn: string | undefined; -} - -export namespace CreateTransformJobResponse { - export const filterSensitiveLog = (obj: CreateTransformJobResponse): any => ({ - ...obj, - }); -} - -export interface CreateTrialRequest { - /** - *

        The name of the trial. The name must be unique in your AWS account and is not - * case-sensitive.

        - */ - TrialName: string | undefined; - - /** - *

        The name of the trial as displayed. The name doesn't need to be unique. If - * DisplayName isn't specified, TrialName is displayed.

        - */ - DisplayName?: string; - - /** - *

        The name of the experiment to associate the trial with.

        - */ - ExperimentName: string | undefined; - - /** - *

        A list of tags to associate with the trial. You can use Search API to - * search on the tags.

        - */ - Tags?: Tag[]; -} - -export namespace CreateTrialRequest { - export const filterSensitiveLog = (obj: CreateTrialRequest): any => ({ - ...obj, - }); -} - -export interface CreateTrialResponse { - /** - *

        The Amazon Resource Name (ARN) of the trial.

        - */ - TrialArn?: string; -} - -export namespace CreateTrialResponse { - export const filterSensitiveLog = (obj: CreateTrialResponse): any => ({ - ...obj, - }); -} - -/** - *

        Represents an input or output artifact of a trial component. You specify - * TrialComponentArtifact as part of the InputArtifacts and - * OutputArtifacts parameters in the CreateTrialComponent - * request.

        - *

        Examples of input artifacts are datasets, algorithms, hyperparameters, source code, and - * instance types. Examples of output artifacts are metrics, snapshots, logs, and images.

        - */ -export interface TrialComponentArtifact { - /** - *

        The media type of the artifact, which indicates the type of data in the artifact file. The - * media type consists of a type and a subtype - * concatenated with a slash (/) character, for example, text/csv, image/jpeg, and s3/uri. The - * type specifies the category of the media. The subtype specifies the kind of data.

        - */ - MediaType?: string; - - /** - *

        The location of the artifact.

        - */ - Value: string | undefined; -} - -export namespace TrialComponentArtifact { - export const filterSensitiveLog = (obj: TrialComponentArtifact): any => ({ - ...obj, - }); -} - -/** - *

        The value of a hyperparameter. Only one of NumberValue or - * StringValue can be specified.

        - *

        This object is specified in the CreateTrialComponent request.

        - */ -export interface TrialComponentParameterValue { - /** - *

        The string value of a categorical hyperparameter. If you specify a value for this - * parameter, you can't specify the NumberValue parameter.

        - */ - StringValue?: string; - - /** - *

        The numeric value of a numeric hyperparameter. If you specify a value for this parameter, - * you can't specify the StringValue parameter.

        - */ - NumberValue?: number; -} - -export namespace TrialComponentParameterValue { - export const filterSensitiveLog = (obj: TrialComponentParameterValue): any => ({ - ...obj, - }); -} - -export enum TrialComponentPrimaryStatus { - COMPLETED = "Completed", - FAILED = "Failed", - IN_PROGRESS = "InProgress", - STOPPED = "Stopped", - STOPPING = "Stopping", -} - -/** - *

        The status of the trial component.

        - */ -export interface TrialComponentStatus { - /** - *

        The status of the trial component.

        - */ - PrimaryStatus?: TrialComponentPrimaryStatus | string; - - /** - *

        If the component failed, a message describing why.

        - */ - Message?: string; -} - -export namespace TrialComponentStatus { - export const filterSensitiveLog = (obj: TrialComponentStatus): any => ({ - ...obj, - }); -} - -export interface CreateTrialComponentRequest { - /** - *

        The name of the component. The name must be unique in your AWS account and is not - * case-sensitive.

        - */ - TrialComponentName: string | undefined; - - /** - *

        The name of the component as displayed. The name doesn't need to be unique. If - * DisplayName isn't specified, TrialComponentName is - * displayed.

        - */ - DisplayName?: string; - - /** - *

        The status of the component. States include:

        - *
          - *
        • - *

          InProgress

          - *
        • - *
        • - *

          Completed

          - *
        • - *
        • - *

          Failed

          - *
        • - *
        - */ - Status?: TrialComponentStatus; - - /** - *

        When the component started.

        - */ - StartTime?: Date; - - /** - *

        When the component ended.

        - */ - EndTime?: Date; - - /** - *

        The hyperparameters for the component.

        - */ - Parameters?: { [key: string]: TrialComponentParameterValue }; - - /** - *

        The input artifacts for the component. Examples of input artifacts are datasets, - * algorithms, hyperparameters, source code, and instance types.

        - */ - InputArtifacts?: { [key: string]: TrialComponentArtifact }; - - /** - *

        The output artifacts for the component. Examples of output artifacts are metrics, - * snapshots, logs, and images.

        - */ - OutputArtifacts?: { [key: string]: TrialComponentArtifact }; - - /** - *

        A list of tags to associate with the component. You can use Search API - * to search on the tags.

        - */ - Tags?: Tag[]; -} - -export namespace CreateTrialComponentRequest { - export const filterSensitiveLog = (obj: CreateTrialComponentRequest): any => ({ - ...obj, - }); -} - -export interface CreateTrialComponentResponse { - /** - *

        The Amazon Resource Name (ARN) of the trial component.

        - */ - TrialComponentArn?: string; -} - -export namespace CreateTrialComponentResponse { - export const filterSensitiveLog = (obj: CreateTrialComponentResponse): any => ({ - ...obj, - }); -} - -export interface CreateUserProfileRequest { - /** - *

        The ID of the associated Domain.

        - */ - DomainId: string | undefined; - - /** - *

        A name for the UserProfile.

        - */ - UserProfileName: string | undefined; - - /** - *

        A specifier for the type of value specified in SingleSignOnUserValue. Currently, the only supported value is "UserName". - * If the Domain's AuthMode is SSO, this field is required. If the Domain's AuthMode is not SSO, this field cannot be specified. - *

        - */ - SingleSignOnUserIdentifier?: string; - - /** - *

        The username of the associated AWS Single Sign-On User for this UserProfile. If the Domain's AuthMode is SSO, this field is - * required, and must match a valid username of a user in your directory. If the Domain's AuthMode is not SSO, this field cannot be specified. - *

        - */ - SingleSignOnUserValue?: string; - - /** - *

        Each tag consists of a key and an optional value. - * Tag keys must be unique per resource.

        - */ - Tags?: Tag[]; - - /** - *

        A collection of settings.

        - */ - UserSettings?: UserSettings; -} - -export namespace CreateUserProfileRequest { - export const filterSensitiveLog = (obj: CreateUserProfileRequest): any => ({ - ...obj, - }); -} - -export interface CreateUserProfileResponse { - /** - *

        The user profile Amazon Resource Name (ARN).

        - */ - UserProfileArn?: string; -} - -export namespace CreateUserProfileResponse { - export const filterSensitiveLog = (obj: CreateUserProfileResponse): any => ({ - ...obj, - }); -} - -/** - *

        Use this parameter to configure your OIDC Identity Provider (IdP).

        - */ -export interface OidcConfig { - /** - *

        The OIDC IdP client ID used to configure your private workforce.

        - */ - ClientId: string | undefined; - - /** - *

        The OIDC IdP client secret used to configure your private workforce.

        - */ - ClientSecret: string | undefined; - - /** - *

        The OIDC IdP issuer used to configure your private workforce.

        - */ - Issuer: string | undefined; - - /** - *

        The OIDC IdP authorization endpoint used to configure your private workforce.

        - */ - AuthorizationEndpoint: string | undefined; - - /** - *

        The OIDC IdP token endpoint used to configure your private workforce.

        - */ - TokenEndpoint: string | undefined; - - /** - *

        The OIDC IdP user information endpoint used to configure your private workforce.

        - */ - UserInfoEndpoint: string | undefined; - - /** - *

        The OIDC IdP logout endpoint used to configure your private workforce.

        - */ - LogoutEndpoint: string | undefined; - - /** - *

        The OIDC IdP JSON Web Key Set (Jwks) URI used to configure your private workforce.

        - */ - JwksUri: string | undefined; -} - -export namespace OidcConfig { - export const filterSensitiveLog = (obj: OidcConfig): any => ({ - ...obj, - ...(obj.ClientSecret && { ClientSecret: SENSITIVE_STRING }), - }); -} - -/** - *

        A list of IP address ranges (CIDRs). Used to create an allow - * list of IP addresses for a private workforce. Workers will only be able to login to their worker portal from an - * IP address within this range. By default, a workforce isn't restricted to specific IP addresses.

        - */ -export interface SourceIpConfig { - /** - *

        A list of one to ten Classless Inter-Domain Routing (CIDR) values.

        - *

        Maximum: Ten CIDR values

        - * - *

        The following Length Constraints apply to individual CIDR values in - * the CIDR value list.

        - *
        - */ - Cidrs: string[] | undefined; -} - -export namespace SourceIpConfig { - export const filterSensitiveLog = (obj: SourceIpConfig): any => ({ - ...obj, - }); -} - -export interface CreateWorkforceRequest { - /** - *

        Use this parameter to configure an Amazon Cognito private workforce. - * A single Cognito workforce is created using and corresponds to a single - * - * Amazon Cognito user pool.

        - * - *

        Do not use OidcConfig if you specify values for - * CognitoConfig.

        - */ - CognitoConfig?: CognitoConfig; - - /** - *

        Use this parameter to configure a private workforce using your own OIDC Identity Provider.

        - *

        Do not use CognitoConfig if you specify values for - * OidcConfig.

        - */ - OidcConfig?: OidcConfig; - - /** - *

        A list of IP address ranges (CIDRs). Used to create an allow - * list of IP addresses for a private workforce. Workers will only be able to login to their worker portal from an - * IP address within this range. By default, a workforce isn't restricted to specific IP addresses.

        - */ - SourceIpConfig?: SourceIpConfig; - - /** - *

        The name of the private workforce.

        - */ - WorkforceName: string | undefined; - - /** - *

        An array of key-value pairs that contain metadata to help you categorize and - * organize our workforce. Each tag consists of a key and a value, - * both of which you define.

        - */ - Tags?: Tag[]; -} - -export namespace CreateWorkforceRequest { - export const filterSensitiveLog = (obj: CreateWorkforceRequest): any => ({ - ...obj, - ...(obj.OidcConfig && { OidcConfig: OidcConfig.filterSensitiveLog(obj.OidcConfig) }), - }); -} - -export interface CreateWorkforceResponse { - /** - *

        The Amazon Resource Name (ARN) of the workforce.

        - */ - WorkforceArn: string | undefined; -} - -export namespace CreateWorkforceResponse { - export const filterSensitiveLog = (obj: CreateWorkforceResponse): any => ({ - ...obj, - }); -} - -/** - *

        A list of user groups that exist in your OIDC Identity Provider (IdP). - * One to ten groups can be used to create a single private work team. - * When you add a user group to the list of Groups, you can add that user group to one or more - * private work teams. If you add a user group to a private work team, all workers in that user group - * are added to the work team.

        - */ -export interface OidcMemberDefinition { - /** - *

        A list of comma seperated strings that identifies - * user groups in your OIDC IdP. Each user group is - * made up of a group of private workers.

        - */ - Groups: string[] | undefined; -} - -export namespace OidcMemberDefinition { - export const filterSensitiveLog = (obj: OidcMemberDefinition): any => ({ - ...obj, - }); -} - -/** - *

        Defines an Amazon Cognito or your own OIDC IdP user group that is part of a work team.

        - */ -export interface MemberDefinition { - /** - *

        The Amazon Cognito user group that is part of the work team.

        - */ - CognitoMemberDefinition?: CognitoMemberDefinition; - - /** - *

        A list user groups that exist in your OIDC Identity Provider (IdP). - * One to ten groups can be used to create a single private work team. - * When you add a user group to the list of Groups, you can add that user group to one or more - * private work teams. If you add a user group to a private work team, all workers in that user group - * are added to the work team.

        - */ - OidcMemberDefinition?: OidcMemberDefinition; -} - -export namespace MemberDefinition { - export const filterSensitiveLog = (obj: MemberDefinition): any => ({ - ...obj, - }); -} - -/** - *

        Configures SNS notifications of available or expiring work items for work - * teams.

        - */ -export interface NotificationConfiguration { - /** - *

        The ARN for the SNS topic to which notifications should be published.

        - */ - NotificationTopicArn?: string; -} - -export namespace NotificationConfiguration { - export const filterSensitiveLog = (obj: NotificationConfiguration): any => ({ - ...obj, - }); -} - -export interface CreateWorkteamRequest { - /** - *

        The name of the work team. Use this name to identify the work team.

        - */ - WorkteamName: string | undefined; - - /** - *

        The name of the workforce.

        - */ - WorkforceName?: string; - - /** - *

        A list of MemberDefinition objects that contains objects that identify - * the workers that make up the work team.

        - *

        Workforces can be created using Amazon Cognito or your own OIDC Identity Provider (IdP). For - * private workforces created using Amazon Cognito use CognitoMemberDefinition. For - * workforces created using your own OIDC identity provider (IdP) use - * OidcMemberDefinition. Do not provide input for both of these parameters - * in a single request.

        - *

        For workforces created using Amazon Cognito, private work teams correspond to Amazon Cognito - * user groups within the user pool used to create a workforce. All of the - * CognitoMemberDefinition objects that make up the member definition must - * have the same ClientId and UserPool values. To add a Amazon - * Cognito user group to an existing worker pool, see Adding groups to a User - * Pool. For more information about user pools, see Amazon Cognito User - * Pools.

        - *

        For workforces created using your own OIDC IdP, specify the user groups that you want to - * include in your private work team in OidcMemberDefinition by listing those groups - * in Groups.

        - */ - MemberDefinitions: MemberDefinition[] | undefined; - - /** - *

        A description of the work team.

        - */ - Description: string | undefined; - - /** - *

        Configures notification of workers regarding available or expiring work items.

        - */ - NotificationConfiguration?: NotificationConfiguration; - - /** - *

        An array of key-value pairs.

        - *

        For more information, see Resource - * Tag and Using - * Cost Allocation Tags in the AWS Billing and Cost Management User - * Guide.

        - */ - Tags?: Tag[]; -} - -export namespace CreateWorkteamRequest { - export const filterSensitiveLog = (obj: CreateWorkteamRequest): any => ({ - ...obj, - }); -} - -export interface CreateWorkteamResponse { - /** - *

        The Amazon Resource Name (ARN) of the work team. You can use this ARN to identify the - * work team.

        - */ - WorkteamArn?: string; -} - -export namespace CreateWorkteamResponse { - export const filterSensitiveLog = (obj: CreateWorkteamResponse): any => ({ - ...obj, - }); -} - -/** - *

        - */ -export interface DataCaptureConfigSummary { - /** - *

        - */ - EnableCapture: boolean | undefined; - - /** - *

        - */ - CaptureStatus: CaptureStatus | string | undefined; - - /** - *

        - */ - CurrentSamplingPercentage: number | undefined; - - /** - *

        - */ - DestinationS3Uri: string | undefined; - - /** - *

        - */ - KmsKeyId: string | undefined; -} - -export namespace DataCaptureConfigSummary { - export const filterSensitiveLog = (obj: DataCaptureConfigSummary): any => ({ - ...obj, - }); -} - -export enum RuleEvaluationStatus { - ERROR = "Error", - IN_PROGRESS = "InProgress", - ISSUES_FOUND = "IssuesFound", - NO_ISSUES_FOUND = "NoIssuesFound", - STOPPED = "Stopped", - STOPPING = "Stopping", -} - -/** - *

        Information about the status of the rule evaluation.

        - */ -export interface DebugRuleEvaluationStatus { - /** - *

        The name of the rule configuration

        - */ - RuleConfigurationName?: string; - - /** - *

        The Amazon Resource Name (ARN) of the rule evaluation job.

        - */ - RuleEvaluationJobArn?: string; - - /** - *

        Status of the rule evaluation.

        - */ - RuleEvaluationStatus?: RuleEvaluationStatus | string; - - /** - *

        Details from the rule evaluation.

        - */ - StatusDetails?: string; - - /** - *

        Timestamp when the rule evaluation status was last modified.

        - */ - LastModifiedTime?: Date; -} - -export namespace DebugRuleEvaluationStatus { - export const filterSensitiveLog = (obj: DebugRuleEvaluationStatus): any => ({ - ...obj, - }); -} - -export interface DeleteAlgorithmInput { - /** - *

        The name of the algorithm to delete.

        - */ - AlgorithmName: string | undefined; -} - -export namespace DeleteAlgorithmInput { - export const filterSensitiveLog = (obj: DeleteAlgorithmInput): any => ({ - ...obj, - }); -} - -export interface DeleteAppRequest { - /** - *

        The domain ID.

        - */ - DomainId: string | undefined; - - /** - *

        The user profile name.

        - */ - UserProfileName: string | undefined; - - /** - *

        The type of app.

        - */ - AppType: AppType | string | undefined; - - /** - *

        The name of the app.

        - */ - AppName: string | undefined; -} - -export namespace DeleteAppRequest { - export const filterSensitiveLog = (obj: DeleteAppRequest): any => ({ - ...obj, - }); -} - -export interface DeleteAppImageConfigRequest { - /** - *

        The name of the AppImageConfig to delete.

        - */ - AppImageConfigName: string | undefined; -} - -export namespace DeleteAppImageConfigRequest { - export const filterSensitiveLog = (obj: DeleteAppImageConfigRequest): any => ({ - ...obj, - }); -} - -export interface DeleteCodeRepositoryInput { - /** - *

        The name of the Git repository to delete.

        - */ - CodeRepositoryName: string | undefined; -} - -export namespace DeleteCodeRepositoryInput { - export const filterSensitiveLog = (obj: DeleteCodeRepositoryInput): any => ({ - ...obj, - }); -} - -export enum RetentionType { - Delete = "Delete", - Retain = "Retain", -} - -/** - *

        The retention policy for data stored on an Amazon Elastic File System (EFS) volume.

        - */ -export interface RetentionPolicy { - /** - *

        The default is Retain, which specifies to keep the data stored on the EFS volume.

        - *

        Specify Delete to delete the data stored on the EFS volume.

        - */ - HomeEfsFileSystem?: RetentionType | string; -} - -export namespace RetentionPolicy { - export const filterSensitiveLog = (obj: RetentionPolicy): any => ({ +export namespace CreatePresignedDomainUrlRequest { + export const filterSensitiveLog = (obj: CreatePresignedDomainUrlRequest): any => ({ ...obj, }); } diff --git a/clients/client-sagemaker/models/models_1.ts b/clients/client-sagemaker/models/models_1.ts index 404cabe67724..b4b9ec024344 100644 --- a/clients/client-sagemaker/models/models_1.ts +++ b/clients/client-sagemaker/models/models_1.ts @@ -1,4 +1,7 @@ import { + ActionSource, + ActionStatus, + ActionSummary, AlgorithmSortBy, AlgorithmSpecification, AlgorithmStatus, @@ -13,6 +16,10 @@ import { AppSpecification, AppStatus, AppType, + ArtifactSource, + ArtifactSummary, + AssociationEdgeType, + AthenaDatasetDefinition, AuthMode, AutoMLCandidate, AutoMLChannel, @@ -22,30 +29,22 @@ import { AutoMLJobObjective, AutoMLJobSecondaryStatus, AutoMLJobStatus, - AutoMLJobSummary, AutoMLOutputDataConfig, - AutoMLSortBy, - AutoMLSortOrder, + AutoRollbackConfig, BatchStrategy, - CandidateSortBy, - CandidateStatus, + BlueGreenUpdatePolicy, + CaptureStatus, Channel, CheckpointConfig, - CodeRepositorySortBy, - CodeRepositorySortOrder, - CodeRepositorySummary, CognitoConfig, + CognitoMemberDefinition, + CollectionConfiguration, CompilationJobStatus, - CompilationJobSummary, ContainerDefinition, + ContextSource, DataCaptureConfig, - DataCaptureConfigSummary, - DataProcessing, - DebugHookConfig, - DebugRuleConfiguration, - DebugRuleEvaluationStatus, DirectInternetAccess, - ExperimentConfig, + FeatureDefinition, FlowDefinitionOutputConfig, GitConfig, HumanLoopActivationConfig, @@ -64,10174 +63,9461 @@ import { LabelingJobInputConfig, LabelingJobOutputConfig, LabelingJobStoppingConditions, - MemberDefinition, - ModelClientConfig, + MetadataProperties, + ModelApprovalStatus, + ModelMetrics, ModelPackageValidationSpecification, MonitoringScheduleConfig, NetworkConfig, NotebookInstanceAcceleratorType, NotebookInstanceLifecycleHook, - NotificationConfiguration, ObjectiveStatus, + OfflineStoreConfig, + OnlineStoreConfig, OutputConfig, OutputDataConfig, ProblemType, - ProcessingInput, - ProcessingOutputConfig, - ProcessingResources, - ProcessingStoppingCondition, + ProcessingInstanceType, + ProcessingS3DataDistributionType, + ProcessingS3InputMode, + ProcessingS3UploadMode, ProductionVariant, ResourceConfig, ResourceLimits, ResourceSpec, - RetentionPolicy, RootAccess, SourceAlgorithmSpecification, - SourceIpConfig, StoppingCondition, Tag, - TensorBoardOutputConfig, TrainingSpecification, TransformInput, TransformOutput, TransformResources, - TrialComponentArtifact, - TrialComponentParameterValue, - TrialComponentStatus, - UiTemplate, + UserContext, UserSettings, VpcConfig, _InstanceType, } from "./models_0"; import { SENSITIVE_STRING } from "@aws-sdk/smithy-client"; -export interface DeleteDomainRequest { - /** - *

        The domain ID.

        - */ - DomainId: string | undefined; - +export interface CreatePresignedDomainUrlResponse { /** - *

        The retention policy for this domain, which specifies whether resources will be retained after the Domain is deleted. - * By default, all resources are retained (not automatically deleted). - *

        + *

        The presigned URL.

        */ - RetentionPolicy?: RetentionPolicy; + AuthorizedUrl?: string; } -export namespace DeleteDomainRequest { - export const filterSensitiveLog = (obj: DeleteDomainRequest): any => ({ +export namespace CreatePresignedDomainUrlResponse { + export const filterSensitiveLog = (obj: CreatePresignedDomainUrlResponse): any => ({ ...obj, }); } -export interface DeleteEndpointInput { +export interface CreatePresignedNotebookInstanceUrlInput { /** - *

        The name of the endpoint that you want to delete.

        + *

        The name of the notebook instance.

        */ - EndpointName: string | undefined; -} - -export namespace DeleteEndpointInput { - export const filterSensitiveLog = (obj: DeleteEndpointInput): any => ({ - ...obj, - }); -} + NotebookInstanceName: string | undefined; -export interface DeleteEndpointConfigInput { /** - *

        The name of the endpoint configuration that you want to delete.

        + *

        The duration of the session, in seconds. The default is 12 hours.

        */ - EndpointConfigName: string | undefined; + SessionExpirationDurationInSeconds?: number; } -export namespace DeleteEndpointConfigInput { - export const filterSensitiveLog = (obj: DeleteEndpointConfigInput): any => ({ +export namespace CreatePresignedNotebookInstanceUrlInput { + export const filterSensitiveLog = (obj: CreatePresignedNotebookInstanceUrlInput): any => ({ ...obj, }); } -export interface DeleteExperimentRequest { +export interface CreatePresignedNotebookInstanceUrlOutput { /** - *

        The name of the experiment to delete.

        + *

        A JSON object that contains the URL string.

        */ - ExperimentName: string | undefined; + AuthorizedUrl?: string; } -export namespace DeleteExperimentRequest { - export const filterSensitiveLog = (obj: DeleteExperimentRequest): any => ({ +export namespace CreatePresignedNotebookInstanceUrlOutput { + export const filterSensitiveLog = (obj: CreatePresignedNotebookInstanceUrlOutput): any => ({ ...obj, }); } -export interface DeleteExperimentResponse { +/** + *

        Associates a SageMaker job as a trial component with an experiment and trial. Specified when + * you call the following APIs:

        + * + */ +export interface ExperimentConfig { /** - *

        The Amazon Resource Name (ARN) of the experiment that is being deleted.

        + *

        The name of an existing experiment to associate the trial component with.

        */ - ExperimentArn?: string; -} + ExperimentName?: string; -export namespace DeleteExperimentResponse { - export const filterSensitiveLog = (obj: DeleteExperimentResponse): any => ({ - ...obj, - }); -} + /** + *

        The name of an existing trial to associate the trial component with. If not specified, a + * new trial is created.

        + */ + TrialName?: string; -export interface DeleteFlowDefinitionRequest { /** - *

        The name of the flow definition you are deleting.

        + *

        The display name for the trial component. If this key isn't specified, the display name is + * the trial component name.

        */ - FlowDefinitionName: string | undefined; + TrialComponentDisplayName?: string; } -export namespace DeleteFlowDefinitionRequest { - export const filterSensitiveLog = (obj: DeleteFlowDefinitionRequest): any => ({ +export namespace ExperimentConfig { + export const filterSensitiveLog = (obj: ExperimentConfig): any => ({ ...obj, }); } -export interface DeleteFlowDefinitionResponse {} +export enum DataDistributionType { + FULLYREPLICATED = "FullyReplicated", + SHARDEDBYS3KEY = "ShardedByS3Key", +} -export namespace DeleteFlowDefinitionResponse { - export const filterSensitiveLog = (obj: DeleteFlowDefinitionResponse): any => ({ - ...obj, - }); +export enum InputMode { + FILE = "File", + PIPE = "Pipe", } -export interface DeleteHumanTaskUiRequest { - /** - *

        The name of the human task user interface (work task template) you want to delete.

        - */ - HumanTaskUiName: string | undefined; +export enum RedshiftResultCompressionType { + BZIP2 = "BZIP2", + GZIP = "GZIP", + NONE = "None", + SNAPPY = "SNAPPY", + ZSTD = "ZSTD", } -export namespace DeleteHumanTaskUiRequest { - export const filterSensitiveLog = (obj: DeleteHumanTaskUiRequest): any => ({ - ...obj, - }); +export enum RedshiftResultFormat { + CSV = "CSV", + PARQUET = "PARQUET", } -export interface DeleteHumanTaskUiResponse {} +/** + *

        Configuration for Redshift Dataset Definition input.

        + */ +export interface RedshiftDatasetDefinition { + /** + *

        The Redshift cluster Identifier.

        + */ + ClusterId: string | undefined; -export namespace DeleteHumanTaskUiResponse { - export const filterSensitiveLog = (obj: DeleteHumanTaskUiResponse): any => ({ - ...obj, - }); -} + /** + *

        The name of the Redshift database used in Redshift query execution.

        + */ + Database: string | undefined; -export interface DeleteImageRequest { /** - *

        The name of the image to delete.

        + *

        The database user name used in Redshift query execution.

        */ - ImageName: string | undefined; -} + DbUser: string | undefined; -export namespace DeleteImageRequest { - export const filterSensitiveLog = (obj: DeleteImageRequest): any => ({ - ...obj, - }); -} + /** + *

        The SQL query statements to be executed.

        + */ + QueryString: string | undefined; -export interface DeleteImageResponse {} + /** + *

        The IAM role attached to your Redshift cluster that Amazon SageMaker uses to generate datasets.

        + */ + ClusterRoleArn: string | undefined; -export namespace DeleteImageResponse { - export const filterSensitiveLog = (obj: DeleteImageResponse): any => ({ - ...obj, - }); -} + /** + *

        The location in Amazon S3 where the Redshift query results are stored.

        + */ + OutputS3Uri: string | undefined; -export interface DeleteImageVersionRequest { /** - *

        The name of the image.

        + *

        The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data from a + * Redshift execution.

        */ - ImageName: string | undefined; + KmsKeyId?: string; /** - *

        The version to delete.

        + *

        The data storage format for Redshift query results.

        */ - Version: number | undefined; + OutputFormat: RedshiftResultFormat | string | undefined; + + /** + *

        The compression used for Redshift query results.

        + */ + OutputCompression?: RedshiftResultCompressionType | string; } -export namespace DeleteImageVersionRequest { - export const filterSensitiveLog = (obj: DeleteImageVersionRequest): any => ({ +export namespace RedshiftDatasetDefinition { + export const filterSensitiveLog = (obj: RedshiftDatasetDefinition): any => ({ ...obj, }); } -export interface DeleteImageVersionResponse {} +/** + *

        Configuration for Dataset Definition inputs. The Dataset Definition input must specify + * exactly one of either AthenaDatasetDefinition or RedshiftDatasetDefinition + * types.

        + */ +export interface DatasetDefinition { + /** + *

        Configuration for Athena Dataset Definition input.

        + */ + AthenaDatasetDefinition?: AthenaDatasetDefinition; -export namespace DeleteImageVersionResponse { - export const filterSensitiveLog = (obj: DeleteImageVersionResponse): any => ({ - ...obj, - }); -} + /** + *

        Configuration for Redshift Dataset Definition input.

        + */ + RedshiftDatasetDefinition?: RedshiftDatasetDefinition; -export interface DeleteModelInput { /** - *

        The name of the model to delete.

        + *

        The local path where you want Amazon SageMaker to download the Dataset Definition inputs to run a + * processing job. LocalPath is an absolute path to the input data. This is a required + * parameter when AppManaged is False (default).

        */ - ModelName: string | undefined; -} + LocalPath?: string; -export namespace DeleteModelInput { - export const filterSensitiveLog = (obj: DeleteModelInput): any => ({ - ...obj, - }); -} + /** + *

        Whether the generated dataset is FullyReplicated or + * ShardedByS3Key (default).

        + */ + DataDistributionType?: DataDistributionType | string; -export interface DeleteModelPackageInput { /** - *

        The name of the model package. The name must have 1 to 63 characters. Valid characters - * are a-z, A-Z, 0-9, and - (hyphen).

        + *

        Whether to use File or Pipe input mode. In File (default) mode, + * Amazon SageMaker copies the data from the input source onto the local Amazon Elastic Block Store + * (Amazon EBS) volumes before starting your training algorithm. This is the most commonly used + * input mode. In Pipe mode, Amazon SageMaker streams input data from the source directly to your + * algorithm without using the EBS volume.

        */ - ModelPackageName: string | undefined; + InputMode?: InputMode | string; } -export namespace DeleteModelPackageInput { - export const filterSensitiveLog = (obj: DeleteModelPackageInput): any => ({ +export namespace DatasetDefinition { + export const filterSensitiveLog = (obj: DatasetDefinition): any => ({ ...obj, }); } -export interface DeleteMonitoringScheduleRequest { - /** - *

        The name of the monitoring schedule to delete.

        - */ - MonitoringScheduleName: string | undefined; +export enum ProcessingS3CompressionType { + GZIP = "Gzip", + NONE = "None", } -export namespace DeleteMonitoringScheduleRequest { - export const filterSensitiveLog = (obj: DeleteMonitoringScheduleRequest): any => ({ - ...obj, - }); +export enum ProcessingS3DataType { + MANIFEST_FILE = "ManifestFile", + S3_PREFIX = "S3Prefix", } -export interface DeleteNotebookInstanceInput { +/** + *

        Configuration for processing job inputs in Amazon S3.

        + */ +export interface ProcessingS3Input { /** - *

        The name of the Amazon SageMaker notebook instance to delete.

        + *

        The URI for the Amazon S3 storage where you want Amazon SageMaker to download the artifacts needed + * to run a processing job.

        */ - NotebookInstanceName: string | undefined; -} + S3Uri: string | undefined; -export namespace DeleteNotebookInstanceInput { - export const filterSensitiveLog = (obj: DeleteNotebookInstanceInput): any => ({ - ...obj, - }); -} + /** + *

        The local path to the Amazon S3 bucket where you want Amazon SageMaker to download the inputs to + * run a processing job. LocalPath is an absolute path to the input + * data. This is a required parameter when AppManaged is False + * (default).

        + */ + LocalPath?: string; -export interface DeleteNotebookInstanceLifecycleConfigInput { /** - *

        The name of the lifecycle configuration to delete.

        + *

        Whether you use an S3Prefix or a ManifestFile for + * the data type. If you choose S3Prefix, S3Uri identifies a key + * name prefix. Amazon SageMaker uses all objects with the specified key name prefix for the processing + * job. If you choose ManifestFile, S3Uri identifies an object + * that is a manifest file containing a list of object keys that you want Amazon SageMaker to use for + * the processing job.

        */ - NotebookInstanceLifecycleConfigName: string | undefined; -} + S3DataType: ProcessingS3DataType | string | undefined; -export namespace DeleteNotebookInstanceLifecycleConfigInput { - export const filterSensitiveLog = (obj: DeleteNotebookInstanceLifecycleConfigInput): any => ({ - ...obj, - }); -} + /** + *

        Whether to use File or Pipe input mode. In + * File mode, Amazon SageMaker copies the data from the input source onto the local + * Amazon Elastic Block Store (Amazon EBS) volumes before starting your training algorithm. + * This is the most commonly used input mode. In Pipe mode, Amazon SageMaker streams input + * data from the source directly to your algorithm without using the EBS volume.This is a + * required parameter when AppManaged is False (default).

        + */ + S3InputMode?: ProcessingS3InputMode | string; -export interface DeleteTagsInput { /** - *

        The Amazon Resource Name (ARN) of the resource whose tags you want to - * delete.

        + *

        Whether the data stored in Amazon S3 is FullyReplicated or + * ShardedByS3Key.

        */ - ResourceArn: string | undefined; + S3DataDistributionType?: ProcessingS3DataDistributionType | string; /** - *

        An array or one or more tag keys to delete.

        + *

        Whether to use Gzip compression for Amazon S3 storage.

        */ - TagKeys: string[] | undefined; + S3CompressionType?: ProcessingS3CompressionType | string; } -export namespace DeleteTagsInput { - export const filterSensitiveLog = (obj: DeleteTagsInput): any => ({ +export namespace ProcessingS3Input { + export const filterSensitiveLog = (obj: ProcessingS3Input): any => ({ ...obj, }); } -export interface DeleteTagsOutput {} +/** + *

        The inputs for a processing job. The processing input must specify exactly one of either + * S3Input or DatasetDefinition types.

        + */ +export interface ProcessingInput { + /** + *

        The name of the inputs for the processing job.

        + */ + InputName: string | undefined; -export namespace DeleteTagsOutput { - export const filterSensitiveLog = (obj: DeleteTagsOutput): any => ({ - ...obj, - }); -} + /** + *

        When True, input operations such as data download are managed natively by the + * processing job application. When False (default), input operations are managed by Amazon SageMaker.

        + */ + AppManaged?: boolean; -export interface DeleteTrialRequest { /** - *

        The name of the trial to delete.

        + *

        Configuration for processing job inputs in Amazon S3.

        */ - TrialName: string | undefined; + S3Input?: ProcessingS3Input; + + /** + *

        Configuration for a Dataset Definition input.

        + */ + DatasetDefinition?: DatasetDefinition; } -export namespace DeleteTrialRequest { - export const filterSensitiveLog = (obj: DeleteTrialRequest): any => ({ +export namespace ProcessingInput { + export const filterSensitiveLog = (obj: ProcessingInput): any => ({ ...obj, }); } -export interface DeleteTrialResponse { +/** + *

        Configuration for processing job outputs in Amazon SageMaker Feature Store.

        + */ +export interface ProcessingFeatureStoreOutput { /** - *

        The Amazon Resource Name (ARN) of the trial that is being deleted.

        + *

        The name of the Amazon SageMaker FeatureGroup to use as the destination for processing job output.

        */ - TrialArn?: string; + FeatureGroupName: string | undefined; } -export namespace DeleteTrialResponse { - export const filterSensitiveLog = (obj: DeleteTrialResponse): any => ({ +export namespace ProcessingFeatureStoreOutput { + export const filterSensitiveLog = (obj: ProcessingFeatureStoreOutput): any => ({ ...obj, }); } -export interface DeleteTrialComponentRequest { +/** + *

        Configuration for processing job outputs in Amazon S3.

        + */ +export interface ProcessingS3Output { /** - *

        The name of the component to delete.

        + *

        A URI that identifies the Amazon S3 bucket where you want Amazon SageMaker to save the results of + * a processing job.

        */ - TrialComponentName: string | undefined; -} + S3Uri: string | undefined; -export namespace DeleteTrialComponentRequest { - export const filterSensitiveLog = (obj: DeleteTrialComponentRequest): any => ({ - ...obj, - }); -} + /** + *

        The local path to the Amazon S3 bucket where you want Amazon SageMaker to save the results of an + * processing job. LocalPath is an absolute path to the input data.

        + */ + LocalPath: string | undefined; -export interface DeleteTrialComponentResponse { /** - *

        The Amazon Resource Name (ARN) of the component is being deleted.

        + *

        Whether to upload the results of the processing job continuously or after the job + * completes.

        */ - TrialComponentArn?: string; + S3UploadMode: ProcessingS3UploadMode | string | undefined; } -export namespace DeleteTrialComponentResponse { - export const filterSensitiveLog = (obj: DeleteTrialComponentResponse): any => ({ +export namespace ProcessingS3Output { + export const filterSensitiveLog = (obj: ProcessingS3Output): any => ({ ...obj, }); } -export interface DeleteUserProfileRequest { +/** + *

        Describes the results of a processing job. The processing output must specify exactly one of + * either S3Output or FeatureStoreOutput types.

        + */ +export interface ProcessingOutput { /** - *

        The domain ID.

        + *

        The name for the processing job output.

        */ - DomainId: string | undefined; + OutputName: string | undefined; /** - *

        The user profile name.

        + *

        Configuration for processing job outputs in Amazon S3.

        */ - UserProfileName: string | undefined; + S3Output?: ProcessingS3Output; + + /** + *

        Configuration for processing job outputs in Amazon SageMaker Feature Store. This processing output + * type is only supported when AppManaged is specified.

        + */ + FeatureStoreOutput?: ProcessingFeatureStoreOutput; + + /** + *

        When True, output operations such as data upload are managed natively by the + * processing job application. When False (default), output operations are managed by + * Amazon SageMaker.

        + */ + AppManaged?: boolean; } -export namespace DeleteUserProfileRequest { - export const filterSensitiveLog = (obj: DeleteUserProfileRequest): any => ({ +export namespace ProcessingOutput { + export const filterSensitiveLog = (obj: ProcessingOutput): any => ({ ...obj, }); } -export interface DeleteWorkforceRequest { +/** + *

        The output configuration for the processing job.

        + */ +export interface ProcessingOutputConfig { /** - *

        The name of the workforce.

        + *

        List of output configurations for the processing job.

        */ - WorkforceName: string | undefined; -} + Outputs: ProcessingOutput[] | undefined; -export namespace DeleteWorkforceRequest { - export const filterSensitiveLog = (obj: DeleteWorkforceRequest): any => ({ - ...obj, - }); + /** + *

        The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the processing + * job output. KmsKeyId can be an ID of a KMS key, ARN of a KMS key, alias of + * a KMS key, or alias of a KMS key. The KmsKeyId is applied to all + * outputs.

        + */ + KmsKeyId?: string; } -export interface DeleteWorkforceResponse {} - -export namespace DeleteWorkforceResponse { - export const filterSensitiveLog = (obj: DeleteWorkforceResponse): any => ({ +export namespace ProcessingOutputConfig { + export const filterSensitiveLog = (obj: ProcessingOutputConfig): any => ({ ...obj, }); } -export interface DeleteWorkteamRequest { +/** + *

        Configuration for the cluster used to run a processing job.

        + */ +export interface ProcessingClusterConfig { /** - *

        The name of the work team to delete.

        + *

        The number of ML compute instances to use in the processing job. For distributed + * processing jobs, specify a value greater than 1. The default value is 1.

        */ - WorkteamName: string | undefined; -} + InstanceCount: number | undefined; -export namespace DeleteWorkteamRequest { - export const filterSensitiveLog = (obj: DeleteWorkteamRequest): any => ({ - ...obj, - }); -} + /** + *

        The ML compute instance type for the processing job.

        + */ + InstanceType: ProcessingInstanceType | string | undefined; -export interface DeleteWorkteamResponse { /** - *

        Returns true if the work team was successfully deleted; otherwise, - * returns false.

        + *

        The size of the ML storage volume in gigabytes that you want to provision. You must + * specify sufficient ML storage for your scenario.

        */ - Success: boolean | undefined; + VolumeSizeInGB: number | undefined; + + /** + *

        The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the + * storage volume attached to the ML compute instance(s) that run the processing job. + *

        + */ + VolumeKmsKeyId?: string; } -export namespace DeleteWorkteamResponse { - export const filterSensitiveLog = (obj: DeleteWorkteamResponse): any => ({ +export namespace ProcessingClusterConfig { + export const filterSensitiveLog = (obj: ProcessingClusterConfig): any => ({ ...obj, }); } /** - *

        Gets the Amazon EC2 Container Registry path of the docker image of the model that is hosted in this ProductionVariant.

        - *

        If you used the registry/repository[:tag] form to specify the image path - * of the primary container when you created the model hosted in this - * ProductionVariant, the path resolves to a path of the form - * registry/repository[@digest]. A digest is a hash value that identifies - * a specific version of an image. For information about Amazon ECR paths, see Pulling an Image in the Amazon ECR User Guide.

        + *

        Identifies the resources, ML compute instances, and ML storage volumes to deploy for a + * processing job. In distributed training, you specify more than one instance.

        */ -export interface DeployedImage { - /** - *

        The image path you specified when you created the model.

        - */ - SpecifiedImage?: string; - - /** - *

        The specific digest path of the image hosted in this - * ProductionVariant.

        - */ - ResolvedImage?: string; - +export interface ProcessingResources { /** - *

        The date and time when the image path for the model resolved to the - * ResolvedImage - *

        + *

        The configuration for the resources in a cluster used to run the processing + * job.

        */ - ResolutionTime?: Date; + ClusterConfig: ProcessingClusterConfig | undefined; } -export namespace DeployedImage { - export const filterSensitiveLog = (obj: DeployedImage): any => ({ +export namespace ProcessingResources { + export const filterSensitiveLog = (obj: ProcessingResources): any => ({ ...obj, }); } -export interface DescribeAlgorithmInput { +/** + *

        Specifies a time limit for how long the processing job is allowed to run.

        + */ +export interface ProcessingStoppingCondition { /** - *

        The name of the algorithm to describe.

        + *

        Specifies the maximum runtime in seconds.

        */ - AlgorithmName: string | undefined; + MaxRuntimeInSeconds: number | undefined; } -export namespace DescribeAlgorithmInput { - export const filterSensitiveLog = (obj: DescribeAlgorithmInput): any => ({ +export namespace ProcessingStoppingCondition { + export const filterSensitiveLog = (obj: ProcessingStoppingCondition): any => ({ ...obj, }); } -export interface DescribeAlgorithmOutput { +export interface CreateProcessingJobRequest { /** - *

        The name of the algorithm being described.

        + *

        List of input configurations for the processing job.

        */ - AlgorithmName: string | undefined; + ProcessingInputs?: ProcessingInput[]; /** - *

        The Amazon Resource Name (ARN) of the algorithm.

        + *

        Output configuration for the processing job.

        */ - AlgorithmArn: string | undefined; + ProcessingOutputConfig?: ProcessingOutputConfig; /** - *

        A brief summary about the algorithm.

        + *

        The name of the processing job. The name must be unique within an AWS Region in the + * AWS account.

        */ - AlgorithmDescription?: string; + ProcessingJobName: string | undefined; /** - *

        A timestamp specifying when the algorithm was created.

        + *

        Identifies the resources, ML compute instances, and ML storage volumes to deploy for a + * processing job. In distributed training, you specify more than one instance.

        */ - CreationTime: Date | undefined; + ProcessingResources: ProcessingResources | undefined; /** - *

        Details about training jobs run by this algorithm.

        + *

        The time limit for how long the processing job is allowed to run.

        */ - TrainingSpecification: TrainingSpecification | undefined; + StoppingCondition?: ProcessingStoppingCondition; /** - *

        Details about inference jobs that the algorithm runs.

        + *

        Configures the processing job to run a specified Docker container image.

        */ - InferenceSpecification?: InferenceSpecification; + AppSpecification: AppSpecification | undefined; /** - *

        Details about configurations for one or more training jobs that Amazon SageMaker runs to test the - * algorithm.

        + *

        Sets the environment variables in the Docker container.

        */ - ValidationSpecification?: AlgorithmValidationSpecification; + Environment?: { [key: string]: string }; /** - *

        The current status of the algorithm.

        + *

        Networking options for a processing job.

        */ - AlgorithmStatus: AlgorithmStatus | string | undefined; + NetworkConfig?: NetworkConfig; /** - *

        Details about the current status of the algorithm.

        + *

        The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on + * your behalf.

        */ - AlgorithmStatusDetails: AlgorithmStatusDetails | undefined; + RoleArn: string | undefined; /** - *

        The product identifier of the algorithm.

        + *

        (Optional) An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management + * User Guide.

        */ - ProductId?: string; + Tags?: Tag[]; /** - *

        Whether the algorithm is certified to be listed in AWS Marketplace.

        + *

        Associates a SageMaker job as a trial component with an experiment and trial. Specified when + * you call the following APIs:

        + * */ - CertifyForMarketplace?: boolean; + ExperimentConfig?: ExperimentConfig; } -export namespace DescribeAlgorithmOutput { - export const filterSensitiveLog = (obj: DescribeAlgorithmOutput): any => ({ +export namespace CreateProcessingJobRequest { + export const filterSensitiveLog = (obj: CreateProcessingJobRequest): any => ({ ...obj, }); } -export interface DescribeAppRequest { +export interface CreateProcessingJobResponse { /** - *

        The domain ID.

        + *

        The Amazon Resource Name (ARN) of the processing job.

        */ - DomainId: string | undefined; + ProcessingJobArn: string | undefined; +} - /** - *

        The user profile name.

        - */ - UserProfileName: string | undefined; +export namespace CreateProcessingJobResponse { + export const filterSensitiveLog = (obj: CreateProcessingJobResponse): any => ({ + ...obj, + }); +} +/** + *

        A key value pair used when you provision a project as a service catalog product. For + * information, see What is AWS Service + * Catalog.

        + */ +export interface ProvisioningParameter { /** - *

        The type of app.

        + *

        The key that identifies a provisioning parameter.

        */ - AppType: AppType | string | undefined; + Key?: string; /** - *

        The name of the app.

        + *

        The value of the provisioning parameter.

        */ - AppName: string | undefined; + Value?: string; } -export namespace DescribeAppRequest { - export const filterSensitiveLog = (obj: DescribeAppRequest): any => ({ +export namespace ProvisioningParameter { + export const filterSensitiveLog = (obj: ProvisioningParameter): any => ({ ...obj, }); } -export interface DescribeAppResponse { +/** + *

        Details that you specify to provision a service catalog product. For information about + * service catalog, see .What is AWS Service + * Catalog.

        + */ +export interface ServiceCatalogProvisioningDetails { /** - *

        The Amazon Resource Name (ARN) of the app.

        + *

        The ID of the product to provision.

        */ - AppArn?: string; + ProductId: string | undefined; /** - *

        The type of app.

        + *

        The ID of the provisioning artifact.

        */ - AppType?: AppType | string; + ProvisioningArtifactId: string | undefined; /** - *

        The name of the app.

        + *

        The path identifier of the product. This value is optional if the product has a default path, and required if the product has more than one path.

        */ - AppName?: string; + PathId?: string; /** - *

        The domain ID.

        + *

        A list of key value pairs that you specify when you provision a product.

        */ - DomainId?: string; + ProvisioningParameters?: ProvisioningParameter[]; +} - /** - *

        The user profile name.

        - */ - UserProfileName?: string; +export namespace ServiceCatalogProvisioningDetails { + export const filterSensitiveLog = (obj: ServiceCatalogProvisioningDetails): any => ({ + ...obj, + }); +} +export interface CreateProjectInput { /** - *

        The status.

        + *

        The name of the project.

        */ - Status?: AppStatus | string; + ProjectName: string | undefined; /** - *

        The timestamp of the last health check.

        + *

        A description for the project.

        */ - LastHealthCheckTimestamp?: Date; + ProjectDescription?: string; /** - *

        The timestamp of the last user's activity.

        + *

        The product ID and provisioning artifact ID to provision a service catalog. For + * information, see What is AWS Service + * Catalog.

        */ - LastUserActivityTimestamp?: Date; + ServiceCatalogProvisioningDetails: ServiceCatalogProvisioningDetails | undefined; /** - *

        The creation time.

        + *

        An array of key-value pairs that you want to use to organize and track your AWS + * resource costs. For more information, see Tagging AWS resources in the AWS General Reference Guide.

        */ - CreationTime?: Date; + Tags?: Tag[]; +} + +export namespace CreateProjectInput { + export const filterSensitiveLog = (obj: CreateProjectInput): any => ({ + ...obj, + }); +} +export interface CreateProjectOutput { /** - *

        The failure reason.

        + *

        The Amazon Resource Name (ARN) of the project.

        */ - FailureReason?: string; + ProjectArn: string | undefined; /** - *

        The instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance.

        + *

        The ID of the new project.

        */ - ResourceSpec?: ResourceSpec; + ProjectId: string | undefined; } -export namespace DescribeAppResponse { - export const filterSensitiveLog = (obj: DescribeAppResponse): any => ({ +export namespace CreateProjectOutput { + export const filterSensitiveLog = (obj: CreateProjectOutput): any => ({ ...obj, }); } -export interface DescribeAppImageConfigRequest { +/** + *

        Configuration information for the debug hook parameters, collection configuration, and + * storage paths.

        + */ +export interface DebugHookConfig { /** - *

        The name of the AppImageConfig to describe.

        + *

        Path to local storage location for tensors. Defaults to + * /opt/ml/output/tensors/.

        */ - AppImageConfigName: string | undefined; + LocalPath?: string; + + /** + *

        Path to Amazon S3 storage location for tensors.

        + */ + S3OutputPath: string | undefined; + + /** + *

        Configuration information for the debug hook parameters.

        + */ + HookParameters?: { [key: string]: string }; + + /** + *

        Configuration information for tensor collections.

        + */ + CollectionConfigurations?: CollectionConfiguration[]; } -export namespace DescribeAppImageConfigRequest { - export const filterSensitiveLog = (obj: DescribeAppImageConfigRequest): any => ({ +export namespace DebugHookConfig { + export const filterSensitiveLog = (obj: DebugHookConfig): any => ({ ...obj, }); } -export interface DescribeAppImageConfigResponse { +/** + *

        Configuration information for debugging rules.

        + */ +export interface DebugRuleConfiguration { /** - *

        The Amazon Resource Name (ARN) of the AppImageConfig.

        + *

        The name of the rule configuration. It must be unique relative to other rule + * configuration names.

        */ - AppImageConfigArn?: string; + RuleConfigurationName: string | undefined; /** - *

        The name of the AppImageConfig.

        + *

        Path to local storage location for output of rules. Defaults to + * /opt/ml/processing/output/rule/.

        */ - AppImageConfigName?: string; + LocalPath?: string; /** - *

        When the AppImageConfig was created.

        + *

        Path to Amazon S3 storage location for rules.

        */ - CreationTime?: Date; + S3OutputPath?: string; /** - *

        When the AppImageConfig was last modified.

        + *

        The Amazon Elastic Container (ECR) Image for the managed rule evaluation.

        */ - LastModifiedTime?: Date; + RuleEvaluatorImage: string | undefined; /** - *

        The configuration of a KernelGateway app.

        + *

        The instance type to deploy for a training job.

        */ - KernelGatewayImageConfig?: KernelGatewayImageConfig; -} + InstanceType?: ProcessingInstanceType | string; -export namespace DescribeAppImageConfigResponse { - export const filterSensitiveLog = (obj: DescribeAppImageConfigResponse): any => ({ - ...obj, - }); -} + /** + *

        The size, in GB, of the ML storage volume attached to the processing instance.

        + */ + VolumeSizeInGB?: number; -export interface DescribeAutoMLJobRequest { /** - *

        Request information about a job using that job's unique name.

        + *

        Runtime configuration for rule container.

        */ - AutoMLJobName: string | undefined; + RuleParameters?: { [key: string]: string }; } -export namespace DescribeAutoMLJobRequest { - export const filterSensitiveLog = (obj: DescribeAutoMLJobRequest): any => ({ +export namespace DebugRuleConfiguration { + export const filterSensitiveLog = (obj: DebugRuleConfiguration): any => ({ ...obj, }); } /** - *

        The resolved attributes.

        + *

        Configuration of storage locations for TensorBoard output.

        */ -export interface ResolvedAttributes { - /** - *

        Specifies a metric to minimize or maximize as the objective of a job.

        - */ - AutoMLJobObjective?: AutoMLJobObjective; - +export interface TensorBoardOutputConfig { /** - *

        The problem type.

        + *

        Path to local storage location for tensorBoard output. Defaults to + * /opt/ml/output/tensorboard.

        */ - ProblemType?: ProblemType | string; + LocalPath?: string; /** - *

        How long a job is allowed to run, or how many candidates a job is allowed to - * generate.

        + *

        Path to Amazon S3 storage location for TensorBoard output.

        */ - CompletionCriteria?: AutoMLJobCompletionCriteria; + S3OutputPath: string | undefined; } -export namespace ResolvedAttributes { - export const filterSensitiveLog = (obj: ResolvedAttributes): any => ({ +export namespace TensorBoardOutputConfig { + export const filterSensitiveLog = (obj: TensorBoardOutputConfig): any => ({ ...obj, }); } -export interface DescribeAutoMLJobResponse { +export interface CreateTrainingJobRequest { /** - *

        Returns the name of a job.

        + *

        The name of the training job. The name must be unique within an AWS Region in an + * AWS account.

        */ - AutoMLJobName: string | undefined; + TrainingJobName: string | undefined; /** - *

        Returns the job's ARN.

        + *

        Algorithm-specific parameters that influence the quality of the model. You set + * hyperparameters before you start the learning process. For a list of hyperparameters for + * each training algorithm provided by Amazon SageMaker, see Algorithms.

        + *

        You can specify a maximum of 100 hyperparameters. Each hyperparameter is a + * key-value pair. Each key and value is limited to 256 characters, as specified by the + * Length Constraint.

        */ - AutoMLJobArn: string | undefined; + HyperParameters?: { [key: string]: string }; /** - *

        Returns the job's input data config.

        + *

        The registry path of the Docker image that contains the training algorithm and + * algorithm-specific metadata, including the input mode. For more information about + * algorithms provided by Amazon SageMaker, see Algorithms. For information about + * providing your own algorithms, see Using Your Own Algorithms with Amazon + * SageMaker.

        */ - InputDataConfig: AutoMLChannel[] | undefined; + AlgorithmSpecification: AlgorithmSpecification | undefined; /** - *

        Returns the job's output data config.

        + *

        The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform + * tasks on your behalf.

        + *

        During model training, Amazon SageMaker needs your permission to read input data from an S3 + * bucket, download a Docker image that contains training code, write model artifacts to an + * S3 bucket, write logs to Amazon CloudWatch Logs, and publish metrics to Amazon CloudWatch. You grant + * permissions for all of these tasks to an IAM role. For more information, see Amazon SageMaker + * Roles.

        + * + *

        To be able to pass this role to Amazon SageMaker, the caller of this API must have the + * iam:PassRole permission.

        + *
        */ - OutputDataConfig: AutoMLOutputDataConfig | undefined; + RoleArn: string | undefined; /** - *

        The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that has read permission to - * the input data location and write permission to the output data location in Amazon S3.

        + *

        An array of Channel objects. Each channel is a named input source. + * InputDataConfig + * + * describes the input data and its location.

        + *

        Algorithms can accept input data from one or more channels. For example, an + * algorithm might have two channels of input data, training_data and + * validation_data. The configuration for each channel provides the S3, + * EFS, or FSx location where the input data is stored. It also provides information about + * the stored data: the MIME type, compression method, and whether the data is wrapped in + * RecordIO format.

        + *

        Depending on the input mode that the algorithm supports, Amazon SageMaker either copies input + * data files from an S3 bucket to a local directory in the Docker container, or makes it + * available as input streams. For example, if you specify an EFS location, input data + * files will be made available as input streams. They do not need to be + * downloaded.

        */ - RoleArn: string | undefined; + InputDataConfig?: Channel[]; /** - *

        Returns the job's objective.

        + *

        Specifies the path to the S3 location where you want to store model artifacts. Amazon SageMaker + * creates subfolders for the artifacts.

        */ - AutoMLJobObjective?: AutoMLJobObjective; + OutputDataConfig: OutputDataConfig | undefined; /** - *

        Returns the job's problem type.

        + *

        The resources, including the ML compute instances and ML storage volumes, to use + * for model training.

        + *

        ML storage volumes store model artifacts and incremental states. Training + * algorithms might also use ML storage volumes for scratch space. If you want Amazon SageMaker to use + * the ML storage volume to store the training data, choose File as the + * TrainingInputMode in the algorithm specification. For distributed + * training algorithms, specify an instance count greater than 1.

        */ - ProblemType?: ProblemType | string; + ResourceConfig: ResourceConfig | undefined; /** - *

        Returns the job's config.

        + *

        A VpcConfig object that specifies the VPC that you want your + * training job to connect to. Control access to and from your training container by + * configuring the VPC. For more information, see Protect Training Jobs by Using an Amazon + * Virtual Private Cloud.

        */ - AutoMLJobConfig?: AutoMLJobConfig; + VpcConfig?: VpcConfig; /** - *

        Returns the job's creation time.

        + *

        Specifies a limit to how long a model training job can run. When the job reaches the + * time limit, Amazon SageMaker ends the training job. Use this API to cap model training costs.

        + *

        To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays + * job termination for 120 seconds. Algorithms can use this 120-second window to save the + * model artifacts, so the results of training are not lost.

        */ - CreationTime: Date | undefined; + StoppingCondition: StoppingCondition | undefined; /** - *

        Returns the job's end time.

        + *

        An array of key-value pairs. You can use tags to categorize your AWS resources in + * different ways, for example, by purpose, owner, or environment. For more information, + * see Tagging AWS + * Resources.

        */ - EndTime?: Date; + Tags?: Tag[]; /** - *

        Returns the job's last modified time.

        + *

        Isolates the training container. No inbound or outbound network calls can be made, + * except for calls between peers within a training cluster for distributed training. If + * you enable network isolation for training jobs that are configured to use a VPC, Amazon SageMaker + * downloads and uploads customer data and model artifacts through the specified VPC, but + * the training container does not have network access.

        */ - LastModifiedTime: Date | undefined; + EnableNetworkIsolation?: boolean; /** - *

        Returns the job's FailureReason.

        + *

        To encrypt all communications between ML compute instances in distributed training, + * choose True. Encryption provides greater security for distributed training, + * but training might take longer. How long it takes depends on the amount of communication + * between compute instances, especially if you use a deep learning algorithm in + * distributed training. For more information, see Protect Communications Between ML + * Compute Instances in a Distributed Training Job.

        */ - FailureReason?: string; + EnableInterContainerTrafficEncryption?: boolean; /** - *

        Returns the job's BestCandidate.

        + *

        To train models using managed spot training, choose True. Managed spot + * training provides a fully managed and scalable infrastructure for training machine + * learning models. this option is useful when training jobs can be interrupted and when + * there is flexibility when the training job is run.

        + *

        The complete and intermediate results of jobs are stored in an Amazon S3 bucket, and can be + * used as a starting point to train models incrementally. Amazon SageMaker provides metrics and + * logs in CloudWatch. They can be used to see when managed spot training jobs are running, + * interrupted, resumed, or completed.

        */ - BestCandidate?: AutoMLCandidate; + EnableManagedSpotTraining?: boolean; /** - *

        Returns the job's AutoMLJobStatus.

        + *

        Contains information about the output location for managed spot training checkpoint + * data.

        */ - AutoMLJobStatus: AutoMLJobStatus | string | undefined; + CheckpointConfig?: CheckpointConfig; /** - *

        Returns the job's AutoMLJobSecondaryStatus.

        + *

        Configuration information for the debug hook parameters, collection configuration, and + * storage paths.

        */ - AutoMLJobSecondaryStatus: AutoMLJobSecondaryStatus | string | undefined; + DebugHookConfig?: DebugHookConfig; /** - *

        Returns the job's output from GenerateCandidateDefinitionsOnly.

        + *

        Configuration information for debugging rules.

        */ - GenerateCandidateDefinitionsOnly?: boolean; + DebugRuleConfigurations?: DebugRuleConfiguration[]; /** - *

        Returns information on the job's artifacts found in AutoMLJobArtifacts.

        + *

        Configuration of storage locations for TensorBoard output.

        */ - AutoMLJobArtifacts?: AutoMLJobArtifacts; + TensorBoardOutputConfig?: TensorBoardOutputConfig; /** - *

        This contains ProblemType, AutoMLJobObjective and CompletionCriteria. They're - * auto-inferred values, if not provided by you. If you do provide them, then they'll be the - * same as provided.

        + *

        Associates a SageMaker job as a trial component with an experiment and trial. Specified when + * you call the following APIs:

        + * */ - ResolvedAttributes?: ResolvedAttributes; + ExperimentConfig?: ExperimentConfig; } -export namespace DescribeAutoMLJobResponse { - export const filterSensitiveLog = (obj: DescribeAutoMLJobResponse): any => ({ +export namespace CreateTrainingJobRequest { + export const filterSensitiveLog = (obj: CreateTrainingJobRequest): any => ({ ...obj, }); } -export interface DescribeCodeRepositoryInput { +export interface CreateTrainingJobResponse { /** - *

        The name of the Git repository to describe.

        + *

        The Amazon Resource Name (ARN) of the training job.

        */ - CodeRepositoryName: string | undefined; + TrainingJobArn: string | undefined; } -export namespace DescribeCodeRepositoryInput { - export const filterSensitiveLog = (obj: DescribeCodeRepositoryInput): any => ({ +export namespace CreateTrainingJobResponse { + export const filterSensitiveLog = (obj: CreateTrainingJobResponse): any => ({ ...obj, }); } -export interface DescribeCodeRepositoryOutput { - /** - *

        The name of the Git repository.

        - */ - CodeRepositoryName: string | undefined; - - /** - *

        The Amazon Resource Name (ARN) of the Git repository.

        - */ - CodeRepositoryArn: string | undefined; +export enum JoinSource { + INPUT = "Input", + NONE = "None", +} +/** + *

        The data structure used to specify the data to be used for inference in a batch + * transform job and to associate the data that is relevant to the prediction results in + * the output. The input filter provided allows you to exclude input data that is not + * needed for inference in a batch transform job. The output filter provided allows you to + * include input data relevant to interpreting the predictions in the output from the job. + * For more information, see Associate Prediction + * Results with their Corresponding Input Records.

        + */ +export interface DataProcessing { /** - *

        The date and time that the repository was created.

        + *

        A JSONPath expression used to select a portion of the input data to pass to + * the algorithm. Use the InputFilter parameter to exclude fields, such as an + * ID column, from the input. If you want Amazon SageMaker to pass the entire input dataset to the + * algorithm, accept the default value $.

        + *

        Examples: "$", "$[1:]", "$.features" + *

        */ - CreationTime: Date | undefined; + InputFilter?: string; /** - *

        The date and time that the repository was last changed.

        + *

        A JSONPath expression used to select a portion of the joined dataset to save + * in the output file for a batch transform job. If you want Amazon SageMaker to store the entire input + * dataset in the output file, leave the default value, $. If you specify + * indexes that aren't within the dimension size of the joined dataset, you get an + * error.

        + *

        Examples: "$", "$[0,5:]", + * "$['id','SageMakerOutput']" + *

        */ - LastModifiedTime: Date | undefined; + OutputFilter?: string; /** - *

        Configuration details about the repository, including the URL where the repository is - * located, the default branch, and the Amazon Resource Name (ARN) of the AWS Secrets - * Manager secret that contains the credentials used to access the repository.

        + *

        Specifies the source of the data to join with the transformed data. The valid values + * are None and Input. The default value is None, + * which specifies not to join the input with the transformed data. If you want the batch + * transform job to join the original input data with the transformed data, set + * JoinSource to Input.

        + * + *

        For JSON or JSONLines objects, such as a JSON array, Amazon SageMaker adds the transformed data to + * the input JSON object in an attribute called SageMakerOutput. The joined + * result for JSON must be a key-value pair object. If the input is not a key-value pair + * object, Amazon SageMaker creates a new JSON file. In the new JSON file, and the input data is stored + * under the SageMakerInput key and the results are stored in + * SageMakerOutput.

        + *

        For CSV files, Amazon SageMaker combines the transformed data with the input data at the end of + * the input data and stores it in the output file. The joined data has the joined input + * data followed by the transformed data and the output is a CSV file.

        */ - GitConfig?: GitConfig; + JoinSource?: JoinSource | string; } -export namespace DescribeCodeRepositoryOutput { - export const filterSensitiveLog = (obj: DescribeCodeRepositoryOutput): any => ({ +export namespace DataProcessing { + export const filterSensitiveLog = (obj: DataProcessing): any => ({ ...obj, }); } -export interface DescribeCompilationJobRequest { +/** + *

        Configures the timeout and maximum number of retries for processing a transform job + * invocation.

        + */ +export interface ModelClientConfig { /** - *

        The name of the model compilation job that you want information about.

        + *

        The timeout value in seconds for an invocation request.

        */ - CompilationJobName: string | undefined; -} - -export namespace DescribeCompilationJobRequest { - export const filterSensitiveLog = (obj: DescribeCompilationJobRequest): any => ({ - ...obj, - }); -} + InvocationsTimeoutInSeconds?: number; -/** - *

        Provides information about the location that is configured for storing model - * artifacts.

        - *

        Model artifacts are the output that results from training a model, and typically - * consist of trained parameters, a model defintion that desribes how to compute - * inferences, and other metadata.

        - */ -export interface ModelArtifacts { /** - *

        The path of the S3 object that contains the model artifacts. For example, - * s3://bucket-name/keynameprefix/model.tar.gz.

        + *

        The maximum number of retries when invocation requests are failing.

        */ - S3ModelArtifacts: string | undefined; + InvocationsMaxRetries?: number; } -export namespace ModelArtifacts { - export const filterSensitiveLog = (obj: ModelArtifacts): any => ({ +export namespace ModelClientConfig { + export const filterSensitiveLog = (obj: ModelClientConfig): any => ({ ...obj, }); } -export interface DescribeCompilationJobResponse { +export interface CreateTransformJobRequest { /** - *

        The name of the model compilation job.

        + *

        The name of the transform job. The name must be unique within an AWS Region in an + * AWS account.

        */ - CompilationJobName: string | undefined; + TransformJobName: string | undefined; /** - *

        The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker assumes to perform the model - * compilation job.

        + *

        The name of the model that you want to use for the transform job. + * ModelName must be the name of an existing Amazon SageMaker model within an AWS + * Region in an AWS account.

        */ - CompilationJobArn: string | undefined; + ModelName: string | undefined; /** - *

        The status of the model compilation job.

        + *

        The maximum number of parallel requests that can be sent to each instance in a + * transform job. If MaxConcurrentTransforms is set to 0 or left + * unset, Amazon SageMaker checks the optional execution-parameters to determine the settings for your + * chosen algorithm. If the execution-parameters endpoint is not enabled, the default value + * is 1. For more information on execution-parameters, see How Containers Serve Requests. For built-in algorithms, you don't need to + * set a value for MaxConcurrentTransforms.

        */ - CompilationJobStatus: CompilationJobStatus | string | undefined; + MaxConcurrentTransforms?: number; /** - *

        The time when the model compilation job started the CompilationJob - * instances.

        - *

        You are billed for the time between this timestamp and the timestamp in the DescribeCompilationJobResponse$CompilationEndTime field. In Amazon CloudWatch Logs, - * the start time might be later than this time. That's because it takes time to download - * the compilation job, which depends on the size of the compilation job container.

        + *

        Configures the timeout and maximum number of retries for processing a transform job + * invocation.

        */ - CompilationStartTime?: Date; + ModelClientConfig?: ModelClientConfig; /** - *

        The time when the model compilation job on a compilation job instance ended. For a - * successful or stopped job, this is when the job's model artifacts have finished - * uploading. For a failed job, this is when Amazon SageMaker detected that the job failed.

        + *

        The maximum allowed size of the payload, in MB. A payload is the + * data portion of a record (without metadata). The value in MaxPayloadInMB + * must be greater than, or equal to, the size of a single record. To estimate the size of + * a record in MB, divide the size of your dataset by the number of records. To ensure that + * the records fit within the maximum payload size, we recommend using a slightly larger + * value. The default value is 6 MB. + *

        + *

        For cases where the payload might be arbitrarily large and is transmitted using HTTP + * chunked encoding, set the value to 0. + * This + * feature works only in supported algorithms. Currently, Amazon SageMaker built-in + * algorithms do not support HTTP chunked encoding.

        */ - CompilationEndTime?: Date; + MaxPayloadInMB?: number; /** - *

        Specifies a limit to how long a model compilation job can run. When the job reaches - * the time limit, Amazon SageMaker ends the compilation job. Use this API to cap model training - * costs.

        + *

        Specifies the number of records to include in a mini-batch for an HTTP inference + * request. A record + * is a single unit of input data that + * inference can be made on. For example, a single line in a CSV file is a record.

        + *

        To enable the batch strategy, you must set the SplitType property to + * Line, RecordIO, or TFRecord.

        + *

        To use only one record when making an HTTP invocation request to a container, set + * BatchStrategy to SingleRecord and SplitType + * to Line.

        + *

        To fit as many records in a mini-batch as can fit within the + * MaxPayloadInMB limit, set BatchStrategy to + * MultiRecord and SplitType to Line.

        */ - StoppingCondition: StoppingCondition | undefined; + BatchStrategy?: BatchStrategy | string; /** - *

        The time that the model compilation job was created.

        + *

        The environment variables to set in the Docker container. We support up to 16 key and + * values entries in the map.

        */ - CreationTime: Date | undefined; + Environment?: { [key: string]: string }; /** - *

        The time that the status - * of - * the model compilation job was last modified.

        + *

        Describes the input source and + * the + * way the transform job consumes it.

        */ - LastModifiedTime: Date | undefined; + TransformInput: TransformInput | undefined; /** - *

        If a model compilation job failed, the reason it failed.

        + *

        Describes the results of the transform job.

        */ - FailureReason: string | undefined; + TransformOutput: TransformOutput | undefined; /** - *

        Information about the location in Amazon S3 that has been configured for storing the model - * artifacts used in the compilation job.

        + *

        Describes the resources, including + * ML + * instance types and ML instance count, to use for the transform + * job.

        */ - ModelArtifacts: ModelArtifacts | undefined; + TransformResources: TransformResources | undefined; /** - *

        The Amazon Resource Name (ARN) of the model compilation job.

        + *

        The data structure used to specify the data to be used for inference in a batch + * transform job and to associate the data that is relevant to the prediction results in + * the output. The input filter provided allows you to exclude input data that is not + * needed for inference in a batch transform job. The output filter provided allows you to + * include input data relevant to interpreting the predictions in the output from the job. + * For more information, see Associate Prediction + * Results with their Corresponding Input Records.

        */ - RoleArn: string | undefined; + DataProcessing?: DataProcessing; /** - *

        Information about the location in Amazon S3 of the input model artifacts, the name and - * shape of the expected data inputs, and the framework in which the model was - * trained.

        + *

        (Optional) + * An + * array of key-value pairs. For more information, see Using + * Cost Allocation Tags in the AWS Billing and Cost Management User + * Guide.

        */ - InputConfig: InputConfig | undefined; + Tags?: Tag[]; /** - *

        Information about the output location for the compiled model and the target device - * that the model runs on.

        + *

        Associates a SageMaker job as a trial component with an experiment and trial. Specified when + * you call the following APIs:

        + * */ - OutputConfig: OutputConfig | undefined; + ExperimentConfig?: ExperimentConfig; } -export namespace DescribeCompilationJobResponse { - export const filterSensitiveLog = (obj: DescribeCompilationJobResponse): any => ({ +export namespace CreateTransformJobRequest { + export const filterSensitiveLog = (obj: CreateTransformJobRequest): any => ({ ...obj, }); } -export interface DescribeDomainRequest { +export interface CreateTransformJobResponse { /** - *

        The domain ID.

        + *

        The Amazon Resource Name (ARN) of the transform job.

        */ - DomainId: string | undefined; + TransformJobArn: string | undefined; } -export namespace DescribeDomainRequest { - export const filterSensitiveLog = (obj: DescribeDomainRequest): any => ({ +export namespace CreateTransformJobResponse { + export const filterSensitiveLog = (obj: CreateTransformJobResponse): any => ({ ...obj, }); } -export enum DomainStatus { - Delete_Failed = "Delete_Failed", - Deleting = "Deleting", - Failed = "Failed", - InService = "InService", - Pending = "Pending", - Update_Failed = "Update_Failed", - Updating = "Updating", -} +export interface CreateTrialRequest { + /** + *

        The name of the trial. The name must be unique in your AWS account and is not + * case-sensitive.

        + */ + TrialName: string | undefined; -export interface DescribeDomainResponse { /** - *

        The domain's Amazon Resource Name (ARN).

        + *

        The name of the trial as displayed. The name doesn't need to be unique. If + * DisplayName isn't specified, TrialName is displayed.

        */ - DomainArn?: string; + DisplayName?: string; /** - *

        The domain ID.

        + *

        The name of the experiment to associate the trial with.

        */ - DomainId?: string; + ExperimentName: string | undefined; /** - *

        The domain name.

        + *

        Metadata properties of the tracking entity, trial, or trial component.

        */ - DomainName?: string; + MetadataProperties?: MetadataProperties; /** - *

        The ID of the Amazon Elastic File System (EFS) managed by this Domain.

        + *

        A list of tags to associate with the trial. You can use Search API to + * search on the tags.

        */ - HomeEfsFileSystemId?: string; + Tags?: Tag[]; +} + +export namespace CreateTrialRequest { + export const filterSensitiveLog = (obj: CreateTrialRequest): any => ({ + ...obj, + }); +} +export interface CreateTrialResponse { /** - *

        The SSO managed application instance ID.

        + *

        The Amazon Resource Name (ARN) of the trial.

        */ - SingleSignOnManagedApplicationInstanceId?: string; + TrialArn?: string; +} +export namespace CreateTrialResponse { + export const filterSensitiveLog = (obj: CreateTrialResponse): any => ({ + ...obj, + }); +} + +/** + *

        Represents an input or output artifact of a trial component. You specify + * TrialComponentArtifact as part of the InputArtifacts and + * OutputArtifacts parameters in the CreateTrialComponent + * request.

        + *

        Examples of input artifacts are datasets, algorithms, hyperparameters, source code, and + * instance types. Examples of output artifacts are metrics, snapshots, logs, and images.

        + */ +export interface TrialComponentArtifact { /** - *

        The status.

        + *

        The media type of the artifact, which indicates the type of data in the artifact file. The + * media type consists of a type and a subtype + * concatenated with a slash (/) character, for example, text/csv, image/jpeg, and s3/uri. The + * type specifies the category of the media. The subtype specifies the kind of data.

        */ - Status?: DomainStatus | string; + MediaType?: string; /** - *

        The creation time.

        + *

        The location of the artifact.

        */ - CreationTime?: Date; + Value: string | undefined; +} + +export namespace TrialComponentArtifact { + export const filterSensitiveLog = (obj: TrialComponentArtifact): any => ({ + ...obj, + }); +} + +/** + *

        The value of a hyperparameter. Only one of NumberValue or + * StringValue can be specified.

        + *

        This object is specified in the CreateTrialComponent request.

        + */ +export type TrialComponentParameterValue = + | TrialComponentParameterValue.NumberValueMember + | TrialComponentParameterValue.StringValueMember + | TrialComponentParameterValue.$UnknownMember; + +export namespace TrialComponentParameterValue { + /** + *

        The string value of a categorical hyperparameter. If you specify a value for this + * parameter, you can't specify the NumberValue parameter.

        + */ + export interface StringValueMember { + StringValue: string; + NumberValue?: never; + $unknown?: never; + } + + /** + *

        The numeric value of a numeric hyperparameter. If you specify a value for this parameter, + * you can't specify the StringValue parameter.

        + */ + export interface NumberValueMember { + StringValue?: never; + NumberValue: number; + $unknown?: never; + } + + export interface $UnknownMember { + StringValue?: never; + NumberValue?: never; + $unknown: [string, any]; + } + + export interface Visitor { + StringValue: (value: string) => T; + NumberValue: (value: number) => T; + _: (name: string, value: any) => T; + } + + export const visit = (value: TrialComponentParameterValue, visitor: Visitor): T => { + if (value.StringValue !== undefined) return visitor.StringValue(value.StringValue); + if (value.NumberValue !== undefined) return visitor.NumberValue(value.NumberValue); + return visitor._(value.$unknown[0], value.$unknown[1]); + }; + + export const filterSensitiveLog = (obj: TrialComponentParameterValue): any => { + if (obj.StringValue !== undefined) return { StringValue: obj.StringValue }; + if (obj.NumberValue !== undefined) return { NumberValue: obj.NumberValue }; + if (obj.$unknown !== undefined) return { [obj.$unknown[0]]: "UNKNOWN" }; + }; +} + +export enum TrialComponentPrimaryStatus { + COMPLETED = "Completed", + FAILED = "Failed", + IN_PROGRESS = "InProgress", + STOPPED = "Stopped", + STOPPING = "Stopping", +} +/** + *

        The status of the trial component.

        + */ +export interface TrialComponentStatus { /** - *

        The last modified time.

        + *

        The status of the trial component.

        */ - LastModifiedTime?: Date; + PrimaryStatus?: TrialComponentPrimaryStatus | string; /** - *

        The failure reason.

        + *

        If the component failed, a message describing why.

        */ - FailureReason?: string; + Message?: string; +} +export namespace TrialComponentStatus { + export const filterSensitiveLog = (obj: TrialComponentStatus): any => ({ + ...obj, + }); +} + +export interface CreateTrialComponentRequest { /** - *

        The domain's authentication mode.

        + *

        The name of the component. The name must be unique in your AWS account and is not + * case-sensitive.

        */ - AuthMode?: AuthMode | string; + TrialComponentName: string | undefined; /** - *

        Settings which are applied to all UserProfiles in this domain, if settings are not explicitly specified - * in a given UserProfile. - *

        + *

        The name of the component as displayed. The name doesn't need to be unique. If + * DisplayName isn't specified, TrialComponentName is + * displayed.

        */ - DefaultUserSettings?: UserSettings; + DisplayName?: string; /** - *

        Specifies the VPC used for non-EFS traffic. The default value is - * PublicInternetOnly.

        + *

        The status of the component. States include:

        *
          *
        • - *

          - * PublicInternetOnly - Non-EFS traffic is through a VPC managed by - * Amazon SageMaker, which allows direct internet access

          + *

          InProgress

          *
        • *
        • - *

          - * VpcOnly - All Studio traffic is through the specified VPC and subnets

          + *

          Completed

          + *
        • + *
        • + *

          Failed

          *
        • *
        */ - AppNetworkAccessType?: AppNetworkAccessType | string; + Status?: TrialComponentStatus; /** - *

        This member is deprecated and replaced with KmsKeyId.

        + *

        When the component started.

        */ - HomeEfsFileSystemKmsKeyId?: string; + StartTime?: Date; /** - *

        The VPC subnets that Studio uses for communication.

        + *

        When the component ended.

        */ - SubnetIds?: string[]; + EndTime?: Date; /** - *

        The domain's URL.

        + *

        The hyperparameters for the component.

        */ - Url?: string; + Parameters?: { [key: string]: TrialComponentParameterValue }; /** - *

        The ID of the Amazon Virtual Private Cloud (VPC) that Studio uses for communication.

        + *

        The input artifacts for the component. Examples of input artifacts are datasets, + * algorithms, hyperparameters, source code, and instance types.

        */ - VpcId?: string; + InputArtifacts?: { [key: string]: TrialComponentArtifact }; /** - *

        The AWS KMS customer managed CMK used to encrypt - * the EFS volume attached to the domain.

        + *

        The output artifacts for the component. Examples of output artifacts are metrics, + * snapshots, logs, and images.

        */ - KmsKeyId?: string; + OutputArtifacts?: { [key: string]: TrialComponentArtifact }; + + /** + *

        Metadata properties of the tracking entity, trial, or trial component.

        + */ + MetadataProperties?: MetadataProperties; + + /** + *

        A list of tags to associate with the component. You can use Search API + * to search on the tags.

        + */ + Tags?: Tag[]; } -export namespace DescribeDomainResponse { - export const filterSensitiveLog = (obj: DescribeDomainResponse): any => ({ +export namespace CreateTrialComponentRequest { + export const filterSensitiveLog = (obj: CreateTrialComponentRequest): any => ({ ...obj, + ...(obj.Parameters && { + Parameters: Object.entries(obj.Parameters).reduce( + (acc: any, [key, value]: [string, TrialComponentParameterValue]) => ({ + ...acc, + [key]: TrialComponentParameterValue.filterSensitiveLog(value), + }), + {} + ), + }), }); } -export interface DescribeEndpointInput { +export interface CreateTrialComponentResponse { /** - *

        The name of the endpoint.

        + *

        The Amazon Resource Name (ARN) of the trial component.

        */ - EndpointName: string | undefined; + TrialComponentArn?: string; } -export namespace DescribeEndpointInput { - export const filterSensitiveLog = (obj: DescribeEndpointInput): any => ({ +export namespace CreateTrialComponentResponse { + export const filterSensitiveLog = (obj: CreateTrialComponentResponse): any => ({ ...obj, }); } -export enum EndpointStatus { - CREATING = "Creating", - DELETING = "Deleting", - FAILED = "Failed", - IN_SERVICE = "InService", - OUT_OF_SERVICE = "OutOfService", - ROLLING_BACK = "RollingBack", - SYSTEM_UPDATING = "SystemUpdating", - UPDATING = "Updating", -} - -/** - *

        Describes weight and capacities for a production variant associated with an - * endpoint. If you sent a request to the UpdateEndpointWeightsAndCapacities - * API and the endpoint status is Updating, you get different desired and - * current values.

        - */ -export interface ProductionVariantSummary { +export interface CreateUserProfileRequest { /** - *

        The name of the variant.

        + *

        The ID of the associated Domain.

        */ - VariantName: string | undefined; + DomainId: string | undefined; /** - *

        An array of DeployedImage objects that specify the Amazon EC2 Container Registry paths of the - * inference images deployed on instances of this ProductionVariant.

        + *

        A name for the UserProfile.

        */ - DeployedImages?: DeployedImage[]; + UserProfileName: string | undefined; /** - *

        The weight associated with the variant.

        + *

        A specifier for the type of value specified in SingleSignOnUserValue. Currently, the only supported value is "UserName". + * If the Domain's AuthMode is SSO, this field is required. If the Domain's AuthMode is not SSO, this field cannot be specified. + *

        */ - CurrentWeight?: number; + SingleSignOnUserIdentifier?: string; /** - *

        The requested weight, as specified in the - * UpdateEndpointWeightsAndCapacities request.

        + *

        The username of the associated AWS Single Sign-On User for this UserProfile. If the Domain's AuthMode is SSO, this field is + * required, and must match a valid username of a user in your directory. If the Domain's AuthMode is not SSO, this field cannot be specified. + *

        */ - DesiredWeight?: number; + SingleSignOnUserValue?: string; /** - *

        The number of instances associated with the variant.

        + *

        Each tag consists of a key and an optional value. + * Tag keys must be unique per resource.

        */ - CurrentInstanceCount?: number; + Tags?: Tag[]; /** - *

        The number of instances requested in the - * UpdateEndpointWeightsAndCapacities request.

        + *

        A collection of settings.

        */ - DesiredInstanceCount?: number; + UserSettings?: UserSettings; } -export namespace ProductionVariantSummary { - export const filterSensitiveLog = (obj: ProductionVariantSummary): any => ({ +export namespace CreateUserProfileRequest { + export const filterSensitiveLog = (obj: CreateUserProfileRequest): any => ({ ...obj, }); } -export interface DescribeEndpointOutput { +export interface CreateUserProfileResponse { /** - *

        Name of the endpoint.

        + *

        The user profile Amazon Resource Name (ARN).

        */ - EndpointName: string | undefined; + UserProfileArn?: string; +} + +export namespace CreateUserProfileResponse { + export const filterSensitiveLog = (obj: CreateUserProfileResponse): any => ({ + ...obj, + }); +} +/** + *

        Use this parameter to configure your OIDC Identity Provider (IdP).

        + */ +export interface OidcConfig { /** - *

        The Amazon Resource Name (ARN) of the endpoint.

        + *

        The OIDC IdP client ID used to configure your private workforce.

        */ - EndpointArn: string | undefined; + ClientId: string | undefined; /** - *

        The name of the endpoint configuration associated with this endpoint.

        + *

        The OIDC IdP client secret used to configure your private workforce.

        */ - EndpointConfigName: string | undefined; + ClientSecret: string | undefined; /** - *

        An array of ProductionVariantSummary objects, one for each model - * hosted behind this endpoint.

        + *

        The OIDC IdP issuer used to configure your private workforce.

        */ - ProductionVariants?: ProductionVariantSummary[]; + Issuer: string | undefined; /** - *

        + *

        The OIDC IdP authorization endpoint used to configure your private workforce.

        */ - DataCaptureConfig?: DataCaptureConfigSummary; + AuthorizationEndpoint: string | undefined; /** - *

        The status of the endpoint.

        - *
          - *
        • - *

          - * OutOfService: Endpoint is not available to take incoming - * requests.

          - *
        • - *
        • - *

          - * Creating: CreateEndpoint is executing.

          - *
        • - *
        • - *

          - * Updating: UpdateEndpoint or UpdateEndpointWeightsAndCapacities is executing.

          - *
        • - *
        • - *

          - * SystemUpdating: Endpoint is undergoing maintenance and cannot be - * updated or deleted or re-scaled until it has completed. This maintenance - * operation does not change any customer-specified values such as VPC config, KMS - * encryption, model, instance type, or instance count.

          - *
        • - *
        • - *

          - * RollingBack: Endpoint fails to scale up or down or change its - * variant weight and is in the process of rolling back to its previous - * configuration. Once the rollback completes, endpoint returns to an - * InService status. This transitional status only applies to an - * endpoint that has autoscaling enabled and is undergoing variant weight or - * capacity changes as part of an UpdateEndpointWeightsAndCapacities call or when the UpdateEndpointWeightsAndCapacities operation is called - * explicitly.

          - *
        • - *
        • - *

          - * InService: Endpoint is available to process incoming - * requests.

          - *
        • - *
        • - *

          - * Deleting: DeleteEndpoint is executing.

          - *
        • - *
        • - *

          - * Failed: Endpoint could not be created, updated, or re-scaled. Use - * DescribeEndpointOutput$FailureReason for information about - * the failure. DeleteEndpoint is the only operation that can be - * performed on a failed endpoint.

          - *
        • - *
        + *

        The OIDC IdP token endpoint used to configure your private workforce.

        */ - EndpointStatus: EndpointStatus | string | undefined; + TokenEndpoint: string | undefined; /** - *

        If the status of the endpoint is Failed, the reason why it failed. - *

        + *

        The OIDC IdP user information endpoint used to configure your private workforce.

        */ - FailureReason?: string; + UserInfoEndpoint: string | undefined; /** - *

        A timestamp that shows when the endpoint was created.

        + *

        The OIDC IdP logout endpoint used to configure your private workforce.

        */ - CreationTime: Date | undefined; + LogoutEndpoint: string | undefined; /** - *

        A timestamp that shows when the endpoint was last modified.

        + *

        The OIDC IdP JSON Web Key Set (Jwks) URI used to configure your private workforce.

        */ - LastModifiedTime: Date | undefined; + JwksUri: string | undefined; } -export namespace DescribeEndpointOutput { - export const filterSensitiveLog = (obj: DescribeEndpointOutput): any => ({ +export namespace OidcConfig { + export const filterSensitiveLog = (obj: OidcConfig): any => ({ ...obj, + ...(obj.ClientSecret && { ClientSecret: SENSITIVE_STRING }), }); } -export interface DescribeEndpointConfigInput { +/** + *

        A list of IP address ranges (CIDRs). Used to create an allow + * list of IP addresses for a private workforce. Workers will only be able to login to their worker portal from an + * IP address within this range. By default, a workforce isn't restricted to specific IP addresses.

        + */ +export interface SourceIpConfig { /** - *

        The name of the endpoint configuration.

        + *

        A list of one to ten Classless Inter-Domain Routing (CIDR) values.

        + *

        Maximum: Ten CIDR values

        + * + *

        The following Length Constraints apply to individual CIDR values in + * the CIDR value list.

        + *
        */ - EndpointConfigName: string | undefined; + Cidrs: string[] | undefined; } -export namespace DescribeEndpointConfigInput { - export const filterSensitiveLog = (obj: DescribeEndpointConfigInput): any => ({ +export namespace SourceIpConfig { + export const filterSensitiveLog = (obj: SourceIpConfig): any => ({ ...obj, }); } -export interface DescribeEndpointConfigOutput { - /** - *

        Name of the Amazon SageMaker endpoint configuration.

        - */ - EndpointConfigName: string | undefined; - +export interface CreateWorkforceRequest { /** - *

        The Amazon Resource Name (ARN) of the endpoint configuration.

        + *

        Use this parameter to configure an Amazon Cognito private workforce. + * A single Cognito workforce is created using and corresponds to a single + * + * Amazon Cognito user pool.

        + * + *

        Do not use OidcConfig if you specify values for + * CognitoConfig.

        */ - EndpointConfigArn: string | undefined; + CognitoConfig?: CognitoConfig; /** - *

        An array of ProductionVariant objects, one for each model that you - * want to host at this endpoint.

        + *

        Use this parameter to configure a private workforce using your own OIDC Identity Provider.

        + *

        Do not use CognitoConfig if you specify values for + * OidcConfig.

        */ - ProductionVariants: ProductionVariant[] | undefined; + OidcConfig?: OidcConfig; /** - *

        + *

        A list of IP address ranges (CIDRs). Used to create an allow + * list of IP addresses for a private workforce. Workers will only be able to login to their worker portal from an + * IP address within this range. By default, a workforce isn't restricted to specific IP addresses.

        */ - DataCaptureConfig?: DataCaptureConfig; + SourceIpConfig?: SourceIpConfig; /** - *

        AWS KMS key ID Amazon SageMaker uses to encrypt data when storing it on the ML storage - * volume attached to the instance.

        + *

        The name of the private workforce.

        */ - KmsKeyId?: string; + WorkforceName: string | undefined; /** - *

        A timestamp that shows when the endpoint configuration was created.

        + *

        An array of key-value pairs that contain metadata to help you categorize and + * organize our workforce. Each tag consists of a key and a value, + * both of which you define.

        */ - CreationTime: Date | undefined; + Tags?: Tag[]; } -export namespace DescribeEndpointConfigOutput { - export const filterSensitiveLog = (obj: DescribeEndpointConfigOutput): any => ({ +export namespace CreateWorkforceRequest { + export const filterSensitiveLog = (obj: CreateWorkforceRequest): any => ({ ...obj, + ...(obj.OidcConfig && { OidcConfig: OidcConfig.filterSensitiveLog(obj.OidcConfig) }), }); } -export interface DescribeExperimentRequest { +export interface CreateWorkforceResponse { /** - *

        The name of the experiment to describe.

        + *

        The Amazon Resource Name (ARN) of the workforce.

        */ - ExperimentName: string | undefined; + WorkforceArn: string | undefined; } -export namespace DescribeExperimentRequest { - export const filterSensitiveLog = (obj: DescribeExperimentRequest): any => ({ +export namespace CreateWorkforceResponse { + export const filterSensitiveLog = (obj: CreateWorkforceResponse): any => ({ ...obj, }); } /** - *

        Information about the user who created or modified an experiment, trial, or trial - * component.

        + *

        A list of user groups that exist in your OIDC Identity Provider (IdP). + * One to ten groups can be used to create a single private work team. + * When you add a user group to the list of Groups, you can add that user group to one or more + * private work teams. If you add a user group to a private work team, all workers in that user group + * are added to the work team.

        */ -export interface UserContext { - /** - *

        The Amazon Resource Name (ARN) of the user's profile.

        - */ - UserProfileArn?: string; - - /** - *

        The name of the user's profile.

        - */ - UserProfileName?: string; - +export interface OidcMemberDefinition { /** - *

        The domain associated with the user.

        + *

        A list of comma seperated strings that identifies + * user groups in your OIDC IdP. Each user group is + * made up of a group of private workers.

        */ - DomainId?: string; + Groups: string[] | undefined; } -export namespace UserContext { - export const filterSensitiveLog = (obj: UserContext): any => ({ +export namespace OidcMemberDefinition { + export const filterSensitiveLog = (obj: OidcMemberDefinition): any => ({ ...obj, }); } /** - *

        The source of the experiment.

        + *

        Defines an Amazon Cognito or your own OIDC IdP user group that is part of a work team.

        */ -export interface ExperimentSource { +export interface MemberDefinition { /** - *

        The Amazon Resource Name (ARN) of the source.

        + *

        The Amazon Cognito user group that is part of the work team.

        */ - SourceArn: string | undefined; + CognitoMemberDefinition?: CognitoMemberDefinition; /** - *

        The source type.

        + *

        A list user groups that exist in your OIDC Identity Provider (IdP). + * One to ten groups can be used to create a single private work team. + * When you add a user group to the list of Groups, you can add that user group to one or more + * private work teams. If you add a user group to a private work team, all workers in that user group + * are added to the work team.

        */ - SourceType?: string; + OidcMemberDefinition?: OidcMemberDefinition; } -export namespace ExperimentSource { - export const filterSensitiveLog = (obj: ExperimentSource): any => ({ +export namespace MemberDefinition { + export const filterSensitiveLog = (obj: MemberDefinition): any => ({ ...obj, }); } -export interface DescribeExperimentResponse { - /** - *

        The name of the experiment.

        - */ - ExperimentName?: string; - +/** + *

        Configures SNS notifications of available or expiring work items for work + * teams.

        + */ +export interface NotificationConfiguration { /** - *

        The Amazon Resource Name (ARN) of the experiment.

        + *

        The ARN for the SNS topic to which notifications should be published.

        */ - ExperimentArn?: string; + NotificationTopicArn?: string; +} - /** - *

        The name of the experiment as displayed. If DisplayName isn't specified, - * ExperimentName is displayed.

        - */ - DisplayName?: string; +export namespace NotificationConfiguration { + export const filterSensitiveLog = (obj: NotificationConfiguration): any => ({ + ...obj, + }); +} +export interface CreateWorkteamRequest { /** - *

        The ARN of the source and, optionally, the type.

        + *

        The name of the work team. Use this name to identify the work team.

        */ - Source?: ExperimentSource; + WorkteamName: string | undefined; /** - *

        The description of the experiment.

        + *

        The name of the workforce.

        */ - Description?: string; + WorkforceName?: string; /** - *

        When the experiment was created.

        + *

        A list of MemberDefinition objects that contains objects that identify + * the workers that make up the work team.

        + *

        Workforces can be created using Amazon Cognito or your own OIDC Identity Provider (IdP). For + * private workforces created using Amazon Cognito use CognitoMemberDefinition. For + * workforces created using your own OIDC identity provider (IdP) use + * OidcMemberDefinition. Do not provide input for both of these parameters + * in a single request.

        + *

        For workforces created using Amazon Cognito, private work teams correspond to Amazon Cognito + * user groups within the user pool used to create a workforce. All of the + * CognitoMemberDefinition objects that make up the member definition must + * have the same ClientId and UserPool values. To add a Amazon + * Cognito user group to an existing worker pool, see Adding groups to a User + * Pool. For more information about user pools, see Amazon Cognito User + * Pools.

        + *

        For workforces created using your own OIDC IdP, specify the user groups that you want to + * include in your private work team in OidcMemberDefinition by listing those groups + * in Groups.

        */ - CreationTime?: Date; + MemberDefinitions: MemberDefinition[] | undefined; /** - *

        Who created the experiment.

        + *

        A description of the work team.

        */ - CreatedBy?: UserContext; + Description: string | undefined; /** - *

        When the experiment was last modified.

        + *

        Configures notification of workers regarding available or expiring work items.

        */ - LastModifiedTime?: Date; + NotificationConfiguration?: NotificationConfiguration; /** - *

        Who last modified the experiment.

        + *

        An array of key-value pairs.

        + *

        For more information, see Resource + * Tag and Using + * Cost Allocation Tags in the AWS Billing and Cost Management User + * Guide.

        */ - LastModifiedBy?: UserContext; + Tags?: Tag[]; } -export namespace DescribeExperimentResponse { - export const filterSensitiveLog = (obj: DescribeExperimentResponse): any => ({ +export namespace CreateWorkteamRequest { + export const filterSensitiveLog = (obj: CreateWorkteamRequest): any => ({ ...obj, }); } -export interface DescribeFlowDefinitionRequest { +export interface CreateWorkteamResponse { /** - *

        The name of the flow definition.

        + *

        The Amazon Resource Name (ARN) of the work team. You can use this ARN to identify the + * work team.

        */ - FlowDefinitionName: string | undefined; + WorkteamArn?: string; } -export namespace DescribeFlowDefinitionRequest { - export const filterSensitiveLog = (obj: DescribeFlowDefinitionRequest): any => ({ +export namespace CreateWorkteamResponse { + export const filterSensitiveLog = (obj: CreateWorkteamResponse): any => ({ ...obj, }); } -export enum FlowDefinitionStatus { - ACTIVE = "Active", - DELETING = "Deleting", - FAILED = "Failed", - INITIALIZING = "Initializing", -} - -export interface DescribeFlowDefinitionResponse { +/** + *

        + */ +export interface DataCaptureConfigSummary { /** - *

        The Amazon Resource Name (ARN) of the flow defintion.

        + *

        */ - FlowDefinitionArn: string | undefined; + EnableCapture: boolean | undefined; /** - *

        The Amazon Resource Name (ARN) of the flow definition.

        + *

        */ - FlowDefinitionName: string | undefined; + CaptureStatus: CaptureStatus | string | undefined; /** - *

        The status of the flow definition. Valid values are listed below.

        + *

        */ - FlowDefinitionStatus: FlowDefinitionStatus | string | undefined; + CurrentSamplingPercentage: number | undefined; /** - *

        The timestamp when the flow definition was created.

        + *

        */ - CreationTime: Date | undefined; + DestinationS3Uri: string | undefined; /** - *

        Container for configuring the source of human task requests. Used to specify if - * Amazon Rekognition or Amazon Textract is used as an integration source.

        + *

        */ - HumanLoopRequestSource?: HumanLoopRequestSource; + KmsKeyId: string | undefined; +} + +export namespace DataCaptureConfigSummary { + export const filterSensitiveLog = (obj: DataCaptureConfigSummary): any => ({ + ...obj, + }); +} + +export enum RuleEvaluationStatus { + ERROR = "Error", + IN_PROGRESS = "InProgress", + ISSUES_FOUND = "IssuesFound", + NO_ISSUES_FOUND = "NoIssuesFound", + STOPPED = "Stopped", + STOPPING = "Stopping", +} +/** + *

        Information about the status of the rule evaluation.

        + */ +export interface DebugRuleEvaluationStatus { /** - *

        An object containing information about what triggers a human review workflow.

        + *

        The name of the rule configuration

        */ - HumanLoopActivationConfig?: HumanLoopActivationConfig; + RuleConfigurationName?: string; /** - *

        An object containing information about who works on the task, the workforce task price, and other task details.

        + *

        The Amazon Resource Name (ARN) of the rule evaluation job.

        */ - HumanLoopConfig: HumanLoopConfig | undefined; + RuleEvaluationJobArn?: string; /** - *

        An object containing information about the output file.

        + *

        Status of the rule evaluation.

        */ - OutputConfig: FlowDefinitionOutputConfig | undefined; + RuleEvaluationStatus?: RuleEvaluationStatus | string; /** - *

        The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) execution role for the flow definition.

        + *

        Details from the rule evaluation.

        */ - RoleArn: string | undefined; + StatusDetails?: string; /** - *

        The reason your flow definition failed.

        + *

        Timestamp when the rule evaluation status was last modified.

        */ - FailureReason?: string; + LastModifiedTime?: Date; } -export namespace DescribeFlowDefinitionResponse { - export const filterSensitiveLog = (obj: DescribeFlowDefinitionResponse): any => ({ +export namespace DebugRuleEvaluationStatus { + export const filterSensitiveLog = (obj: DebugRuleEvaluationStatus): any => ({ ...obj, }); } -export interface DescribeHumanTaskUiRequest { +export interface DeleteActionRequest { /** - *

        The name of the human task user interface - * (worker task template) you want information about.

        + *

        The name of the action to delete.

        */ - HumanTaskUiName: string | undefined; + ActionName: string | undefined; } -export namespace DescribeHumanTaskUiRequest { - export const filterSensitiveLog = (obj: DescribeHumanTaskUiRequest): any => ({ +export namespace DeleteActionRequest { + export const filterSensitiveLog = (obj: DeleteActionRequest): any => ({ ...obj, }); } -export enum HumanTaskUiStatus { - ACTIVE = "Active", - DELETING = "Deleting", -} - -/** - *

        Container for user interface template information.

        - */ -export interface UiTemplateInfo { +export interface DeleteActionResponse { /** - *

        The URL for the user interface template.

        + *

        The Amazon Resource Name (ARN) of the action.

        */ - Url?: string; + ActionArn?: string; +} + +export namespace DeleteActionResponse { + export const filterSensitiveLog = (obj: DeleteActionResponse): any => ({ + ...obj, + }); +} +export interface DeleteAlgorithmInput { /** - *

        The SHA-256 digest of the contents of the template.

        + *

        The name of the algorithm to delete.

        */ - ContentSha256?: string; + AlgorithmName: string | undefined; } -export namespace UiTemplateInfo { - export const filterSensitiveLog = (obj: UiTemplateInfo): any => ({ +export namespace DeleteAlgorithmInput { + export const filterSensitiveLog = (obj: DeleteAlgorithmInput): any => ({ ...obj, }); } -export interface DescribeHumanTaskUiResponse { +export interface DeleteAppRequest { /** - *

        The Amazon Resource Name (ARN) of the human task user interface (worker task template).

        + *

        The domain ID.

        */ - HumanTaskUiArn: string | undefined; + DomainId: string | undefined; /** - *

        The name of the human task user interface (worker task template).

        + *

        The user profile name.

        */ - HumanTaskUiName: string | undefined; + UserProfileName: string | undefined; /** - *

        The status of the human task user interface (worker task template). Valid values are listed below.

        + *

        The type of app.

        */ - HumanTaskUiStatus?: HumanTaskUiStatus | string; + AppType: AppType | string | undefined; /** - *

        The timestamp when the human task user interface was created.

        + *

        The name of the app.

        */ - CreationTime: Date | undefined; + AppName: string | undefined; +} + +export namespace DeleteAppRequest { + export const filterSensitiveLog = (obj: DeleteAppRequest): any => ({ + ...obj, + }); +} +export interface DeleteAppImageConfigRequest { /** - *

        Container for user interface template information.

        + *

        The name of the AppImageConfig to delete.

        */ - UiTemplate: UiTemplateInfo | undefined; + AppImageConfigName: string | undefined; } -export namespace DescribeHumanTaskUiResponse { - export const filterSensitiveLog = (obj: DescribeHumanTaskUiResponse): any => ({ +export namespace DeleteAppImageConfigRequest { + export const filterSensitiveLog = (obj: DeleteAppImageConfigRequest): any => ({ ...obj, }); } -export interface DescribeHyperParameterTuningJobRequest { +export interface DeleteArtifactRequest { /** - *

        The name of the tuning job.

        + *

        The Amazon Resource Name (ARN) of the artifact to delete.

        */ - HyperParameterTuningJobName: string | undefined; + ArtifactArn?: string; + + /** + *

        The URI of the source.

        + */ + Source?: ArtifactSource; } -export namespace DescribeHyperParameterTuningJobRequest { - export const filterSensitiveLog = (obj: DescribeHyperParameterTuningJobRequest): any => ({ +export namespace DeleteArtifactRequest { + export const filterSensitiveLog = (obj: DeleteArtifactRequest): any => ({ ...obj, }); } -/** - *

        Shows the final value for the - * objective - * metric for a training job that was launched by a hyperparameter - * tuning job. You define the objective metric in the - * HyperParameterTuningJobObjective parameter of HyperParameterTuningJobConfig.

        - */ -export interface FinalHyperParameterTuningJobObjectiveMetric { +export interface DeleteArtifactResponse { /** - *

        Whether to - * minimize - * or maximize the objective metric. Valid values are Minimize and - * Maximize.

        + *

        The Amazon Resource Name (ARN) of the artifact.

        */ - Type?: HyperParameterTuningJobObjectiveType | string; + ArtifactArn?: string; +} + +export namespace DeleteArtifactResponse { + export const filterSensitiveLog = (obj: DeleteArtifactResponse): any => ({ + ...obj, + }); +} +export interface DeleteAssociationRequest { /** - *

        The name of the - * objective - * metric.

        + *

        The ARN of the source.

        */ - MetricName: string | undefined; + SourceArn: string | undefined; /** - *

        The value of the objective metric.

        + *

        The Amazon Resource Name (ARN) of the destination.

        */ - Value: number | undefined; + DestinationArn: string | undefined; } -export namespace FinalHyperParameterTuningJobObjectiveMetric { - export const filterSensitiveLog = (obj: FinalHyperParameterTuningJobObjectiveMetric): any => ({ +export namespace DeleteAssociationRequest { + export const filterSensitiveLog = (obj: DeleteAssociationRequest): any => ({ ...obj, }); } -export enum TrainingJobStatus { - COMPLETED = "Completed", - FAILED = "Failed", - IN_PROGRESS = "InProgress", - STOPPED = "Stopped", - STOPPING = "Stopping", -} - -/** - *

        Specifies - * summary information about a training job.

        - */ -export interface HyperParameterTrainingJobSummary { - /** - *

        The training job definition name.

        - */ - TrainingJobDefinitionName?: string; - - /** - *

        The name of the training job.

        - */ - TrainingJobName: string | undefined; - - /** - *

        The - * Amazon - * Resource Name (ARN) of the training job.

        - */ - TrainingJobArn: string | undefined; - +export interface DeleteAssociationResponse { /** - *

        The HyperParameter tuning job that launched the training job.

        - */ - TuningJobName?: string; - - /** - *

        The date and time that the training job was created.

        + *

        The ARN of the source.

        */ - CreationTime: Date | undefined; + SourceArn?: string; /** - *

        The date and time that the training job started.

        + *

        The Amazon Resource Name (ARN) of the destination.

        */ - TrainingStartTime?: Date; + DestinationArn?: string; +} - /** - *

        Specifies the time when the training job ends on training instances. You are billed - * for the time interval between the value of TrainingStartTime and this time. - * For successful jobs and stopped jobs, this is the time after model artifacts are - * uploaded. For failed jobs, this is the time when Amazon SageMaker detects a job failure.

        - */ - TrainingEndTime?: Date; +export namespace DeleteAssociationResponse { + export const filterSensitiveLog = (obj: DeleteAssociationResponse): any => ({ + ...obj, + }); +} +export interface DeleteCodeRepositoryInput { /** - *

        The - * status - * of the training job.

        + *

        The name of the Git repository to delete.

        */ - TrainingJobStatus: TrainingJobStatus | string | undefined; + CodeRepositoryName: string | undefined; +} - /** - *

        A - * list of the hyperparameters for which you specified ranges to - * search.

        - */ - TunedHyperParameters: { [key: string]: string } | undefined; +export namespace DeleteCodeRepositoryInput { + export const filterSensitiveLog = (obj: DeleteCodeRepositoryInput): any => ({ + ...obj, + }); +} +export interface DeleteContextRequest { /** - *

        The - * reason that the training job failed. - *

        + *

        The name of the context to delete.

        */ - FailureReason?: string; + ContextName: string | undefined; +} - /** - *

        The FinalHyperParameterTuningJobObjectiveMetric object that - * specifies the - * value - * of the - * objective - * metric of the tuning job that launched this training job.

        - */ - FinalHyperParameterTuningJobObjectiveMetric?: FinalHyperParameterTuningJobObjectiveMetric; +export namespace DeleteContextRequest { + export const filterSensitiveLog = (obj: DeleteContextRequest): any => ({ + ...obj, + }); +} +export interface DeleteContextResponse { /** - *

        The status of the objective metric for the training job:

        - *
          - *
        • - *

          Succeeded: The - * final - * objective metric for the training job was evaluated by the - * hyperparameter tuning job and - * used - * in the hyperparameter tuning process.

          - *
        • - *
        - *
          - *
        • - *

          Pending: The training job is in progress and evaluation of its final objective - * metric is pending.

          - *
        • - *
        - *
          - *
        • - *

          Failed: - * The final objective metric for the training job was not evaluated, and was not - * used in the hyperparameter tuning process. This typically occurs when the - * training job failed or did not emit an objective - * metric.

          - *
        • - *
        + *

        The Amazon Resource Name (ARN) of the context.

        */ - ObjectiveStatus?: ObjectiveStatus | string; + ContextArn?: string; } -export namespace HyperParameterTrainingJobSummary { - export const filterSensitiveLog = (obj: HyperParameterTrainingJobSummary): any => ({ +export namespace DeleteContextResponse { + export const filterSensitiveLog = (obj: DeleteContextResponse): any => ({ ...obj, }); } -export enum HyperParameterTuningJobStatus { - COMPLETED = "Completed", - FAILED = "Failed", - IN_PROGRESS = "InProgress", - STOPPED = "Stopped", - STOPPING = "Stopping", +export enum RetentionType { + Delete = "Delete", + Retain = "Retain", } /** - *

        Specifies the number of training jobs that this hyperparameter tuning job launched, - * categorized by the status of their objective metric. The objective metric status shows - * whether the - * final - * objective metric for the training job has been evaluated by the - * tuning job and used in the hyperparameter tuning process.

        + *

        The retention policy for data stored on an Amazon Elastic File System (EFS) volume.

        */ -export interface ObjectiveStatusCounters { +export interface RetentionPolicy { /** - *

        The number of training jobs whose final objective metric was evaluated by the - * hyperparameter tuning job and used in the hyperparameter tuning process.

        + *

        The default is Retain, which specifies to keep the data stored on the EFS volume.

        + *

        Specify Delete to delete the data stored on the EFS volume.

        */ - Succeeded?: number; + HomeEfsFileSystem?: RetentionType | string; +} + +export namespace RetentionPolicy { + export const filterSensitiveLog = (obj: RetentionPolicy): any => ({ + ...obj, + }); +} +export interface DeleteDomainRequest { /** - *

        The number of training jobs that are in progress and pending evaluation of their final - * objective metric.

        + *

        The domain ID.

        */ - Pending?: number; + DomainId: string | undefined; /** - *

        The number of training jobs whose final objective metric was not evaluated and used in - * the hyperparameter tuning process. This typically occurs when the training job failed or - * did not emit an objective metric.

        + *

        The retention policy for this domain, which specifies whether resources will be retained after the Domain is deleted. + * By default, all resources are retained (not automatically deleted). + *

        */ - Failed?: number; + RetentionPolicy?: RetentionPolicy; } -export namespace ObjectiveStatusCounters { - export const filterSensitiveLog = (obj: ObjectiveStatusCounters): any => ({ +export namespace DeleteDomainRequest { + export const filterSensitiveLog = (obj: DeleteDomainRequest): any => ({ ...obj, }); } -/** - *

        The numbers of training jobs launched by a hyperparameter tuning job, categorized by - * status.

        - */ -export interface TrainingJobStatusCounters { +export interface DeleteEndpointInput { /** - *

        The number of completed training jobs launched by the hyperparameter tuning - * job.

        + *

        The name of the endpoint that you want to delete.

        */ - Completed?: number; + EndpointName: string | undefined; +} - /** - *

        The number of in-progress training jobs launched by a hyperparameter tuning - * job.

        - */ - InProgress?: number; +export namespace DeleteEndpointInput { + export const filterSensitiveLog = (obj: DeleteEndpointInput): any => ({ + ...obj, + }); +} +export interface DeleteEndpointConfigInput { /** - *

        The number of training jobs that failed, but can be retried. A failed training job can - * be retried only if it failed because an internal service error occurred.

        + *

        The name of the endpoint configuration that you want to delete.

        */ - RetryableError?: number; + EndpointConfigName: string | undefined; +} - /** - *

        The number of training jobs that failed and can't be retried. A failed training job - * can't be retried if it failed because a client error occurred.

        - */ - NonRetryableError?: number; +export namespace DeleteEndpointConfigInput { + export const filterSensitiveLog = (obj: DeleteEndpointConfigInput): any => ({ + ...obj, + }); +} +export interface DeleteExperimentRequest { /** - *

        The number of training jobs launched by a hyperparameter tuning job that were - * manually - * stopped.

        + *

        The name of the experiment to delete.

        */ - Stopped?: number; + ExperimentName: string | undefined; } -export namespace TrainingJobStatusCounters { - export const filterSensitiveLog = (obj: TrainingJobStatusCounters): any => ({ +export namespace DeleteExperimentRequest { + export const filterSensitiveLog = (obj: DeleteExperimentRequest): any => ({ ...obj, }); } -export interface DescribeHyperParameterTuningJobResponse { +export interface DeleteExperimentResponse { /** - *

        The name of the tuning job.

        + *

        The Amazon Resource Name (ARN) of the experiment that is being deleted.

        */ - HyperParameterTuningJobName: string | undefined; + ExperimentArn?: string; +} - /** - *

        The - * Amazon Resource Name (ARN) of the tuning job.

        - */ - HyperParameterTuningJobArn: string | undefined; +export namespace DeleteExperimentResponse { + export const filterSensitiveLog = (obj: DeleteExperimentResponse): any => ({ + ...obj, + }); +} +export interface DeleteFeatureGroupRequest { /** - *

        The HyperParameterTuningJobConfig object that specifies the - * configuration of the tuning job.

        + *

        The name of the FeatureGroup you want to delete. The name must be unique + * within an AWS Region in an AWS account.

        */ - HyperParameterTuningJobConfig: HyperParameterTuningJobConfig | undefined; + FeatureGroupName: string | undefined; +} - /** - *

        The HyperParameterTrainingJobDefinition object that specifies the - * definition of the training jobs that this tuning job launches.

        - */ - TrainingJobDefinition?: HyperParameterTrainingJobDefinition; +export namespace DeleteFeatureGroupRequest { + export const filterSensitiveLog = (obj: DeleteFeatureGroupRequest): any => ({ + ...obj, + }); +} +export interface DeleteFlowDefinitionRequest { /** - *

        A list of the HyperParameterTrainingJobDefinition objects launched - * for this tuning job.

        + *

        The name of the flow definition you are deleting.

        */ - TrainingJobDefinitions?: HyperParameterTrainingJobDefinition[]; + FlowDefinitionName: string | undefined; +} - /** - *

        The status of the tuning job: InProgress, Completed, Failed, Stopping, or - * Stopped.

        - */ - HyperParameterTuningJobStatus: HyperParameterTuningJobStatus | string | undefined; +export namespace DeleteFlowDefinitionRequest { + export const filterSensitiveLog = (obj: DeleteFlowDefinitionRequest): any => ({ + ...obj, + }); +} - /** - *

        The date and time that the tuning job started.

        - */ - CreationTime: Date | undefined; +export interface DeleteFlowDefinitionResponse {} - /** - *

        The date and time that the tuning job ended.

        - */ - HyperParameterTuningEndTime?: Date; +export namespace DeleteFlowDefinitionResponse { + export const filterSensitiveLog = (obj: DeleteFlowDefinitionResponse): any => ({ + ...obj, + }); +} +export interface DeleteHumanTaskUiRequest { /** - *

        The date and time that the status of the tuning job was modified.

        - */ - LastModifiedTime?: Date; - - /** - *

        The TrainingJobStatusCounters object that specifies the number of - * training jobs, categorized by status, that this tuning job launched.

        - */ - TrainingJobStatusCounters: TrainingJobStatusCounters | undefined; - - /** - *

        The ObjectiveStatusCounters object that specifies the number of - * training jobs, categorized by the status of their final objective metric, that this - * tuning job launched.

        - */ - ObjectiveStatusCounters: ObjectiveStatusCounters | undefined; - - /** - *

        A TrainingJobSummary object that describes the training job that - * completed with the best current HyperParameterTuningJobObjective.

        - */ - BestTrainingJob?: HyperParameterTrainingJobSummary; - - /** - *

        If the hyperparameter tuning job is an warm start tuning job with a - * WarmStartType of IDENTICAL_DATA_AND_ALGORITHM, this is the - * TrainingJobSummary for the training job with the best objective - * metric value of all training jobs launched by this tuning job and all parent jobs - * specified for the warm start tuning job.

        - */ - OverallBestTrainingJob?: HyperParameterTrainingJobSummary; - - /** - *

        The configuration for starting the hyperparameter parameter tuning job using one or - * more previous tuning jobs as a starting point. The results of previous tuning jobs are - * used to inform which combinations of hyperparameters to search over in the new tuning - * job.

        + *

        The name of the human task user interface (work task template) you want to delete.

        */ - WarmStartConfig?: HyperParameterTuningJobWarmStartConfig; + HumanTaskUiName: string | undefined; +} - /** - *

        If the tuning job failed, the reason it failed.

        - */ - FailureReason?: string; +export namespace DeleteHumanTaskUiRequest { + export const filterSensitiveLog = (obj: DeleteHumanTaskUiRequest): any => ({ + ...obj, + }); } -export namespace DescribeHyperParameterTuningJobResponse { - export const filterSensitiveLog = (obj: DescribeHyperParameterTuningJobResponse): any => ({ +export interface DeleteHumanTaskUiResponse {} + +export namespace DeleteHumanTaskUiResponse { + export const filterSensitiveLog = (obj: DeleteHumanTaskUiResponse): any => ({ ...obj, }); } -export interface DescribeImageRequest { +export interface DeleteImageRequest { /** - *

        The name of the image to describe.

        + *

        The name of the image to delete.

        */ ImageName: string | undefined; } -export namespace DescribeImageRequest { - export const filterSensitiveLog = (obj: DescribeImageRequest): any => ({ +export namespace DeleteImageRequest { + export const filterSensitiveLog = (obj: DeleteImageRequest): any => ({ ...obj, }); } -export enum ImageStatus { - CREATED = "CREATED", - CREATE_FAILED = "CREATE_FAILED", - CREATING = "CREATING", - DELETE_FAILED = "DELETE_FAILED", - DELETING = "DELETING", - UPDATE_FAILED = "UPDATE_FAILED", - UPDATING = "UPDATING", -} +export interface DeleteImageResponse {} -export interface DescribeImageResponse { - /** - *

        When the image was created.

        - */ - CreationTime?: Date; +export namespace DeleteImageResponse { + export const filterSensitiveLog = (obj: DeleteImageResponse): any => ({ + ...obj, + }); +} +export interface DeleteImageVersionRequest { /** - *

        The description of the image.

        + *

        The name of the image.

        */ - Description?: string; + ImageName: string | undefined; /** - *

        The name of the image as displayed.

        + *

        The version to delete.

        */ - DisplayName?: string; + Version: number | undefined; +} - /** - *

        When a create, update, or delete operation fails, the reason for the failure.

        - */ - FailureReason?: string; +export namespace DeleteImageVersionRequest { + export const filterSensitiveLog = (obj: DeleteImageVersionRequest): any => ({ + ...obj, + }); +} - /** - *

        The Amazon Resource Name (ARN) of the image.

        - */ - ImageArn?: string; +export interface DeleteImageVersionResponse {} - /** - *

        The name of the image.

        - */ - ImageName?: string; +export namespace DeleteImageVersionResponse { + export const filterSensitiveLog = (obj: DeleteImageVersionResponse): any => ({ + ...obj, + }); +} +export interface DeleteModelInput { /** - *

        The status of the image.

        + *

        The name of the model to delete.

        */ - ImageStatus?: ImageStatus | string; + ModelName: string | undefined; +} - /** - *

        When the image was last modified.

        - */ - LastModifiedTime?: Date; +export namespace DeleteModelInput { + export const filterSensitiveLog = (obj: DeleteModelInput): any => ({ + ...obj, + }); +} +export interface DeleteModelPackageInput { /** - *

        The Amazon Resource Name (ARN) of the IAM role that enables Amazon SageMaker to perform tasks on your behalf.

        + *

        The name of the model package. The name must have 1 to 63 characters. Valid characters + * are a-z, A-Z, 0-9, and - (hyphen).

        */ - RoleArn?: string; + ModelPackageName: string | undefined; } -export namespace DescribeImageResponse { - export const filterSensitiveLog = (obj: DescribeImageResponse): any => ({ +export namespace DeleteModelPackageInput { + export const filterSensitiveLog = (obj: DeleteModelPackageInput): any => ({ ...obj, }); } -export interface DescribeImageVersionRequest { - /** - *

        The name of the image.

        - */ - ImageName: string | undefined; - +export interface DeleteModelPackageGroupInput { /** - *

        The version of the image. If not specified, the latest version is described.

        + *

        The name of the model group to delete.

        */ - Version?: number; + ModelPackageGroupName: string | undefined; } -export namespace DescribeImageVersionRequest { - export const filterSensitiveLog = (obj: DescribeImageVersionRequest): any => ({ +export namespace DeleteModelPackageGroupInput { + export const filterSensitiveLog = (obj: DeleteModelPackageGroupInput): any => ({ ...obj, }); } -export enum ImageVersionStatus { - CREATED = "CREATED", - CREATE_FAILED = "CREATE_FAILED", - CREATING = "CREATING", - DELETE_FAILED = "DELETE_FAILED", - DELETING = "DELETING", -} - -export interface DescribeImageVersionResponse { +export interface DeleteModelPackageGroupPolicyInput { /** - *

        The registry path of the container image on which this image version is based.

        + *

        The name of the model group for which to delete the policy.

        */ - BaseImage?: string; + ModelPackageGroupName: string | undefined; +} - /** - *

        The registry path of the container image that contains this image version.

        - */ - ContainerImage?: string; +export namespace DeleteModelPackageGroupPolicyInput { + export const filterSensitiveLog = (obj: DeleteModelPackageGroupPolicyInput): any => ({ + ...obj, + }); +} +export interface DeleteMonitoringScheduleRequest { /** - *

        When the version was created.

        + *

        The name of the monitoring schedule to delete.

        */ - CreationTime?: Date; + MonitoringScheduleName: string | undefined; +} - /** - *

        When a create or delete operation fails, the reason for the failure.

        - */ - FailureReason?: string; +export namespace DeleteMonitoringScheduleRequest { + export const filterSensitiveLog = (obj: DeleteMonitoringScheduleRequest): any => ({ + ...obj, + }); +} +export interface DeleteNotebookInstanceInput { /** - *

        The Amazon Resource Name (ARN) of the image the version is based on.

        + *

        The name of the Amazon SageMaker notebook instance to delete.

        */ - ImageArn?: string; + NotebookInstanceName: string | undefined; +} - /** - *

        The ARN of the version.

        - */ - ImageVersionArn?: string; +export namespace DeleteNotebookInstanceInput { + export const filterSensitiveLog = (obj: DeleteNotebookInstanceInput): any => ({ + ...obj, + }); +} +export interface DeleteNotebookInstanceLifecycleConfigInput { /** - *

        The status of the version.

        + *

        The name of the lifecycle configuration to delete.

        */ - ImageVersionStatus?: ImageVersionStatus | string; + NotebookInstanceLifecycleConfigName: string | undefined; +} + +export namespace DeleteNotebookInstanceLifecycleConfigInput { + export const filterSensitiveLog = (obj: DeleteNotebookInstanceLifecycleConfigInput): any => ({ + ...obj, + }); +} +export interface DeletePipelineRequest { /** - *

        When the version was last modified.

        + *

        The name of the pipeline to delete.

        */ - LastModifiedTime?: Date; + PipelineName: string | undefined; /** - *

        The version number.

        + *

        A unique, case-sensitive identifier that you provide to ensure the idempotency of the + * operation. An idempotent operation completes no more than one time.

        */ - Version?: number; + ClientRequestToken?: string; } -export namespace DescribeImageVersionResponse { - export const filterSensitiveLog = (obj: DescribeImageVersionResponse): any => ({ +export namespace DeletePipelineRequest { + export const filterSensitiveLog = (obj: DeletePipelineRequest): any => ({ ...obj, }); } -export interface DescribeLabelingJobRequest { +export interface DeletePipelineResponse { /** - *

        The name of the labeling job to return information for.

        + *

        The Amazon Resource Name (ARN) of the pipeline to delete.

        */ - LabelingJobName: string | undefined; + PipelineArn?: string; } -export namespace DescribeLabelingJobRequest { - export const filterSensitiveLog = (obj: DescribeLabelingJobRequest): any => ({ +export namespace DeletePipelineResponse { + export const filterSensitiveLog = (obj: DeletePipelineResponse): any => ({ ...obj, }); } -/** - *

        Provides a breakdown of the number of objects labeled.

        - */ -export interface LabelCounters { - /** - *

        The total number of objects labeled.

        - */ - TotalLabeled?: number; - +export interface DeleteProjectInput { /** - *

        The total number of objects labeled by a human worker.

        + *

        The name of the project to delete.

        */ - HumanLabeled?: number; + ProjectName: string | undefined; +} - /** - *

        The total number of objects labeled by automated data labeling.

        - */ - MachineLabeled?: number; +export namespace DeleteProjectInput { + export const filterSensitiveLog = (obj: DeleteProjectInput): any => ({ + ...obj, + }); +} +export interface DeleteTagsInput { /** - *

        The total number of objects that could not be labeled due to an error.

        + *

        The Amazon Resource Name (ARN) of the resource whose tags you want to + * delete.

        */ - FailedNonRetryableError?: number; + ResourceArn: string | undefined; /** - *

        The total number of objects not yet labeled.

        + *

        An array or one or more tag keys to delete.

        */ - Unlabeled?: number; + TagKeys: string[] | undefined; } -export namespace LabelCounters { - export const filterSensitiveLog = (obj: LabelCounters): any => ({ +export namespace DeleteTagsInput { + export const filterSensitiveLog = (obj: DeleteTagsInput): any => ({ ...obj, }); } -/** - *

        Specifies the location of the output produced by the labeling job.

        - */ -export interface LabelingJobOutput { +export interface DeleteTagsOutput {} + +export namespace DeleteTagsOutput { + export const filterSensitiveLog = (obj: DeleteTagsOutput): any => ({ + ...obj, + }); +} + +export interface DeleteTrialRequest { /** - *

        The Amazon S3 bucket location of the manifest file for labeled data.

        + *

        The name of the trial to delete.

        */ - OutputDatasetS3Uri: string | undefined; + TrialName: string | undefined; +} + +export namespace DeleteTrialRequest { + export const filterSensitiveLog = (obj: DeleteTrialRequest): any => ({ + ...obj, + }); +} +export interface DeleteTrialResponse { /** - *

        The Amazon Resource Name (ARN) for the most recent Amazon SageMaker model trained as part of - * automated data labeling.

        + *

        The Amazon Resource Name (ARN) of the trial that is being deleted.

        */ - FinalActiveLearningModelArn?: string; + TrialArn?: string; } -export namespace LabelingJobOutput { - export const filterSensitiveLog = (obj: LabelingJobOutput): any => ({ +export namespace DeleteTrialResponse { + export const filterSensitiveLog = (obj: DeleteTrialResponse): any => ({ ...obj, }); } -export enum LabelingJobStatus { - COMPLETED = "Completed", - FAILED = "Failed", - INITIALIZING = "Initializing", - IN_PROGRESS = "InProgress", - STOPPED = "Stopped", - STOPPING = "Stopping", +export interface DeleteTrialComponentRequest { + /** + *

        The name of the component to delete.

        + */ + TrialComponentName: string | undefined; } -export interface DescribeLabelingJobResponse { +export namespace DeleteTrialComponentRequest { + export const filterSensitiveLog = (obj: DeleteTrialComponentRequest): any => ({ + ...obj, + }); +} + +export interface DeleteTrialComponentResponse { /** - *

        The processing status of the labeling job.

        + *

        The Amazon Resource Name (ARN) of the component is being deleted.

        */ - LabelingJobStatus: LabelingJobStatus | string | undefined; + TrialComponentArn?: string; +} + +export namespace DeleteTrialComponentResponse { + export const filterSensitiveLog = (obj: DeleteTrialComponentResponse): any => ({ + ...obj, + }); +} +export interface DeleteUserProfileRequest { /** - *

        Provides a breakdown of the number of data objects labeled by humans, the number of - * objects labeled by machine, the number of objects than couldn't be labeled, and the - * total number of objects labeled.

        + *

        The domain ID.

        */ - LabelCounters: LabelCounters | undefined; + DomainId: string | undefined; /** - *

        If the job failed, the reason that it failed.

        + *

        The user profile name.

        */ - FailureReason?: string; + UserProfileName: string | undefined; +} + +export namespace DeleteUserProfileRequest { + export const filterSensitiveLog = (obj: DeleteUserProfileRequest): any => ({ + ...obj, + }); +} +export interface DeleteWorkforceRequest { /** - *

        The date and time that the labeling job was created.

        + *

        The name of the workforce.

        */ - CreationTime: Date | undefined; + WorkforceName: string | undefined; +} + +export namespace DeleteWorkforceRequest { + export const filterSensitiveLog = (obj: DeleteWorkforceRequest): any => ({ + ...obj, + }); +} + +export interface DeleteWorkforceResponse {} + +export namespace DeleteWorkforceResponse { + export const filterSensitiveLog = (obj: DeleteWorkforceResponse): any => ({ + ...obj, + }); +} +export interface DeleteWorkteamRequest { /** - *

        The date and time that the labeling job was last updated.

        + *

        The name of the work team to delete.

        */ - LastModifiedTime: Date | undefined; + WorkteamName: string | undefined; +} + +export namespace DeleteWorkteamRequest { + export const filterSensitiveLog = (obj: DeleteWorkteamRequest): any => ({ + ...obj, + }); +} +export interface DeleteWorkteamResponse { /** - *

        A unique identifier for work done as part of a labeling job.

        + *

        Returns true if the work team was successfully deleted; otherwise, + * returns false.

        */ - JobReferenceCode: string | undefined; + Success: boolean | undefined; +} + +export namespace DeleteWorkteamResponse { + export const filterSensitiveLog = (obj: DeleteWorkteamResponse): any => ({ + ...obj, + }); +} +/** + *

        Gets the Amazon EC2 Container Registry path of the docker image of the model that is hosted in this ProductionVariant.

        + *

        If you used the registry/repository[:tag] form to specify the image path + * of the primary container when you created the model hosted in this + * ProductionVariant, the path resolves to a path of the form + * registry/repository[@digest]. A digest is a hash value that identifies + * a specific version of an image. For information about Amazon ECR paths, see Pulling an Image in the Amazon ECR User Guide.

        + */ +export interface DeployedImage { /** - *

        The name assigned to the labeling job when it was created.

        + *

        The image path you specified when you created the model.

        */ - LabelingJobName: string | undefined; + SpecifiedImage?: string; /** - *

        The Amazon Resource Name (ARN) of the labeling job.

        + *

        The specific digest path of the image hosted in this + * ProductionVariant.

        */ - LabelingJobArn: string | undefined; + ResolvedImage?: string; /** - *

        The attribute used as the label in the output manifest file.

        + *

        The date and time when the image path for the model resolved to the + * ResolvedImage + *

        */ - LabelAttributeName?: string; + ResolutionTime?: Date; +} + +export namespace DeployedImage { + export const filterSensitiveLog = (obj: DeployedImage): any => ({ + ...obj, + }); +} +/** + *

        Currently, the DeploymentConfig API is not supported.

        + */ +export interface DeploymentConfig { /** - *

        Input configuration information for the labeling job, such as the Amazon S3 location of the - * data objects and the location of the manifest file that describes the data - * objects.

        + *

        */ - InputConfig: LabelingJobInputConfig | undefined; + BlueGreenUpdatePolicy: BlueGreenUpdatePolicy | undefined; /** - *

        The location of the job's output data and the AWS Key Management Service key ID for the key used to - * encrypt the output data, if any.

        + *

        */ - OutputConfig: LabelingJobOutputConfig | undefined; + AutoRollbackConfiguration?: AutoRollbackConfig; +} + +export namespace DeploymentConfig { + export const filterSensitiveLog = (obj: DeploymentConfig): any => ({ + ...obj, + }); +} +export interface DescribeActionRequest { /** - *

        The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf - * during data labeling.

        + *

        The name of the action to describe.

        */ - RoleArn: string | undefined; + ActionName: string | undefined; +} + +export namespace DescribeActionRequest { + export const filterSensitiveLog = (obj: DescribeActionRequest): any => ({ + ...obj, + }); +} +export interface DescribeActionResponse { /** - *

        The S3 location of the JSON file that defines the categories used to label data - * objects. Please note the following label-category limits:

        - *
          - *
        • - *

          Semantic segmentation labeling jobs using automated labeling: 20 labels

          - *
        • - *
        • - *

          Box bounding labeling jobs (all): 10 labels

          - *
        • - *
        - *

        The file is a JSON structure in the following format:

        - *

        - * { - *

        - *

        - * "document-version": "2018-11-28" - *

        - *

        - * "labels": [ - *

        - *

        - * { - *

        - *

        - * "label": "label 1" - *

        - *

        - * }, - *

        - *

        - * { - *

        - *

        - * "label": "label 2" - *

        - *

        - * }, - *

        - *

        - * ... - *

        - *

        - * { - *

        - *

        - * "label": "label n" - *

        - *

        - * } - *

        - *

        - * ] - *

        - *

        - * } - *

        + *

        The name of the action.

        */ - LabelCategoryConfigS3Uri?: string; + ActionName?: string; /** - *

        A set of conditions for stopping a labeling job. If any of the conditions are met, the - * job is automatically stopped.

        + *

        The Amazon Resource Name (ARN) of the action.

        */ - StoppingConditions?: LabelingJobStoppingConditions; + ActionArn?: string; /** - *

        Configuration information for automated data labeling.

        + *

        The source of the action.

        */ - LabelingJobAlgorithmsConfig?: LabelingJobAlgorithmsConfig; + Source?: ActionSource; /** - *

        Configuration information required for human workers to complete a labeling - * task.

        + *

        The type of the action.

        */ - HumanTaskConfig: HumanTaskConfig | undefined; + ActionType?: string; /** - *

        An array of key/value pairs. For more information, see Using - * Cost Allocation Tags in the AWS Billing and Cost Management User - * Guide.

        + *

        The description of the action.

        */ - Tags?: Tag[]; + Description?: string; /** - *

        The location of the output produced by the labeling job.

        + *

        The status of the action.

        */ - LabelingJobOutput?: LabelingJobOutput; -} + Status?: ActionStatus | string; -export namespace DescribeLabelingJobResponse { - export const filterSensitiveLog = (obj: DescribeLabelingJobResponse): any => ({ + /** + *

        A list of the action's properties.

        + */ + Properties?: { [key: string]: string }; + + /** + *

        When the action was created.

        + */ + CreationTime?: Date; + + /** + *

        Information about the user who created or modified an experiment, trial, or trial + * component.

        + */ + CreatedBy?: UserContext; + + /** + *

        When the action was last modified.

        + */ + LastModifiedTime?: Date; + + /** + *

        Information about the user who created or modified an experiment, trial, or trial + * component.

        + */ + LastModifiedBy?: UserContext; + + /** + *

        Metadata properties of the tracking entity, trial, or trial component.

        + */ + MetadataProperties?: MetadataProperties; +} + +export namespace DescribeActionResponse { + export const filterSensitiveLog = (obj: DescribeActionResponse): any => ({ ...obj, }); } -export interface DescribeModelInput { +export interface DescribeAlgorithmInput { /** - *

        The name of the model.

        + *

        The name of the algorithm to describe.

        */ - ModelName: string | undefined; + AlgorithmName: string | undefined; } -export namespace DescribeModelInput { - export const filterSensitiveLog = (obj: DescribeModelInput): any => ({ +export namespace DescribeAlgorithmInput { + export const filterSensitiveLog = (obj: DescribeAlgorithmInput): any => ({ ...obj, }); } -export interface DescribeModelOutput { +export interface DescribeAlgorithmOutput { /** - *

        Name of the Amazon SageMaker model.

        + *

        The name of the algorithm being described.

        */ - ModelName: string | undefined; + AlgorithmName: string | undefined; /** - *

        The location of the primary inference code, associated artifacts, and custom - * environment map that the inference code uses when it is deployed in production. - *

        + *

        The Amazon Resource Name (ARN) of the algorithm.

        */ - PrimaryContainer?: ContainerDefinition; + AlgorithmArn: string | undefined; /** - *

        The containers in the inference pipeline.

        + *

        A brief summary about the algorithm.

        */ - Containers?: ContainerDefinition[]; + AlgorithmDescription?: string; /** - *

        The Amazon Resource Name (ARN) of the IAM role that you specified for the - * model.

        + *

        A timestamp specifying when the algorithm was created.

        */ - ExecutionRoleArn: string | undefined; + CreationTime: Date | undefined; /** - *

        A VpcConfig object that specifies the VPC that this model has access - * to. For more information, see Protect Endpoints by Using an Amazon Virtual - * Private Cloud - *

        + *

        Details about training jobs run by this algorithm.

        */ - VpcConfig?: VpcConfig; + TrainingSpecification: TrainingSpecification | undefined; /** - *

        A timestamp that shows when the model was created.

        + *

        Details about inference jobs that the algorithm runs.

        */ - CreationTime: Date | undefined; + InferenceSpecification?: InferenceSpecification; /** - *

        The Amazon Resource Name (ARN) of the model.

        + *

        Details about configurations for one or more training jobs that Amazon SageMaker runs to test the + * algorithm.

        */ - ModelArn: string | undefined; + ValidationSpecification?: AlgorithmValidationSpecification; /** - *

        If True, no inbound or outbound network calls can be made to or from the - * model container.

        + *

        The current status of the algorithm.

        */ - EnableNetworkIsolation?: boolean; -} + AlgorithmStatus: AlgorithmStatus | string | undefined; -export namespace DescribeModelOutput { - export const filterSensitiveLog = (obj: DescribeModelOutput): any => ({ - ...obj, - }); -} + /** + *

        Details about the current status of the algorithm.

        + */ + AlgorithmStatusDetails: AlgorithmStatusDetails | undefined; -export interface DescribeModelPackageInput { /** - *

        The name of the model package to describe.

        + *

        The product identifier of the algorithm.

        */ - ModelPackageName: string | undefined; + ProductId?: string; + + /** + *

        Whether the algorithm is certified to be listed in AWS Marketplace.

        + */ + CertifyForMarketplace?: boolean; } -export namespace DescribeModelPackageInput { - export const filterSensitiveLog = (obj: DescribeModelPackageInput): any => ({ +export namespace DescribeAlgorithmOutput { + export const filterSensitiveLog = (obj: DescribeAlgorithmOutput): any => ({ ...obj, }); } -export enum ModelPackageStatus { - COMPLETED = "Completed", - DELETING = "Deleting", - FAILED = "Failed", - IN_PROGRESS = "InProgress", - PENDING = "Pending", -} - -export enum DetailedModelPackageStatus { - COMPLETED = "Completed", - FAILED = "Failed", - IN_PROGRESS = "InProgress", - NOT_STARTED = "NotStarted", -} +export interface DescribeAppRequest { + /** + *

        The domain ID.

        + */ + DomainId: string | undefined; -/** - *

        Represents the overall status of a model package.

        - */ -export interface ModelPackageStatusItem { /** - *

        The name of the model package for which the overall status is being reported.

        + *

        The user profile name.

        */ - Name: string | undefined; + UserProfileName: string | undefined; /** - *

        The current status.

        + *

        The type of app.

        */ - Status: DetailedModelPackageStatus | string | undefined; + AppType: AppType | string | undefined; /** - *

        if the overall status is Failed, the reason for the failure.

        + *

        The name of the app.

        */ - FailureReason?: string; + AppName: string | undefined; } -export namespace ModelPackageStatusItem { - export const filterSensitiveLog = (obj: ModelPackageStatusItem): any => ({ +export namespace DescribeAppRequest { + export const filterSensitiveLog = (obj: DescribeAppRequest): any => ({ ...obj, }); } -/** - *

        Specifies the validation and image scan statuses of the model package.

        - */ -export interface ModelPackageStatusDetails { - /** - *

        The validation status of the model package.

        - */ - ValidationStatuses: ModelPackageStatusItem[] | undefined; - +export interface DescribeAppResponse { /** - *

        The status of the scan of the Docker image container for the model package.

        + *

        The Amazon Resource Name (ARN) of the app.

        */ - ImageScanStatuses?: ModelPackageStatusItem[]; -} - -export namespace ModelPackageStatusDetails { - export const filterSensitiveLog = (obj: ModelPackageStatusDetails): any => ({ - ...obj, - }); -} + AppArn?: string; -export interface DescribeModelPackageOutput { /** - *

        The name of the model package being described.

        + *

        The type of app.

        */ - ModelPackageName: string | undefined; + AppType?: AppType | string; /** - *

        The Amazon Resource Name (ARN) of the model package.

        + *

        The name of the app.

        */ - ModelPackageArn: string | undefined; + AppName?: string; /** - *

        A brief summary of the model package.

        + *

        The domain ID.

        */ - ModelPackageDescription?: string; + DomainId?: string; /** - *

        A timestamp specifying when the model package was created.

        + *

        The user profile name.

        */ - CreationTime: Date | undefined; + UserProfileName?: string; /** - *

        Details about inference jobs that can be run with models based on this model - * package.

        + *

        The status.

        */ - InferenceSpecification?: InferenceSpecification; + Status?: AppStatus | string; /** - *

        Details about the algorithm that was used to create the model package.

        + *

        The timestamp of the last health check.

        */ - SourceAlgorithmSpecification?: SourceAlgorithmSpecification; + LastHealthCheckTimestamp?: Date; /** - *

        Configurations for one or more transform jobs that Amazon SageMaker runs to test the model - * package.

        + *

        The timestamp of the last user's activity.

        */ - ValidationSpecification?: ModelPackageValidationSpecification; + LastUserActivityTimestamp?: Date; /** - *

        The current status of the model package.

        + *

        The creation time.

        */ - ModelPackageStatus: ModelPackageStatus | string | undefined; + CreationTime?: Date; /** - *

        Details about the current status of the model package.

        + *

        The failure reason.

        */ - ModelPackageStatusDetails: ModelPackageStatusDetails | undefined; + FailureReason?: string; /** - *

        Whether the model package is certified for listing on AWS Marketplace.

        + *

        The instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance.

        */ - CertifyForMarketplace?: boolean; + ResourceSpec?: ResourceSpec; } -export namespace DescribeModelPackageOutput { - export const filterSensitiveLog = (obj: DescribeModelPackageOutput): any => ({ +export namespace DescribeAppResponse { + export const filterSensitiveLog = (obj: DescribeAppResponse): any => ({ ...obj, }); } -export interface DescribeMonitoringScheduleRequest { +export interface DescribeAppImageConfigRequest { /** - *

        Name of a previously created monitoring schedule.

        + *

        The name of the AppImageConfig to describe.

        */ - MonitoringScheduleName: string | undefined; + AppImageConfigName: string | undefined; } -export namespace DescribeMonitoringScheduleRequest { - export const filterSensitiveLog = (obj: DescribeMonitoringScheduleRequest): any => ({ +export namespace DescribeAppImageConfigRequest { + export const filterSensitiveLog = (obj: DescribeAppImageConfigRequest): any => ({ ...obj, }); } -export enum ExecutionStatus { - COMPLETED = "Completed", - COMPLETED_WITH_VIOLATIONS = "CompletedWithViolations", - FAILED = "Failed", - IN_PROGRESS = "InProgress", - PENDING = "Pending", - STOPPED = "Stopped", - STOPPING = "Stopping", -} - -/** - *

        Summary of information about the last monitoring job to run.

        - */ -export interface MonitoringExecutionSummary { +export interface DescribeAppImageConfigResponse { /** - *

        The name of the monitoring schedule.

        + *

        The Amazon Resource Name (ARN) of the AppImageConfig.

        */ - MonitoringScheduleName: string | undefined; + AppImageConfigArn?: string; /** - *

        The time the monitoring job was scheduled.

        + *

        The name of the AppImageConfig.

        */ - ScheduledTime: Date | undefined; + AppImageConfigName?: string; /** - *

        The time at which the monitoring job was created.

        + *

        When the AppImageConfig was created.

        */ - CreationTime: Date | undefined; + CreationTime?: Date; /** - *

        A timestamp that indicates the last time the monitoring job was modified.

        + *

        When the AppImageConfig was last modified.

        */ - LastModifiedTime: Date | undefined; + LastModifiedTime?: Date; /** - *

        The status of the monitoring job.

        + *

        The configuration of a KernelGateway app.

        */ - MonitoringExecutionStatus: ExecutionStatus | string | undefined; - - /** - *

        The Amazon Resource Name (ARN) of the monitoring job.

        - */ - ProcessingJobArn?: string; + KernelGatewayImageConfig?: KernelGatewayImageConfig; +} - /** - *

        The name of teh endpoint used to run the monitoring job.

        - */ - EndpointName?: string; +export namespace DescribeAppImageConfigResponse { + export const filterSensitiveLog = (obj: DescribeAppImageConfigResponse): any => ({ + ...obj, + }); +} +export interface DescribeArtifactRequest { /** - *

        Contains the reason a monitoring job failed, if it failed.

        + *

        The Amazon Resource Name (ARN) of the artifact to describe.

        */ - FailureReason?: string; + ArtifactArn: string | undefined; } -export namespace MonitoringExecutionSummary { - export const filterSensitiveLog = (obj: MonitoringExecutionSummary): any => ({ +export namespace DescribeArtifactRequest { + export const filterSensitiveLog = (obj: DescribeArtifactRequest): any => ({ ...obj, }); } -export enum ScheduleStatus { - FAILED = "Failed", - PENDING = "Pending", - SCHEDULED = "Scheduled", - STOPPED = "Stopped", -} +export interface DescribeArtifactResponse { + /** + *

        The name of the artifact.

        + */ + ArtifactName?: string; -export interface DescribeMonitoringScheduleResponse { /** - *

        The Amazon Resource Name (ARN) of the monitoring schedule.

        + *

        The Amazon Resource Name (ARN) of the artifact.

        */ - MonitoringScheduleArn: string | undefined; + ArtifactArn?: string; /** - *

        Name of the monitoring schedule.

        + *

        The source of the artifact.

        */ - MonitoringScheduleName: string | undefined; + Source?: ArtifactSource; /** - *

        The status of an monitoring job.

        + *

        The type of the artifact.

        */ - MonitoringScheduleStatus: ScheduleStatus | string | undefined; + ArtifactType?: string; /** - *

        A string, up to one KB in size, that contains the reason a monitoring job failed, if it - * failed.

        + *

        A list of the artifact's properties.

        */ - FailureReason?: string; + Properties?: { [key: string]: string }; /** - *

        The time at which the monitoring job was created.

        + *

        When the artifact was created.

        */ - CreationTime: Date | undefined; + CreationTime?: Date; /** - *

        The time at which the monitoring job was last modified.

        + *

        Information about the user who created or modified an experiment, trial, or trial + * component.

        */ - LastModifiedTime: Date | undefined; + CreatedBy?: UserContext; /** - *

        The configuration object that specifies the monitoring schedule and defines the - * monitoring job.

        + *

        When the artifact was last modified.

        */ - MonitoringScheduleConfig: MonitoringScheduleConfig | undefined; + LastModifiedTime?: Date; /** - *

        The name of the endpoint for the monitoring job.

        + *

        Information about the user who created or modified an experiment, trial, or trial + * component.

        */ - EndpointName?: string; + LastModifiedBy?: UserContext; /** - *

        Describes metadata on the last execution to run, if there was one.

        + *

        Metadata properties of the tracking entity, trial, or trial component.

        */ - LastMonitoringExecutionSummary?: MonitoringExecutionSummary; + MetadataProperties?: MetadataProperties; } -export namespace DescribeMonitoringScheduleResponse { - export const filterSensitiveLog = (obj: DescribeMonitoringScheduleResponse): any => ({ +export namespace DescribeArtifactResponse { + export const filterSensitiveLog = (obj: DescribeArtifactResponse): any => ({ ...obj, }); } -export interface DescribeNotebookInstanceInput { +export interface DescribeAutoMLJobRequest { /** - *

        The name of the notebook instance that you want information about.

        + *

        Request information about a job using that job's unique name.

        */ - NotebookInstanceName: string | undefined; + AutoMLJobName: string | undefined; } -export namespace DescribeNotebookInstanceInput { - export const filterSensitiveLog = (obj: DescribeNotebookInstanceInput): any => ({ +export namespace DescribeAutoMLJobRequest { + export const filterSensitiveLog = (obj: DescribeAutoMLJobRequest): any => ({ ...obj, }); } -export enum NotebookInstanceStatus { - Deleting = "Deleting", - Failed = "Failed", - InService = "InService", - Pending = "Pending", - Stopped = "Stopped", - Stopping = "Stopping", - Updating = "Updating", -} +/** + *

        The resolved attributes.

        + */ +export interface ResolvedAttributes { + /** + *

        Specifies a metric to minimize or maximize as the objective of a job.

        + */ + AutoMLJobObjective?: AutoMLJobObjective; -export interface DescribeNotebookInstanceOutput { /** - *

        The Amazon Resource Name (ARN) of the notebook instance.

        + *

        The problem type.

        */ - NotebookInstanceArn?: string; + ProblemType?: ProblemType | string; /** - *

        The name of the Amazon SageMaker notebook instance.

        + *

        How long a job is allowed to run, or how many candidates a job is allowed to + * generate.

        */ - NotebookInstanceName?: string; + CompletionCriteria?: AutoMLJobCompletionCriteria; +} +export namespace ResolvedAttributes { + export const filterSensitiveLog = (obj: ResolvedAttributes): any => ({ + ...obj, + }); +} + +export interface DescribeAutoMLJobResponse { /** - *

        The status of the notebook instance.

        + *

        Returns the name of a job.

        */ - NotebookInstanceStatus?: NotebookInstanceStatus | string; + AutoMLJobName: string | undefined; /** - *

        If status is Failed, the reason it failed.

        + *

        Returns the job's ARN.

        */ - FailureReason?: string; + AutoMLJobArn: string | undefined; /** - *

        The URL that you use to connect to the Jupyter notebook that is running in your - * notebook instance.

        + *

        Returns the job's input data config.

        */ - Url?: string; + InputDataConfig: AutoMLChannel[] | undefined; /** - *

        The type of ML compute instance running on the notebook instance.

        + *

        Returns the job's output data config.

        */ - InstanceType?: _InstanceType | string; + OutputDataConfig: AutoMLOutputDataConfig | undefined; /** - *

        The ID of the VPC subnet.

        + *

        The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that has read permission to + * the input data location and write permission to the output data location in Amazon S3.

        */ - SubnetId?: string; + RoleArn: string | undefined; /** - *

        The IDs of the VPC security groups.

        + *

        Returns the job's objective.

        */ - SecurityGroups?: string[]; + AutoMLJobObjective?: AutoMLJobObjective; /** - *

        The Amazon Resource Name (ARN) of the IAM role associated with the instance. - *

        + *

        Returns the job's problem type.

        */ - RoleArn?: string; + ProblemType?: ProblemType | string; /** - *

        The AWS KMS key ID Amazon SageMaker uses to encrypt data when storing it on the ML storage - * volume attached to the instance.

        + *

        Returns the job's config.

        */ - KmsKeyId?: string; + AutoMLJobConfig?: AutoMLJobConfig; /** - *

        The network interface IDs that Amazon SageMaker created at the time of creating the instance. - *

        + *

        Returns the job's creation time.

        */ - NetworkInterfaceId?: string; + CreationTime: Date | undefined; /** - *

        A timestamp. Use this parameter to retrieve the time when the notebook instance was - * last modified.

        + *

        Returns the job's end time.

        */ - LastModifiedTime?: Date; + EndTime?: Date; /** - *

        A timestamp. Use this parameter to return the time when the notebook instance was - * created

        + *

        Returns the job's last modified time.

        */ - CreationTime?: Date; + LastModifiedTime: Date | undefined; /** - *

        Returns the name of a notebook instance lifecycle configuration.

        - *

        For information about notebook instance lifestyle configurations, see Step - * 2.1: (Optional) Customize a Notebook Instance - *

        + *

        Returns the job's FailureReason.

        */ - NotebookInstanceLifecycleConfigName?: string; + FailureReason?: string; /** - *

        Describes whether Amazon SageMaker provides internet access to the notebook instance. If this - * value is set to Disabled, the notebook instance does not have - * internet access, and cannot connect to Amazon SageMaker training and endpoint services.

        - *

        For more information, see Notebook Instances Are Internet-Enabled by Default.

        + *

        Returns the job's BestCandidate.

        */ - DirectInternetAccess?: DirectInternetAccess | string; + BestCandidate?: AutoMLCandidate; /** - *

        The size, in GB, of the ML storage volume attached to the notebook instance.

        + *

        Returns the job's AutoMLJobStatus.

        */ - VolumeSizeInGB?: number; + AutoMLJobStatus: AutoMLJobStatus | string | undefined; /** - *

        A list of the Elastic Inference (EI) instance types associated with this notebook - * instance. Currently only one EI instance type can be associated with a notebook - * instance. For more information, see Using Elastic Inference in Amazon - * SageMaker.

        + *

        Returns the job's AutoMLJobSecondaryStatus.

        */ - AcceleratorTypes?: (NotebookInstanceAcceleratorType | string)[]; + AutoMLJobSecondaryStatus: AutoMLJobSecondaryStatus | string | undefined; /** - *

        The Git repository associated with the notebook instance as its default code - * repository. This can be either the name of a Git repository stored as a resource in your - * account, or the URL of a Git repository in AWS CodeCommit or in any - * other Git repository. When you open a notebook instance, it opens in the directory that - * contains this repository. For more information, see Associating Git Repositories with Amazon SageMaker - * Notebook Instances.

        + *

        Returns the job's output from GenerateCandidateDefinitionsOnly.

        */ - DefaultCodeRepository?: string; + GenerateCandidateDefinitionsOnly?: boolean; /** - *

        An array of up to three Git repositories associated with the notebook instance. These - * can be either the names of Git repositories stored as resources in your account, or the - * URL of Git repositories in AWS CodeCommit or in any - * other Git repository. These repositories are cloned at the same level as the default - * repository of your notebook instance. For more information, see Associating Git - * Repositories with Amazon SageMaker Notebook Instances.

        + *

        Returns information on the job's artifacts found in AutoMLJobArtifacts.

        */ - AdditionalCodeRepositories?: string[]; + AutoMLJobArtifacts?: AutoMLJobArtifacts; /** - *

        Whether root access is enabled or disabled for users of the notebook instance.

        - * - *

        Lifecycle configurations need root access to be able to set up a notebook - * instance. Because of this, lifecycle configurations associated with a notebook - * instance always run with root access even if you disable root access for - * users.

        - *
        + *

        This contains ProblemType, AutoMLJobObjective and CompletionCriteria. They're + * auto-inferred values, if not provided by you. If you do provide them, then they'll be the + * same as provided.

        */ - RootAccess?: RootAccess | string; + ResolvedAttributes?: ResolvedAttributes; } -export namespace DescribeNotebookInstanceOutput { - export const filterSensitiveLog = (obj: DescribeNotebookInstanceOutput): any => ({ +export namespace DescribeAutoMLJobResponse { + export const filterSensitiveLog = (obj: DescribeAutoMLJobResponse): any => ({ ...obj, }); } -export interface DescribeNotebookInstanceLifecycleConfigInput { +export interface DescribeCodeRepositoryInput { /** - *

        The name of the lifecycle configuration to describe.

        + *

        The name of the Git repository to describe.

        */ - NotebookInstanceLifecycleConfigName: string | undefined; + CodeRepositoryName: string | undefined; } -export namespace DescribeNotebookInstanceLifecycleConfigInput { - export const filterSensitiveLog = (obj: DescribeNotebookInstanceLifecycleConfigInput): any => ({ +export namespace DescribeCodeRepositoryInput { + export const filterSensitiveLog = (obj: DescribeCodeRepositoryInput): any => ({ ...obj, }); } -export interface DescribeNotebookInstanceLifecycleConfigOutput { - /** - *

        The Amazon Resource Name (ARN) of the lifecycle configuration.

        - */ - NotebookInstanceLifecycleConfigArn?: string; - +export interface DescribeCodeRepositoryOutput { /** - *

        The name of the lifecycle configuration.

        + *

        The name of the Git repository.

        */ - NotebookInstanceLifecycleConfigName?: string; + CodeRepositoryName: string | undefined; /** - *

        The shell script that runs only once, when you create a notebook instance.

        + *

        The Amazon Resource Name (ARN) of the Git repository.

        */ - OnCreate?: NotebookInstanceLifecycleHook[]; + CodeRepositoryArn: string | undefined; /** - *

        The shell script that runs every time you start a notebook instance, including when - * you create the notebook instance.

        + *

        The date and time that the repository was created.

        */ - OnStart?: NotebookInstanceLifecycleHook[]; + CreationTime: Date | undefined; /** - *

        A timestamp that tells when the lifecycle configuration was last modified.

        + *

        The date and time that the repository was last changed.

        */ - LastModifiedTime?: Date; + LastModifiedTime: Date | undefined; /** - *

        A timestamp that tells when the lifecycle configuration was created.

        + *

        Configuration details about the repository, including the URL where the repository is + * located, the default branch, and the Amazon Resource Name (ARN) of the AWS Secrets + * Manager secret that contains the credentials used to access the repository.

        */ - CreationTime?: Date; + GitConfig?: GitConfig; } -export namespace DescribeNotebookInstanceLifecycleConfigOutput { - export const filterSensitiveLog = (obj: DescribeNotebookInstanceLifecycleConfigOutput): any => ({ - ...obj, +export namespace DescribeCodeRepositoryOutput { + export const filterSensitiveLog = (obj: DescribeCodeRepositoryOutput): any => ({ + ...obj, }); } -export interface DescribeProcessingJobRequest { +export interface DescribeCompilationJobRequest { /** - *

        The name of the processing job. The name must be unique within an AWS Region in the - * AWS account.

        + *

        The name of the model compilation job that you want information about.

        */ - ProcessingJobName: string | undefined; + CompilationJobName: string | undefined; } -export namespace DescribeProcessingJobRequest { - export const filterSensitiveLog = (obj: DescribeProcessingJobRequest): any => ({ +export namespace DescribeCompilationJobRequest { + export const filterSensitiveLog = (obj: DescribeCompilationJobRequest): any => ({ ...obj, }); } -export enum ProcessingJobStatus { - COMPLETED = "Completed", - FAILED = "Failed", - IN_PROGRESS = "InProgress", - STOPPED = "Stopped", - STOPPING = "Stopping", -} - -export interface DescribeProcessingJobResponse { +/** + *

        Provides information about the location that is configured for storing model + * artifacts.

        + *

        Model artifacts are the output that results from training a model, and typically + * consist of trained parameters, a model defintion that desribes how to compute + * inferences, and other metadata.

        + */ +export interface ModelArtifacts { /** - *

        The inputs for a processing job.

        + *

        The path of the S3 object that contains the model artifacts. For example, + * s3://bucket-name/keynameprefix/model.tar.gz.

        */ - ProcessingInputs?: ProcessingInput[]; + S3ModelArtifacts: string | undefined; +} + +export namespace ModelArtifacts { + export const filterSensitiveLog = (obj: ModelArtifacts): any => ({ + ...obj, + }); +} +/** + *

        Provides information to verify the integrity of stored model artifacts.

        + */ +export interface ModelDigests { /** - *

        Output configuration for the processing job.

        + *

        Provides a hash value that uniquely identifies the stored model artifacts.

        */ - ProcessingOutputConfig?: ProcessingOutputConfig; + ArtifactDigest?: string; +} +export namespace ModelDigests { + export const filterSensitiveLog = (obj: ModelDigests): any => ({ + ...obj, + }); +} + +export interface DescribeCompilationJobResponse { /** - *

        The name of the processing job. The name must be unique within an AWS Region in the - * AWS account.

        + *

        The name of the model compilation job.

        */ - ProcessingJobName: string | undefined; + CompilationJobName: string | undefined; /** - *

        Identifies the resources, ML compute instances, and ML storage volumes to deploy for a - * processing job. In distributed training, you specify more than one instance.

        + *

        The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker assumes to perform the model + * compilation job.

        */ - ProcessingResources: ProcessingResources | undefined; + CompilationJobArn: string | undefined; /** - *

        The time limit for how long the processing job is allowed to run.

        + *

        The status of the model compilation job.

        */ - StoppingCondition?: ProcessingStoppingCondition; + CompilationJobStatus: CompilationJobStatus | string | undefined; /** - *

        Configures the processing job to run a specified container image.

        + *

        The time when the model compilation job started the CompilationJob + * instances.

        + *

        You are billed for the time between this timestamp and the timestamp in the DescribeCompilationJobResponse$CompilationEndTime field. In Amazon CloudWatch Logs, + * the start time might be later than this time. That's because it takes time to download + * the compilation job, which depends on the size of the compilation job container.

        */ - AppSpecification: AppSpecification | undefined; + CompilationStartTime?: Date; /** - *

        The environment variables set in the Docker container.

        + *

        The time when the model compilation job on a compilation job instance ended. For a + * successful or stopped job, this is when the job's model artifacts have finished + * uploading. For a failed job, this is when Amazon SageMaker detected that the job failed.

        */ - Environment?: { [key: string]: string }; + CompilationEndTime?: Date; /** - *

        Networking options for a processing job.

        + *

        Specifies a limit to how long a model compilation job can run. When the job reaches + * the time limit, Amazon SageMaker ends the compilation job. Use this API to cap model training + * costs.

        */ - NetworkConfig?: NetworkConfig; + StoppingCondition: StoppingCondition | undefined; /** - *

        The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on - * your behalf.

        + *

        The time that the model compilation job was created.

        */ - RoleArn?: string; + CreationTime: Date | undefined; /** - *

        The configuration information used to create an experiment.

        + *

        The time that the status + * of + * the model compilation job was last modified.

        */ - ExperimentConfig?: ExperimentConfig; + LastModifiedTime: Date | undefined; /** - *

        The Amazon Resource Name (ARN) of the processing job.

        + *

        If a model compilation job failed, the reason it failed.

        */ - ProcessingJobArn: string | undefined; + FailureReason: string | undefined; /** - *

        Provides the status of a processing job.

        + *

        Information about the location in Amazon S3 that has been configured for storing the model + * artifacts used in the compilation job.

        */ - ProcessingJobStatus: ProcessingJobStatus | string | undefined; + ModelArtifacts: ModelArtifacts | undefined; /** - *

        An optional string, up to one KB in size, that contains metadata from the processing - * container when the processing job exits.

        + *

        Provides a BLAKE2 hash value that identifies the compiled model artifacts in Amazon S3.

        */ - ExitMessage?: string; + ModelDigests?: ModelDigests; /** - *

        A string, up to one KB in size, that contains the reason a processing job failed, if - * it failed.

        + *

        The Amazon Resource Name (ARN) of the model compilation job.

        */ - FailureReason?: string; + RoleArn: string | undefined; /** - *

        The time at which the processing job completed.

        + *

        Information about the location in Amazon S3 of the input model artifacts, the name and + * shape of the expected data inputs, and the framework in which the model was + * trained.

        */ - ProcessingEndTime?: Date; + InputConfig: InputConfig | undefined; /** - *

        The time at which the processing job started.

        + *

        Information about the output location for the compiled model and the target device + * that the model runs on.

        */ - ProcessingStartTime?: Date; + OutputConfig: OutputConfig | undefined; +} +export namespace DescribeCompilationJobResponse { + export const filterSensitiveLog = (obj: DescribeCompilationJobResponse): any => ({ + ...obj, + }); +} + +export interface DescribeContextRequest { /** - *

        The time at which the processing job was last modified.

        + *

        The name of the context to describe.

        */ - LastModifiedTime?: Date; + ContextName: string | undefined; +} + +export namespace DescribeContextRequest { + export const filterSensitiveLog = (obj: DescribeContextRequest): any => ({ + ...obj, + }); +} +export interface DescribeContextResponse { /** - *

        The time at which the processing job was created.

        + *

        The name of the context.

        */ - CreationTime: Date | undefined; + ContextName?: string; /** - *

        The ARN of a monitoring schedule for an endpoint associated with this processing - * job.

        + *

        The Amazon Resource Name (ARN) of the context.

        */ - MonitoringScheduleArn?: string; + ContextArn?: string; /** - *

        The ARN of an AutoML job associated with this processing job.

        + *

        The source of the context.

        */ - AutoMLJobArn?: string; + Source?: ContextSource; /** - *

        The ARN of a training job associated with this processing job.

        + *

        The type of the context.

        */ - TrainingJobArn?: string; -} - -export namespace DescribeProcessingJobResponse { - export const filterSensitiveLog = (obj: DescribeProcessingJobResponse): any => ({ - ...obj, - }); -} + ContextType?: string; -export interface DescribeSubscribedWorkteamRequest { /** - *

        The Amazon Resource Name (ARN) of the subscribed work team to describe.

        + *

        The description of the context.

        */ - WorkteamArn: string | undefined; -} - -export namespace DescribeSubscribedWorkteamRequest { - export const filterSensitiveLog = (obj: DescribeSubscribedWorkteamRequest): any => ({ - ...obj, - }); -} + Description?: string; -/** - *

        Describes a work team of a vendor that does the a labelling job.

        - */ -export interface SubscribedWorkteam { /** - *

        The Amazon Resource Name (ARN) of the vendor that you have subscribed.

        + *

        A list of the context's properties.

        */ - WorkteamArn: string | undefined; + Properties?: { [key: string]: string }; /** - *

        The title of the service provided by the vendor in the Amazon Marketplace.

        + *

        When the context was created.

        */ - MarketplaceTitle?: string; + CreationTime?: Date; /** - *

        The name of the vendor in the Amazon Marketplace.

        + *

        Information about the user who created or modified an experiment, trial, or trial + * component.

        */ - SellerName?: string; + CreatedBy?: UserContext; /** - *

        The description of the vendor from the Amazon Marketplace.

        + *

        When the context was last modified.

        */ - MarketplaceDescription?: string; + LastModifiedTime?: Date; /** - *

        Marketplace product listing ID.

        + *

        Information about the user who created or modified an experiment, trial, or trial + * component.

        */ - ListingId?: string; + LastModifiedBy?: UserContext; } -export namespace SubscribedWorkteam { - export const filterSensitiveLog = (obj: SubscribedWorkteam): any => ({ +export namespace DescribeContextResponse { + export const filterSensitiveLog = (obj: DescribeContextResponse): any => ({ ...obj, }); } -export interface DescribeSubscribedWorkteamResponse { +export interface DescribeDomainRequest { /** - *

        A Workteam instance that contains information about the work team.

        + *

        The domain ID.

        */ - SubscribedWorkteam: SubscribedWorkteam | undefined; + DomainId: string | undefined; } -export namespace DescribeSubscribedWorkteamResponse { - export const filterSensitiveLog = (obj: DescribeSubscribedWorkteamResponse): any => ({ +export namespace DescribeDomainRequest { + export const filterSensitiveLog = (obj: DescribeDomainRequest): any => ({ ...obj, }); } -export interface DescribeTrainingJobRequest { +export enum DomainStatus { + Delete_Failed = "Delete_Failed", + Deleting = "Deleting", + Failed = "Failed", + InService = "InService", + Pending = "Pending", + Update_Failed = "Update_Failed", + Updating = "Updating", +} + +export interface DescribeDomainResponse { /** - *

        The name of the training job.

        + *

        The domain's Amazon Resource Name (ARN).

        */ - TrainingJobName: string | undefined; -} + DomainArn?: string; -export namespace DescribeTrainingJobRequest { - export const filterSensitiveLog = (obj: DescribeTrainingJobRequest): any => ({ - ...obj, - }); -} + /** + *

        The domain ID.

        + */ + DomainId?: string; -/** - *

        The name, value, and date and time of a metric that was emitted to Amazon CloudWatch.

        - */ -export interface MetricData { /** - *

        The name of the metric.

        + *

        The domain name.

        */ - MetricName?: string; + DomainName?: string; /** - *

        The value of the metric.

        + *

        The ID of the Amazon Elastic File System (EFS) managed by this Domain.

        */ - Value?: number; + HomeEfsFileSystemId?: string; /** - *

        The date and time that the algorithm emitted the metric.

        + *

        The SSO managed application instance ID.

        */ - Timestamp?: Date; -} + SingleSignOnManagedApplicationInstanceId?: string; -export namespace MetricData { - export const filterSensitiveLog = (obj: MetricData): any => ({ - ...obj, - }); -} + /** + *

        The status.

        + */ + Status?: DomainStatus | string; -export enum SecondaryStatus { - COMPLETED = "Completed", - DOWNLOADING = "Downloading", - DOWNLOADING_TRAINING_IMAGE = "DownloadingTrainingImage", - FAILED = "Failed", - INTERRUPTED = "Interrupted", - LAUNCHING_ML_INSTANCES = "LaunchingMLInstances", - MAX_RUNTIME_EXCEEDED = "MaxRuntimeExceeded", - MAX_WAIT_TIME_EXCEEDED = "MaxWaitTimeExceeded", - PREPARING_TRAINING_STACK = "PreparingTrainingStack", - STARTING = "Starting", - STOPPED = "Stopped", - STOPPING = "Stopping", - TRAINING = "Training", - UPLOADING = "Uploading", -} + /** + *

        The creation time.

        + */ + CreationTime?: Date; -/** - *

        An array element of DescribeTrainingJobResponse$SecondaryStatusTransitions. It provides - * additional details about a status that the training job has transitioned through. A - * training job can be in one of several states, for example, starting, downloading, - * training, or uploading. Within each state, there are a number of intermediate states. - * For example, within the starting state, Amazon SageMaker could be starting the training job or - * launching the ML instances. These transitional states are referred to as the job's - * secondary - * status. - *

        - *

        - */ -export interface SecondaryStatusTransition { /** - *

        Contains a secondary status information from a training - * job.

        - *

        Status might be one of the following secondary statuses:

        - *
        - *
        InProgress
        - *
        - *
          - *
        • - *

          - * Starting - * - Starting the training job.

          - *
        • - *
        • - *

          - * Downloading - An optional stage for algorithms that - * support File training input mode. It indicates that - * data is being downloaded to the ML storage volumes.

          - *
        • - *
        • - *

          - * Training - Training is in progress.

          - *
        • - *
        • - *

          - * Uploading - Training is complete and the model - * artifacts are being uploaded to the S3 location.

          - *
        • - *
        - *
        - *
        Completed
        - *
        - *
          - *
        • - *

          - * Completed - The training job has completed.

          - *
        • - *
        - *
        - *
        Failed
        - *
        - *
          - *
        • - *

          - * Failed - The training job has failed. The reason for - * the failure is returned in the FailureReason field of - * DescribeTrainingJobResponse.

          - *
        • - *
        - *
        - *
        Stopped
        - *
        - *
          - *
        • - *

          - * MaxRuntimeExceeded - The job stopped because it - * exceeded the maximum allowed runtime.

          - *
        • - *
        • - *

          - * Stopped - The training job has stopped.

          - *
        • - *
        - *
        - *
        Stopping
        - *
        - *
          - *
        • - *

          - * Stopping - Stopping the training job.

          - *
        • - *
        - *
        - *
        - *

        We no longer support the following secondary statuses:

        - *
          - *
        • - *

          - * LaunchingMLInstances - *

          - *
        • - *
        • - *

          - * PreparingTrainingStack - *

          - *
        • - *
        • - *

          - * DownloadingTrainingImage - *

          - *
        • - *
        + *

        The last modified time.

        */ - Status: SecondaryStatus | string | undefined; + LastModifiedTime?: Date; /** - *

        A timestamp that shows when the training job transitioned to the current secondary - * status state.

        + *

        The failure reason.

        */ - StartTime: Date | undefined; + FailureReason?: string; /** - *

        A timestamp that shows when the training job transitioned out of this secondary status - * state into another secondary status state or when the training job has ended.

        + *

        The domain's authentication mode.

        */ - EndTime?: Date; + AuthMode?: AuthMode | string; /** - *

        A detailed description of the progress within a secondary status. - *

        - *

        Amazon SageMaker provides secondary statuses and status messages that apply to each of - * them:

        - *
        - *
        Starting
        - *
        - *
          - *
        • - *

          Starting the training job.

          - *
        • - *
        • - *

          Launching - * requested ML instances.

          - *
        • - *
        • - *

          Insufficient - * capacity error from EC2 while launching instances, - * retrying!

          - *
        • - *
        • - *

          Launched - * instance was unhealthy, replacing it!

          - *
        • - *
        • - *

          Preparing the instances for training.

          - *
        • - *
        - *
        - *
        Training
        - *
        - *
          - *
        • - *

          Downloading the training image.

          - *
        • - *
        • - *

          Training - * image download completed. Training in - * progress.

          - *
        • - *
        - *
        - *
        - * - *

        Status messages are subject to change. Therefore, we recommend not including them - * in code that programmatically initiates actions. For examples, don't use status - * messages in if statements.

        - *
        - *

        To have an overview of your training job's progress, view - * TrainingJobStatus and SecondaryStatus in DescribeTrainingJob, and StatusMessage together. For - * example, at the start of a training job, you might see the following:

        - *
          - *
        • - *

          - * TrainingJobStatus - InProgress

          - *
        • + *

          Settings which are applied to all UserProfiles in this domain, if settings are not explicitly specified + * in a given UserProfile. + *

          + */ + DefaultUserSettings?: UserSettings; + + /** + *

          Specifies the VPC used for non-EFS traffic. The default value is + * PublicInternetOnly.

          + *
            *
          • - *

            - * SecondaryStatus - Training

            + *

            + * PublicInternetOnly - Non-EFS traffic is through a VPC managed by + * Amazon SageMaker, which allows direct internet access

            *
          • *
          • - *

            - * StatusMessage - Downloading the training image

            + *

            + * VpcOnly - All Studio traffic is through the specified VPC and subnets

            *
          • *
          */ - StatusMessage?: string; -} - -export namespace SecondaryStatusTransition { - export const filterSensitiveLog = (obj: SecondaryStatusTransition): any => ({ - ...obj, - }); -} + AppNetworkAccessType?: AppNetworkAccessType | string; -export interface DescribeTrainingJobResponse { /** - *

          Name of the model training job.

          + *

          This member is deprecated and replaced with KmsKeyId.

          */ - TrainingJobName: string | undefined; + HomeEfsFileSystemKmsKeyId?: string; /** - *

          The Amazon Resource Name (ARN) of the training job.

          + *

          The VPC subnets that Studio uses for communication.

          */ - TrainingJobArn: string | undefined; + SubnetIds?: string[]; /** - *

          The Amazon Resource Name (ARN) of the associated hyperparameter tuning job if the - * training job was launched by a hyperparameter tuning job.

          + *

          The domain's URL.

          */ - TuningJobArn?: string; + Url?: string; /** - *

          The Amazon Resource Name (ARN) of the Amazon SageMaker Ground Truth labeling job that created the - * transform or training job.

          + *

          The ID of the Amazon Virtual Private Cloud (VPC) that Studio uses for communication.

          */ - LabelingJobArn?: string; + VpcId?: string; /** - *

          The Amazon Resource Name (ARN) of an AutoML job.

          + *

          The AWS KMS customer managed CMK used to encrypt + * the EFS volume attached to the domain.

          */ - AutoMLJobArn?: string; + KmsKeyId?: string; +} - /** - *

          Information about the Amazon S3 location that is configured for storing model artifacts. - *

          - */ - ModelArtifacts: ModelArtifacts | undefined; +export namespace DescribeDomainResponse { + export const filterSensitiveLog = (obj: DescribeDomainResponse): any => ({ + ...obj, + }); +} +export interface DescribeEndpointInput { /** - *

          The status of the - * training - * job.

          - *

          Amazon SageMaker provides the following training job statuses:

          + *

          The name of the endpoint.

          + */ + EndpointName: string | undefined; +} + +export namespace DescribeEndpointInput { + export const filterSensitiveLog = (obj: DescribeEndpointInput): any => ({ + ...obj, + }); +} + +export enum EndpointStatus { + CREATING = "Creating", + DELETING = "Deleting", + FAILED = "Failed", + IN_SERVICE = "InService", + OUT_OF_SERVICE = "OutOfService", + ROLLING_BACK = "RollingBack", + SYSTEM_UPDATING = "SystemUpdating", + UPDATING = "Updating", +} + +/** + *

          Describes weight and capacities for a production variant associated with an + * endpoint. If you sent a request to the UpdateEndpointWeightsAndCapacities + * API and the endpoint status is Updating, you get different desired and + * current values.

          + */ +export interface ProductionVariantSummary { + /** + *

          The name of the variant.

          + */ + VariantName: string | undefined; + + /** + *

          An array of DeployedImage objects that specify the Amazon EC2 Container Registry paths of the + * inference images deployed on instances of this ProductionVariant.

          + */ + DeployedImages?: DeployedImage[]; + + /** + *

          The weight associated with the variant.

          + */ + CurrentWeight?: number; + + /** + *

          The requested weight, as specified in the + * UpdateEndpointWeightsAndCapacities request.

          + */ + DesiredWeight?: number; + + /** + *

          The number of instances associated with the variant.

          + */ + CurrentInstanceCount?: number; + + /** + *

          The number of instances requested in the + * UpdateEndpointWeightsAndCapacities request.

          + */ + DesiredInstanceCount?: number; +} + +export namespace ProductionVariantSummary { + export const filterSensitiveLog = (obj: ProductionVariantSummary): any => ({ + ...obj, + }); +} + +export interface DescribeEndpointOutput { + /** + *

          Name of the endpoint.

          + */ + EndpointName: string | undefined; + + /** + *

          The Amazon Resource Name (ARN) of the endpoint.

          + */ + EndpointArn: string | undefined; + + /** + *

          The name of the endpoint configuration associated with this endpoint.

          + */ + EndpointConfigName: string | undefined; + + /** + *

          An array of ProductionVariantSummary objects, one for each model + * hosted behind this endpoint.

          + */ + ProductionVariants?: ProductionVariantSummary[]; + + /** + *

          + */ + DataCaptureConfig?: DataCaptureConfigSummary; + + /** + *

          The status of the endpoint.

          *
            *
          • *

            - * InProgress - The training is in progress.

            + * OutOfService: Endpoint is not available to take incoming + * requests.

            *
          • *
          • *

            - * Completed - The training job has completed.

            + * Creating: CreateEndpoint is executing.

            *
          • *
          • *

            - * Failed - The training job has failed. To see the reason for the - * failure, see the FailureReason field in the response to a - * DescribeTrainingJobResponse call.

            + * Updating: UpdateEndpoint or UpdateEndpointWeightsAndCapacities is executing.

            *
          • *
          • *

            - * Stopping - The training job is stopping.

            + * SystemUpdating: Endpoint is undergoing maintenance and cannot be + * updated or deleted or re-scaled until it has completed. This maintenance + * operation does not change any customer-specified values such as VPC config, KMS + * encryption, model, instance type, or instance count.

            *
          • *
          • *

            - * Stopped - The training job has stopped.

            + * RollingBack: Endpoint fails to scale up or down or change its + * variant weight and is in the process of rolling back to its previous + * configuration. Once the rollback completes, endpoint returns to an + * InService status. This transitional status only applies to an + * endpoint that has autoscaling enabled and is undergoing variant weight or + * capacity changes as part of an UpdateEndpointWeightsAndCapacities call or when the UpdateEndpointWeightsAndCapacities operation is called + * explicitly.

            *
          • - *
          - *

          For - * more detailed information, see SecondaryStatus.

          - */ - TrainingJobStatus: TrainingJobStatus | string | undefined; - - /** - *

          Provides detailed information about the state of the training job. For detailed - * information on the secondary status of the training job, see StatusMessage - * under SecondaryStatusTransition.

          - *

          Amazon SageMaker provides primary statuses and secondary statuses that apply to each of - * them:

          - *
          - *
          InProgress
          - *
          - *
            - *
          • - *

            - * Starting - * - Starting the training job.

            - *
          • - *
          • - *

            - * Downloading - An optional stage for algorithms that - * support File training input mode. It indicates that - * data is being downloaded to the ML storage volumes.

            - *
          • - *
          • - *

            - * Training - Training is in progress.

            - *
          • - *
          • - *

            - * Interrupted - The job stopped because the managed - * spot training instances were interrupted.

            - *
          • - *
          • - *

            - * Uploading - Training is complete and the model - * artifacts are being uploaded to the S3 location.

            - *
          • - *
          - *
          - *
          Completed
          - *
          - *
            - *
          • - *

            - * Completed - The training job has completed.

            - *
          • - *
          - *
          - *
          Failed
          - *
          - *
            - *
          • - *

            - * Failed - The training job has failed. The reason for - * the failure is returned in the FailureReason field of - * DescribeTrainingJobResponse.

            - *
          • - *
          - *
          - *
          Stopped
          - *
          - *
            - *
          • - *

            - * MaxRuntimeExceeded - The job stopped because it - * exceeded the maximum allowed runtime.

            - *
          • - *
          • - *

            - * MaxWaitTimeExceeded - The job stopped because it - * exceeded the maximum allowed wait time.

            - *
          • - *
          • - *

            - * Stopped - The training job has stopped.

            - *
          • - *
          - *
          - *
          Stopping
          - *
          - *
            - *
          • - *

            - * Stopping - Stopping the training job.

            - *
          • - *
          - *
          - *
          - * - * - *

          Valid values for SecondaryStatus are subject to change.

          - *
          - *

          We no longer support the following secondary statuses:

          - *
            *
          • *

            - * LaunchingMLInstances - *

            + * InService: Endpoint is available to process incoming + * requests.

            *
          • *
          • *

            - * PreparingTrainingStack - *

            + * Deleting: DeleteEndpoint is executing.

            *
          • *
          • *

            - * DownloadingTrainingImage - *

            + * Failed: Endpoint could not be created, updated, or re-scaled. Use + * DescribeEndpointOutput$FailureReason for information about + * the failure. DeleteEndpoint is the only operation that can be + * performed on a failed endpoint.

            *
          • *
          */ - SecondaryStatus: SecondaryStatus | string | undefined; + EndpointStatus: EndpointStatus | string | undefined; /** - *

          If the training job failed, the reason it failed.

          + *

          If the status of the endpoint is Failed, the reason why it failed. + *

          */ FailureReason?: string; /** - *

          Algorithm-specific parameters.

          + *

          A timestamp that shows when the endpoint was created.

          */ - HyperParameters?: { [key: string]: string }; + CreationTime: Date | undefined; /** - *

          Information about the algorithm used for training, and algorithm metadata. - *

          + *

          A timestamp that shows when the endpoint was last modified.

          */ - AlgorithmSpecification: AlgorithmSpecification | undefined; + LastModifiedTime: Date | undefined; /** - *

          The AWS Identity and Access Management (IAM) role configured for the training job.

          + *

          The most recent deployment configuration for the endpoint.

          */ - RoleArn?: string; + LastDeploymentConfig?: DeploymentConfig; +} - /** - *

          An array of Channel objects that describes each data input channel. - *

          - */ - InputDataConfig?: Channel[]; - - /** - *

          The S3 path where model artifacts that you configured when creating the job are - * stored. Amazon SageMaker creates subfolders for model artifacts.

          - */ - OutputDataConfig?: OutputDataConfig; - - /** - *

          Resources, including ML compute instances and ML storage volumes, that are - * configured for model training.

          - */ - ResourceConfig: ResourceConfig | undefined; - - /** - *

          A VpcConfig object that specifies the VPC that this training job has - * access to. For more information, see Protect Training Jobs by Using an Amazon - * Virtual Private Cloud.

          - */ - VpcConfig?: VpcConfig; - - /** - *

          Specifies a limit to how long a model training job can run. It also specifies the - * maximum time to wait for a spot instance. When the job reaches the time limit, Amazon SageMaker ends - * the training job. Use this API to cap model training costs.

          - *

          To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays - * job termination for 120 seconds. Algorithms can use this 120-second window to save the - * model artifacts, so the results of training are not lost.

          - */ - StoppingCondition: StoppingCondition | undefined; - - /** - *

          A timestamp that indicates when the training job was created.

          - */ - CreationTime: Date | undefined; - - /** - *

          Indicates the time when the training job starts on training instances. You are - * billed for the time interval between this time and the value of - * TrainingEndTime. The start time in CloudWatch Logs might be later than this time. - * The difference is due to the time it takes to download the training data and to the size - * of the training container.

          - */ - TrainingStartTime?: Date; - - /** - *

          Indicates the time when the training job ends on training instances. You are billed - * for the time interval between the value of TrainingStartTime and this time. - * For successful jobs and stopped jobs, this is the time after model artifacts are - * uploaded. For failed jobs, this is the time when Amazon SageMaker detects a job failure.

          - */ - TrainingEndTime?: Date; - - /** - *

          A timestamp that indicates when the status of the training job was last - * modified.

          - */ - LastModifiedTime?: Date; - - /** - *

          A history of all of the secondary statuses that the training job has transitioned - * through.

          - */ - SecondaryStatusTransitions?: SecondaryStatusTransition[]; - - /** - *

          A collection of MetricData objects that specify the names, values, and - * dates and times that the training algorithm emitted to Amazon CloudWatch.

          - */ - FinalMetricDataList?: MetricData[]; - - /** - *

          If you want to allow inbound or outbound network calls, except for calls between peers - * within a training cluster for distributed training, choose True. If you - * enable network isolation for training jobs that are configured to use a VPC, Amazon SageMaker - * downloads and uploads customer data and model artifacts through the specified VPC, but - * the training container does not have network access.

          - */ - EnableNetworkIsolation?: boolean; - - /** - *

          To encrypt all communications between ML compute instances in distributed training, - * choose True. Encryption provides greater security for distributed training, - * but training might take longer. How long it takes depends on the amount of communication - * between compute instances, especially if you use a deep learning algorithms in - * distributed training.

          - */ - EnableInterContainerTrafficEncryption?: boolean; - - /** - *

          A Boolean indicating whether managed spot training is enabled (True) or - * not (False).

          - */ - EnableManagedSpotTraining?: boolean; +export namespace DescribeEndpointOutput { + export const filterSensitiveLog = (obj: DescribeEndpointOutput): any => ({ + ...obj, + }); +} +export interface DescribeEndpointConfigInput { /** - *

          Contains information about the output location for managed spot training checkpoint - * data.

          + *

          The name of the endpoint configuration.

          */ - CheckpointConfig?: CheckpointConfig; + EndpointConfigName: string | undefined; +} - /** - *

          The training time in seconds.

          - */ - TrainingTimeInSeconds?: number; +export namespace DescribeEndpointConfigInput { + export const filterSensitiveLog = (obj: DescribeEndpointConfigInput): any => ({ + ...obj, + }); +} +export interface DescribeEndpointConfigOutput { /** - *

          The billable time in seconds.

          - *

          You can calculate the savings from using managed spot training using the formula - * (1 - BillableTimeInSeconds / TrainingTimeInSeconds) * 100. For example, - * if BillableTimeInSeconds is 100 and TrainingTimeInSeconds is - * 500, the savings is 80%.

          + *

          Name of the Amazon SageMaker endpoint configuration.

          */ - BillableTimeInSeconds?: number; + EndpointConfigName: string | undefined; /** - *

          Configuration information for the debug hook parameters, collection configuration, and - * storage paths.

          + *

          The Amazon Resource Name (ARN) of the endpoint configuration.

          */ - DebugHookConfig?: DebugHookConfig; + EndpointConfigArn: string | undefined; /** - *

          Associates a SageMaker job as a trial component with an experiment and trial. Specified when - * you call the following APIs:

          - * + *

          An array of ProductionVariant objects, one for each model that you + * want to host at this endpoint.

          */ - ExperimentConfig?: ExperimentConfig; + ProductionVariants: ProductionVariant[] | undefined; /** - *

          Configuration information for debugging rules.

          + *

          */ - DebugRuleConfigurations?: DebugRuleConfiguration[]; + DataCaptureConfig?: DataCaptureConfig; /** - *

          Configuration of storage locations for TensorBoard output.

          + *

          AWS KMS key ID Amazon SageMaker uses to encrypt data when storing it on the ML storage + * volume attached to the instance.

          */ - TensorBoardOutputConfig?: TensorBoardOutputConfig; + KmsKeyId?: string; /** - *

          Status about the debug rule evaluation.

          + *

          A timestamp that shows when the endpoint configuration was created.

          */ - DebugRuleEvaluationStatuses?: DebugRuleEvaluationStatus[]; + CreationTime: Date | undefined; } -export namespace DescribeTrainingJobResponse { - export const filterSensitiveLog = (obj: DescribeTrainingJobResponse): any => ({ +export namespace DescribeEndpointConfigOutput { + export const filterSensitiveLog = (obj: DescribeEndpointConfigOutput): any => ({ ...obj, }); } -export interface DescribeTransformJobRequest { +export interface DescribeExperimentRequest { /** - *

          The name of the transform job that you want to view details of.

          + *

          The name of the experiment to describe.

          */ - TransformJobName: string | undefined; + ExperimentName: string | undefined; } -export namespace DescribeTransformJobRequest { - export const filterSensitiveLog = (obj: DescribeTransformJobRequest): any => ({ +export namespace DescribeExperimentRequest { + export const filterSensitiveLog = (obj: DescribeExperimentRequest): any => ({ ...obj, }); } -export enum TransformJobStatus { - COMPLETED = "Completed", - FAILED = "Failed", - IN_PROGRESS = "InProgress", - STOPPED = "Stopped", - STOPPING = "Stopping", -} - -export interface DescribeTransformJobResponse { - /** - *

          The name of the transform job.

          - */ - TransformJobName: string | undefined; - - /** - *

          The Amazon Resource Name (ARN) of the transform job.

          - */ - TransformJobArn: string | undefined; - - /** - *

          The - * status of the transform job. If the transform job failed, the reason - * is returned in the FailureReason field.

          - */ - TransformJobStatus: TransformJobStatus | string | undefined; - +/** + *

          The source of the experiment.

          + */ +export interface ExperimentSource { /** - *

          If the transform job failed, FailureReason describes - * why - * it failed. A transform job creates a log file, which includes error - * messages, and stores it - * as - * an Amazon S3 object. For more information, see Log Amazon SageMaker Events with - * Amazon CloudWatch.

          + *

          The Amazon Resource Name (ARN) of the source.

          */ - FailureReason?: string; + SourceArn: string | undefined; /** - *

          The name of the model used in the transform job.

          + *

          The source type.

          */ - ModelName: string | undefined; + SourceType?: string; +} - /** - *

          The - * maximum number - * of - * parallel requests on each instance node - * that can be launched in a transform job. The default value is 1.

          - */ - MaxConcurrentTransforms?: number; +export namespace ExperimentSource { + export const filterSensitiveLog = (obj: ExperimentSource): any => ({ + ...obj, + }); +} +export interface DescribeExperimentResponse { /** - *

          The timeout and maximum number of retries for processing a transform job - * invocation.

          + *

          The name of the experiment.

          */ - ModelClientConfig?: ModelClientConfig; + ExperimentName?: string; /** - *

          The - * maximum - * payload size, in MB, used in the - * transform job.

          + *

          The Amazon Resource Name (ARN) of the experiment.

          */ - MaxPayloadInMB?: number; + ExperimentArn?: string; /** - *

          Specifies the number of records to include in a mini-batch for an HTTP inference - * request. - * A record - * is a single unit of input data that inference - * can be made on. For example, a single line in a CSV file is a record.

          - *

          To enable the batch strategy, you must set SplitType - * to - * Line, RecordIO, or - * TFRecord.

          - */ - BatchStrategy?: BatchStrategy | string; - - /** - *

          The - * environment variables to set in the Docker container. We support up to 16 key and values - * entries in the map.

          - */ - Environment?: { [key: string]: string }; - - /** - *

          Describes the dataset to be transformed and the Amazon S3 location where it is - * stored.

          - */ - TransformInput: TransformInput | undefined; - - /** - *

          Identifies the Amazon S3 location where you want Amazon SageMaker to save the results from the - * transform job.

          - */ - TransformOutput?: TransformOutput; - - /** - *

          Describes - * the resources, including ML instance types and ML instance count, to - * use for the transform job.

          - */ - TransformResources: TransformResources | undefined; - - /** - *

          A timestamp that shows when the transform Job was created.

          - */ - CreationTime: Date | undefined; - - /** - *

          Indicates when the transform job starts - * on - * ML instances. You are billed for the time interval between this time - * and the value of TransformEndTime.

          - */ - TransformStartTime?: Date; - - /** - *

          Indicates when the transform job has been - * - * completed, or has stopped or failed. You are billed for the time - * interval between this time and the value of TransformStartTime.

          - */ - TransformEndTime?: Date; - - /** - *

          The Amazon Resource Name (ARN) of the Amazon SageMaker Ground Truth labeling job that created the - * transform or training job.

          - */ - LabelingJobArn?: string; - - /** - *

          The Amazon Resource Name (ARN) of the AutoML transform job.

          - */ - AutoMLJobArn?: string; - - /** - *

          The data structure used to specify the data to be used for inference in a batch - * transform job and to associate the data that is relevant to the prediction results in - * the output. The input filter provided allows you to exclude input data that is not - * needed for inference in a batch transform job. The output filter provided allows you to - * include input data relevant to interpreting the predictions in the output from the job. - * For more information, see Associate Prediction - * Results with their Corresponding Input Records.

          - */ - DataProcessing?: DataProcessing; - - /** - *

          Associates a SageMaker job as a trial component with an experiment and trial. Specified when - * you call the following APIs:

          - * - */ - ExperimentConfig?: ExperimentConfig; -} - -export namespace DescribeTransformJobResponse { - export const filterSensitiveLog = (obj: DescribeTransformJobResponse): any => ({ - ...obj, - }); -} - -export interface DescribeTrialRequest { - /** - *

          The name of the trial to describe.

          - */ - TrialName: string | undefined; -} - -export namespace DescribeTrialRequest { - export const filterSensitiveLog = (obj: DescribeTrialRequest): any => ({ - ...obj, - }); -} - -/** - *

          The source of the trial.

          - */ -export interface TrialSource { - /** - *

          The Amazon Resource Name (ARN) of the source.

          - */ - SourceArn: string | undefined; - - /** - *

          The source job type.

          - */ - SourceType?: string; -} - -export namespace TrialSource { - export const filterSensitiveLog = (obj: TrialSource): any => ({ - ...obj, - }); -} - -export interface DescribeTrialResponse { - /** - *

          The name of the trial.

          - */ - TrialName?: string; - - /** - *

          The Amazon Resource Name (ARN) of the trial.

          - */ - TrialArn?: string; - - /** - *

          The name of the trial as displayed. If DisplayName isn't specified, - * TrialName is displayed.

          - */ - DisplayName?: string; - - /** - *

          The name of the experiment the trial is part of.

          - */ - ExperimentName?: string; - - /** - *

          The Amazon Resource Name (ARN) of the source and, optionally, the job type.

          - */ - Source?: TrialSource; - - /** - *

          When the trial was created.

          - */ - CreationTime?: Date; - - /** - *

          Who created the trial.

          - */ - CreatedBy?: UserContext; - - /** - *

          When the trial was last modified.

          - */ - LastModifiedTime?: Date; - - /** - *

          Who last modified the trial.

          - */ - LastModifiedBy?: UserContext; -} - -export namespace DescribeTrialResponse { - export const filterSensitiveLog = (obj: DescribeTrialResponse): any => ({ - ...obj, - }); -} - -export interface DescribeTrialComponentRequest { - /** - *

          The name of the trial component to describe.

          - */ - TrialComponentName: string | undefined; -} - -export namespace DescribeTrialComponentRequest { - export const filterSensitiveLog = (obj: DescribeTrialComponentRequest): any => ({ - ...obj, - }); -} - -/** - *

          A summary of the metrics of a trial component.

          - */ -export interface TrialComponentMetricSummary { - /** - *

          The name of the metric.

          - */ - MetricName?: string; - - /** - *

          The Amazon Resource Name (ARN) of the source.

          - */ - SourceArn?: string; - - /** - *

          When the metric was last updated.

          - */ - TimeStamp?: Date; - - /** - *

          The maximum value of the metric.

          - */ - Max?: number; - - /** - *

          The minimum value of the metric.

          - */ - Min?: number; - - /** - *

          The most recent value of the metric.

          - */ - Last?: number; - - /** - *

          The number of samples used to generate the metric.

          - */ - Count?: number; - - /** - *

          The average value of the metric.

          - */ - Avg?: number; - - /** - *

          The standard deviation of the metric.

          - */ - StdDev?: number; -} - -export namespace TrialComponentMetricSummary { - export const filterSensitiveLog = (obj: TrialComponentMetricSummary): any => ({ - ...obj, - }); -} - -/** - *

          The Amazon Resource Name (ARN) and job type of the source of a trial component.

          - */ -export interface TrialComponentSource { - /** - *

          The source ARN.

          - */ - SourceArn: string | undefined; - - /** - *

          The source job type.

          - */ - SourceType?: string; -} - -export namespace TrialComponentSource { - export const filterSensitiveLog = (obj: TrialComponentSource): any => ({ - ...obj, - }); -} - -export interface DescribeTrialComponentResponse { - /** - *

          The name of the trial component.

          - */ - TrialComponentName?: string; - - /** - *

          The Amazon Resource Name (ARN) of the trial component.

          - */ - TrialComponentArn?: string; - - /** - *

          The name of the component as displayed. If DisplayName isn't specified, - * TrialComponentName is displayed.

          - */ - DisplayName?: string; - - /** - *

          The Amazon Resource Name (ARN) of the source and, optionally, the job type.

          - */ - Source?: TrialComponentSource; - - /** - *

          The status of the component. States include:

          - *
            - *
          • - *

            InProgress

            - *
          • - *
          • - *

            Completed

            - *
          • - *
          • - *

            Failed

            - *
          • - *
          - */ - Status?: TrialComponentStatus; - - /** - *

          When the component started.

          - */ - StartTime?: Date; - - /** - *

          When the component ended.

          - */ - EndTime?: Date; - - /** - *

          When the component was created.

          - */ - CreationTime?: Date; - - /** - *

          Who created the component.

          - */ - CreatedBy?: UserContext; - - /** - *

          When the component was last modified.

          - */ - LastModifiedTime?: Date; - - /** - *

          Who last modified the component.

          - */ - LastModifiedBy?: UserContext; - - /** - *

          The hyperparameters of the component.

          - */ - Parameters?: { [key: string]: TrialComponentParameterValue }; - - /** - *

          The input artifacts of the component.

          - */ - InputArtifacts?: { [key: string]: TrialComponentArtifact }; - - /** - *

          The output artifacts of the component.

          - */ - OutputArtifacts?: { [key: string]: TrialComponentArtifact }; - - /** - *

          The metrics for the component.

          - */ - Metrics?: TrialComponentMetricSummary[]; -} - -export namespace DescribeTrialComponentResponse { - export const filterSensitiveLog = (obj: DescribeTrialComponentResponse): any => ({ - ...obj, - }); -} - -export interface DescribeUserProfileRequest { - /** - *

          The domain ID.

          - */ - DomainId: string | undefined; - - /** - *

          The user profile name.

          - */ - UserProfileName: string | undefined; -} - -export namespace DescribeUserProfileRequest { - export const filterSensitiveLog = (obj: DescribeUserProfileRequest): any => ({ - ...obj, - }); -} - -export enum UserProfileStatus { - Delete_Failed = "Delete_Failed", - Deleting = "Deleting", - Failed = "Failed", - InService = "InService", - Pending = "Pending", - Update_Failed = "Update_Failed", - Updating = "Updating", -} - -export interface DescribeUserProfileResponse { - /** - *

          The ID of the domain that contains the profile.

          - */ - DomainId?: string; - - /** - *

          The user profile Amazon Resource Name (ARN).

          - */ - UserProfileArn?: string; - - /** - *

          The user profile name.

          - */ - UserProfileName?: string; - - /** - *

          The ID of the user's profile in the Amazon Elastic File System (EFS) volume.

          - */ - HomeEfsFileSystemUid?: string; - - /** - *

          The status.

          - */ - Status?: UserProfileStatus | string; - - /** - *

          The last modified time.

          - */ - LastModifiedTime?: Date; - - /** - *

          The creation time.

          - */ - CreationTime?: Date; - - /** - *

          The failure reason.

          - */ - FailureReason?: string; - - /** - *

          The SSO user identifier.

          - */ - SingleSignOnUserIdentifier?: string; - - /** - *

          The SSO user value.

          - */ - SingleSignOnUserValue?: string; - - /** - *

          A collection of settings.

          - */ - UserSettings?: UserSettings; -} - -export namespace DescribeUserProfileResponse { - export const filterSensitiveLog = (obj: DescribeUserProfileResponse): any => ({ - ...obj, - }); -} - -export interface DescribeWorkforceRequest { - /** - *

          The name of the private workforce whose access you want to restrict. - * WorkforceName is automatically set to default when a - * workforce is created and cannot be modified.

          - */ - WorkforceName: string | undefined; -} - -export namespace DescribeWorkforceRequest { - export const filterSensitiveLog = (obj: DescribeWorkforceRequest): any => ({ - ...obj, - }); -} - -/** - *

          Your OIDC IdP workforce configuration.

          - */ -export interface OidcConfigForResponse { - /** - *

          The OIDC IdP client ID used to configure your private workforce.

          - */ - ClientId?: string; - - /** - *

          The OIDC IdP issuer used to configure your private workforce.

          - */ - Issuer?: string; - - /** - *

          The OIDC IdP authorization endpoint used to configure your private workforce.

          - */ - AuthorizationEndpoint?: string; - - /** - *

          The OIDC IdP token endpoint used to configure your private workforce.

          - */ - TokenEndpoint?: string; - - /** - *

          The OIDC IdP user information endpoint used to configure your private workforce.

          - */ - UserInfoEndpoint?: string; - - /** - *

          The OIDC IdP logout endpoint used to configure your private workforce.

          - */ - LogoutEndpoint?: string; - - /** - *

          The OIDC IdP JSON Web Key Set (Jwks) URI used to configure your private workforce.

          - */ - JwksUri?: string; -} - -export namespace OidcConfigForResponse { - export const filterSensitiveLog = (obj: OidcConfigForResponse): any => ({ - ...obj, - }); -} - -/** - *

          A single private workforce, which is automatically created when you create your first - * private work team. You can create one private work force in each AWS Region. By default, - * any workforce-related API operation used in a specific region will apply to the - * workforce created in that region. To learn how to create a private workforce, see Create a Private Workforce.

          - */ -export interface Workforce { - /** - *

          The name of the private workforce.

          - */ - WorkforceName: string | undefined; - - /** - *

          The Amazon Resource Name (ARN) of the private workforce.

          - */ - WorkforceArn: string | undefined; - - /** - *

          The most recent date that was used to - * successfully add one or more IP address ranges (CIDRs) to a private workforce's - * allow list.

          - */ - LastUpdatedDate?: Date; - - /** - *

          A list of one to ten IP address ranges (CIDRs) to be added to the - * workforce allow list. By default, a workforce isn't restricted to specific IP addresses.

          - */ - SourceIpConfig?: SourceIpConfig; - - /** - *

          The subdomain for your OIDC Identity Provider.

          - */ - SubDomain?: string; - - /** - *

          The configuration of an Amazon Cognito workforce. - * A single Cognito workforce is created using and corresponds to a single - * - * Amazon Cognito user pool.

          - */ - CognitoConfig?: CognitoConfig; - - /** - *

          The configuration of an OIDC Identity Provider (IdP) private workforce.

          - */ - OidcConfig?: OidcConfigForResponse; - - /** - *

          The date that the workforce is created.

          - */ - CreateDate?: Date; -} - -export namespace Workforce { - export const filterSensitiveLog = (obj: Workforce): any => ({ - ...obj, - }); -} - -export interface DescribeWorkforceResponse { - /** - *

          A single private workforce, which is automatically created when you create your first - * private work team. You can create one private work force in each AWS Region. By default, - * any workforce-related API operation used in a specific region will apply to the - * workforce created in that region. To learn how to create a private workforce, see Create a Private Workforce.

          - */ - Workforce: Workforce | undefined; -} - -export namespace DescribeWorkforceResponse { - export const filterSensitiveLog = (obj: DescribeWorkforceResponse): any => ({ - ...obj, - }); -} - -export interface DescribeWorkteamRequest { - /** - *

          The name of the work team to return a description of.

          - */ - WorkteamName: string | undefined; -} - -export namespace DescribeWorkteamRequest { - export const filterSensitiveLog = (obj: DescribeWorkteamRequest): any => ({ - ...obj, - }); -} - -/** - *

          Provides details about a labeling work team.

          - */ -export interface Workteam { - /** - *

          The name of the work team.

          - */ - WorkteamName: string | undefined; - - /** - *

          A list of MemberDefinition objects that contains objects that identify - * the workers that make up the work team.

          - *

          Workforces can be created using Amazon Cognito or your own OIDC Identity Provider (IdP). - * For private workforces created using Amazon Cognito use - * CognitoMemberDefinition. For workforces created using your own OIDC identity - * provider (IdP) use OidcMemberDefinition.

          - */ - MemberDefinitions: MemberDefinition[] | undefined; - - /** - *

          The Amazon Resource Name (ARN) that identifies the work team.

          - */ - WorkteamArn: string | undefined; - - /** - *

          The Amazon Resource Name (ARN) of the workforce.

          - */ - WorkforceArn?: string; - - /** - *

          The Amazon Marketplace identifier for a vendor's work team.

          - */ - ProductListingIds?: string[]; - - /** - *

          A description of the work team.

          - */ - Description: string | undefined; - - /** - *

          The URI of the labeling job's user interface. Workers open this URI to start labeling - * your data objects.

          - */ - SubDomain?: string; - - /** - *

          The date and time that the work team was created (timestamp).

          - */ - CreateDate?: Date; - - /** - *

          The date and time that the work team was last updated (timestamp).

          - */ - LastUpdatedDate?: Date; - - /** - *

          Configures SNS notifications of available or expiring work items for work - * teams.

          - */ - NotificationConfiguration?: NotificationConfiguration; -} - -export namespace Workteam { - export const filterSensitiveLog = (obj: Workteam): any => ({ - ...obj, - }); -} - -export interface DescribeWorkteamResponse { - /** - *

          A Workteam instance that contains information about the work team. - *

          - */ - Workteam: Workteam | undefined; -} - -export namespace DescribeWorkteamResponse { - export const filterSensitiveLog = (obj: DescribeWorkteamResponse): any => ({ - ...obj, - }); -} - -/** - *

          Specifies weight and capacity values for a production variant.

          - */ -export interface DesiredWeightAndCapacity { - /** - *

          The name of the - * variant - * to update.

          - */ - VariantName: string | undefined; - - /** - *

          The variant's weight.

          - */ - DesiredWeight?: number; - - /** - *

          The variant's capacity.

          - */ - DesiredInstanceCount?: number; -} - -export namespace DesiredWeightAndCapacity { - export const filterSensitiveLog = (obj: DesiredWeightAndCapacity): any => ({ - ...obj, - }); -} - -export interface DisassociateTrialComponentRequest { - /** - *

          The name of the component to disassociate from the trial.

          - */ - TrialComponentName: string | undefined; - - /** - *

          The name of the trial to disassociate from.

          - */ - TrialName: string | undefined; -} - -export namespace DisassociateTrialComponentRequest { - export const filterSensitiveLog = (obj: DisassociateTrialComponentRequest): any => ({ - ...obj, - }); -} - -export interface DisassociateTrialComponentResponse { - /** - *

          The ARN of the trial component.

          - */ - TrialComponentArn?: string; - - /** - *

          The Amazon Resource Name (ARN) of the trial.

          - */ - TrialArn?: string; -} - -export namespace DisassociateTrialComponentResponse { - export const filterSensitiveLog = (obj: DisassociateTrialComponentResponse): any => ({ - ...obj, - }); -} - -/** - *

          The domain's details.

          - */ -export interface DomainDetails { - /** - *

          The domain's Amazon Resource Name (ARN).

          - */ - DomainArn?: string; - - /** - *

          The domain ID.

          - */ - DomainId?: string; - - /** - *

          The domain name.

          - */ - DomainName?: string; - - /** - *

          The status.

          - */ - Status?: DomainStatus | string; - - /** - *

          The creation time.

          - */ - CreationTime?: Date; - - /** - *

          The last modified time.

          - */ - LastModifiedTime?: Date; - - /** - *

          The domain's URL.

          - */ - Url?: string; -} - -export namespace DomainDetails { - export const filterSensitiveLog = (obj: DomainDetails): any => ({ - ...obj, - }); -} - -export enum EndpointConfigSortKey { - CreationTime = "CreationTime", - Name = "Name", -} - -/** - *

          Provides summary information for an endpoint configuration.

          - */ -export interface EndpointConfigSummary { - /** - *

          The name of the endpoint configuration.

          - */ - EndpointConfigName: string | undefined; - - /** - *

          The Amazon Resource Name (ARN) of the endpoint configuration.

          - */ - EndpointConfigArn: string | undefined; - - /** - *

          A timestamp that shows when the endpoint configuration was created.

          - */ - CreationTime: Date | undefined; -} - -export namespace EndpointConfigSummary { - export const filterSensitiveLog = (obj: EndpointConfigSummary): any => ({ - ...obj, - }); -} - -export enum EndpointSortKey { - CreationTime = "CreationTime", - Name = "Name", - Status = "Status", -} - -/** - *

          Provides summary information for an endpoint.

          - */ -export interface EndpointSummary { - /** - *

          The name of the endpoint.

          - */ - EndpointName: string | undefined; - - /** - *

          The Amazon Resource Name (ARN) of the endpoint.

          - */ - EndpointArn: string | undefined; - - /** - *

          A timestamp that shows when the endpoint was created.

          - */ - CreationTime: Date | undefined; - - /** - *

          A timestamp that shows when the endpoint was last modified.

          - */ - LastModifiedTime: Date | undefined; - - /** - *

          The status of the endpoint.

          - *
            - *
          • - *

            - * OutOfService: Endpoint is not available to take incoming - * requests.

            - *
          • - *
          • - *

            - * Creating: CreateEndpoint is executing.

            - *
          • - *
          • - *

            - * Updating: UpdateEndpoint or UpdateEndpointWeightsAndCapacities is executing.

            - *
          • - *
          • - *

            - * SystemUpdating: Endpoint is undergoing maintenance and cannot be - * updated or deleted or re-scaled until it has completed. This maintenance - * operation does not change any customer-specified values such as VPC config, KMS - * encryption, model, instance type, or instance count.

            - *
          • - *
          • - *

            - * RollingBack: Endpoint fails to scale up or down or change its - * variant weight and is in the process of rolling back to its previous - * configuration. Once the rollback completes, endpoint returns to an - * InService status. This transitional status only applies to an - * endpoint that has autoscaling enabled and is undergoing variant weight or - * capacity changes as part of an UpdateEndpointWeightsAndCapacities call or when the UpdateEndpointWeightsAndCapacities operation is called - * explicitly.

            - *
          • - *
          • - *

            - * InService: Endpoint is available to process incoming - * requests.

            - *
          • - *
          • - *

            - * Deleting: DeleteEndpoint is executing.

            - *
          • - *
          • - *

            - * Failed: Endpoint could not be created, updated, or re-scaled. Use - * DescribeEndpointOutput$FailureReason for information about - * the failure. DeleteEndpoint is the only operation that can be - * performed on a failed endpoint.

            - *
          • - *
          - *

          To get a list of endpoints with a specified status, use the ListEndpointsInput$StatusEquals filter.

          - */ - EndpointStatus: EndpointStatus | string | undefined; -} - -export namespace EndpointSummary { - export const filterSensitiveLog = (obj: EndpointSummary): any => ({ - ...obj, - }); -} - -/** - *

          The properties of an experiment as returned by the Search API.

          - */ -export interface Experiment { - /** - *

          The name of the experiment.

          - */ - ExperimentName?: string; - - /** - *

          The Amazon Resource Name (ARN) of the experiment.

          - */ - ExperimentArn?: string; - - /** - *

          The name of the experiment as displayed. If DisplayName isn't specified, - * ExperimentName is displayed.

          - */ - DisplayName?: string; - - /** - *

          The source of the experiment.

          - */ - Source?: ExperimentSource; - - /** - *

          The description of the experiment.

          - */ - Description?: string; - - /** - *

          When the experiment was created.

          - */ - CreationTime?: Date; - - /** - *

          Information about the user who created or modified an experiment, trial, or trial - * component.

          - */ - CreatedBy?: UserContext; - - /** - *

          When the experiment was last modified.

          - */ - LastModifiedTime?: Date; - - /** - *

          Information about the user who created or modified an experiment, trial, or trial - * component.

          - */ - LastModifiedBy?: UserContext; - - /** - *

          The list of tags that are associated with the experiment. You can use Search API to search on the tags.

          - */ - Tags?: Tag[]; -} - -export namespace Experiment { - export const filterSensitiveLog = (obj: Experiment): any => ({ - ...obj, - }); -} - -/** - *

          A summary of the properties of an experiment. To get the complete set of properties, call - * the DescribeExperiment API and provide the - * ExperimentName.

          - */ -export interface ExperimentSummary { - /** - *

          The Amazon Resource Name (ARN) of the experiment.

          - */ - ExperimentArn?: string; - - /** - *

          The name of the experiment.

          - */ - ExperimentName?: string; - - /** - *

          The name of the experiment as displayed. If DisplayName isn't specified, - * ExperimentName is displayed.

          - */ - DisplayName?: string; - - /** - *

          The source of the experiment.

          - */ - ExperimentSource?: ExperimentSource; - - /** - *

          When the experiment was created.

          - */ - CreationTime?: Date; - - /** - *

          When the experiment was last modified.

          - */ - LastModifiedTime?: Date; -} - -export namespace ExperimentSummary { - export const filterSensitiveLog = (obj: ExperimentSummary): any => ({ - ...obj, - }); -} - -export enum Operator { - CONTAINS = "Contains", - EQUALS = "Equals", - EXISTS = "Exists", - GREATER_THAN = "GreaterThan", - GREATER_THAN_OR_EQUAL_TO = "GreaterThanOrEqualTo", - IN = "In", - LESS_THAN = "LessThan", - LESS_THAN_OR_EQUAL_TO = "LessThanOrEqualTo", - NOT_EQUALS = "NotEquals", - NOT_EXISTS = "NotExists", -} - -/** - *

          A conditional statement for a search expression that includes a resource property, a - * Boolean operator, and a value. Resources that match the statement are returned in the - * results from the Search API.

          - * - *

          If you specify a Value, but not an Operator, Amazon SageMaker uses the - * equals operator.

          - *

          In search, there are several property types:

          - *
          - *
          Metrics
          - *
          - *

          To define a metric filter, enter a value using the form - * "Metrics.", where is - * a metric name. For example, the following filter searches for training jobs - * with an "accuracy" metric greater than - * "0.9":

          - *

          - * { - *

          - *

          - * "Name": "Metrics.accuracy", - *

          - *

          - * "Operator": "GreaterThan", - *

          - *

          - * "Value": "0.9" - *

          - *

          - * } - *

          - *
          - *
          HyperParameters
          - *
          - *

          To define a hyperparameter filter, enter a value with the form - * "HyperParameters.". Decimal hyperparameter - * values are treated as a decimal in a comparison if the specified - * Value is also a decimal value. If the specified - * Value is an integer, the decimal hyperparameter values are - * treated as integers. For example, the following filter is satisfied by - * training jobs with a "learning_rate" hyperparameter that is - * less than "0.5":

          - *

          - * { - *

          - *

          - * "Name": "HyperParameters.learning_rate", - *

          - *

          - * "Operator": "LessThan", - *

          - *

          - * "Value": "0.5" - *

          - *

          - * } - *

          - *
          - *
          Tags
          - *
          - *

          To define a tag filter, enter a value with the form - * Tags..

          - *
          - *
          - */ -export interface Filter { - /** - *

          A resource property name. For example, TrainingJobName. For - * valid property names, see SearchRecord. - * You must specify a valid property for the resource.

          - */ - Name: string | undefined; - - /** - *

          A Boolean binary operator that is used to evaluate the filter. The operator field - * contains one of the following values:

          - *
          - *
          Equals
          - *
          - *

          The value of Name equals Value.

          - *
          - *
          NotEquals
          - *
          - *

          The value of Name doesn't equal Value.

          - *
          - *
          Exists
          - *
          - *

          The Name property exists.

          - *
          - *
          NotExists
          - *
          - *

          The Name property does not exist.

          - *
          - *
          GreaterThan
          - *
          - *

          The value of Name is greater than Value. - * Not supported for text properties.

          - *
          - *
          GreaterThanOrEqualTo
          - *
          - *

          The value of Name is greater than or equal to Value. - * Not supported for text properties.

          - *
          - *
          LessThan
          - *
          - *

          The value of Name is less than Value. - * Not supported for text properties.

          - *
          - *
          LessThanOrEqualTo
          - *
          - *

          The value of Name is less than or equal to Value. - * Not supported for text properties.

          - *
          - *
          In
          - *
          - *

          The value of Name is one of the comma delimited strings in - * Value. Only supported for text properties.

          - *
          - *
          Contains
          - *
          - *

          The value of Name contains the string Value. - * Only supported for text properties.

          - *

          A SearchExpression can include the Contains operator - * multiple times when the value of Name is one of the following:

          - *
            - *
          • - *

            - * Experiment.DisplayName - *

            - *
          • - *
          • - *

            - * Experiment.ExperimentName - *

            - *
          • - *
          • - *

            - * Experiment.Tags - *

            - *
          • - *
          • - *

            - * Trial.DisplayName - *

            - *
          • - *
          • - *

            - * Trial.TrialName - *

            - *
          • - *
          • - *

            - * Trial.Tags - *

            - *
          • - *
          • - *

            - * TrialComponent.DisplayName - *

            - *
          • - *
          • - *

            - * TrialComponent.TrialComponentName - *

            - *
          • - *
          • - *

            - * TrialComponent.Tags - *

            - *
          • - *
          • - *

            - * TrialComponent.InputArtifacts - *

            - *
          • - *
          • - *

            - * TrialComponent.OutputArtifacts - *

            - *
          • - *
          - *

          A SearchExpression can include only one Contains operator - * for all other values of Name. In these cases, if you include multiple - * Contains operators in the SearchExpression, the result is - * the following error message: "'CONTAINS' operator usage limit of 1 - * exceeded."

          - *
          - *
          - */ - Operator?: Operator | string; - - /** - *

          A value used with Name and Operator to determine which - * resources satisfy the filter's condition. For numerical properties, Value - * must be an integer or floating-point decimal. For timestamp properties, - * Value must be an ISO 8601 date-time string of the following format: - * YYYY-mm-dd'T'HH:MM:SS.

          - */ - Value?: string; -} - -export namespace Filter { - export const filterSensitiveLog = (obj: Filter): any => ({ - ...obj, - }); -} - -/** - *

          Contains summary information about the flow definition.

          - */ -export interface FlowDefinitionSummary { - /** - *

          The name of the flow definition.

          + *

          The name of the experiment as displayed. If DisplayName isn't specified, + * ExperimentName is displayed.

          */ - FlowDefinitionName: string | undefined; + DisplayName?: string; /** - *

          The Amazon Resource Name (ARN) of the flow definition.

          + *

          The ARN of the source and, optionally, the type.

          */ - FlowDefinitionArn: string | undefined; + Source?: ExperimentSource; /** - *

          The status of the flow definition. Valid values:

          + *

          The description of the experiment.

          */ - FlowDefinitionStatus: FlowDefinitionStatus | string | undefined; + Description?: string; /** - *

          The timestamp when SageMaker created the flow definition.

          + *

          When the experiment was created.

          */ - CreationTime: Date | undefined; + CreationTime?: Date; /** - *

          The reason why the flow definition creation failed. A failure reason is returned only when the flow definition status is Failed.

          + *

          Who created the experiment.

          */ - FailureReason?: string; -} - -export namespace FlowDefinitionSummary { - export const filterSensitiveLog = (obj: FlowDefinitionSummary): any => ({ - ...obj, - }); -} - -export enum ResourceType { - EXPERIMENT = "Experiment", - EXPERIMENT_TRIAL = "ExperimentTrial", - EXPERIMENT_TRIAL_COMPONENT = "ExperimentTrialComponent", - TRAINING_JOB = "TrainingJob", -} + CreatedBy?: UserContext; -/** - *

          Part of the SuggestionQuery type. Specifies a hint for retrieving property - * names that begin with the specified text.

          - */ -export interface PropertyNameQuery { /** - *

          Text that begins a property's name.

          + *

          When the experiment was last modified.

          */ - PropertyNameHint: string | undefined; -} - -export namespace PropertyNameQuery { - export const filterSensitiveLog = (obj: PropertyNameQuery): any => ({ - ...obj, - }); -} + LastModifiedTime?: Date; -/** - *

          Specified in the GetSearchSuggestions request. - * Limits the property names that are included in the response.

          - */ -export interface SuggestionQuery { /** - *

          Defines a property name hint. Only property - * names that begin with the specified hint are included in the response.

          + *

          Who last modified the experiment.

          */ - PropertyNameQuery?: PropertyNameQuery; + LastModifiedBy?: UserContext; } -export namespace SuggestionQuery { - export const filterSensitiveLog = (obj: SuggestionQuery): any => ({ +export namespace DescribeExperimentResponse { + export const filterSensitiveLog = (obj: DescribeExperimentResponse): any => ({ ...obj, }); } -export interface GetSearchSuggestionsRequest { +export interface DescribeFeatureGroupRequest { /** - *

          The name of the Amazon SageMaker resource to search for.

          + *

          The name of the FeatureGroup you want described.

          */ - Resource: ResourceType | string | undefined; + FeatureGroupName: string | undefined; /** - *

          Limits the property names that are included in the response.

          + *

          A token to resume pagination of the list of Features + * (FeatureDefinitions). 2,500 Features are returned by + * default.

          */ - SuggestionQuery?: SuggestionQuery; + NextToken?: string; } -export namespace GetSearchSuggestionsRequest { - export const filterSensitiveLog = (obj: GetSearchSuggestionsRequest): any => ({ +export namespace DescribeFeatureGroupRequest { + export const filterSensitiveLog = (obj: DescribeFeatureGroupRequest): any => ({ ...obj, }); } -/** - *

          A property name returned from a GetSearchSuggestions call that specifies - * a value in the PropertyNameQuery field.

          - */ -export interface PropertyNameSuggestion { - /** - *

          A suggested property name based on what you entered in the search textbox in the Amazon SageMaker - * console.

          - */ - PropertyName?: string; +export enum FeatureGroupStatus { + CREATED = "Created", + CREATE_FAILED = "CreateFailed", + CREATING = "Creating", + DELETE_FAILED = "DeleteFailed", + DELETING = "Deleting", } -export namespace PropertyNameSuggestion { - export const filterSensitiveLog = (obj: PropertyNameSuggestion): any => ({ - ...obj, - }); +export enum OfflineStoreStatusValue { + ACTIVE = "Active", + BLOCKED = "Blocked", + DISABLED = "Disabled", } -export interface GetSearchSuggestionsResponse { +/** + *

          The status of OfflineStore.

          + */ +export interface OfflineStoreStatus { /** - *

          A list of property names for a Resource that match a - * SuggestionQuery.

          + *

          An OfflineStore status.

          */ - PropertyNameSuggestions?: PropertyNameSuggestion[]; -} - -export namespace GetSearchSuggestionsResponse { - export const filterSensitiveLog = (obj: GetSearchSuggestionsResponse): any => ({ - ...obj, - }); -} + Status: OfflineStoreStatusValue | string | undefined; -/** - *

          Specifies configuration details for a Git repository when the repository is - * updated.

          - */ -export interface GitConfigForUpdate { /** - *

          The Amazon Resource Name (ARN) of the AWS Secrets Manager secret that contains the - * credentials used to access the git repository. The secret must have a staging label of - * AWSCURRENT and must be in the following format:

          - *

          - * {"username": UserName, "password": - * Password} - *

          + *

          The justification for why the OfflineStoreStatus is Blocked (if applicable).

          */ - SecretArn?: string; + BlockedReason?: string; } -export namespace GitConfigForUpdate { - export const filterSensitiveLog = (obj: GitConfigForUpdate): any => ({ +export namespace OfflineStoreStatus { + export const filterSensitiveLog = (obj: OfflineStoreStatus): any => ({ ...obj, }); } -/** - *

          Container for human task user interface information.

          - */ -export interface HumanTaskUiSummary { +export interface DescribeFeatureGroupResponse { /** - *

          The name of the human task user interface.

          + *

          The Amazon Resource Name (ARN) of the FeatureGroup.

          */ - HumanTaskUiName: string | undefined; + FeatureGroupArn: string | undefined; /** - *

          The Amazon Resource Name (ARN) of the human task user interface.

          + *

          he name of the FeatureGroup.

          */ - HumanTaskUiArn: string | undefined; + FeatureGroupName: string | undefined; /** - *

          A timestamp when SageMaker created the human task user interface.

          + *

          The name of the Feature used for RecordIdentifier, whose value + * uniquely identifies a record stored in the feature store.

          */ - CreationTime: Date | undefined; -} + RecordIdentifierFeatureName: string | undefined; -export namespace HumanTaskUiSummary { - export const filterSensitiveLog = (obj: HumanTaskUiSummary): any => ({ - ...obj, - }); -} + /** + *

          The name of the feature that stores the EventTime of a Record in a + * FeatureGroup.

          + *

          An EventTime is a point in time when a new event occurs that + * corresponds to the creation or update of a Record in a + * FeatureGroup. All Records in the FeatureGroup + * have a corresponding EventTime.

          + */ + EventTimeFeatureName: string | undefined; -export enum HyperParameterTuningJobSortByOptions { - CreationTime = "CreationTime", - Name = "Name", - Status = "Status", -} + /** + *

          A list of the Features in the FeatureGroup. + * Each feature is defined by a FeatureName and FeatureType.

          + */ + FeatureDefinitions: FeatureDefinition[] | undefined; -/** - *

          Provides summary information about a hyperparameter tuning job.

          - */ -export interface HyperParameterTuningJobSummary { /** - *

          The name of the tuning job.

          + *

          A timestamp indicating when SageMaker created the FeatureGroup.

          */ - HyperParameterTuningJobName: string | undefined; + CreationTime: Date | undefined; /** - *

          The - * Amazon - * Resource Name (ARN) of the tuning job.

          + *

          The configuration for the OnlineStore.

          */ - HyperParameterTuningJobArn: string | undefined; + OnlineStoreConfig?: OnlineStoreConfig; /** - *

          The status of the - * tuning - * job.

          + *

          The configuration of the OfflineStore, inducing the S3 location of the + * OfflineStore, AWS Glue or AWS Hive data catalogue configurations, and the + * security configuration.

          */ - HyperParameterTuningJobStatus: HyperParameterTuningJobStatus | string | undefined; + OfflineStoreConfig?: OfflineStoreConfig; /** - *

          Specifies the search strategy hyperparameter tuning uses to choose which - * hyperparameters to - * use - * for each iteration. Currently, the only valid value is - * Bayesian.

          + *

          The Amazon Resource Name (ARN) of the IAM execution role used to persist data into the + * OfflineStore if an OfflineStoreConfig is provided.

          */ - Strategy: HyperParameterTuningJobStrategyType | string | undefined; + RoleArn?: string; /** - *

          The date and time that the tuning job was created.

          + *

          The status of the feature group.

          */ - CreationTime: Date | undefined; + FeatureGroupStatus?: FeatureGroupStatus | string; /** - *

          The date and time that the tuning job ended.

          + *

          The status of the OfflineStore. Notifies you if replicating data into the + * OfflineStore has failed. Returns either: Active or + * Blocked + *

          */ - HyperParameterTuningEndTime?: Date; + OfflineStoreStatus?: OfflineStoreStatus; /** - *

          The date and time that the tuning job was - * modified.

          + *

          The reason that the FeatureGroup failed to be replicated in the + * OfflineStore. This is failure can occur because:

          + *
            + *
          • + *

            The FeatureGroup could not be created in the + * OfflineStore.

            + *
          • + *
          • + *

            The FeatureGroup could not be deleted from the + * OfflineStore.

            + *
          • + *
          */ - LastModifiedTime?: Date; + FailureReason?: string; /** - *

          The TrainingJobStatusCounters object that specifies the numbers of - * training jobs, categorized by status, that this tuning job launched.

          + *

          A free form description of the feature group.

          */ - TrainingJobStatusCounters: TrainingJobStatusCounters | undefined; + Description?: string; /** - *

          The ObjectiveStatusCounters object that specifies the numbers of - * training jobs, categorized by objective metric status, that this tuning job - * launched.

          + *

          A token to resume pagination of the list of Features + * (FeatureDefinitions).

          */ - ObjectiveStatusCounters: ObjectiveStatusCounters | undefined; + NextToken: string | undefined; +} + +export namespace DescribeFeatureGroupResponse { + export const filterSensitiveLog = (obj: DescribeFeatureGroupResponse): any => ({ + ...obj, + }); +} +export interface DescribeFlowDefinitionRequest { /** - *

          The ResourceLimits object that specifies the maximum number of - * training jobs and parallel training jobs allowed for this tuning job.

          + *

          The name of the flow definition.

          */ - ResourceLimits?: ResourceLimits; + FlowDefinitionName: string | undefined; } -export namespace HyperParameterTuningJobSummary { - export const filterSensitiveLog = (obj: HyperParameterTuningJobSummary): any => ({ +export namespace DescribeFlowDefinitionRequest { + export const filterSensitiveLog = (obj: DescribeFlowDefinitionRequest): any => ({ ...obj, }); } -/** - *

          A SageMaker image. A SageMaker image represents a set of container images that are derived from - * a common base container image. Each of these container images is represented by a SageMaker - * ImageVersion.

          - */ -export interface Image { +export enum FlowDefinitionStatus { + ACTIVE = "Active", + DELETING = "Deleting", + FAILED = "Failed", + INITIALIZING = "Initializing", +} + +export interface DescribeFlowDefinitionResponse { /** - *

          When the image was created.

          + *

          The Amazon Resource Name (ARN) of the flow defintion.

          */ - CreationTime: Date | undefined; + FlowDefinitionArn: string | undefined; /** - *

          The description of the image.

          + *

          The Amazon Resource Name (ARN) of the flow definition.

          */ - Description?: string; + FlowDefinitionName: string | undefined; /** - *

          The name of the image as displayed.

          + *

          The status of the flow definition. Valid values are listed below.

          */ - DisplayName?: string; + FlowDefinitionStatus: FlowDefinitionStatus | string | undefined; /** - *

          When a create, update, or delete operation fails, the reason for the failure.

          + *

          The timestamp when the flow definition was created.

          */ - FailureReason?: string; + CreationTime: Date | undefined; /** - *

          The Amazon Resource Name (ARN) of the image.

          + *

          Container for configuring the source of human task requests. Used to specify if + * Amazon Rekognition or Amazon Textract is used as an integration source.

          */ - ImageArn: string | undefined; + HumanLoopRequestSource?: HumanLoopRequestSource; /** - *

          The name of the image.

          + *

          An object containing information about what triggers a human review workflow.

          */ - ImageName: string | undefined; + HumanLoopActivationConfig?: HumanLoopActivationConfig; /** - *

          The status of the image.

          + *

          An object containing information about who works on the task, the workforce task price, and other task details.

          */ - ImageStatus: ImageStatus | string | undefined; + HumanLoopConfig: HumanLoopConfig | undefined; + + /** + *

          An object containing information about the output file.

          + */ + OutputConfig: FlowDefinitionOutputConfig | undefined; + + /** + *

          The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) execution role for the flow definition.

          + */ + RoleArn: string | undefined; /** - *

          When the image was last modified.

          + *

          The reason your flow definition failed.

          */ - LastModifiedTime: Date | undefined; + FailureReason?: string; } -export namespace Image { - export const filterSensitiveLog = (obj: Image): any => ({ +export namespace DescribeFlowDefinitionResponse { + export const filterSensitiveLog = (obj: DescribeFlowDefinitionResponse): any => ({ ...obj, }); } -export enum ImageSortBy { - CREATION_TIME = "CREATION_TIME", - IMAGE_NAME = "IMAGE_NAME", - LAST_MODIFIED_TIME = "LAST_MODIFIED_TIME", +export interface DescribeHumanTaskUiRequest { + /** + *

          The name of the human task user interface + * (worker task template) you want information about.

          + */ + HumanTaskUiName: string | undefined; } -export enum ImageSortOrder { - ASCENDING = "ASCENDING", - DESCENDING = "DESCENDING", +export namespace DescribeHumanTaskUiRequest { + export const filterSensitiveLog = (obj: DescribeHumanTaskUiRequest): any => ({ + ...obj, + }); +} + +export enum HumanTaskUiStatus { + ACTIVE = "Active", + DELETING = "Deleting", } /** - *

          A version of a SageMaker Image. A version represents an existing container - * image.

          + *

          Container for user interface template information.

          */ -export interface ImageVersion { +export interface UiTemplateInfo { /** - *

          When the version was created.

          + *

          The URL for the user interface template.

          */ - CreationTime: Date | undefined; + Url?: string; /** - *

          When a create or delete operation fails, the reason for the failure.

          + *

          The SHA-256 digest of the contents of the template.

          */ - FailureReason?: string; + ContentSha256?: string; +} + +export namespace UiTemplateInfo { + export const filterSensitiveLog = (obj: UiTemplateInfo): any => ({ + ...obj, + }); +} +export interface DescribeHumanTaskUiResponse { /** - *

          The Amazon Resource Name (ARN) of the image the version is based on.

          + *

          The Amazon Resource Name (ARN) of the human task user interface (worker task template).

          */ - ImageArn: string | undefined; + HumanTaskUiArn: string | undefined; /** - *

          The ARN of the version.

          + *

          The name of the human task user interface (worker task template).

          */ - ImageVersionArn: string | undefined; + HumanTaskUiName: string | undefined; /** - *

          The status of the version.

          + *

          The status of the human task user interface (worker task template). Valid values are listed below.

          */ - ImageVersionStatus: ImageVersionStatus | string | undefined; + HumanTaskUiStatus?: HumanTaskUiStatus | string; /** - *

          When the version was last modified.

          + *

          The timestamp when the human task user interface was created.

          */ - LastModifiedTime: Date | undefined; + CreationTime: Date | undefined; /** - *

          The version number.

          + *

          Container for user interface template information.

          */ - Version: number | undefined; + UiTemplate: UiTemplateInfo | undefined; } -export namespace ImageVersion { - export const filterSensitiveLog = (obj: ImageVersion): any => ({ +export namespace DescribeHumanTaskUiResponse { + export const filterSensitiveLog = (obj: DescribeHumanTaskUiResponse): any => ({ ...obj, }); } -export enum ImageVersionSortBy { - CREATION_TIME = "CREATION_TIME", - LAST_MODIFIED_TIME = "LAST_MODIFIED_TIME", - VERSION = "VERSION", +export interface DescribeHyperParameterTuningJobRequest { + /** + *

          The name of the tuning job.

          + */ + HyperParameterTuningJobName: string | undefined; } -export enum ImageVersionSortOrder { - ASCENDING = "ASCENDING", - DESCENDING = "DESCENDING", +export namespace DescribeHyperParameterTuningJobRequest { + export const filterSensitiveLog = (obj: DescribeHyperParameterTuningJobRequest): any => ({ + ...obj, + }); } /** - *

          Provides counts for human-labeled tasks in the labeling job.

          + *

          Shows the final value for the + * objective + * metric for a training job that was launched by a hyperparameter + * tuning job. You define the objective metric in the + * HyperParameterTuningJobObjective parameter of HyperParameterTuningJobConfig.

          */ -export interface LabelCountersForWorkteam { +export interface FinalHyperParameterTuningJobObjectiveMetric { /** - *

          The total number of data objects labeled by a human worker.

          + *

          Whether to + * minimize + * or maximize the objective metric. Valid values are Minimize and + * Maximize.

          */ - HumanLabeled?: number; + Type?: HyperParameterTuningJobObjectiveType | string; /** - *

          The total number of data objects that need to be labeled by a human worker.

          + *

          The name of the + * objective + * metric.

          */ - PendingHuman?: number; + MetricName: string | undefined; /** - *

          The total number of tasks in the labeling job.

          + *

          The value of the objective metric.

          */ - Total?: number; + Value: number | undefined; } -export namespace LabelCountersForWorkteam { - export const filterSensitiveLog = (obj: LabelCountersForWorkteam): any => ({ +export namespace FinalHyperParameterTuningJobObjectiveMetric { + export const filterSensitiveLog = (obj: FinalHyperParameterTuningJobObjectiveMetric): any => ({ ...obj, }); } +export enum TrainingJobStatus { + COMPLETED = "Completed", + FAILED = "Failed", + IN_PROGRESS = "InProgress", + STOPPED = "Stopped", + STOPPING = "Stopping", +} + /** - *

          Provides summary information for a work team.

          + *

          Specifies + * summary information about a training job.

          */ -export interface LabelingJobForWorkteamSummary { +export interface HyperParameterTrainingJobSummary { /** - *

          The name of the labeling job that the work team is assigned to.

          + *

          The training job definition name.

          */ - LabelingJobName?: string; + TrainingJobDefinitionName?: string; /** - *

          A unique identifier for a labeling job. You can use this to refer to a specific - * labeling job.

          + *

          The name of the training job.

          */ - JobReferenceCode: string | undefined; + TrainingJobName: string | undefined; /** - *

          + *

          The + * Amazon + * Resource Name (ARN) of the training job.

          */ - WorkRequesterAccountId: string | undefined; + TrainingJobArn: string | undefined; /** - *

          The date and time that the labeling job was created.

          + *

          The HyperParameter tuning job that launched the training job.

          + */ + TuningJobName?: string; + + /** + *

          The date and time that the training job was created.

          */ CreationTime: Date | undefined; /** - *

          Provides information about the progress of a labeling job.

          + *

          The date and time that the training job started.

          */ - LabelCounters?: LabelCountersForWorkteam; + TrainingStartTime?: Date; /** - *

          The configured number of workers per data object.

          + *

          Specifies the time when the training job ends on training instances. You are billed + * for the time interval between the value of TrainingStartTime and this time. + * For successful jobs and stopped jobs, this is the time after model artifacts are + * uploaded. For failed jobs, this is the time when Amazon SageMaker detects a job failure.

          */ - NumberOfHumanWorkersPerDataObject?: number; -} + TrainingEndTime?: Date; -export namespace LabelingJobForWorkteamSummary { - export const filterSensitiveLog = (obj: LabelingJobForWorkteamSummary): any => ({ - ...obj, - }); -} + /** + *

          The + * status + * of the training job.

          + */ + TrainingJobStatus: TrainingJobStatus | string | undefined; -/** - *

          Provides summary information about a labeling job.

          - */ -export interface LabelingJobSummary { /** - *

          The name of the labeling job.

          + *

          A + * list of the hyperparameters for which you specified ranges to + * search.

          */ - LabelingJobName: string | undefined; + TunedHyperParameters: { [key: string]: string } | undefined; /** - *

          The Amazon Resource Name (ARN) assigned to the labeling job when it was - * created.

          + *

          The + * reason that the training job failed. + *

          */ - LabelingJobArn: string | undefined; + FailureReason?: string; /** - *

          The date and time that the job was created (timestamp).

          + *

          The FinalHyperParameterTuningJobObjectiveMetric object that + * specifies the + * value + * of the + * objective + * metric of the tuning job that launched this training job.

          */ - CreationTime: Date | undefined; + FinalHyperParameterTuningJobObjectiveMetric?: FinalHyperParameterTuningJobObjectiveMetric; /** - *

          The date and time that the job was last modified (timestamp).

          + *

          The status of the objective metric for the training job:

          + *
            + *
          • + *

            Succeeded: The + * final + * objective metric for the training job was evaluated by the + * hyperparameter tuning job and + * used + * in the hyperparameter tuning process.

            + *
          • + *
          + *
            + *
          • + *

            Pending: The training job is in progress and evaluation of its final objective + * metric is pending.

            + *
          • + *
          + *
            + *
          • + *

            Failed: + * The final objective metric for the training job was not evaluated, and was not + * used in the hyperparameter tuning process. This typically occurs when the + * training job failed or did not emit an objective + * metric.

            + *
          • + *
          */ - LastModifiedTime: Date | undefined; + ObjectiveStatus?: ObjectiveStatus | string; +} + +export namespace HyperParameterTrainingJobSummary { + export const filterSensitiveLog = (obj: HyperParameterTrainingJobSummary): any => ({ + ...obj, + }); +} +export enum HyperParameterTuningJobStatus { + COMPLETED = "Completed", + FAILED = "Failed", + IN_PROGRESS = "InProgress", + STOPPED = "Stopped", + STOPPING = "Stopping", +} + +/** + *

          Specifies the number of training jobs that this hyperparameter tuning job launched, + * categorized by the status of their objective metric. The objective metric status shows + * whether the + * final + * objective metric for the training job has been evaluated by the + * tuning job and used in the hyperparameter tuning process.

          + */ +export interface ObjectiveStatusCounters { /** - *

          The current status of the labeling job.

          + *

          The number of training jobs whose final objective metric was evaluated by the + * hyperparameter tuning job and used in the hyperparameter tuning process.

          */ - LabelingJobStatus: LabelingJobStatus | string | undefined; + Succeeded?: number; /** - *

          Counts showing the progress of the labeling job.

          + *

          The number of training jobs that are in progress and pending evaluation of their final + * objective metric.

          */ - LabelCounters: LabelCounters | undefined; + Pending?: number; /** - *

          The Amazon Resource Name (ARN) of the work team assigned to the job.

          + *

          The number of training jobs whose final objective metric was not evaluated and used in + * the hyperparameter tuning process. This typically occurs when the training job failed or + * did not emit an objective metric.

          */ - WorkteamArn: string | undefined; + Failed?: number; +} + +export namespace ObjectiveStatusCounters { + export const filterSensitiveLog = (obj: ObjectiveStatusCounters): any => ({ + ...obj, + }); +} +/** + *

          The numbers of training jobs launched by a hyperparameter tuning job, categorized by + * status.

          + */ +export interface TrainingJobStatusCounters { /** - *

          The Amazon Resource Name (ARN) of a Lambda function. The function is run before each - * data object is sent to a worker.

          + *

          The number of completed training jobs launched by the hyperparameter tuning + * job.

          */ - PreHumanTaskLambdaArn: string | undefined; + Completed?: number; /** - *

          The Amazon Resource Name (ARN) of the Lambda function used to consolidate the - * annotations from individual workers into a label for a data object. For more - * information, see Annotation - * Consolidation.

          + *

          The number of in-progress training jobs launched by a hyperparameter tuning + * job.

          */ - AnnotationConsolidationLambdaArn?: string; + InProgress?: number; /** - *

          If the LabelingJobStatus field is Failed, this field - * contains a description of the error.

          + *

          The number of training jobs that failed, but can be retried. A failed training job can + * be retried only if it failed because an internal service error occurred.

          */ - FailureReason?: string; + RetryableError?: number; /** - *

          The location of the output produced by the labeling job.

          + *

          The number of training jobs that failed and can't be retried. A failed training job + * can't be retried if it failed because a client error occurred.

          */ - LabelingJobOutput?: LabelingJobOutput; + NonRetryableError?: number; /** - *

          Input configuration for the labeling job.

          + *

          The number of training jobs launched by a hyperparameter tuning job that were + * manually + * stopped.

          */ - InputConfig?: LabelingJobInputConfig; + Stopped?: number; } -export namespace LabelingJobSummary { - export const filterSensitiveLog = (obj: LabelingJobSummary): any => ({ +export namespace TrainingJobStatusCounters { + export const filterSensitiveLog = (obj: TrainingJobStatusCounters): any => ({ ...obj, }); } -export enum SortOrder { - ASCENDING = "Ascending", - DESCENDING = "Descending", -} - -export interface ListAlgorithmsInput { - /** - *

          A filter that returns only algorithms created after the specified time - * (timestamp).

          - */ - CreationTimeAfter?: Date; - +export interface DescribeHyperParameterTuningJobResponse { /** - *

          A filter that returns only algorithms created before the specified time - * (timestamp).

          + *

          The name of the tuning job.

          */ - CreationTimeBefore?: Date; + HyperParameterTuningJobName: string | undefined; /** - *

          The maximum number of algorithms to return in the response.

          + *

          The + * Amazon Resource Name (ARN) of the tuning job.

          */ - MaxResults?: number; + HyperParameterTuningJobArn: string | undefined; /** - *

          A string in the algorithm name. This filter returns only algorithms whose name - * contains the specified string.

          + *

          The HyperParameterTuningJobConfig object that specifies the + * configuration of the tuning job.

          */ - NameContains?: string; + HyperParameterTuningJobConfig: HyperParameterTuningJobConfig | undefined; /** - *

          If the response to a previous ListAlgorithms request was truncated, the - * response includes a NextToken. To retrieve the next set of algorithms, use - * the token in the next request.

          + *

          The HyperParameterTrainingJobDefinition object that specifies the + * definition of the training jobs that this tuning job launches.

          */ - NextToken?: string; + TrainingJobDefinition?: HyperParameterTrainingJobDefinition; /** - *

          The parameter by which to sort the results. The default is - * CreationTime.

          + *

          A list of the HyperParameterTrainingJobDefinition objects launched + * for this tuning job.

          */ - SortBy?: AlgorithmSortBy | string; + TrainingJobDefinitions?: HyperParameterTrainingJobDefinition[]; /** - *

          The sort order for the results. The default is Ascending.

          + *

          The status of the tuning job: InProgress, Completed, Failed, Stopping, or + * Stopped.

          */ - SortOrder?: SortOrder | string; -} - -export namespace ListAlgorithmsInput { - export const filterSensitiveLog = (obj: ListAlgorithmsInput): any => ({ - ...obj, - }); -} + HyperParameterTuningJobStatus: HyperParameterTuningJobStatus | string | undefined; -export interface ListAlgorithmsOutput { /** - *

          >An array of AlgorithmSummary objects, each of which lists an - * algorithm.

          + *

          The date and time that the tuning job started.

          */ - AlgorithmSummaryList: AlgorithmSummary[] | undefined; + CreationTime: Date | undefined; /** - *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of - * algorithms, use it in the subsequent request.

          + *

          The date and time that the tuning job ended.

          */ - NextToken?: string; -} - -export namespace ListAlgorithmsOutput { - export const filterSensitiveLog = (obj: ListAlgorithmsOutput): any => ({ - ...obj, - }); -} + HyperParameterTuningEndTime?: Date; -export interface ListAppImageConfigsRequest { /** - *

          The maximum number of AppImageConfigs to return in the response. The default value is - * 10.

          + *

          The date and time that the status of the tuning job was modified.

          */ - MaxResults?: number; + LastModifiedTime?: Date; /** - *

          If the previous call to ListImages didn't return the full set of - * AppImageConfigs, the call returns a token for getting the next set of AppImageConfigs.

          + *

          The TrainingJobStatusCounters object that specifies the number of + * training jobs, categorized by status, that this tuning job launched.

          */ - NextToken?: string; + TrainingJobStatusCounters: TrainingJobStatusCounters | undefined; /** - *

          A filter that returns only AppImageConfigs whose name contains the specified string.

          + *

          The ObjectiveStatusCounters object that specifies the number of + * training jobs, categorized by the status of their final objective metric, that this + * tuning job launched.

          */ - NameContains?: string; + ObjectiveStatusCounters: ObjectiveStatusCounters | undefined; /** - *

          A filter that returns only AppImageConfigs created on or before the specified time.

          + *

          A TrainingJobSummary object that describes the training job that + * completed with the best current HyperParameterTuningJobObjective.

          */ - CreationTimeBefore?: Date; + BestTrainingJob?: HyperParameterTrainingJobSummary; /** - *

          A filter that returns only AppImageConfigs created on or after the specified time.

          + *

          If the hyperparameter tuning job is an warm start tuning job with a + * WarmStartType of IDENTICAL_DATA_AND_ALGORITHM, this is the + * TrainingJobSummary for the training job with the best objective + * metric value of all training jobs launched by this tuning job and all parent jobs + * specified for the warm start tuning job.

          */ - CreationTimeAfter?: Date; + OverallBestTrainingJob?: HyperParameterTrainingJobSummary; /** - *

          A filter that returns only AppImageConfigs modified on or before the specified time.

          + *

          The configuration for starting the hyperparameter parameter tuning job using one or + * more previous tuning jobs as a starting point. The results of previous tuning jobs are + * used to inform which combinations of hyperparameters to search over in the new tuning + * job.

          */ - ModifiedTimeBefore?: Date; + WarmStartConfig?: HyperParameterTuningJobWarmStartConfig; /** - *

          A filter that returns only AppImageConfigs modified on or after the specified time.

          + *

          If the tuning job failed, the reason it failed.

          */ - ModifiedTimeAfter?: Date; + FailureReason?: string; +} - /** - *

          The property used to sort results. The default value is CreationTime.

          - */ - SortBy?: AppImageConfigSortKey | string; +export namespace DescribeHyperParameterTuningJobResponse { + export const filterSensitiveLog = (obj: DescribeHyperParameterTuningJobResponse): any => ({ + ...obj, + }); +} +export interface DescribeImageRequest { /** - *

          The sort order. The default value is Descending.

          + *

          The name of the image to describe.

          */ - SortOrder?: SortOrder | string; + ImageName: string | undefined; } -export namespace ListAppImageConfigsRequest { - export const filterSensitiveLog = (obj: ListAppImageConfigsRequest): any => ({ +export namespace DescribeImageRequest { + export const filterSensitiveLog = (obj: DescribeImageRequest): any => ({ ...obj, }); } -export interface ListAppImageConfigsResponse { +export enum ImageStatus { + CREATED = "CREATED", + CREATE_FAILED = "CREATE_FAILED", + CREATING = "CREATING", + DELETE_FAILED = "DELETE_FAILED", + DELETING = "DELETING", + UPDATE_FAILED = "UPDATE_FAILED", + UPDATING = "UPDATING", +} + +export interface DescribeImageResponse { /** - *

          A token for getting the next set of AppImageConfigs, if there are any.

          + *

          When the image was created.

          */ - NextToken?: string; + CreationTime?: Date; /** - *

          A list of AppImageConfigs and their properties.

          + *

          The description of the image.

          */ - AppImageConfigs?: AppImageConfigDetails[]; -} + Description?: string; -export namespace ListAppImageConfigsResponse { - export const filterSensitiveLog = (obj: ListAppImageConfigsResponse): any => ({ - ...obj, - }); -} + /** + *

          The name of the image as displayed.

          + */ + DisplayName?: string; -export interface ListAppsRequest { /** - *

          If the previous response was truncated, you will receive this token. - * Use it in your next request to receive the next set of results.

          + *

          When a create, update, or delete operation fails, the reason for the failure.

          */ - NextToken?: string; + FailureReason?: string; /** - *

          Returns a list up to a specified limit.

          + *

          The Amazon Resource Name (ARN) of the image.

          */ - MaxResults?: number; + ImageArn?: string; /** - *

          The sort order for the results. The default is Ascending.

          + *

          The name of the image.

          */ - SortOrder?: SortOrder | string; + ImageName?: string; /** - *

          The parameter by which to sort the results. The default is CreationTime.

          + *

          The status of the image.

          */ - SortBy?: AppSortKey | string; + ImageStatus?: ImageStatus | string; /** - *

          A parameter to search for the domain ID.

          + *

          When the image was last modified.

          */ - DomainIdEquals?: string; + LastModifiedTime?: Date; /** - *

          A parameter to search by user profile name.

          + *

          The Amazon Resource Name (ARN) of the IAM role that enables Amazon SageMaker to perform tasks on your behalf.

          */ - UserProfileNameEquals?: string; + RoleArn?: string; } -export namespace ListAppsRequest { - export const filterSensitiveLog = (obj: ListAppsRequest): any => ({ +export namespace DescribeImageResponse { + export const filterSensitiveLog = (obj: DescribeImageResponse): any => ({ ...obj, }); } -export interface ListAppsResponse { +export interface DescribeImageVersionRequest { /** - *

          The list of apps.

          + *

          The name of the image.

          */ - Apps?: AppDetails[]; + ImageName: string | undefined; /** - *

          If the previous response was truncated, you will receive this token. - * Use it in your next request to receive the next set of results.

          + *

          The version of the image. If not specified, the latest version is described.

          */ - NextToken?: string; + Version?: number; } -export namespace ListAppsResponse { - export const filterSensitiveLog = (obj: ListAppsResponse): any => ({ +export namespace DescribeImageVersionRequest { + export const filterSensitiveLog = (obj: DescribeImageVersionRequest): any => ({ ...obj, }); } -export interface ListAutoMLJobsRequest { - /** - *

          Request a list of jobs, using a filter for time.

          - */ - CreationTimeAfter?: Date; +export enum ImageVersionStatus { + CREATED = "CREATED", + CREATE_FAILED = "CREATE_FAILED", + CREATING = "CREATING", + DELETE_FAILED = "DELETE_FAILED", + DELETING = "DELETING", +} +export interface DescribeImageVersionResponse { /** - *

          Request a list of jobs, using a filter for time.

          + *

          The registry path of the container image on which this image version is based.

          */ - CreationTimeBefore?: Date; + BaseImage?: string; /** - *

          Request a list of jobs, using a filter for time.

          + *

          The registry path of the container image that contains this image version.

          */ - LastModifiedTimeAfter?: Date; + ContainerImage?: string; /** - *

          Request a list of jobs, using a filter for time.

          + *

          When the version was created.

          */ - LastModifiedTimeBefore?: Date; + CreationTime?: Date; /** - *

          Request a list of jobs, using a search filter for name.

          + *

          When a create or delete operation fails, the reason for the failure.

          */ - NameContains?: string; + FailureReason?: string; /** - *

          Request a list of jobs, using a filter for status.

          + *

          The Amazon Resource Name (ARN) of the image the version is based on.

          */ - StatusEquals?: AutoMLJobStatus | string; + ImageArn?: string; /** - *

          The sort order for the results. The default is Descending.

          + *

          The ARN of the version.

          */ - SortOrder?: AutoMLSortOrder | string; + ImageVersionArn?: string; /** - *

          The parameter by which to sort the results. The default is AutoMLJobName.

          + *

          The status of the version.

          */ - SortBy?: AutoMLSortBy | string; + ImageVersionStatus?: ImageVersionStatus | string; /** - *

          Request a list of jobs up to a specified limit.

          + *

          When the version was last modified.

          */ - MaxResults?: number; + LastModifiedTime?: Date; /** - *

          If the previous response was truncated, you receive this token. Use it in your next - * request to receive the next set of results.

          + *

          The version number.

          */ - NextToken?: string; + Version?: number; } -export namespace ListAutoMLJobsRequest { - export const filterSensitiveLog = (obj: ListAutoMLJobsRequest): any => ({ +export namespace DescribeImageVersionResponse { + export const filterSensitiveLog = (obj: DescribeImageVersionResponse): any => ({ ...obj, }); } -export interface ListAutoMLJobsResponse { - /** - *

          Returns a summary list of jobs.

          - */ - AutoMLJobSummaries: AutoMLJobSummary[] | undefined; - +export interface DescribeLabelingJobRequest { /** - *

          If the previous response was truncated, you receive this token. Use it in your next - * request to receive the next set of results.

          + *

          The name of the labeling job to return information for.

          */ - NextToken?: string; + LabelingJobName: string | undefined; } -export namespace ListAutoMLJobsResponse { - export const filterSensitiveLog = (obj: ListAutoMLJobsResponse): any => ({ +export namespace DescribeLabelingJobRequest { + export const filterSensitiveLog = (obj: DescribeLabelingJobRequest): any => ({ ...obj, }); } -export interface ListCandidatesForAutoMLJobRequest { +/** + *

          Provides a breakdown of the number of objects labeled.

          + */ +export interface LabelCounters { /** - *

          List the Candidates created for the job by providing the job's name.

          + *

          The total number of objects labeled.

          */ - AutoMLJobName: string | undefined; + TotalLabeled?: number; /** - *

          List the Candidates for the job and filter by status.

          + *

          The total number of objects labeled by a human worker.

          */ - StatusEquals?: CandidateStatus | string; + HumanLabeled?: number; /** - *

          List the Candidates for the job and filter by candidate name.

          + *

          The total number of objects labeled by automated data labeling.

          */ - CandidateNameEquals?: string; + MachineLabeled?: number; /** - *

          The sort order for the results. The default is Ascending.

          + *

          The total number of objects that could not be labeled due to an error.

          */ - SortOrder?: AutoMLSortOrder | string; + FailedNonRetryableError?: number; /** - *

          The parameter by which to sort the results. The default is Descending.

          + *

          The total number of objects not yet labeled.

          */ - SortBy?: CandidateSortBy | string; + Unlabeled?: number; +} + +export namespace LabelCounters { + export const filterSensitiveLog = (obj: LabelCounters): any => ({ + ...obj, + }); +} +/** + *

          Specifies the location of the output produced by the labeling job.

          + */ +export interface LabelingJobOutput { /** - *

          List the job's Candidates up to a specified limit.

          + *

          The Amazon S3 bucket location of the manifest file for labeled data.

          */ - MaxResults?: number; + OutputDatasetS3Uri: string | undefined; /** - *

          If the previous response was truncated, you receive this token. Use it in your next - * request to receive the next set of results.

          + *

          The Amazon Resource Name (ARN) for the most recent Amazon SageMaker model trained as part of + * automated data labeling.

          */ - NextToken?: string; + FinalActiveLearningModelArn?: string; } -export namespace ListCandidatesForAutoMLJobRequest { - export const filterSensitiveLog = (obj: ListCandidatesForAutoMLJobRequest): any => ({ +export namespace LabelingJobOutput { + export const filterSensitiveLog = (obj: LabelingJobOutput): any => ({ ...obj, }); } -export interface ListCandidatesForAutoMLJobResponse { +export enum LabelingJobStatus { + COMPLETED = "Completed", + FAILED = "Failed", + INITIALIZING = "Initializing", + IN_PROGRESS = "InProgress", + STOPPED = "Stopped", + STOPPING = "Stopping", +} + +export interface DescribeLabelingJobResponse { /** - *

          Summaries about the Candidates.

          + *

          The processing status of the labeling job.

          */ - Candidates: AutoMLCandidate[] | undefined; + LabelingJobStatus: LabelingJobStatus | string | undefined; /** - *

          If the previous response was truncated, you receive this token. Use it in your next - * request to receive the next set of results.

          + *

          Provides a breakdown of the number of data objects labeled by humans, the number of + * objects labeled by machine, the number of objects than couldn't be labeled, and the + * total number of objects labeled.

          */ - NextToken?: string; -} - -export namespace ListCandidatesForAutoMLJobResponse { - export const filterSensitiveLog = (obj: ListCandidatesForAutoMLJobResponse): any => ({ - ...obj, - }); -} + LabelCounters: LabelCounters | undefined; -export interface ListCodeRepositoriesInput { /** - *

          A filter that returns only Git repositories that were created after the specified - * time.

          + *

          If the job failed, the reason that it failed.

          */ - CreationTimeAfter?: Date; + FailureReason?: string; /** - *

          A filter that returns only Git repositories that were created before the specified - * time.

          + *

          The date and time that the labeling job was created.

          */ - CreationTimeBefore?: Date; + CreationTime: Date | undefined; /** - *

          A filter that returns only Git repositories that were last modified after the - * specified time.

          + *

          The date and time that the labeling job was last updated.

          */ - LastModifiedTimeAfter?: Date; + LastModifiedTime: Date | undefined; /** - *

          A filter that returns only Git repositories that were last modified before the - * specified time.

          + *

          A unique identifier for work done as part of a labeling job.

          */ - LastModifiedTimeBefore?: Date; + JobReferenceCode: string | undefined; /** - *

          The maximum number of Git repositories to return in the response.

          + *

          The name assigned to the labeling job when it was created.

          */ - MaxResults?: number; + LabelingJobName: string | undefined; /** - *

          A string in the Git repositories name. This filter returns only repositories whose - * name contains the specified string.

          + *

          The Amazon Resource Name (ARN) of the labeling job.

          */ - NameContains?: string; + LabelingJobArn: string | undefined; /** - *

          If the result of a ListCodeRepositoriesOutput request was truncated, the - * response includes a NextToken. To get the next set of Git repositories, use - * the token in the next request.

          + *

          The attribute used as the label in the output manifest file.

          */ - NextToken?: string; + LabelAttributeName?: string; /** - *

          The field to sort results by. The default is Name.

          + *

          Input configuration information for the labeling job, such as the Amazon S3 location of the + * data objects and the location of the manifest file that describes the data + * objects.

          */ - SortBy?: CodeRepositorySortBy | string; + InputConfig: LabelingJobInputConfig | undefined; /** - *

          The sort order for results. The default is Ascending.

          + *

          The location of the job's output data and the AWS Key Management Service key ID for the key used to + * encrypt the output data, if any.

          */ - SortOrder?: CodeRepositorySortOrder | string; -} + OutputConfig: LabelingJobOutputConfig | undefined; -export namespace ListCodeRepositoriesInput { - export const filterSensitiveLog = (obj: ListCodeRepositoriesInput): any => ({ - ...obj, - }); -} + /** + *

          The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf + * during data labeling.

          + */ + RoleArn: string | undefined; -export interface ListCodeRepositoriesOutput { /** - *

          Gets a list of summaries of the Git repositories. Each summary specifies the following - * values for the repository:

          + *

          The S3 location of the JSON file that defines the categories used to label data + * objects. Please note the following label-category limits:

          *
            *
          • - *

            Name

            - *
          • - *
          • - *

            Amazon Resource Name (ARN)

            - *
          • - *
          • - *

            Creation time

            - *
          • - *
          • - *

            Last modified time

            + *

            Semantic segmentation labeling jobs using automated labeling: 20 labels

            *
          • *
          • - *

            Configuration information, including the URL location of the repository and - * the ARN of the AWS Secrets Manager secret that contains the credentials used - * to access the repository.

            + *

            Box bounding labeling jobs (all): 10 labels

            *
          • *
          + *

          The file is a JSON structure in the following format:

          + *

          + * { + *

          + *

          + * "document-version": "2018-11-28" + *

          + *

          + * "labels": [ + *

          + *

          + * { + *

          + *

          + * "label": "label 1" + *

          + *

          + * }, + *

          + *

          + * { + *

          + *

          + * "label": "label 2" + *

          + *

          + * }, + *

          + *

          + * ... + *

          + *

          + * { + *

          + *

          + * "label": "label n" + *

          + *

          + * } + *

          + *

          + * ] + *

          + *

          + * } + *

          */ - CodeRepositorySummaryList: CodeRepositorySummary[] | undefined; + LabelCategoryConfigS3Uri?: string; /** - *

          If the result of a ListCodeRepositoriesOutput request was truncated, the - * response includes a NextToken. To get the next set of Git repositories, use - * the token in the next request.

          + *

          A set of conditions for stopping a labeling job. If any of the conditions are met, the + * job is automatically stopped.

          */ - NextToken?: string; + StoppingConditions?: LabelingJobStoppingConditions; + + /** + *

          Configuration information for automated data labeling.

          + */ + LabelingJobAlgorithmsConfig?: LabelingJobAlgorithmsConfig; + + /** + *

          Configuration information required for human workers to complete a labeling + * task.

          + */ + HumanTaskConfig: HumanTaskConfig | undefined; + + /** + *

          An array of key-value pairs. You can use tags to categorize your AWS resources in + * different ways, for example, by purpose, owner, or environment. For more information, + * see Tagging AWS + * Resources.

          + */ + Tags?: Tag[]; + + /** + *

          The location of the output produced by the labeling job.

          + */ + LabelingJobOutput?: LabelingJobOutput; } -export namespace ListCodeRepositoriesOutput { - export const filterSensitiveLog = (obj: ListCodeRepositoriesOutput): any => ({ +export namespace DescribeLabelingJobResponse { + export const filterSensitiveLog = (obj: DescribeLabelingJobResponse): any => ({ ...obj, }); } -export enum ListCompilationJobsSortBy { - CREATION_TIME = "CreationTime", - NAME = "Name", - STATUS = "Status", -} - -export interface ListCompilationJobsRequest { +export interface DescribeModelInput { /** - *

          If the result of the previous ListCompilationJobs request was truncated, - * the response includes a NextToken. To retrieve the next set of model - * compilation jobs, use the token in the next request.

          + *

          The name of the model.

          */ - NextToken?: string; + ModelName: string | undefined; +} - /** - *

          The maximum number of model compilation jobs to return in the response.

          - */ - MaxResults?: number; +export namespace DescribeModelInput { + export const filterSensitiveLog = (obj: DescribeModelInput): any => ({ + ...obj, + }); +} +export interface DescribeModelOutput { /** - *

          A filter that returns the model compilation jobs that were created after a specified - * time.

          + *

          Name of the Amazon SageMaker model.

          */ - CreationTimeAfter?: Date; + ModelName: string | undefined; /** - *

          A filter that returns the model compilation jobs that were created before a specified - * time.

          + *

          The location of the primary inference code, associated artifacts, and custom + * environment map that the inference code uses when it is deployed in production. + *

          */ - CreationTimeBefore?: Date; + PrimaryContainer?: ContainerDefinition; /** - *

          A filter that returns the model compilation jobs that were modified after a specified - * time.

          + *

          The containers in the inference pipeline.

          */ - LastModifiedTimeAfter?: Date; + Containers?: ContainerDefinition[]; /** - *

          A filter that returns the model compilation jobs that were modified before a specified - * time.

          + *

          The Amazon Resource Name (ARN) of the IAM role that you specified for the + * model.

          */ - LastModifiedTimeBefore?: Date; + ExecutionRoleArn: string | undefined; /** - *

          A filter that returns the model compilation jobs whose name contains a specified - * string.

          + *

          A VpcConfig object that specifies the VPC that this model has access + * to. For more information, see Protect Endpoints by Using an Amazon Virtual + * Private Cloud + *

          */ - NameContains?: string; + VpcConfig?: VpcConfig; /** - *

          A filter that retrieves model compilation jobs with a specific DescribeCompilationJobResponse$CompilationJobStatus status.

          + *

          A timestamp that shows when the model was created.

          */ - StatusEquals?: CompilationJobStatus | string; + CreationTime: Date | undefined; /** - *

          The field by which to sort results. The default is CreationTime.

          + *

          The Amazon Resource Name (ARN) of the model.

          */ - SortBy?: ListCompilationJobsSortBy | string; + ModelArn: string | undefined; /** - *

          The sort order for results. The default is Ascending.

          + *

          If True, no inbound or outbound network calls can be made to or from the + * model container.

          */ - SortOrder?: SortOrder | string; + EnableNetworkIsolation?: boolean; } -export namespace ListCompilationJobsRequest { - export const filterSensitiveLog = (obj: ListCompilationJobsRequest): any => ({ +export namespace DescribeModelOutput { + export const filterSensitiveLog = (obj: DescribeModelOutput): any => ({ ...obj, }); } -export interface ListCompilationJobsResponse { - /** - *

          An array of CompilationJobSummary objects, each describing a model - * compilation job.

          - */ - CompilationJobSummaries: CompilationJobSummary[] | undefined; - +export interface DescribeModelPackageInput { /** - *

          If the response is truncated, Amazon SageMaker returns this NextToken. To retrieve - * the next set of model compilation jobs, use this token in the next request.

          + *

          The name of the model package to describe.

          */ - NextToken?: string; + ModelPackageName: string | undefined; } -export namespace ListCompilationJobsResponse { - export const filterSensitiveLog = (obj: ListCompilationJobsResponse): any => ({ +export namespace DescribeModelPackageInput { + export const filterSensitiveLog = (obj: DescribeModelPackageInput): any => ({ ...obj, }); } -export interface ListDomainsRequest { +export enum ModelPackageStatus { + COMPLETED = "Completed", + DELETING = "Deleting", + FAILED = "Failed", + IN_PROGRESS = "InProgress", + PENDING = "Pending", +} + +export enum DetailedModelPackageStatus { + COMPLETED = "Completed", + FAILED = "Failed", + IN_PROGRESS = "InProgress", + NOT_STARTED = "NotStarted", +} + +/** + *

          Represents the overall status of a model package.

          + */ +export interface ModelPackageStatusItem { /** - *

          If the previous response was truncated, you will receive this token. - * Use it in your next request to receive the next set of results.

          + *

          The name of the model package for which the overall status is being reported.

          */ - NextToken?: string; + Name: string | undefined; /** - *

          Returns a list up to a specified limit.

          + *

          The current status.

          */ - MaxResults?: number; + Status: DetailedModelPackageStatus | string | undefined; + + /** + *

          if the overall status is Failed, the reason for the failure.

          + */ + FailureReason?: string; } -export namespace ListDomainsRequest { - export const filterSensitiveLog = (obj: ListDomainsRequest): any => ({ +export namespace ModelPackageStatusItem { + export const filterSensitiveLog = (obj: ModelPackageStatusItem): any => ({ ...obj, }); } -export interface ListDomainsResponse { +/** + *

          Specifies the validation and image scan statuses of the model package.

          + */ +export interface ModelPackageStatusDetails { /** - *

          The list of domains.

          + *

          The validation status of the model package.

          */ - Domains?: DomainDetails[]; + ValidationStatuses: ModelPackageStatusItem[] | undefined; /** - *

          If the previous response was truncated, you will receive this token. - * Use it in your next request to receive the next set of results.

          + *

          The status of the scan of the Docker image container for the model package.

          */ - NextToken?: string; + ImageScanStatuses?: ModelPackageStatusItem[]; } -export namespace ListDomainsResponse { - export const filterSensitiveLog = (obj: ListDomainsResponse): any => ({ +export namespace ModelPackageStatusDetails { + export const filterSensitiveLog = (obj: ModelPackageStatusDetails): any => ({ ...obj, }); } -export enum OrderKey { - Ascending = "Ascending", - Descending = "Descending", -} - -export interface ListEndpointConfigsInput { +export interface DescribeModelPackageOutput { /** - *

          The field to sort results by. The default is CreationTime.

          + *

          The name of the model package being described.

          */ - SortBy?: EndpointConfigSortKey | string; + ModelPackageName: string | undefined; /** - *

          The sort order for results. The default is Descending.

          + *

          If the model is a versioned model, the name of the model group that the versioned + * model belongs to.

          */ - SortOrder?: OrderKey | string; + ModelPackageGroupName?: string; /** - *

          If the result of the previous ListEndpointConfig request was - * truncated, the response includes a NextToken. To retrieve the next set of - * endpoint configurations, use the token in the next request.

          + *

          The version of the model package.

          */ - NextToken?: string; + ModelPackageVersion?: number; /** - *

          The maximum number of training jobs to return in the response.

          + *

          The Amazon Resource Name (ARN) of the model package.

          */ - MaxResults?: number; + ModelPackageArn: string | undefined; /** - *

          A string in the endpoint configuration name. This filter returns only endpoint - * configurations whose name contains the specified string.

          + *

          A brief summary of the model package.

          */ - NameContains?: string; + ModelPackageDescription?: string; /** - *

          A filter that returns only endpoint configurations created before the specified - * time (timestamp).

          + *

          A timestamp specifying when the model package was created.

          */ - CreationTimeBefore?: Date; + CreationTime: Date | undefined; /** - *

          A filter that returns only endpoint configurations with a creation time greater - * than or equal to the specified time (timestamp).

          + *

          Details about inference jobs that can be run with models based on this model + * package.

          */ - CreationTimeAfter?: Date; -} - -export namespace ListEndpointConfigsInput { - export const filterSensitiveLog = (obj: ListEndpointConfigsInput): any => ({ - ...obj, - }); -} + InferenceSpecification?: InferenceSpecification; -export interface ListEndpointConfigsOutput { /** - *

          An array of endpoint configurations.

          + *

          Details about the algorithm that was used to create the model package.

          */ - EndpointConfigs: EndpointConfigSummary[] | undefined; + SourceAlgorithmSpecification?: SourceAlgorithmSpecification; /** - *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of - * endpoint configurations, use it in the subsequent request

          + *

          Configurations for one or more transform jobs that Amazon SageMaker runs to test the model + * package.

          */ - NextToken?: string; -} - -export namespace ListEndpointConfigsOutput { - export const filterSensitiveLog = (obj: ListEndpointConfigsOutput): any => ({ - ...obj, - }); -} + ValidationSpecification?: ModelPackageValidationSpecification; -export interface ListEndpointsInput { /** - *

          Sorts the list of results. The default is CreationTime.

          + *

          The current status of the model package.

          */ - SortBy?: EndpointSortKey | string; + ModelPackageStatus: ModelPackageStatus | string | undefined; /** - *

          The sort order for results. The default is Descending.

          + *

          Details about the current status of the model package.

          */ - SortOrder?: OrderKey | string; + ModelPackageStatusDetails: ModelPackageStatusDetails | undefined; /** - *

          If the result of a ListEndpoints request was truncated, the response - * includes a NextToken. To retrieve the next set of endpoints, use the token - * in the next request.

          + *

          Whether the model package is certified for listing on AWS Marketplace.

          */ - NextToken?: string; + CertifyForMarketplace?: boolean; /** - *

          The maximum number of endpoints to return in the response.

          + *

          The approval status of the model package.

          */ - MaxResults?: number; + ModelApprovalStatus?: ModelApprovalStatus | string; /** - *

          A string in endpoint names. This filter returns only endpoints whose name contains - * the specified string.

          + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          */ - NameContains?: string; + CreatedBy?: UserContext; /** - *

          A filter that returns only endpoints that were created before the specified time - * (timestamp).

          + *

          Metadata properties of the tracking entity, trial, or trial component.

          */ - CreationTimeBefore?: Date; + MetadataProperties?: MetadataProperties; /** - *

          A filter that returns only endpoints with a creation time greater than or equal to - * the specified time (timestamp).

          + *

          Metrics for the model.

          */ - CreationTimeAfter?: Date; + ModelMetrics?: ModelMetrics; /** - *

          A filter that returns only endpoints that were modified before the specified - * timestamp.

          + *

          The last time the model package was modified.

          */ - LastModifiedTimeBefore?: Date; + LastModifiedTime?: Date; /** - *

          A filter that returns only endpoints that were modified after the specified - * timestamp.

          + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          */ - LastModifiedTimeAfter?: Date; + LastModifiedBy?: UserContext; /** - *

          A filter that returns only endpoints with the specified status.

          + *

          A description provided for the model approval.

          */ - StatusEquals?: EndpointStatus | string; + ApprovalDescription?: string; } -export namespace ListEndpointsInput { - export const filterSensitiveLog = (obj: ListEndpointsInput): any => ({ +export namespace DescribeModelPackageOutput { + export const filterSensitiveLog = (obj: DescribeModelPackageOutput): any => ({ ...obj, }); } -export interface ListEndpointsOutput { - /** - *

          An array or endpoint objects.

          - */ - Endpoints: EndpointSummary[] | undefined; - +export interface DescribeModelPackageGroupInput { /** - *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of - * training jobs, use it in the subsequent request.

          + *

          The name of the model group to describe.

          */ - NextToken?: string; + ModelPackageGroupName: string | undefined; } -export namespace ListEndpointsOutput { - export const filterSensitiveLog = (obj: ListEndpointsOutput): any => ({ +export namespace DescribeModelPackageGroupInput { + export const filterSensitiveLog = (obj: DescribeModelPackageGroupInput): any => ({ ...obj, }); } -export enum SortExperimentsBy { - CREATION_TIME = "CreationTime", - NAME = "Name", +export enum ModelPackageGroupStatus { + COMPLETED = "Completed", + DELETE_FAILED = "DeleteFailed", + DELETING = "Deleting", + FAILED = "Failed", + IN_PROGRESS = "InProgress", + PENDING = "Pending", } -export interface ListExperimentsRequest { +export interface DescribeModelPackageGroupOutput { /** - *

          A filter that returns only experiments created after the specified time.

          + *

          The name of the model group.

          */ - CreatedAfter?: Date; + ModelPackageGroupName: string | undefined; /** - *

          A filter that returns only experiments created before the specified time.

          + *

          The Amazon Resource Name (ARN) of the model group.

          */ - CreatedBefore?: Date; + ModelPackageGroupArn: string | undefined; /** - *

          The property used to sort results. The default value is CreationTime.

          + *

          A description of the model group.

          */ - SortBy?: SortExperimentsBy | string; + ModelPackageGroupDescription?: string; /** - *

          The sort order. The default value is Descending.

          + *

          The time that the model group was created.

          */ - SortOrder?: SortOrder | string; + CreationTime: Date | undefined; /** - *

          If the previous call to ListExperiments didn't return the full set of - * experiments, the call returns a token for getting the next set of experiments.

          + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          */ - NextToken?: string; + CreatedBy: UserContext | undefined; /** - *

          The maximum number of experiments to return in the response. The default value is - * 10.

          + *

          The status of the model group.

          */ - MaxResults?: number; + ModelPackageGroupStatus: ModelPackageGroupStatus | string | undefined; } -export namespace ListExperimentsRequest { - export const filterSensitiveLog = (obj: ListExperimentsRequest): any => ({ +export namespace DescribeModelPackageGroupOutput { + export const filterSensitiveLog = (obj: DescribeModelPackageGroupOutput): any => ({ ...obj, }); } -export interface ListExperimentsResponse { - /** - *

          A list of the summaries of your experiments.

          - */ - ExperimentSummaries?: ExperimentSummary[]; - +export interface DescribeMonitoringScheduleRequest { /** - *

          A token for getting the next set of experiments, if there are any.

          + *

          Name of a previously created monitoring schedule.

          */ - NextToken?: string; + MonitoringScheduleName: string | undefined; } -export namespace ListExperimentsResponse { - export const filterSensitiveLog = (obj: ListExperimentsResponse): any => ({ +export namespace DescribeMonitoringScheduleRequest { + export const filterSensitiveLog = (obj: DescribeMonitoringScheduleRequest): any => ({ ...obj, }); } -export interface ListFlowDefinitionsRequest { +export enum ExecutionStatus { + COMPLETED = "Completed", + COMPLETED_WITH_VIOLATIONS = "CompletedWithViolations", + FAILED = "Failed", + IN_PROGRESS = "InProgress", + PENDING = "Pending", + STOPPED = "Stopped", + STOPPING = "Stopping", +} + +/** + *

          Summary of information about the last monitoring job to run.

          + */ +export interface MonitoringExecutionSummary { /** - *

          A filter that returns only flow definitions with a creation time greater than or equal to the specified timestamp.

          + *

          The name of the monitoring schedule.

          */ - CreationTimeAfter?: Date; + MonitoringScheduleName: string | undefined; /** - *

          A filter that returns only flow definitions that were created before the specified timestamp.

          + *

          The time the monitoring job was scheduled.

          */ - CreationTimeBefore?: Date; + ScheduledTime: Date | undefined; /** - *

          An optional value that specifies whether you want the results sorted in Ascending or Descending order.

          + *

          The time at which the monitoring job was created.

          */ - SortOrder?: SortOrder | string; + CreationTime: Date | undefined; /** - *

          A token to resume pagination.

          + *

          A timestamp that indicates the last time the monitoring job was modified.

          */ - NextToken?: string; + LastModifiedTime: Date | undefined; /** - *

          The total number of items to return. If the total number of available items is more than the value specified in MaxResults, then a NextToken will be provided in the output that you can use to resume pagination.

          + *

          The status of the monitoring job.

          */ - MaxResults?: number; -} + MonitoringExecutionStatus: ExecutionStatus | string | undefined; -export namespace ListFlowDefinitionsRequest { - export const filterSensitiveLog = (obj: ListFlowDefinitionsRequest): any => ({ - ...obj, - }); -} + /** + *

          The Amazon Resource Name (ARN) of the monitoring job.

          + */ + ProcessingJobArn?: string; -export interface ListFlowDefinitionsResponse { /** - *

          An array of objects describing the flow definitions.

          + *

          The name of teh endpoint used to run the monitoring job.

          */ - FlowDefinitionSummaries: FlowDefinitionSummary[] | undefined; + EndpointName?: string; /** - *

          A token to resume pagination.

          + *

          Contains the reason a monitoring job failed, if it failed.

          */ - NextToken?: string; + FailureReason?: string; } -export namespace ListFlowDefinitionsResponse { - export const filterSensitiveLog = (obj: ListFlowDefinitionsResponse): any => ({ +export namespace MonitoringExecutionSummary { + export const filterSensitiveLog = (obj: MonitoringExecutionSummary): any => ({ ...obj, }); } -export interface ListHumanTaskUisRequest { +export enum ScheduleStatus { + FAILED = "Failed", + PENDING = "Pending", + SCHEDULED = "Scheduled", + STOPPED = "Stopped", +} + +export interface DescribeMonitoringScheduleResponse { + /** + *

          The Amazon Resource Name (ARN) of the monitoring schedule.

          + */ + MonitoringScheduleArn: string | undefined; + /** - *

          A filter that returns only human task user interfaces with a creation time greater than or equal to the specified timestamp.

          + *

          Name of the monitoring schedule.

          */ - CreationTimeAfter?: Date; + MonitoringScheduleName: string | undefined; /** - *

          A filter that returns only human task user interfaces that were created before the specified timestamp.

          + *

          The status of an monitoring job.

          */ - CreationTimeBefore?: Date; + MonitoringScheduleStatus: ScheduleStatus | string | undefined; /** - *

          An optional value that specifies whether you want the results sorted in Ascending or Descending order.

          + *

          A string, up to one KB in size, that contains the reason a monitoring job failed, if it + * failed.

          */ - SortOrder?: SortOrder | string; + FailureReason?: string; + + /** + *

          The time at which the monitoring job was created.

          + */ + CreationTime: Date | undefined; + + /** + *

          The time at which the monitoring job was last modified.

          + */ + LastModifiedTime: Date | undefined; + + /** + *

          The configuration object that specifies the monitoring schedule and defines the + * monitoring job.

          + */ + MonitoringScheduleConfig: MonitoringScheduleConfig | undefined; /** - *

          A token to resume pagination.

          + *

          The name of the endpoint for the monitoring job.

          */ - NextToken?: string; + EndpointName?: string; /** - *

          The total number of items to return. If the total number of available items is more than the value specified in MaxResults, then a NextToken will be provided in the output that you can use to resume pagination.

          + *

          Describes metadata on the last execution to run, if there was one.

          */ - MaxResults?: number; + LastMonitoringExecutionSummary?: MonitoringExecutionSummary; } -export namespace ListHumanTaskUisRequest { - export const filterSensitiveLog = (obj: ListHumanTaskUisRequest): any => ({ +export namespace DescribeMonitoringScheduleResponse { + export const filterSensitiveLog = (obj: DescribeMonitoringScheduleResponse): any => ({ ...obj, }); } -export interface ListHumanTaskUisResponse { - /** - *

          An array of objects describing the human task user interfaces.

          - */ - HumanTaskUiSummaries: HumanTaskUiSummary[] | undefined; - +export interface DescribeNotebookInstanceInput { /** - *

          A token to resume pagination.

          + *

          The name of the notebook instance that you want information about.

          */ - NextToken?: string; + NotebookInstanceName: string | undefined; } -export namespace ListHumanTaskUisResponse { - export const filterSensitiveLog = (obj: ListHumanTaskUisResponse): any => ({ +export namespace DescribeNotebookInstanceInput { + export const filterSensitiveLog = (obj: DescribeNotebookInstanceInput): any => ({ ...obj, }); } -export interface ListHyperParameterTuningJobsRequest { - /** - *

          If the result of the previous ListHyperParameterTuningJobs request was - * truncated, the response includes a NextToken. To retrieve the next set of - * tuning jobs, use the token in the next request.

          - */ - NextToken?: string; +export enum NotebookInstanceStatus { + Deleting = "Deleting", + Failed = "Failed", + InService = "InService", + Pending = "Pending", + Stopped = "Stopped", + Stopping = "Stopping", + Updating = "Updating", +} +export interface DescribeNotebookInstanceOutput { /** - *

          The - * maximum number of tuning jobs to return. The default value is - * 10.

          + *

          The Amazon Resource Name (ARN) of the notebook instance.

          */ - MaxResults?: number; + NotebookInstanceArn?: string; /** - *

          The - * field - * to sort results by. The default is Name.

          + *

          The name of the Amazon SageMaker notebook instance.

          */ - SortBy?: HyperParameterTuningJobSortByOptions | string; + NotebookInstanceName?: string; /** - *

          The sort - * order - * for results. The default is Ascending.

          + *

          The status of the notebook instance.

          */ - SortOrder?: SortOrder | string; + NotebookInstanceStatus?: NotebookInstanceStatus | string; /** - *

          A string in the tuning job name. This filter returns only tuning jobs whose name - * contains the specified string.

          + *

          If status is Failed, the reason it failed.

          */ - NameContains?: string; + FailureReason?: string; /** - *

          A filter that returns only tuning jobs that were created after the - * specified - * time.

          + *

          The URL that you use to connect to the Jupyter notebook that is running in your + * notebook instance.

          */ - CreationTimeAfter?: Date; + Url?: string; /** - *

          A filter that returns only tuning jobs that were created before the - * specified - * time.

          + *

          The type of ML compute instance running on the notebook instance.

          */ - CreationTimeBefore?: Date; + InstanceType?: _InstanceType | string; /** - *

          A filter that returns only tuning jobs that were modified after the specified - * time.

          + *

          The ID of the VPC subnet.

          */ - LastModifiedTimeAfter?: Date; + SubnetId?: string; /** - *

          A filter that returns only tuning jobs that were modified before the specified - * time.

          + *

          The IDs of the VPC security groups.

          */ - LastModifiedTimeBefore?: Date; + SecurityGroups?: string[]; /** - *

          A filter that returns only tuning jobs with the - * specified - * status.

          + *

          The Amazon Resource Name (ARN) of the IAM role associated with the instance. + *

          */ - StatusEquals?: HyperParameterTuningJobStatus | string; -} - -export namespace ListHyperParameterTuningJobsRequest { - export const filterSensitiveLog = (obj: ListHyperParameterTuningJobsRequest): any => ({ - ...obj, - }); -} + RoleArn?: string; -export interface ListHyperParameterTuningJobsResponse { /** - *

          A list of HyperParameterTuningJobSummary objects that - * describe - * the tuning jobs that the ListHyperParameterTuningJobs - * request returned.

          + *

          The AWS KMS key ID Amazon SageMaker uses to encrypt data when storing it on the ML storage + * volume attached to the instance.

          */ - HyperParameterTuningJobSummaries: HyperParameterTuningJobSummary[] | undefined; + KmsKeyId?: string; /** - *

          If the result of this ListHyperParameterTuningJobs request was truncated, - * the response includes a NextToken. To retrieve the next set of tuning jobs, - * use the token in the next request.

          + *

          The network interface IDs that Amazon SageMaker created at the time of creating the instance. + *

          */ - NextToken?: string; -} - -export namespace ListHyperParameterTuningJobsResponse { - export const filterSensitiveLog = (obj: ListHyperParameterTuningJobsResponse): any => ({ - ...obj, - }); -} + NetworkInterfaceId?: string; -export interface ListImagesRequest { /** - *

          A filter that returns only images created on or after the specified time.

          + *

          A timestamp. Use this parameter to retrieve the time when the notebook instance was + * last modified.

          */ - CreationTimeAfter?: Date; + LastModifiedTime?: Date; /** - *

          A filter that returns only images created on or before the specified time.

          + *

          A timestamp. Use this parameter to return the time when the notebook instance was + * created

          */ - CreationTimeBefore?: Date; + CreationTime?: Date; /** - *

          A filter that returns only images modified on or after the specified time.

          + *

          Returns the name of a notebook instance lifecycle configuration.

          + *

          For information about notebook instance lifestyle configurations, see Step + * 2.1: (Optional) Customize a Notebook Instance + *

          */ - LastModifiedTimeAfter?: Date; + NotebookInstanceLifecycleConfigName?: string; /** - *

          A filter that returns only images modified on or before the specified time.

          + *

          Describes whether Amazon SageMaker provides internet access to the notebook instance. If this + * value is set to Disabled, the notebook instance does not have + * internet access, and cannot connect to Amazon SageMaker training and endpoint services.

          + *

          For more information, see Notebook Instances Are Internet-Enabled by Default.

          */ - LastModifiedTimeBefore?: Date; + DirectInternetAccess?: DirectInternetAccess | string; /** - *

          The maximum number of images to return in the response. The default value is 10.

          + *

          The size, in GB, of the ML storage volume attached to the notebook instance.

          */ - MaxResults?: number; + VolumeSizeInGB?: number; /** - *

          A filter that returns only images whose name contains the specified string.

          + *

          A list of the Elastic Inference (EI) instance types associated with this notebook + * instance. Currently only one EI instance type can be associated with a notebook + * instance. For more information, see Using Elastic Inference in Amazon + * SageMaker.

          */ - NameContains?: string; + AcceleratorTypes?: (NotebookInstanceAcceleratorType | string)[]; /** - *

          If the previous call to ListImages didn't return the full set of images, - * the call returns a token for getting the next set of images.

          + *

          The Git repository associated with the notebook instance as its default code + * repository. This can be either the name of a Git repository stored as a resource in your + * account, or the URL of a Git repository in AWS CodeCommit or in any + * other Git repository. When you open a notebook instance, it opens in the directory that + * contains this repository. For more information, see Associating Git Repositories with Amazon SageMaker + * Notebook Instances.

          */ - NextToken?: string; + DefaultCodeRepository?: string; /** - *

          The property used to sort results. The default value is CREATION_TIME.

          + *

          An array of up to three Git repositories associated with the notebook instance. These + * can be either the names of Git repositories stored as resources in your account, or the + * URL of Git repositories in AWS CodeCommit or in any + * other Git repository. These repositories are cloned at the same level as the default + * repository of your notebook instance. For more information, see Associating Git + * Repositories with Amazon SageMaker Notebook Instances.

          */ - SortBy?: ImageSortBy | string; + AdditionalCodeRepositories?: string[]; /** - *

          The sort order. The default value is DESCENDING.

          + *

          Whether root access is enabled or disabled for users of the notebook instance.

          + * + *

          Lifecycle configurations need root access to be able to set up a notebook + * instance. Because of this, lifecycle configurations associated with a notebook + * instance always run with root access even if you disable root access for + * users.

          + *
          */ - SortOrder?: ImageSortOrder | string; + RootAccess?: RootAccess | string; } -export namespace ListImagesRequest { - export const filterSensitiveLog = (obj: ListImagesRequest): any => ({ +export namespace DescribeNotebookInstanceOutput { + export const filterSensitiveLog = (obj: DescribeNotebookInstanceOutput): any => ({ ...obj, }); } -export interface ListImagesResponse { - /** - *

          A list of images and their properties.

          - */ - Images?: Image[]; - +export interface DescribeNotebookInstanceLifecycleConfigInput { /** - *

          A token for getting the next set of images, if there are any.

          + *

          The name of the lifecycle configuration to describe.

          */ - NextToken?: string; + NotebookInstanceLifecycleConfigName: string | undefined; } -export namespace ListImagesResponse { - export const filterSensitiveLog = (obj: ListImagesResponse): any => ({ +export namespace DescribeNotebookInstanceLifecycleConfigInput { + export const filterSensitiveLog = (obj: DescribeNotebookInstanceLifecycleConfigInput): any => ({ ...obj, }); } -export interface ListImageVersionsRequest { - /** - *

          A filter that returns only versions created on or after the specified time.

          - */ - CreationTimeAfter?: Date; - +export interface DescribeNotebookInstanceLifecycleConfigOutput { /** - *

          A filter that returns only versions created on or before the specified time.

          + *

          The Amazon Resource Name (ARN) of the lifecycle configuration.

          */ - CreationTimeBefore?: Date; + NotebookInstanceLifecycleConfigArn?: string; /** - *

          The name of the image to list the versions of.

          + *

          The name of the lifecycle configuration.

          */ - ImageName: string | undefined; + NotebookInstanceLifecycleConfigName?: string; /** - *

          A filter that returns only versions modified on or after the specified time.

          + *

          The shell script that runs only once, when you create a notebook instance.

          */ - LastModifiedTimeAfter?: Date; + OnCreate?: NotebookInstanceLifecycleHook[]; /** - *

          A filter that returns only versions modified on or before the specified time.

          + *

          The shell script that runs every time you start a notebook instance, including when + * you create the notebook instance.

          */ - LastModifiedTimeBefore?: Date; + OnStart?: NotebookInstanceLifecycleHook[]; /** - *

          The maximum number of versions to return in the response. The default value is 10.

          + *

          A timestamp that tells when the lifecycle configuration was last modified.

          */ - MaxResults?: number; + LastModifiedTime?: Date; /** - *

          If the previous call to ListImageVersions didn't return the full set of - * versions, the call returns a token for getting the next set of versions.

          + *

          A timestamp that tells when the lifecycle configuration was created.

          */ - NextToken?: string; + CreationTime?: Date; +} - /** - *

          The property used to sort results. The default value is CREATION_TIME.

          - */ - SortBy?: ImageVersionSortBy | string; +export namespace DescribeNotebookInstanceLifecycleConfigOutput { + export const filterSensitiveLog = (obj: DescribeNotebookInstanceLifecycleConfigOutput): any => ({ + ...obj, + }); +} +export interface DescribePipelineRequest { /** - *

          The sort order. The default value is DESCENDING.

          + *

          The name of the pipeline to describe.

          */ - SortOrder?: ImageVersionSortOrder | string; + PipelineName: string | undefined; } -export namespace ListImageVersionsRequest { - export const filterSensitiveLog = (obj: ListImageVersionsRequest): any => ({ +export namespace DescribePipelineRequest { + export const filterSensitiveLog = (obj: DescribePipelineRequest): any => ({ ...obj, }); } -export interface ListImageVersionsResponse { +export enum PipelineStatus { + ACTIVE = "Active", +} + +export interface DescribePipelineResponse { /** - *

          A list of versions and their properties.

          + *

          The Amazon Resource Name (ARN) of the pipeline.

          */ - ImageVersions?: ImageVersion[]; + PipelineArn?: string; /** - *

          A token for getting the next set of versions, if there are any.

          + *

          The name of the pipeline.

          */ - NextToken?: string; -} - -export namespace ListImageVersionsResponse { - export const filterSensitiveLog = (obj: ListImageVersionsResponse): any => ({ - ...obj, - }); -} + PipelineName?: string; -export enum SortBy { - CREATION_TIME = "CreationTime", - NAME = "Name", - STATUS = "Status", -} + /** + *

          The display name of the pipeline.

          + */ + PipelineDisplayName?: string; -export interface ListLabelingJobsRequest { /** - *

          A filter that returns only labeling jobs created after the specified time - * (timestamp).

          + *

          The JSON pipeline definition.

          */ - CreationTimeAfter?: Date; + PipelineDefinition?: string; /** - *

          A filter that returns only labeling jobs created before the specified time - * (timestamp).

          + *

          The description of the pipeline.

          */ - CreationTimeBefore?: Date; + PipelineDescription?: string; /** - *

          A filter that returns only labeling jobs modified after the specified time - * (timestamp).

          + *

          The Amazon Resource Name (ARN) that the pipeline uses to execute.

          */ - LastModifiedTimeAfter?: Date; + RoleArn?: string; /** - *

          A filter that returns only labeling jobs modified before the specified time - * (timestamp).

          + *

          The status of the pipeline execution.

          */ - LastModifiedTimeBefore?: Date; + PipelineStatus?: PipelineStatus | string; /** - *

          The maximum number of labeling jobs to return in each page of the response.

          + *

          The time when the pipeline was created.

          */ - MaxResults?: number; + CreationTime?: Date; /** - *

          If the result of the previous ListLabelingJobs request was truncated, the - * response includes a NextToken. To retrieve the next set of labeling jobs, - * use the token in the next request.

          + *

          The time when the pipeline was last modified.

          */ - NextToken?: string; + LastModifiedTime?: Date; /** - *

          A string in the labeling job name. This filter returns only labeling jobs whose name - * contains the specified string.

          + *

          The time when the pipeline was last run.

          */ - NameContains?: string; + LastRunTime?: Date; /** - *

          The field to sort results by. The default is CreationTime.

          + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          */ - SortBy?: SortBy | string; + CreatedBy?: UserContext; /** - *

          The sort order for results. The default is Ascending.

          + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          */ - SortOrder?: SortOrder | string; + LastModifiedBy?: UserContext; +} +export namespace DescribePipelineResponse { + export const filterSensitiveLog = (obj: DescribePipelineResponse): any => ({ + ...obj, + }); +} + +export interface DescribePipelineDefinitionForExecutionRequest { /** - *

          A filter that retrieves only labeling jobs with a specific status.

          + *

          The Amazon Resource Name (ARN) of the pipeline execution.

          */ - StatusEquals?: LabelingJobStatus | string; + PipelineExecutionArn: string | undefined; } -export namespace ListLabelingJobsRequest { - export const filterSensitiveLog = (obj: ListLabelingJobsRequest): any => ({ +export namespace DescribePipelineDefinitionForExecutionRequest { + export const filterSensitiveLog = (obj: DescribePipelineDefinitionForExecutionRequest): any => ({ ...obj, }); } -export interface ListLabelingJobsResponse { +export interface DescribePipelineDefinitionForExecutionResponse { /** - *

          An array of LabelingJobSummary objects, each describing a labeling - * job.

          + *

          The JSON pipeline definition.

          */ - LabelingJobSummaryList?: LabelingJobSummary[]; + PipelineDefinition?: string; /** - *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of - * labeling jobs, use it in the subsequent request.

          + *

          The time when the pipeline was created.

          */ - NextToken?: string; + CreationTime?: Date; } -export namespace ListLabelingJobsResponse { - export const filterSensitiveLog = (obj: ListLabelingJobsResponse): any => ({ +export namespace DescribePipelineDefinitionForExecutionResponse { + export const filterSensitiveLog = (obj: DescribePipelineDefinitionForExecutionResponse): any => ({ ...obj, }); } -export enum ListLabelingJobsForWorkteamSortByOptions { - CREATION_TIME = "CreationTime", +export interface DescribePipelineExecutionRequest { + /** + *

          The Amazon Resource Name (ARN) of the pipeline execution.

          + */ + PipelineExecutionArn: string | undefined; +} + +export namespace DescribePipelineExecutionRequest { + export const filterSensitiveLog = (obj: DescribePipelineExecutionRequest): any => ({ + ...obj, + }); +} + +export enum PipelineExecutionStatus { + EXECUTING = "Executing", + FAILED = "Failed", + STOPPED = "Stopped", + STOPPING = "Stopping", + SUCCEEDED = "Succeeded", } -export interface ListLabelingJobsForWorkteamRequest { +export interface DescribePipelineExecutionResponse { /** - *

          The Amazon Resource Name (ARN) of the work team for which you want to see labeling - * jobs for.

          + *

          The Amazon Resource Name (ARN) of the pipeline.

          */ - WorkteamArn: string | undefined; + PipelineArn?: string; /** - *

          The maximum number of labeling jobs to return in each page of the response.

          + *

          The Amazon Resource Name (ARN) of the pipeline execution.

          */ - MaxResults?: number; + PipelineExecutionArn?: string; /** - *

          If the result of the previous ListLabelingJobsForWorkteam request was - * truncated, the response includes a NextToken. To retrieve the next set of - * labeling jobs, use the token in the next request.

          + *

          The display name of the pipeline execution.

          */ - NextToken?: string; + PipelineExecutionDisplayName?: string; /** - *

          A filter that returns only labeling jobs created after the specified time - * (timestamp).

          + *

          The status of the pipeline execution.

          */ - CreationTimeAfter?: Date; + PipelineExecutionStatus?: PipelineExecutionStatus | string; /** - *

          A filter that returns only labeling jobs created before the specified time - * (timestamp).

          + *

          The description of the pipeline execution.

          */ - CreationTimeBefore?: Date; + PipelineExecutionDescription?: string; /** - *

          A filter the limits jobs to only the ones whose job reference code contains the - * specified string.

          + *

          The time when the pipeline execution was created.

          */ - JobReferenceCodeContains?: string; + CreationTime?: Date; /** - *

          The field to sort results by. The default is CreationTime.

          + *

          The time when the pipeline execution was modified last.

          */ - SortBy?: ListLabelingJobsForWorkteamSortByOptions | string; + LastModifiedTime?: Date; /** - *

          The sort order for results. The default is Ascending.

          + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          */ - SortOrder?: SortOrder | string; + CreatedBy?: UserContext; + + /** + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          + */ + LastModifiedBy?: UserContext; } -export namespace ListLabelingJobsForWorkteamRequest { - export const filterSensitiveLog = (obj: ListLabelingJobsForWorkteamRequest): any => ({ +export namespace DescribePipelineExecutionResponse { + export const filterSensitiveLog = (obj: DescribePipelineExecutionResponse): any => ({ ...obj, }); } -export interface ListLabelingJobsForWorkteamResponse { - /** - *

          An array of LabelingJobSummary objects, each describing a labeling - * job.

          - */ - LabelingJobSummaryList: LabelingJobForWorkteamSummary[] | undefined; - +export interface DescribeProcessingJobRequest { /** - *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of - * labeling jobs, use it in the subsequent request.

          + *

          The name of the processing job. The name must be unique within an AWS Region in the + * AWS account.

          */ - NextToken?: string; + ProcessingJobName: string | undefined; } -export namespace ListLabelingJobsForWorkteamResponse { - export const filterSensitiveLog = (obj: ListLabelingJobsForWorkteamResponse): any => ({ +export namespace DescribeProcessingJobRequest { + export const filterSensitiveLog = (obj: DescribeProcessingJobRequest): any => ({ ...obj, }); } -export enum ModelPackageSortBy { - CREATION_TIME = "CreationTime", - NAME = "Name", +export enum ProcessingJobStatus { + COMPLETED = "Completed", + FAILED = "Failed", + IN_PROGRESS = "InProgress", + STOPPED = "Stopped", + STOPPING = "Stopping", } -export interface ListModelPackagesInput { +export interface DescribeProcessingJobResponse { /** - *

          A filter that returns only model packages created after the specified time - * (timestamp).

          + *

          The inputs for a processing job.

          */ - CreationTimeAfter?: Date; + ProcessingInputs?: ProcessingInput[]; /** - *

          A filter that returns only model packages created before the specified time - * (timestamp).

          + *

          Output configuration for the processing job.

          */ - CreationTimeBefore?: Date; + ProcessingOutputConfig?: ProcessingOutputConfig; /** - *

          The maximum number of model packages to return in the response.

          + *

          The name of the processing job. The name must be unique within an AWS Region in the + * AWS account.

          */ - MaxResults?: number; + ProcessingJobName: string | undefined; /** - *

          A string in the model package name. This filter returns only model packages whose name - * contains the specified string.

          + *

          Identifies the resources, ML compute instances, and ML storage volumes to deploy for a + * processing job. In distributed training, you specify more than one instance.

          */ - NameContains?: string; + ProcessingResources: ProcessingResources | undefined; /** - *

          If the response to a previous ListModelPackages request was truncated, - * the response includes a NextToken. To retrieve the next set of model - * packages, use the token in the next request.

          + *

          The time limit for how long the processing job is allowed to run.

          */ - NextToken?: string; + StoppingCondition?: ProcessingStoppingCondition; /** - *

          The parameter by which to sort the results. The default is - * CreationTime.

          + *

          Configures the processing job to run a specified container image.

          */ - SortBy?: ModelPackageSortBy | string; + AppSpecification: AppSpecification | undefined; /** - *

          The sort order for the results. The default is Ascending.

          + *

          The environment variables set in the Docker container.

          */ - SortOrder?: SortOrder | string; -} - -export namespace ListModelPackagesInput { - export const filterSensitiveLog = (obj: ListModelPackagesInput): any => ({ - ...obj, - }); -} + Environment?: { [key: string]: string }; -/** - *

          Provides summary information about a model package.

          - */ -export interface ModelPackageSummary { /** - *

          The name of the model package.

          + *

          Networking options for a processing job.

          */ - ModelPackageName: string | undefined; + NetworkConfig?: NetworkConfig; /** - *

          The Amazon Resource Name (ARN) of the model package.

          + *

          The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on + * your behalf.

          */ - ModelPackageArn: string | undefined; + RoleArn?: string; /** - *

          A brief description of the model package.

          + *

          The configuration information used to create an experiment.

          */ - ModelPackageDescription?: string; + ExperimentConfig?: ExperimentConfig; /** - *

          A timestamp that shows when the model package was created.

          + *

          The Amazon Resource Name (ARN) of the processing job.

          */ - CreationTime: Date | undefined; + ProcessingJobArn: string | undefined; /** - *

          The overall status of the model package.

          + *

          Provides the status of a processing job.

          */ - ModelPackageStatus: ModelPackageStatus | string | undefined; -} - -export namespace ModelPackageSummary { - export const filterSensitiveLog = (obj: ModelPackageSummary): any => ({ - ...obj, - }); -} + ProcessingJobStatus: ProcessingJobStatus | string | undefined; -export interface ListModelPackagesOutput { /** - *

          An array of ModelPackageSummary objects, each of which lists a model - * package.

          + *

          An optional string, up to one KB in size, that contains metadata from the processing + * container when the processing job exits.

          */ - ModelPackageSummaryList: ModelPackageSummary[] | undefined; + ExitMessage?: string; /** - *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of - * model packages, use it in the subsequent request.

          + *

          A string, up to one KB in size, that contains the reason a processing job failed, if + * it failed.

          */ - NextToken?: string; -} - -export namespace ListModelPackagesOutput { - export const filterSensitiveLog = (obj: ListModelPackagesOutput): any => ({ - ...obj, - }); -} - -export enum ModelSortKey { - CreationTime = "CreationTime", - Name = "Name", -} + FailureReason?: string; -export interface ListModelsInput { /** - *

          Sorts the list of results. The default is CreationTime.

          + *

          The time at which the processing job completed.

          */ - SortBy?: ModelSortKey | string; + ProcessingEndTime?: Date; /** - *

          The sort order for results. The default is Descending.

          + *

          The time at which the processing job started.

          */ - SortOrder?: OrderKey | string; + ProcessingStartTime?: Date; /** - *

          If the response to a previous ListModels request was truncated, the - * response includes a NextToken. To retrieve the next set of models, use the - * token in the next request.

          + *

          The time at which the processing job was last modified.

          */ - NextToken?: string; + LastModifiedTime?: Date; /** - *

          The maximum number of models to return in the response.

          + *

          The time at which the processing job was created.

          */ - MaxResults?: number; + CreationTime: Date | undefined; /** - *

          A string in the training job name. This filter returns only models in the training - * job whose name contains the specified string.

          + *

          The ARN of a monitoring schedule for an endpoint associated with this processing + * job.

          */ - NameContains?: string; + MonitoringScheduleArn?: string; /** - *

          A filter that returns only models created before the specified time - * (timestamp).

          + *

          The ARN of an AutoML job associated with this processing job.

          */ - CreationTimeBefore?: Date; + AutoMLJobArn?: string; /** - *

          A filter that returns only models with a creation time greater than or equal to the - * specified time (timestamp).

          + *

          The ARN of a training job associated with this processing job.

          */ - CreationTimeAfter?: Date; + TrainingJobArn?: string; } -export namespace ListModelsInput { - export const filterSensitiveLog = (obj: ListModelsInput): any => ({ +export namespace DescribeProcessingJobResponse { + export const filterSensitiveLog = (obj: DescribeProcessingJobResponse): any => ({ ...obj, }); } -/** - *

          Provides summary information about a model.

          - */ -export interface ModelSummary { - /** - *

          The name of the model that you want a summary for.

          - */ - ModelName: string | undefined; - - /** - *

          The Amazon Resource Name (ARN) of the model.

          - */ - ModelArn: string | undefined; - +export interface DescribeProjectInput { /** - *

          A timestamp that indicates when the model was created.

          + *

          The name of the project to describe.

          */ - CreationTime: Date | undefined; + ProjectName: string | undefined; } -export namespace ModelSummary { - export const filterSensitiveLog = (obj: ModelSummary): any => ({ +export namespace DescribeProjectInput { + export const filterSensitiveLog = (obj: DescribeProjectInput): any => ({ ...obj, }); } -export interface ListModelsOutput { +export enum ProjectStatus { + CREATE_COMPLETED = "CreateCompleted", + CREATE_FAILED = "CreateFailed", + CREATE_IN_PROGRESS = "CreateInProgress", + DELETE_COMPLETED = "DeleteCompleted", + DELETE_FAILED = "DeleteFailed", + DELETE_IN_PROGRESS = "DeleteInProgress", + PENDING = "Pending", +} + +/** + *

          Details of a provisioned service catalog product. For information about service catalog, + * see What is AWS Service + * Catalog.

          + */ +export interface ServiceCatalogProvisionedProductDetails { /** - *

          An array of ModelSummary objects, each of which lists a - * model.

          + *

          The ID of the provisioned product.

          */ - Models: ModelSummary[] | undefined; + ProvisionedProductId?: string; /** - *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of - * models, use it in the subsequent request.

          + *

          The current status of the product.

          + *
            + *
          • + *

            + * AVAILABLE - Stable state, ready to perform any operation. The most recent operation succeeded and completed.

            + *
          • + *
          • + *

            + * UNDER_CHANGE - Transitive state. Operations performed might not have valid results. Wait for an AVAILABLE status before performing operations.

            + *
          • + *
          • + *

            + * TAINTED - Stable state, ready to perform any operation. The stack has completed the requested operation but is not exactly what was requested. For example, a request to update to a new version failed and the stack rolled back to the current version.

            + *
          • + *
          • + *

            + * ERROR - An unexpected error occurred. The provisioned product exists but the stack is not running. For example, CloudFormation received a parameter value that was not valid and could not launch the stack.

            + *
          • + *
          • + *

            + * PLAN_IN_PROGRESS - Transitive state. The plan operations were performed to provision a new product, but resources have not yet been created. After reviewing the list of resources to be created, execute the plan. Wait for an AVAILABLE status before performing operations.

            + *
          • + *
          */ - NextToken?: string; + ProvisionedProductStatusMessage?: string; } -export namespace ListModelsOutput { - export const filterSensitiveLog = (obj: ListModelsOutput): any => ({ +export namespace ServiceCatalogProvisionedProductDetails { + export const filterSensitiveLog = (obj: ServiceCatalogProvisionedProductDetails): any => ({ ...obj, }); } -export enum MonitoringExecutionSortKey { - CREATION_TIME = "CreationTime", - SCHEDULED_TIME = "ScheduledTime", - STATUS = "Status", -} +export interface DescribeProjectOutput { + /** + *

          The Amazon Resource Name (ARN) of the project.

          + */ + ProjectArn: string | undefined; -export interface ListMonitoringExecutionsRequest { /** - *

          Name of a specific schedule to fetch jobs for.

          + *

          The name of the project.

          */ - MonitoringScheduleName?: string; + ProjectName: string | undefined; /** - *

          Name of a specific endpoint to fetch jobs for.

          + *

          The ID of the project.

          */ - EndpointName?: string; + ProjectId: string | undefined; /** - *

          Whether to sort results by Status, CreationTime, - * ScheduledTime field. The default is CreationTime.

          + *

          The description of the project.

          */ - SortBy?: MonitoringExecutionSortKey | string; + ProjectDescription?: string; /** - *

          Whether to sort the results in Ascending or Descending order. - * The default is Descending.

          + *

          Information used to provision a service catalog product. For information, see What is AWS Service + * Catalog.

          */ - SortOrder?: SortOrder | string; + ServiceCatalogProvisioningDetails: ServiceCatalogProvisioningDetails | undefined; /** - *

          The token returned if the response is truncated. To retrieve the next set of job - * executions, use it in the next request.

          + *

          Information about a provisioned service catalog product.

          */ - NextToken?: string; + ServiceCatalogProvisionedProductDetails?: ServiceCatalogProvisionedProductDetails; /** - *

          The maximum number of jobs to return in the response. The default value is 10.

          + *

          The status of the project.

          */ - MaxResults?: number; + ProjectStatus: ProjectStatus | string | undefined; /** - *

          Filter for jobs scheduled before a specified time.

          + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          */ - ScheduledTimeBefore?: Date; + CreatedBy?: UserContext; /** - *

          Filter for jobs scheduled after a specified time.

          + *

          The time when the project was created.

          */ - ScheduledTimeAfter?: Date; + CreationTime: Date | undefined; +} +export namespace DescribeProjectOutput { + export const filterSensitiveLog = (obj: DescribeProjectOutput): any => ({ + ...obj, + }); +} + +export interface DescribeSubscribedWorkteamRequest { /** - *

          A filter that returns only jobs created before a specified time.

          + *

          The Amazon Resource Name (ARN) of the subscribed work team to describe.

          */ - CreationTimeBefore?: Date; + WorkteamArn: string | undefined; +} + +export namespace DescribeSubscribedWorkteamRequest { + export const filterSensitiveLog = (obj: DescribeSubscribedWorkteamRequest): any => ({ + ...obj, + }); +} +/** + *

          Describes a work team of a vendor that does the a labelling job.

          + */ +export interface SubscribedWorkteam { /** - *

          A filter that returns only jobs created after a specified time.

          + *

          The Amazon Resource Name (ARN) of the vendor that you have subscribed.

          */ - CreationTimeAfter?: Date; + WorkteamArn: string | undefined; + + /** + *

          The title of the service provided by the vendor in the Amazon Marketplace.

          + */ + MarketplaceTitle?: string; /** - *

          A filter that returns only jobs modified after a specified time.

          + *

          The name of the vendor in the Amazon Marketplace.

          */ - LastModifiedTimeBefore?: Date; + SellerName?: string; /** - *

          A filter that returns only jobs modified before a specified time.

          + *

          The description of the vendor from the Amazon Marketplace.

          */ - LastModifiedTimeAfter?: Date; + MarketplaceDescription?: string; /** - *

          A filter that retrieves only jobs with a specific status.

          + *

          Marketplace product listing ID.

          */ - StatusEquals?: ExecutionStatus | string; + ListingId?: string; } -export namespace ListMonitoringExecutionsRequest { - export const filterSensitiveLog = (obj: ListMonitoringExecutionsRequest): any => ({ +export namespace SubscribedWorkteam { + export const filterSensitiveLog = (obj: SubscribedWorkteam): any => ({ ...obj, }); } -export interface ListMonitoringExecutionsResponse { - /** - *

          A JSON array in which each element is a summary for a monitoring execution.

          - */ - MonitoringExecutionSummaries: MonitoringExecutionSummary[] | undefined; - +export interface DescribeSubscribedWorkteamResponse { /** - *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of jobs, - * use it in the subsequent reques

          + *

          A Workteam instance that contains information about the work team.

          */ - NextToken?: string; + SubscribedWorkteam: SubscribedWorkteam | undefined; } -export namespace ListMonitoringExecutionsResponse { - export const filterSensitiveLog = (obj: ListMonitoringExecutionsResponse): any => ({ +export namespace DescribeSubscribedWorkteamResponse { + export const filterSensitiveLog = (obj: DescribeSubscribedWorkteamResponse): any => ({ ...obj, }); } -export enum MonitoringScheduleSortKey { - CREATION_TIME = "CreationTime", - NAME = "Name", - STATUS = "Status", -} - -export interface ListMonitoringSchedulesRequest { +export interface DescribeTrainingJobRequest { /** - *

          Name of a specific endpoint to fetch schedules for.

          + *

          The name of the training job.

          */ - EndpointName?: string; + TrainingJobName: string | undefined; +} - /** - *

          Whether to sort results by Status, CreationTime, - * ScheduledTime field. The default is CreationTime.

          - */ - SortBy?: MonitoringScheduleSortKey | string; +export namespace DescribeTrainingJobRequest { + export const filterSensitiveLog = (obj: DescribeTrainingJobRequest): any => ({ + ...obj, + }); +} +/** + *

          The name, value, and date and time of a metric that was emitted to Amazon CloudWatch.

          + */ +export interface MetricData { /** - *

          Whether to sort the results in Ascending or Descending order. - * The default is Descending.

          + *

          The name of the metric.

          */ - SortOrder?: SortOrder | string; + MetricName?: string; /** - *

          The token returned if the response is truncated. To retrieve the next set of job - * executions, use it in the next request.

          + *

          The value of the metric.

          */ - NextToken?: string; + Value?: number; /** - *

          The maximum number of jobs to return in the response. The default value is 10.

          + *

          The date and time that the algorithm emitted the metric.

          */ - MaxResults?: number; + Timestamp?: Date; +} - /** - *

          Filter for monitoring schedules whose name contains a specified string.

          - */ - NameContains?: string; +export namespace MetricData { + export const filterSensitiveLog = (obj: MetricData): any => ({ + ...obj, + }); +} - /** - *

          A filter that returns only monitoring schedules created before a specified time.

          - */ - CreationTimeBefore?: Date; +export enum SecondaryStatus { + COMPLETED = "Completed", + DOWNLOADING = "Downloading", + DOWNLOADING_TRAINING_IMAGE = "DownloadingTrainingImage", + FAILED = "Failed", + INTERRUPTED = "Interrupted", + LAUNCHING_ML_INSTANCES = "LaunchingMLInstances", + MAX_RUNTIME_EXCEEDED = "MaxRuntimeExceeded", + MAX_WAIT_TIME_EXCEEDED = "MaxWaitTimeExceeded", + PREPARING_TRAINING_STACK = "PreparingTrainingStack", + STARTING = "Starting", + STOPPED = "Stopped", + STOPPING = "Stopping", + TRAINING = "Training", + UPDATING = "Updating", + UPLOADING = "Uploading", +} +/** + *

          An array element of DescribeTrainingJobResponse$SecondaryStatusTransitions. It provides + * additional details about a status that the training job has transitioned through. A + * training job can be in one of several states, for example, starting, downloading, + * training, or uploading. Within each state, there are a number of intermediate states. + * For example, within the starting state, Amazon SageMaker could be starting the training job or + * launching the ML instances. These transitional states are referred to as the job's + * secondary + * status. + *

          + *

          + */ +export interface SecondaryStatusTransition { /** - *

          A filter that returns only monitoring schedules created after a specified time.

          + *

          Contains a secondary status information from a training + * job.

          + *

          Status might be one of the following secondary statuses:

          + *
          + *
          InProgress
          + *
          + *
            + *
          • + *

            + * Starting + * - Starting the training job.

            + *
          • + *
          • + *

            + * Downloading - An optional stage for algorithms that + * support File training input mode. It indicates that + * data is being downloaded to the ML storage volumes.

            + *
          • + *
          • + *

            + * Training - Training is in progress.

            + *
          • + *
          • + *

            + * Uploading - Training is complete and the model + * artifacts are being uploaded to the S3 location.

            + *
          • + *
          + *
          + *
          Completed
          + *
          + *
            + *
          • + *

            + * Completed - The training job has completed.

            + *
          • + *
          + *
          + *
          Failed
          + *
          + *
            + *
          • + *

            + * Failed - The training job has failed. The reason for + * the failure is returned in the FailureReason field of + * DescribeTrainingJobResponse.

            + *
          • + *
          + *
          + *
          Stopped
          + *
          + *
            + *
          • + *

            + * MaxRuntimeExceeded - The job stopped because it + * exceeded the maximum allowed runtime.

            + *
          • + *
          • + *

            + * Stopped - The training job has stopped.

            + *
          • + *
          + *
          + *
          Stopping
          + *
          + *
            + *
          • + *

            + * Stopping - Stopping the training job.

            + *
          • + *
          + *
          + *
          + *

          We no longer support the following secondary statuses:

          + *
            + *
          • + *

            + * LaunchingMLInstances + *

            + *
          • + *
          • + *

            + * PreparingTrainingStack + *

            + *
          • + *
          • + *

            + * DownloadingTrainingImage + *

            + *
          • + *
          */ - CreationTimeAfter?: Date; + Status: SecondaryStatus | string | undefined; /** - *

          A filter that returns only monitoring schedules modified before a specified time.

          + *

          A timestamp that shows when the training job transitioned to the current secondary + * status state.

          */ - LastModifiedTimeBefore?: Date; + StartTime: Date | undefined; /** - *

          A filter that returns only monitoring schedules modified after a specified time.

          + *

          A timestamp that shows when the training job transitioned out of this secondary status + * state into another secondary status state or when the training job has ended.

          */ - LastModifiedTimeAfter?: Date; + EndTime?: Date; /** - *

          A filter that returns only monitoring schedules modified before a specified time.

          + *

          A detailed description of the progress within a secondary status. + *

          + *

          Amazon SageMaker provides secondary statuses and status messages that apply to each of + * them:

          + *
          + *
          Starting
          + *
          + *
            + *
          • + *

            Starting the training job.

            + *
          • + *
          • + *

            Launching + * requested ML instances.

            + *
          • + *
          • + *

            Insufficient + * capacity error from EC2 while launching instances, + * retrying!

            + *
          • + *
          • + *

            Launched + * instance was unhealthy, replacing it!

            + *
          • + *
          • + *

            Preparing the instances for training.

            + *
          • + *
          + *
          + *
          Training
          + *
          + *
            + *
          • + *

            Downloading the training image.

            + *
          • + *
          • + *

            Training + * image download completed. Training in + * progress.

            + *
          • + *
          + *
          + *
          + * + *

          Status messages are subject to change. Therefore, we recommend not including them + * in code that programmatically initiates actions. For examples, don't use status + * messages in if statements.

          + *
          + *

          To have an overview of your training job's progress, view + * TrainingJobStatus and SecondaryStatus in DescribeTrainingJob, and StatusMessage together. For + * example, at the start of a training job, you might see the following:

          + *
            + *
          • + *

            + * TrainingJobStatus - InProgress

            + *
          • + *
          • + *

            + * SecondaryStatus - Training

            + *
          • + *
          • + *

            + * StatusMessage - Downloading the training image

            + *
          • + *
          */ - StatusEquals?: ScheduleStatus | string; + StatusMessage?: string; } -export namespace ListMonitoringSchedulesRequest { - export const filterSensitiveLog = (obj: ListMonitoringSchedulesRequest): any => ({ +export namespace SecondaryStatusTransition { + export const filterSensitiveLog = (obj: SecondaryStatusTransition): any => ({ ...obj, }); } -/** - *

          Summarizes the monitoring schedule.

          - */ -export interface MonitoringScheduleSummary { - /** - *

          The name of the monitoring schedule.

          - */ - MonitoringScheduleName: string | undefined; - +export interface DescribeTrainingJobResponse { /** - *

          The Amazon Resource Name (ARN) of the monitoring schedule.

          + *

          Name of the model training job.

          */ - MonitoringScheduleArn: string | undefined; + TrainingJobName: string | undefined; /** - *

          The creation time of the monitoring schedule.

          + *

          The Amazon Resource Name (ARN) of the training job.

          */ - CreationTime: Date | undefined; + TrainingJobArn: string | undefined; /** - *

          The last time the monitoring schedule was modified.

          + *

          The Amazon Resource Name (ARN) of the associated hyperparameter tuning job if the + * training job was launched by a hyperparameter tuning job.

          */ - LastModifiedTime: Date | undefined; + TuningJobArn?: string; /** - *

          The status of the monitoring schedule.

          + *

          The Amazon Resource Name (ARN) of the Amazon SageMaker Ground Truth labeling job that created the + * transform or training job.

          */ - MonitoringScheduleStatus: ScheduleStatus | string | undefined; + LabelingJobArn?: string; /** - *

          The name of the endpoint using the monitoring schedule.

          + *

          The Amazon Resource Name (ARN) of an AutoML job.

          */ - EndpointName?: string; -} - -export namespace MonitoringScheduleSummary { - export const filterSensitiveLog = (obj: MonitoringScheduleSummary): any => ({ - ...obj, - }); -} + AutoMLJobArn?: string; -export interface ListMonitoringSchedulesResponse { /** - *

          A JSON array in which each element is a summary for a monitoring schedule.

          + *

          Information about the Amazon S3 location that is configured for storing model artifacts. + *

          */ - MonitoringScheduleSummaries: MonitoringScheduleSummary[] | undefined; + ModelArtifacts: ModelArtifacts | undefined; /** - *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of jobs, - * use it in the subsequent reques

          - */ - NextToken?: string; -} - -export namespace ListMonitoringSchedulesResponse { - export const filterSensitiveLog = (obj: ListMonitoringSchedulesResponse): any => ({ - ...obj, - }); -} - -export enum NotebookInstanceLifecycleConfigSortKey { - CREATION_TIME = "CreationTime", - LAST_MODIFIED_TIME = "LastModifiedTime", - NAME = "Name", -} - -export enum NotebookInstanceLifecycleConfigSortOrder { - ASCENDING = "Ascending", - DESCENDING = "Descending", -} + *

          The status of the + * training + * job.

          + *

          Amazon SageMaker provides the following training job statuses:

          + *
            + *
          • + *

            + * InProgress - The training is in progress.

            + *
          • + *
          • + *

            + * Completed - The training job has completed.

            + *
          • + *
          • + *

            + * Failed - The training job has failed. To see the reason for the + * failure, see the FailureReason field in the response to a + * DescribeTrainingJobResponse call.

            + *
          • + *
          • + *

            + * Stopping - The training job is stopping.

            + *
          • + *
          • + *

            + * Stopped - The training job has stopped.

            + *
          • + *
          + *

          For + * more detailed information, see SecondaryStatus.

          + */ + TrainingJobStatus: TrainingJobStatus | string | undefined; -export interface ListNotebookInstanceLifecycleConfigsInput { /** - *

          If the result of a ListNotebookInstanceLifecycleConfigs request was - * truncated, the response includes a NextToken. To get the next set of - * lifecycle configurations, use the token in the next request.

          + *

          Provides detailed information about the state of the training job. For detailed + * information on the secondary status of the training job, see StatusMessage + * under SecondaryStatusTransition.

          + *

          Amazon SageMaker provides primary statuses and secondary statuses that apply to each of + * them:

          + *
          + *
          InProgress
          + *
          + *
            + *
          • + *

            + * Starting + * - Starting the training job.

            + *
          • + *
          • + *

            + * Downloading - An optional stage for algorithms that + * support File training input mode. It indicates that + * data is being downloaded to the ML storage volumes.

            + *
          • + *
          • + *

            + * Training - Training is in progress.

            + *
          • + *
          • + *

            + * Interrupted - The job stopped because the managed + * spot training instances were interrupted.

            + *
          • + *
          • + *

            + * Uploading - Training is complete and the model + * artifacts are being uploaded to the S3 location.

            + *
          • + *
          + *
          + *
          Completed
          + *
          + *
            + *
          • + *

            + * Completed - The training job has completed.

            + *
          • + *
          + *
          + *
          Failed
          + *
          + *
            + *
          • + *

            + * Failed - The training job has failed. The reason for + * the failure is returned in the FailureReason field of + * DescribeTrainingJobResponse.

            + *
          • + *
          + *
          + *
          Stopped
          + *
          + *
            + *
          • + *

            + * MaxRuntimeExceeded - The job stopped because it + * exceeded the maximum allowed runtime.

            + *
          • + *
          • + *

            + * MaxWaitTimeExceeded - The job stopped because it + * exceeded the maximum allowed wait time.

            + *
          • + *
          • + *

            + * Stopped - The training job has stopped.

            + *
          • + *
          + *
          + *
          Stopping
          + *
          + *
            + *
          • + *

            + * Stopping - Stopping the training job.

            + *
          • + *
          + *
          + *
          + * + * + *

          Valid values for SecondaryStatus are subject to change.

          + *
          + *

          We no longer support the following secondary statuses:

          + *
            + *
          • + *

            + * LaunchingMLInstances + *

            + *
          • + *
          • + *

            + * PreparingTrainingStack + *

            + *
          • + *
          • + *

            + * DownloadingTrainingImage + *

            + *
          • + *
          */ - NextToken?: string; + SecondaryStatus: SecondaryStatus | string | undefined; /** - *

          The maximum number of lifecycle configurations to return in the response.

          + *

          If the training job failed, the reason it failed.

          */ - MaxResults?: number; + FailureReason?: string; /** - *

          Sorts the list of results. The default is CreationTime.

          + *

          Algorithm-specific parameters.

          */ - SortBy?: NotebookInstanceLifecycleConfigSortKey | string; + HyperParameters?: { [key: string]: string }; /** - *

          The sort order for results.

          + *

          Information about the algorithm used for training, and algorithm metadata. + *

          */ - SortOrder?: NotebookInstanceLifecycleConfigSortOrder | string; + AlgorithmSpecification: AlgorithmSpecification | undefined; /** - *

          A string in the lifecycle configuration name. This filter returns only lifecycle - * configurations whose name contains the specified string.

          + *

          The AWS Identity and Access Management (IAM) role configured for the training job.

          */ - NameContains?: string; + RoleArn?: string; /** - *

          A filter that returns only lifecycle configurations that were created before the - * specified time (timestamp).

          + *

          An array of Channel objects that describes each data input channel. + *

          */ - CreationTimeBefore?: Date; + InputDataConfig?: Channel[]; /** - *

          A filter that returns only lifecycle configurations that were created after the - * specified time (timestamp).

          + *

          The S3 path where model artifacts that you configured when creating the job are + * stored. Amazon SageMaker creates subfolders for model artifacts.

          */ - CreationTimeAfter?: Date; + OutputDataConfig?: OutputDataConfig; /** - *

          A filter that returns only lifecycle configurations that were modified before the - * specified time (timestamp).

          + *

          Resources, including ML compute instances and ML storage volumes, that are + * configured for model training.

          */ - LastModifiedTimeBefore?: Date; + ResourceConfig: ResourceConfig | undefined; /** - *

          A filter that returns only lifecycle configurations that were modified after the - * specified time (timestamp).

          + *

          A VpcConfig object that specifies the VPC that this training job has + * access to. For more information, see Protect Training Jobs by Using an Amazon + * Virtual Private Cloud.

          */ - LastModifiedTimeAfter?: Date; -} - -export namespace ListNotebookInstanceLifecycleConfigsInput { - export const filterSensitiveLog = (obj: ListNotebookInstanceLifecycleConfigsInput): any => ({ - ...obj, - }); -} + VpcConfig?: VpcConfig; -/** - *

          Provides a summary of a notebook instance lifecycle configuration.

          - */ -export interface NotebookInstanceLifecycleConfigSummary { /** - *

          The name of the lifecycle configuration.

          + *

          Specifies a limit to how long a model training job can run. It also specifies the + * maximum time to wait for a spot instance. When the job reaches the time limit, Amazon SageMaker ends + * the training job. Use this API to cap model training costs.

          + *

          To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays + * job termination for 120 seconds. Algorithms can use this 120-second window to save the + * model artifacts, so the results of training are not lost.

          */ - NotebookInstanceLifecycleConfigName: string | undefined; + StoppingCondition: StoppingCondition | undefined; /** - *

          The Amazon Resource Name (ARN) of the lifecycle configuration.

          + *

          A timestamp that indicates when the training job was created.

          */ - NotebookInstanceLifecycleConfigArn: string | undefined; + CreationTime: Date | undefined; /** - *

          A timestamp that tells when the lifecycle configuration was created.

          + *

          Indicates the time when the training job starts on training instances. You are + * billed for the time interval between this time and the value of + * TrainingEndTime. The start time in CloudWatch Logs might be later than this time. + * The difference is due to the time it takes to download the training data and to the size + * of the training container.

          */ - CreationTime?: Date; + TrainingStartTime?: Date; /** - *

          A timestamp that tells when the lifecycle configuration was last modified.

          + *

          Indicates the time when the training job ends on training instances. You are billed + * for the time interval between the value of TrainingStartTime and this time. + * For successful jobs and stopped jobs, this is the time after model artifacts are + * uploaded. For failed jobs, this is the time when Amazon SageMaker detects a job failure.

          */ - LastModifiedTime?: Date; -} - -export namespace NotebookInstanceLifecycleConfigSummary { - export const filterSensitiveLog = (obj: NotebookInstanceLifecycleConfigSummary): any => ({ - ...obj, - }); -} + TrainingEndTime?: Date; -export interface ListNotebookInstanceLifecycleConfigsOutput { /** - *

          If the response is truncated, Amazon SageMaker returns this token. To get the next set of - * lifecycle configurations, use it in the next request.

          + *

          A timestamp that indicates when the status of the training job was last + * modified.

          */ - NextToken?: string; + LastModifiedTime?: Date; /** - *

          An array of NotebookInstanceLifecycleConfiguration objects, each listing - * a lifecycle configuration.

          + *

          A history of all of the secondary statuses that the training job has transitioned + * through.

          */ - NotebookInstanceLifecycleConfigs?: NotebookInstanceLifecycleConfigSummary[]; -} - -export namespace ListNotebookInstanceLifecycleConfigsOutput { - export const filterSensitiveLog = (obj: ListNotebookInstanceLifecycleConfigsOutput): any => ({ - ...obj, - }); -} - -export enum NotebookInstanceSortKey { - CREATION_TIME = "CreationTime", - NAME = "Name", - STATUS = "Status", -} - -export enum NotebookInstanceSortOrder { - ASCENDING = "Ascending", - DESCENDING = "Descending", -} + SecondaryStatusTransitions?: SecondaryStatusTransition[]; -export interface ListNotebookInstancesInput { /** - *

          If the previous call to the ListNotebookInstances is truncated, the - * response includes a NextToken. You can use this token in your subsequent - * ListNotebookInstances request to fetch the next set of notebook - * instances.

          - * - *

          You might specify a filter or a sort order in your request. When response is - * truncated, you must use the same values for the filer and sort order in the next - * request.

          - *
          + *

          A collection of MetricData objects that specify the names, values, and + * dates and times that the training algorithm emitted to Amazon CloudWatch.

          */ - NextToken?: string; + FinalMetricDataList?: MetricData[]; /** - *

          The maximum number of notebook instances to return.

          + *

          If you want to allow inbound or outbound network calls, except for calls between peers + * within a training cluster for distributed training, choose True. If you + * enable network isolation for training jobs that are configured to use a VPC, Amazon SageMaker + * downloads and uploads customer data and model artifacts through the specified VPC, but + * the training container does not have network access.

          */ - MaxResults?: number; + EnableNetworkIsolation?: boolean; /** - *

          The field to sort results by. The default is Name.

          + *

          To encrypt all communications between ML compute instances in distributed training, + * choose True. Encryption provides greater security for distributed training, + * but training might take longer. How long it takes depends on the amount of communication + * between compute instances, especially if you use a deep learning algorithms in + * distributed training.

          */ - SortBy?: NotebookInstanceSortKey | string; + EnableInterContainerTrafficEncryption?: boolean; /** - *

          The sort order for results.

          + *

          A Boolean indicating whether managed spot training is enabled (True) or + * not (False).

          */ - SortOrder?: NotebookInstanceSortOrder | string; + EnableManagedSpotTraining?: boolean; /** - *

          A string in the notebook instances' name. This filter returns only notebook - * instances whose name contains the specified string.

          + *

          Contains information about the output location for managed spot training checkpoint + * data.

          */ - NameContains?: string; + CheckpointConfig?: CheckpointConfig; /** - *

          A filter that returns only notebook instances that were created before the - * specified time (timestamp).

          + *

          The training time in seconds.

          */ - CreationTimeBefore?: Date; + TrainingTimeInSeconds?: number; /** - *

          A filter that returns only notebook instances that were created after the specified - * time (timestamp).

          + *

          The billable time in seconds.

          + *

          You can calculate the savings from using managed spot training using the formula + * (1 - BillableTimeInSeconds / TrainingTimeInSeconds) * 100. For example, + * if BillableTimeInSeconds is 100 and TrainingTimeInSeconds is + * 500, the savings is 80%.

          */ - CreationTimeAfter?: Date; + BillableTimeInSeconds?: number; /** - *

          A filter that returns only notebook instances that were modified before the - * specified time (timestamp).

          + *

          Configuration information for the debug hook parameters, collection configuration, and + * storage paths.

          */ - LastModifiedTimeBefore?: Date; + DebugHookConfig?: DebugHookConfig; /** - *

          A filter that returns only notebook instances that were modified after the - * specified time (timestamp).

          + *

          Associates a SageMaker job as a trial component with an experiment and trial. Specified when + * you call the following APIs:

          + * */ - LastModifiedTimeAfter?: Date; + ExperimentConfig?: ExperimentConfig; /** - *

          A filter that returns only notebook instances with the specified status.

          + *

          Configuration information for debugging rules.

          */ - StatusEquals?: NotebookInstanceStatus | string; + DebugRuleConfigurations?: DebugRuleConfiguration[]; /** - *

          A string in the name of a notebook instances lifecycle configuration associated with - * this notebook instance. This filter returns only notebook instances associated with a - * lifecycle configuration with a name that contains the specified string.

          + *

          Configuration of storage locations for TensorBoard output.

          */ - NotebookInstanceLifecycleConfigNameContains?: string; + TensorBoardOutputConfig?: TensorBoardOutputConfig; /** - *

          A string in the name or URL of a Git repository associated with this notebook - * instance. This filter returns only notebook instances associated with a git repository - * with a name that contains the specified string.

          + *

          Status about the debug rule evaluation.

          */ - DefaultCodeRepositoryContains?: string; + DebugRuleEvaluationStatuses?: DebugRuleEvaluationStatus[]; +} +export namespace DescribeTrainingJobResponse { + export const filterSensitiveLog = (obj: DescribeTrainingJobResponse): any => ({ + ...obj, + }); +} + +export interface DescribeTransformJobRequest { /** - *

          A filter that returns only notebook instances with associated with the specified git - * repository.

          + *

          The name of the transform job that you want to view details of.

          */ - AdditionalCodeRepositoryEquals?: string; + TransformJobName: string | undefined; } -export namespace ListNotebookInstancesInput { - export const filterSensitiveLog = (obj: ListNotebookInstancesInput): any => ({ +export namespace DescribeTransformJobRequest { + export const filterSensitiveLog = (obj: DescribeTransformJobRequest): any => ({ ...obj, }); } -/** - *

          Provides summary information for an Amazon SageMaker notebook instance.

          - */ -export interface NotebookInstanceSummary { - /** - *

          The name of the notebook instance that you want a summary for.

          - */ - NotebookInstanceName: string | undefined; +export enum TransformJobStatus { + COMPLETED = "Completed", + FAILED = "Failed", + IN_PROGRESS = "InProgress", + STOPPED = "Stopped", + STOPPING = "Stopping", +} +export interface DescribeTransformJobResponse { /** - *

          The Amazon Resource Name (ARN) of the notebook instance.

          + *

          The name of the transform job.

          */ - NotebookInstanceArn: string | undefined; + TransformJobName: string | undefined; /** - *

          The status of the notebook instance.

          + *

          The Amazon Resource Name (ARN) of the transform job.

          */ - NotebookInstanceStatus?: NotebookInstanceStatus | string; + TransformJobArn: string | undefined; /** *

          The - * URL that you use to connect to the Jupyter instance running in your notebook instance. - *

          + * status of the transform job. If the transform job failed, the reason + * is returned in the FailureReason field.

          */ - Url?: string; + TransformJobStatus: TransformJobStatus | string | undefined; /** - *

          The type of ML compute instance that the notebook instance is running on.

          + *

          If the transform job failed, FailureReason describes + * why + * it failed. A transform job creates a log file, which includes error + * messages, and stores it + * as + * an Amazon S3 object. For more information, see Log Amazon SageMaker Events with + * Amazon CloudWatch.

          */ - InstanceType?: _InstanceType | string; + FailureReason?: string; /** - *

          A timestamp that shows when the notebook instance was created.

          + *

          The name of the model used in the transform job.

          */ - CreationTime?: Date; + ModelName: string | undefined; /** - *

          A timestamp that shows when the notebook instance was last modified.

          + *

          The + * maximum number + * of + * parallel requests on each instance node + * that can be launched in a transform job. The default value is 1.

          */ - LastModifiedTime?: Date; + MaxConcurrentTransforms?: number; /** - *

          The name of a notebook instance lifecycle configuration associated with this notebook - * instance.

          - *

          For information about notebook instance lifestyle configurations, see Step - * 2.1: (Optional) Customize a Notebook Instance.

          + *

          The timeout and maximum number of retries for processing a transform job + * invocation.

          */ - NotebookInstanceLifecycleConfigName?: string; + ModelClientConfig?: ModelClientConfig; /** - *

          The Git repository associated with the notebook instance as its default code - * repository. This can be either the name of a Git repository stored as a resource in your - * account, or the URL of a Git repository in AWS CodeCommit or in any - * other Git repository. When you open a notebook instance, it opens in the directory that - * contains this repository. For more information, see Associating Git Repositories with Amazon SageMaker - * Notebook Instances.

          + *

          The + * maximum + * payload size, in MB, used in the + * transform job.

          */ - DefaultCodeRepository?: string; + MaxPayloadInMB?: number; /** - *

          An array of up to three Git repositories associated with the notebook instance. These - * can be either the names of Git repositories stored as resources in your account, or the - * URL of Git repositories in AWS CodeCommit or in any - * other Git repository. These repositories are cloned at the same level as the default - * repository of your notebook instance. For more information, see Associating Git - * Repositories with Amazon SageMaker Notebook Instances.

          + *

          Specifies the number of records to include in a mini-batch for an HTTP inference + * request. + * A record + * is a single unit of input data that inference + * can be made on. For example, a single line in a CSV file is a record.

          + *

          To enable the batch strategy, you must set SplitType + * to + * Line, RecordIO, or + * TFRecord.

          */ - AdditionalCodeRepositories?: string[]; -} - -export namespace NotebookInstanceSummary { - export const filterSensitiveLog = (obj: NotebookInstanceSummary): any => ({ - ...obj, - }); -} + BatchStrategy?: BatchStrategy | string; -export interface ListNotebookInstancesOutput { /** - *

          If the response to the previous ListNotebookInstances request was - * truncated, Amazon SageMaker returns this token. To retrieve the next set of notebook instances, use - * the token in the next request.

          + *

          The + * environment variables to set in the Docker container. We support up to 16 key and values + * entries in the map.

          */ - NextToken?: string; + Environment?: { [key: string]: string }; /** - *

          An array of NotebookInstanceSummary objects, one for each notebook - * instance.

          + *

          Describes the dataset to be transformed and the Amazon S3 location where it is + * stored.

          */ - NotebookInstances?: NotebookInstanceSummary[]; -} - -export namespace ListNotebookInstancesOutput { - export const filterSensitiveLog = (obj: ListNotebookInstancesOutput): any => ({ - ...obj, - }); -} + TransformInput: TransformInput | undefined; -export interface ListProcessingJobsRequest { /** - *

          A filter that returns only processing jobs created after the specified time.

          + *

          Identifies the Amazon S3 location where you want Amazon SageMaker to save the results from the + * transform job.

          */ - CreationTimeAfter?: Date; + TransformOutput?: TransformOutput; /** - *

          A filter that returns only processing jobs created after the specified time.

          + *

          Describes + * the resources, including ML instance types and ML instance count, to + * use for the transform job.

          */ - CreationTimeBefore?: Date; + TransformResources: TransformResources | undefined; /** - *

          A filter that returns only processing jobs modified after the specified time.

          + *

          A timestamp that shows when the transform Job was created.

          */ - LastModifiedTimeAfter?: Date; + CreationTime: Date | undefined; /** - *

          A filter that returns only processing jobs modified before the specified time.

          + *

          Indicates when the transform job starts + * on + * ML instances. You are billed for the time interval between this time + * and the value of TransformEndTime.

          */ - LastModifiedTimeBefore?: Date; + TransformStartTime?: Date; /** - *

          A string in the processing job name. This filter returns only processing jobs whose - * name contains the specified string.

          + *

          Indicates when the transform job has been + * + * completed, or has stopped or failed. You are billed for the time + * interval between this time and the value of TransformStartTime.

          */ - NameContains?: string; + TransformEndTime?: Date; /** - *

          A filter that retrieves only processing jobs with a specific status.

          + *

          The Amazon Resource Name (ARN) of the Amazon SageMaker Ground Truth labeling job that created the + * transform or training job.

          */ - StatusEquals?: ProcessingJobStatus | string; + LabelingJobArn?: string; /** - *

          The field to sort results by. The default is CreationTime.

          + *

          The Amazon Resource Name (ARN) of the AutoML transform job.

          */ - SortBy?: SortBy | string; + AutoMLJobArn?: string; /** - *

          The sort order for results. The default is Ascending.

          + *

          The data structure used to specify the data to be used for inference in a batch + * transform job and to associate the data that is relevant to the prediction results in + * the output. The input filter provided allows you to exclude input data that is not + * needed for inference in a batch transform job. The output filter provided allows you to + * include input data relevant to interpreting the predictions in the output from the job. + * For more information, see Associate Prediction + * Results with their Corresponding Input Records.

          */ - SortOrder?: SortOrder | string; + DataProcessing?: DataProcessing; /** - *

          If the result of the previous ListProcessingJobs request was truncated, - * the response includes a NextToken. To retrieve the next set of processing - * jobs, use the token in the next request.

          + *

          Associates a SageMaker job as a trial component with an experiment and trial. Specified when + * you call the following APIs:

          + * */ - NextToken?: string; + ExperimentConfig?: ExperimentConfig; +} + +export namespace DescribeTransformJobResponse { + export const filterSensitiveLog = (obj: DescribeTransformJobResponse): any => ({ + ...obj, + }); +} +export interface DescribeTrialRequest { /** - *

          The maximum number of processing jobs to return in the response.

          + *

          The name of the trial to describe.

          */ - MaxResults?: number; + TrialName: string | undefined; } -export namespace ListProcessingJobsRequest { - export const filterSensitiveLog = (obj: ListProcessingJobsRequest): any => ({ +export namespace DescribeTrialRequest { + export const filterSensitiveLog = (obj: DescribeTrialRequest): any => ({ ...obj, }); } /** - *

          Summary of information about a processing job.

          + *

          The source of the trial.

          */ -export interface ProcessingJobSummary { +export interface TrialSource { /** - *

          The name of the processing job.

          + *

          The Amazon Resource Name (ARN) of the source.

          */ - ProcessingJobName: string | undefined; + SourceArn: string | undefined; + + /** + *

          The source job type.

          + */ + SourceType?: string; +} + +export namespace TrialSource { + export const filterSensitiveLog = (obj: TrialSource): any => ({ + ...obj, + }); +} +export interface DescribeTrialResponse { /** - *

          The Amazon Resource Name (ARN) of the processing job..

          + *

          The name of the trial.

          */ - ProcessingJobArn: string | undefined; + TrialName?: string; /** - *

          The time at which the processing job was created.

          + *

          The Amazon Resource Name (ARN) of the trial.

          */ - CreationTime: Date | undefined; + TrialArn?: string; /** - *

          The time at which the processing job completed.

          + *

          The name of the trial as displayed. If DisplayName isn't specified, + * TrialName is displayed.

          */ - ProcessingEndTime?: Date; + DisplayName?: string; /** - *

          A timestamp that indicates the last time the processing job was modified.

          + *

          The name of the experiment the trial is part of.

          */ - LastModifiedTime?: Date; + ExperimentName?: string; /** - *

          The status of the processing job.

          + *

          The Amazon Resource Name (ARN) of the source and, optionally, the job type.

          */ - ProcessingJobStatus: ProcessingJobStatus | string | undefined; + Source?: TrialSource; /** - *

          A string, up to one KB in size, that contains the reason a processing job failed, if - * it failed.

          + *

          When the trial was created.

          */ - FailureReason?: string; + CreationTime?: Date; /** - *

          An optional string, up to one KB in size, that contains metadata from the processing - * container when the processing job exits.

          + *

          Who created the trial.

          */ - ExitMessage?: string; + CreatedBy?: UserContext; + + /** + *

          When the trial was last modified.

          + */ + LastModifiedTime?: Date; + + /** + *

          Who last modified the trial.

          + */ + LastModifiedBy?: UserContext; + + /** + *

          Metadata properties of the tracking entity, trial, or trial component.

          + */ + MetadataProperties?: MetadataProperties; } -export namespace ProcessingJobSummary { - export const filterSensitiveLog = (obj: ProcessingJobSummary): any => ({ +export namespace DescribeTrialResponse { + export const filterSensitiveLog = (obj: DescribeTrialResponse): any => ({ ...obj, }); } -export interface ListProcessingJobsResponse { - /** - *

          An array of ProcessingJobSummary objects, each listing a processing - * job.

          - */ - ProcessingJobSummaries: ProcessingJobSummary[] | undefined; - +export interface DescribeTrialComponentRequest { /** - *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of - * processing jobs, use it in the subsequent request.

          + *

          The name of the trial component to describe.

          */ - NextToken?: string; + TrialComponentName: string | undefined; } -export namespace ListProcessingJobsResponse { - export const filterSensitiveLog = (obj: ListProcessingJobsResponse): any => ({ +export namespace DescribeTrialComponentRequest { + export const filterSensitiveLog = (obj: DescribeTrialComponentRequest): any => ({ ...obj, }); } -export interface ListSubscribedWorkteamsRequest { +/** + *

          A summary of the metrics of a trial component.

          + */ +export interface TrialComponentMetricSummary { /** - *

          A string in the work team name. This filter returns only work teams whose name - * contains the specified string.

          + *

          The name of the metric.

          */ - NameContains?: string; + MetricName?: string; /** - *

          If the result of the previous ListSubscribedWorkteams request was - * truncated, the response includes a NextToken. To retrieve the next set of - * labeling jobs, use the token in the next request.

          + *

          The Amazon Resource Name (ARN) of the source.

          */ - NextToken?: string; + SourceArn?: string; /** - *

          The maximum number of work teams to return in each page of the response.

          + *

          When the metric was last updated.

          */ - MaxResults?: number; -} - -export namespace ListSubscribedWorkteamsRequest { - export const filterSensitiveLog = (obj: ListSubscribedWorkteamsRequest): any => ({ - ...obj, - }); -} + TimeStamp?: Date; -export interface ListSubscribedWorkteamsResponse { /** - *

          An array of Workteam objects, each describing a work team.

          + *

          The maximum value of the metric.

          */ - SubscribedWorkteams: SubscribedWorkteam[] | undefined; + Max?: number; /** - *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of - * work teams, use it in the subsequent request.

          + *

          The minimum value of the metric.

          */ - NextToken?: string; -} + Min?: number; -export namespace ListSubscribedWorkteamsResponse { - export const filterSensitiveLog = (obj: ListSubscribedWorkteamsResponse): any => ({ - ...obj, - }); -} + /** + *

          The most recent value of the metric.

          + */ + Last?: number; -export interface ListTagsInput { /** - *

          The Amazon Resource Name (ARN) of the resource whose tags you want to - * retrieve.

          + *

          The number of samples used to generate the metric.

          */ - ResourceArn: string | undefined; + Count?: number; /** - *

          If the response to the previous ListTags request is truncated, Amazon SageMaker - * returns this token. To retrieve the next set of tags, use it in the subsequent request. - *

          + *

          The average value of the metric.

          */ - NextToken?: string; + Avg?: number; /** - *

          Maximum number of tags to return.

          + *

          The standard deviation of the metric.

          */ - MaxResults?: number; + StdDev?: number; } -export namespace ListTagsInput { - export const filterSensitiveLog = (obj: ListTagsInput): any => ({ +export namespace TrialComponentMetricSummary { + export const filterSensitiveLog = (obj: TrialComponentMetricSummary): any => ({ ...obj, }); } -export interface ListTagsOutput { +/** + *

          The Amazon Resource Name (ARN) and job type of the source of a trial component.

          + */ +export interface TrialComponentSource { /** - *

          An array of Tag objects, each with a tag key and a value.

          + *

          The source ARN.

          */ - Tags?: Tag[]; + SourceArn: string | undefined; /** - *

          If response is truncated, Amazon SageMaker includes a token in the response. You can use this - * token in your subsequent request to fetch next set of tokens.

          + *

          The source job type.

          */ - NextToken?: string; + SourceType?: string; } -export namespace ListTagsOutput { - export const filterSensitiveLog = (obj: ListTagsOutput): any => ({ +export namespace TrialComponentSource { + export const filterSensitiveLog = (obj: TrialComponentSource): any => ({ ...obj, }); } -export interface ListTrainingJobsRequest { +export interface DescribeTrialComponentResponse { /** - *

          If the result of the previous ListTrainingJobs request was truncated, - * the response includes a NextToken. To retrieve the next set of training - * jobs, use the token in the next request.

          + *

          The name of the trial component.

          */ - NextToken?: string; + TrialComponentName?: string; /** - *

          The maximum number of training jobs to return in the response.

          + *

          The Amazon Resource Name (ARN) of the trial component.

          */ - MaxResults?: number; + TrialComponentArn?: string; /** - *

          A filter that returns only training jobs created after the specified time - * (timestamp).

          + *

          The name of the component as displayed. If DisplayName isn't specified, + * TrialComponentName is displayed.

          */ - CreationTimeAfter?: Date; + DisplayName?: string; /** - *

          A filter that returns only training jobs created before the specified time - * (timestamp).

          + *

          The Amazon Resource Name (ARN) of the source and, optionally, the job type.

          */ - CreationTimeBefore?: Date; + Source?: TrialComponentSource; /** - *

          A filter that returns only training jobs modified after the specified time - * (timestamp).

          + *

          The status of the component. States include:

          + *
            + *
          • + *

            InProgress

            + *
          • + *
          • + *

            Completed

            + *
          • + *
          • + *

            Failed

            + *
          • + *
          */ - LastModifiedTimeAfter?: Date; + Status?: TrialComponentStatus; /** - *

          A filter that returns only training jobs modified before the specified time - * (timestamp).

          + *

          When the component started.

          */ - LastModifiedTimeBefore?: Date; + StartTime?: Date; /** - *

          A string in the training job name. This filter returns only training jobs whose - * name contains the specified string.

          + *

          When the component ended.

          */ - NameContains?: string; + EndTime?: Date; /** - *

          A filter that retrieves only training jobs with a specific status.

          + *

          When the component was created.

          */ - StatusEquals?: TrainingJobStatus | string; + CreationTime?: Date; /** - *

          The field to sort results by. The default is CreationTime.

          + *

          Who created the component.

          */ - SortBy?: SortBy | string; + CreatedBy?: UserContext; /** - *

          The sort order for results. The default is Ascending.

          + *

          When the component was last modified.

          */ - SortOrder?: SortOrder | string; -} - -export namespace ListTrainingJobsRequest { - export const filterSensitiveLog = (obj: ListTrainingJobsRequest): any => ({ - ...obj, - }); -} + LastModifiedTime?: Date; -/** - *

          Provides summary information about a training job.

          - */ -export interface TrainingJobSummary { /** - *

          The name of the training job that you want a summary for.

          + *

          Who last modified the component.

          */ - TrainingJobName: string | undefined; + LastModifiedBy?: UserContext; /** - *

          The Amazon Resource Name (ARN) of the training job.

          + *

          The hyperparameters of the component.

          */ - TrainingJobArn: string | undefined; + Parameters?: { [key: string]: TrialComponentParameterValue }; /** - *

          A timestamp that shows when the training job was created.

          + *

          The input artifacts of the component.

          */ - CreationTime: Date | undefined; + InputArtifacts?: { [key: string]: TrialComponentArtifact }; /** - *

          A timestamp that shows when the training job ended. This field is set only if the - * training job has one of the terminal statuses (Completed, - * Failed, or Stopped).

          + *

          The output artifacts of the component.

          */ - TrainingEndTime?: Date; + OutputArtifacts?: { [key: string]: TrialComponentArtifact }; /** - *

          Timestamp when the training job was last modified.

          + *

          Metadata properties of the tracking entity, trial, or trial component.

          */ - LastModifiedTime?: Date; + MetadataProperties?: MetadataProperties; /** - *

          The status of the training job.

          + *

          The metrics for the component.

          */ - TrainingJobStatus: TrainingJobStatus | string | undefined; + Metrics?: TrialComponentMetricSummary[]; } -export namespace TrainingJobSummary { - export const filterSensitiveLog = (obj: TrainingJobSummary): any => ({ +export namespace DescribeTrialComponentResponse { + export const filterSensitiveLog = (obj: DescribeTrialComponentResponse): any => ({ ...obj, + ...(obj.Parameters && { + Parameters: Object.entries(obj.Parameters).reduce( + (acc: any, [key, value]: [string, TrialComponentParameterValue]) => ({ + ...acc, + [key]: TrialComponentParameterValue.filterSensitiveLog(value), + }), + {} + ), + }), }); } -export interface ListTrainingJobsResponse { +export interface DescribeUserProfileRequest { /** - *

          An array of TrainingJobSummary objects, each listing a training - * job.

          + *

          The domain ID.

          */ - TrainingJobSummaries: TrainingJobSummary[] | undefined; + DomainId: string | undefined; /** - *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of - * training jobs, use it in the subsequent request.

          + *

          The user profile name.

          */ - NextToken?: string; + UserProfileName: string | undefined; } -export namespace ListTrainingJobsResponse { - export const filterSensitiveLog = (obj: ListTrainingJobsResponse): any => ({ +export namespace DescribeUserProfileRequest { + export const filterSensitiveLog = (obj: DescribeUserProfileRequest): any => ({ ...obj, }); } -export enum TrainingJobSortByOptions { - CreationTime = "CreationTime", - FinalObjectiveMetricValue = "FinalObjectiveMetricValue", - Name = "Name", - Status = "Status", +export enum UserProfileStatus { + Delete_Failed = "Delete_Failed", + Deleting = "Deleting", + Failed = "Failed", + InService = "InService", + Pending = "Pending", + Update_Failed = "Update_Failed", + Updating = "Updating", } -export interface ListTrainingJobsForHyperParameterTuningJobRequest { +export interface DescribeUserProfileResponse { /** - *

          The name of the tuning job whose training jobs you want to list.

          + *

          The ID of the domain that contains the profile.

          */ - HyperParameterTuningJobName: string | undefined; + DomainId?: string; /** - *

          If the result of the previous ListTrainingJobsForHyperParameterTuningJob - * request was truncated, the response includes a NextToken. To retrieve the - * next set of training jobs, use the token in the next request.

          + *

          The user profile Amazon Resource Name (ARN).

          */ - NextToken?: string; + UserProfileArn?: string; /** - *

          The maximum number of training jobs to return. The default value is 10.

          + *

          The user profile name.

          */ - MaxResults?: number; + UserProfileName?: string; /** - *

          A filter that returns only training jobs with the - * specified - * status.

          + *

          The ID of the user's profile in the Amazon Elastic File System (EFS) volume.

          */ - StatusEquals?: TrainingJobStatus | string; + HomeEfsFileSystemUid?: string; /** - *

          The field to sort - * results - * by. The default is Name.

          - *

          If the value of this field is FinalObjectiveMetricValue, any training - * jobs that did not return an objective metric are not listed.

          + *

          The status.

          */ - SortBy?: TrainingJobSortByOptions | string; + Status?: UserProfileStatus | string; /** - *

          The sort order - * for - * results. The default is Ascending.

          + *

          The last modified time.

          */ - SortOrder?: SortOrder | string; -} - -export namespace ListTrainingJobsForHyperParameterTuningJobRequest { - export const filterSensitiveLog = (obj: ListTrainingJobsForHyperParameterTuningJobRequest): any => ({ - ...obj, - }); -} + LastModifiedTime?: Date; -export interface ListTrainingJobsForHyperParameterTuningJobResponse { /** - *

          A list of TrainingJobSummary objects that - * describe - * the training jobs that the - * ListTrainingJobsForHyperParameterTuningJob request returned.

          + *

          The creation time.

          */ - TrainingJobSummaries: HyperParameterTrainingJobSummary[] | undefined; + CreationTime?: Date; /** - *

          If the result of this ListTrainingJobsForHyperParameterTuningJob request - * was truncated, the response includes a NextToken. To retrieve the next set - * of training jobs, use the token in the next request.

          + *

          The failure reason.

          */ - NextToken?: string; -} + FailureReason?: string; -export namespace ListTrainingJobsForHyperParameterTuningJobResponse { - export const filterSensitiveLog = (obj: ListTrainingJobsForHyperParameterTuningJobResponse): any => ({ - ...obj, - }); -} + /** + *

          The SSO user identifier.

          + */ + SingleSignOnUserIdentifier?: string; -export interface ListTransformJobsRequest { /** - *

          A filter that returns only transform jobs created after the specified time.

          + *

          The SSO user value.

          */ - CreationTimeAfter?: Date; + SingleSignOnUserValue?: string; /** - *

          A filter that returns only transform jobs created before the specified time.

          + *

          A collection of settings.

          */ - CreationTimeBefore?: Date; + UserSettings?: UserSettings; +} +export namespace DescribeUserProfileResponse { + export const filterSensitiveLog = (obj: DescribeUserProfileResponse): any => ({ + ...obj, + }); +} + +export interface DescribeWorkforceRequest { /** - *

          A filter that returns only transform jobs modified after the specified time.

          + *

          The name of the private workforce whose access you want to restrict. + * WorkforceName is automatically set to default when a + * workforce is created and cannot be modified.

          */ - LastModifiedTimeAfter?: Date; + WorkforceName: string | undefined; +} + +export namespace DescribeWorkforceRequest { + export const filterSensitiveLog = (obj: DescribeWorkforceRequest): any => ({ + ...obj, + }); +} +/** + *

          Your OIDC IdP workforce configuration.

          + */ +export interface OidcConfigForResponse { /** - *

          A filter that returns only transform jobs modified before the specified time.

          + *

          The OIDC IdP client ID used to configure your private workforce.

          */ - LastModifiedTimeBefore?: Date; + ClientId?: string; /** - *

          A string in the transform job name. This filter returns only transform jobs whose name - * contains the specified string.

          + *

          The OIDC IdP issuer used to configure your private workforce.

          */ - NameContains?: string; + Issuer?: string; /** - *

          A filter that retrieves only transform jobs with a specific status.

          + *

          The OIDC IdP authorization endpoint used to configure your private workforce.

          */ - StatusEquals?: TransformJobStatus | string; + AuthorizationEndpoint?: string; /** - *

          The field to sort results by. The default is CreationTime.

          + *

          The OIDC IdP token endpoint used to configure your private workforce.

          */ - SortBy?: SortBy | string; + TokenEndpoint?: string; /** - *

          The sort order for results. The default is Descending.

          + *

          The OIDC IdP user information endpoint used to configure your private workforce.

          */ - SortOrder?: SortOrder | string; + UserInfoEndpoint?: string; /** - *

          If the result of the previous ListTransformJobs request was truncated, - * the response includes a NextToken. To retrieve the next set of transform - * jobs, use the token in the next request.

          + *

          The OIDC IdP logout endpoint used to configure your private workforce.

          */ - NextToken?: string; + LogoutEndpoint?: string; /** - *

          The maximum number of - * transform - * jobs to return in the response. The default value is - * 10.

          + *

          The OIDC IdP JSON Web Key Set (Jwks) URI used to configure your private workforce.

          */ - MaxResults?: number; + JwksUri?: string; } -export namespace ListTransformJobsRequest { - export const filterSensitiveLog = (obj: ListTransformJobsRequest): any => ({ +export namespace OidcConfigForResponse { + export const filterSensitiveLog = (obj: OidcConfigForResponse): any => ({ ...obj, }); } /** - *

          Provides a - * summary - * of a transform job. Multiple TransformJobSummary objects are returned as a - * list after in response to a ListTransformJobs call.

          + *

          A single private workforce, which is automatically created when you create your first + * private work team. You can create one private work force in each AWS Region. By default, + * any workforce-related API operation used in a specific region will apply to the + * workforce created in that region. To learn how to create a private workforce, see Create a Private Workforce.

          */ -export interface TransformJobSummary { +export interface Workforce { /** - *

          The name of the transform job.

          + *

          The name of the private workforce.

          */ - TransformJobName: string | undefined; + WorkforceName: string | undefined; /** - *

          The Amazon Resource Name (ARN) of the transform job.

          + *

          The Amazon Resource Name (ARN) of the private workforce.

          */ - TransformJobArn: string | undefined; + WorkforceArn: string | undefined; /** - *

          A timestamp that shows when the transform Job was created.

          + *

          The most recent date that was used to + * successfully add one or more IP address ranges (CIDRs) to a private workforce's + * allow list.

          + */ + LastUpdatedDate?: Date; + + /** + *

          A list of one to ten IP address ranges (CIDRs) to be added to the + * workforce allow list. By default, a workforce isn't restricted to specific IP addresses.

          */ - CreationTime: Date | undefined; + SourceIpConfig?: SourceIpConfig; /** - *

          Indicates when the transform - * job - * ends on compute instances. For successful jobs and stopped jobs, this - * is the exact time - * recorded - * after the results are uploaded. For failed jobs, this is when Amazon SageMaker - * detected that the job failed.

          + *

          The subdomain for your OIDC Identity Provider.

          */ - TransformEndTime?: Date; + SubDomain?: string; /** - *

          Indicates when the transform job was last modified.

          + *

          The configuration of an Amazon Cognito workforce. + * A single Cognito workforce is created using and corresponds to a single + * + * Amazon Cognito user pool.

          */ - LastModifiedTime?: Date; + CognitoConfig?: CognitoConfig; /** - *

          The status of the transform job.

          + *

          The configuration of an OIDC Identity Provider (IdP) private workforce.

          */ - TransformJobStatus: TransformJobStatus | string | undefined; + OidcConfig?: OidcConfigForResponse; /** - *

          If the transform job failed, - * the - * reason it failed.

          + *

          The date that the workforce is created.

          */ - FailureReason?: string; + CreateDate?: Date; } -export namespace TransformJobSummary { - export const filterSensitiveLog = (obj: TransformJobSummary): any => ({ +export namespace Workforce { + export const filterSensitiveLog = (obj: Workforce): any => ({ ...obj, }); } -export interface ListTransformJobsResponse { +export interface DescribeWorkforceResponse { /** - *

          An array of - * TransformJobSummary - * objects.

          + *

          A single private workforce, which is automatically created when you create your first + * private work team. You can create one private work force in each AWS Region. By default, + * any workforce-related API operation used in a specific region will apply to the + * workforce created in that region. To learn how to create a private workforce, see Create a Private Workforce.

          */ - TransformJobSummaries: TransformJobSummary[] | undefined; + Workforce: Workforce | undefined; +} + +export namespace DescribeWorkforceResponse { + export const filterSensitiveLog = (obj: DescribeWorkforceResponse): any => ({ + ...obj, + }); +} +export interface DescribeWorkteamRequest { /** - *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of - * transform jobs, use it in the next request.

          + *

          The name of the work team to return a description of.

          */ - NextToken?: string; + WorkteamName: string | undefined; } -export namespace ListTransformJobsResponse { - export const filterSensitiveLog = (obj: ListTransformJobsResponse): any => ({ +export namespace DescribeWorkteamRequest { + export const filterSensitiveLog = (obj: DescribeWorkteamRequest): any => ({ ...obj, }); } -export enum SortTrialComponentsBy { - CREATION_TIME = "CreationTime", - NAME = "Name", -} +/** + *

          Provides details about a labeling work team.

          + */ +export interface Workteam { + /** + *

          The name of the work team.

          + */ + WorkteamName: string | undefined; -export interface ListTrialComponentsRequest { /** - *

          A filter that returns only components that are part of the specified experiment. If you - * specify ExperimentName, you can't filter by SourceArn or - * TrialName.

          + *

          A list of MemberDefinition objects that contains objects that identify + * the workers that make up the work team.

          + *

          Workforces can be created using Amazon Cognito or your own OIDC Identity Provider (IdP). + * For private workforces created using Amazon Cognito use + * CognitoMemberDefinition. For workforces created using your own OIDC identity + * provider (IdP) use OidcMemberDefinition.

          */ - ExperimentName?: string; + MemberDefinitions: MemberDefinition[] | undefined; /** - *

          A filter that returns only components that are part of the specified trial. If you specify - * TrialName, you can't filter by ExperimentName or - * SourceArn.

          + *

          The Amazon Resource Name (ARN) that identifies the work team.

          */ - TrialName?: string; + WorkteamArn: string | undefined; /** - *

          A filter that returns only components that have the specified source Amazon Resource Name - * (ARN). If you specify SourceArn, you can't filter by ExperimentName - * or TrialName.

          + *

          The Amazon Resource Name (ARN) of the workforce.

          */ - SourceArn?: string; + WorkforceArn?: string; /** - *

          A filter that returns only components created after the specified time.

          + *

          The Amazon Marketplace identifier for a vendor's work team.

          */ - CreatedAfter?: Date; + ProductListingIds?: string[]; /** - *

          A filter that returns only components created before the specified time.

          + *

          A description of the work team.

          */ - CreatedBefore?: Date; + Description: string | undefined; /** - *

          The property used to sort results. The default value is CreationTime.

          + *

          The URI of the labeling job's user interface. Workers open this URI to start labeling + * your data objects.

          */ - SortBy?: SortTrialComponentsBy | string; + SubDomain?: string; /** - *

          The sort order. The default value is Descending.

          + *

          The date and time that the work team was created (timestamp).

          */ - SortOrder?: SortOrder | string; + CreateDate?: Date; /** - *

          The maximum number of components to return in the response. The default value is - * 10.

          + *

          The date and time that the work team was last updated (timestamp).

          */ - MaxResults?: number; + LastUpdatedDate?: Date; /** - *

          If the previous call to ListTrialComponents didn't return the full set of - * components, the call returns a token for getting the next set of components.

          + *

          Configures SNS notifications of available or expiring work items for work + * teams.

          */ - NextToken?: string; + NotificationConfiguration?: NotificationConfiguration; } -export namespace ListTrialComponentsRequest { - export const filterSensitiveLog = (obj: ListTrialComponentsRequest): any => ({ +export namespace Workteam { + export const filterSensitiveLog = (obj: Workteam): any => ({ ...obj, }); } -/** - *

          A summary of the properties of a trial component. To get all the properties, call the - * DescribeTrialComponent API and provide the - * TrialComponentName.

          - */ -export interface TrialComponentSummary { +export interface DescribeWorkteamResponse { /** - *

          The name of the trial component.

          + *

          A Workteam instance that contains information about the work team. + *

          */ - TrialComponentName?: string; + Workteam: Workteam | undefined; +} - /** - *

          The ARN of the trial component.

          - */ - TrialComponentArn?: string; +export namespace DescribeWorkteamResponse { + export const filterSensitiveLog = (obj: DescribeWorkteamResponse): any => ({ + ...obj, + }); +} +/** + *

          Specifies weight and capacity values for a production variant.

          + */ +export interface DesiredWeightAndCapacity { /** - *

          The name of the component as displayed. If DisplayName isn't specified, - * TrialComponentName is displayed.

          + *

          The name of the + * variant + * to update.

          */ - DisplayName?: string; + VariantName: string | undefined; /** - *

          The Amazon Resource Name (ARN) and job type of the source of a trial component.

          + *

          The variant's weight.

          */ - TrialComponentSource?: TrialComponentSource; + DesiredWeight?: number; /** - *

          The status of the component. States include:

          - *
            - *
          • - *

            InProgress

            - *
          • - *
          • - *

            Completed

            - *
          • - *
          • - *

            Failed

            - *
          • - *
          + *

          The variant's capacity.

          */ - Status?: TrialComponentStatus; + DesiredInstanceCount?: number; +} - /** - *

          When the component started.

          - */ - StartTime?: Date; +export namespace DesiredWeightAndCapacity { + export const filterSensitiveLog = (obj: DesiredWeightAndCapacity): any => ({ + ...obj, + }); +} - /** - *

          When the component ended.

          - */ - EndTime?: Date; +export interface DisableSagemakerServicecatalogPortfolioInput {} - /** - *

          When the component was created.

          - */ - CreationTime?: Date; +export namespace DisableSagemakerServicecatalogPortfolioInput { + export const filterSensitiveLog = (obj: DisableSagemakerServicecatalogPortfolioInput): any => ({ + ...obj, + }); +} - /** - *

          Who created the component.

          - */ - CreatedBy?: UserContext; +export interface DisableSagemakerServicecatalogPortfolioOutput {} + +export namespace DisableSagemakerServicecatalogPortfolioOutput { + export const filterSensitiveLog = (obj: DisableSagemakerServicecatalogPortfolioOutput): any => ({ + ...obj, + }); +} +export interface DisassociateTrialComponentRequest { /** - *

          When the component was last modified.

          + *

          The name of the component to disassociate from the trial.

          */ - LastModifiedTime?: Date; + TrialComponentName: string | undefined; /** - *

          Who last modified the component.

          + *

          The name of the trial to disassociate from.

          */ - LastModifiedBy?: UserContext; + TrialName: string | undefined; } -export namespace TrialComponentSummary { - export const filterSensitiveLog = (obj: TrialComponentSummary): any => ({ +export namespace DisassociateTrialComponentRequest { + export const filterSensitiveLog = (obj: DisassociateTrialComponentRequest): any => ({ ...obj, }); } -export interface ListTrialComponentsResponse { +export interface DisassociateTrialComponentResponse { /** - *

          A list of the summaries of your trial components.

          + *

          The ARN of the trial component.

          */ - TrialComponentSummaries?: TrialComponentSummary[]; + TrialComponentArn?: string; /** - *

          A token for getting the next set of components, if there are any.

          + *

          The Amazon Resource Name (ARN) of the trial.

          */ - NextToken?: string; + TrialArn?: string; } -export namespace ListTrialComponentsResponse { - export const filterSensitiveLog = (obj: ListTrialComponentsResponse): any => ({ +export namespace DisassociateTrialComponentResponse { + export const filterSensitiveLog = (obj: DisassociateTrialComponentResponse): any => ({ ...obj, }); } -export enum SortTrialsBy { - CREATION_TIME = "CreationTime", - NAME = "Name", -} - -export interface ListTrialsRequest { +/** + *

          The domain's details.

          + */ +export interface DomainDetails { /** - *

          A filter that returns only trials that are part of the specified experiment.

          + *

          The domain's Amazon Resource Name (ARN).

          */ - ExperimentName?: string; + DomainArn?: string; /** - *

          A filter that returns only trials that are associated with the specified trial - * component.

          + *

          The domain ID.

          */ - TrialComponentName?: string; + DomainId?: string; /** - *

          A filter that returns only trials created after the specified time.

          + *

          The domain name.

          */ - CreatedAfter?: Date; + DomainName?: string; /** - *

          A filter that returns only trials created before the specified time.

          + *

          The status.

          */ - CreatedBefore?: Date; + Status?: DomainStatus | string; /** - *

          The property used to sort results. The default value is CreationTime.

          + *

          The creation time.

          */ - SortBy?: SortTrialsBy | string; + CreationTime?: Date; /** - *

          The sort order. The default value is Descending.

          + *

          The last modified time.

          */ - SortOrder?: SortOrder | string; + LastModifiedTime?: Date; /** - *

          The maximum number of trials to return in the response. The default value is 10.

          + *

          The domain's URL.

          */ - MaxResults?: number; + Url?: string; +} - /** - *

          If the previous call to ListTrials didn't return the full set of trials, the - * call returns a token for getting the next set of trials.

          - */ - NextToken?: string; +export namespace DomainDetails { + export const filterSensitiveLog = (obj: DomainDetails): any => ({ + ...obj, + }); } -export namespace ListTrialsRequest { - export const filterSensitiveLog = (obj: ListTrialsRequest): any => ({ +export interface EnableSagemakerServicecatalogPortfolioInput {} + +export namespace EnableSagemakerServicecatalogPortfolioInput { + export const filterSensitiveLog = (obj: EnableSagemakerServicecatalogPortfolioInput): any => ({ + ...obj, + }); +} + +export interface EnableSagemakerServicecatalogPortfolioOutput {} + +export namespace EnableSagemakerServicecatalogPortfolioOutput { + export const filterSensitiveLog = (obj: EnableSagemakerServicecatalogPortfolioOutput): any => ({ ...obj, }); } /** - *

          A summary of the properties of a trial. To get the complete set of properties, call the - * DescribeTrial API and provide the TrialName.

          + *

          A schedule for a model monitoring job. For information about model monitor, see + * Amazon SageMaker Model + * Monitor.

          */ -export interface TrialSummary { +export interface MonitoringSchedule { /** - *

          The Amazon Resource Name (ARN) of the trial.

          + *

          The Amazon Resource Name (ARN) of the monitoring schedule.

          */ - TrialArn?: string; + MonitoringScheduleArn?: string; /** - *

          The name of the trial.

          + *

          The name of the monitoring schedule.

          */ - TrialName?: string; + MonitoringScheduleName?: string; /** - *

          The name of the trial as displayed. If DisplayName isn't specified, - * TrialName is displayed.

          + *

          The status of the monitoring schedule. This can be one of the following values.

          + *
            + *
          • + *

            + * PENDING - The schedule is pending being created.

            + *
          • + *
          • + *

            + * FAILED - The schedule failed.

            + *
          • + *
          • + *

            + * SCHEDULED - The schedule was successfully created.

            + *
          • + *
          • + *

            + * STOPPED - The schedule was stopped.

            + *
          • + *
          */ - DisplayName?: string; + MonitoringScheduleStatus?: ScheduleStatus | string; /** - *

          The source of the trial.

          + *

          If the monitoring schedule failed, the reason it failed.

          */ - TrialSource?: TrialSource; + FailureReason?: string; /** - *

          When the trial was created.

          + *

          The time that the monitoring schedule was created.

          */ CreationTime?: Date; /** - *

          When the trial was last modified.

          + *

          The last time the monitoring schedule was changed.

          */ LastModifiedTime?: Date; -} -export namespace TrialSummary { - export const filterSensitiveLog = (obj: TrialSummary): any => ({ - ...obj, - }); -} + /** + *

          Configures the monitoring schedule and defines the monitoring job.

          + */ + MonitoringScheduleConfig?: MonitoringScheduleConfig; -export interface ListTrialsResponse { /** - *

          A list of the summaries of your trials.

          + *

          The endpoint that hosts the model being monitored.

          */ - TrialSummaries?: TrialSummary[]; + EndpointName?: string; /** - *

          A token for getting the next set of trials, if there are any.

          + *

          Summary of information about the last monitoring job to run.

          */ - NextToken?: string; + LastMonitoringExecutionSummary?: MonitoringExecutionSummary; + + /** + *

          A list of the tags associated with the monitoring schedlue. For more information, see Tagging AWS + * resources in the AWS General Reference Guide.

          + */ + Tags?: Tag[]; } -export namespace ListTrialsResponse { - export const filterSensitiveLog = (obj: ListTrialsResponse): any => ({ +export namespace MonitoringSchedule { + export const filterSensitiveLog = (obj: MonitoringSchedule): any => ({ ...obj, }); } -export enum UserProfileSortKey { - CreationTime = "CreationTime", - LastModifiedTime = "LastModifiedTime", -} - -export interface ListUserProfilesRequest { +/** + *

          A hosted endpoint for real-time inference.

          + */ +export interface Endpoint { /** - *

          If the previous response was truncated, you will receive this token. - * Use it in your next request to receive the next set of results.

          + *

          The name of the endpoint.

          */ - NextToken?: string; + EndpointName: string | undefined; /** - *

          Returns a list up to a specified limit.

          + *

          The Amazon Resource Name (ARN) of the endpoint.

          */ - MaxResults?: number; + EndpointArn: string | undefined; /** - *

          The sort order for the results. The default is Ascending.

          + *

          The endpoint configuration associated with the endpoint.

          */ - SortOrder?: SortOrder | string; + EndpointConfigName: string | undefined; /** - *

          The parameter by which to sort the results. The default is CreationTime.

          + *

          A list of the production variants hosted on the endpoint. Each production variant is a + * model.

          */ - SortBy?: UserProfileSortKey | string; + ProductionVariants?: ProductionVariantSummary[]; /** - *

          A parameter by which to filter the results.

          + *

          */ - DomainIdEquals?: string; + DataCaptureConfig?: DataCaptureConfigSummary; /** - *

          A parameter by which to filter the results.

          + *

          The status of the endpoint.

          */ - UserProfileNameContains?: string; -} - -export namespace ListUserProfilesRequest { - export const filterSensitiveLog = (obj: ListUserProfilesRequest): any => ({ - ...obj, - }); -} + EndpointStatus: EndpointStatus | string | undefined; -/** - *

          The user profile details.

          - */ -export interface UserProfileDetails { /** - *

          The domain ID.

          + *

          If the endpoint failed, the reason it failed.

          */ - DomainId?: string; + FailureReason?: string; /** - *

          The user profile name.

          + *

          The time that the endpoint was created.

          */ - UserProfileName?: string; + CreationTime: Date | undefined; /** - *

          The status.

          + *

          The last time the endpoint was modified.

          */ - Status?: UserProfileStatus | string; + LastModifiedTime: Date | undefined; /** - *

          The creation time.

          + *

          A list of monitoring schedules for the endpoint. For information about model + * monitoring, see Amazon SageMaker Model Monitor.

          */ - CreationTime?: Date; + MonitoringSchedules?: MonitoringSchedule[]; /** - *

          The last modified time.

          + *

          A list of the tags associated with the endpoint. For more information, see Tagging AWS + * resources in the AWS General Reference Guide.

          */ - LastModifiedTime?: Date; + Tags?: Tag[]; } -export namespace UserProfileDetails { - export const filterSensitiveLog = (obj: UserProfileDetails): any => ({ +export namespace Endpoint { + export const filterSensitiveLog = (obj: Endpoint): any => ({ ...obj, }); } -export interface ListUserProfilesResponse { +export enum EndpointConfigSortKey { + CreationTime = "CreationTime", + Name = "Name", +} + +/** + *

          Provides summary information for an endpoint configuration.

          + */ +export interface EndpointConfigSummary { /** - *

          The list of user profiles.

          + *

          The name of the endpoint configuration.

          */ - UserProfiles?: UserProfileDetails[]; + EndpointConfigName: string | undefined; /** - *

          If the previous response was truncated, you will receive this token. - * Use it in your next request to receive the next set of results.

          + *

          The Amazon Resource Name (ARN) of the endpoint configuration.

          */ - NextToken?: string; + EndpointConfigArn: string | undefined; + + /** + *

          A timestamp that shows when the endpoint configuration was created.

          + */ + CreationTime: Date | undefined; } -export namespace ListUserProfilesResponse { - export const filterSensitiveLog = (obj: ListUserProfilesResponse): any => ({ +export namespace EndpointConfigSummary { + export const filterSensitiveLog = (obj: EndpointConfigSummary): any => ({ ...obj, }); } -export enum ListWorkforcesSortByOptions { - CreateDate = "CreateDate", +export enum EndpointSortKey { + CreationTime = "CreationTime", Name = "Name", + Status = "Status", } -export interface ListWorkforcesRequest { +/** + *

          Provides summary information for an endpoint.

          + */ +export interface EndpointSummary { /** - *

          Sort workforces using the workforce name or creation date.

          + *

          The name of the endpoint.

          */ - SortBy?: ListWorkforcesSortByOptions | string; + EndpointName: string | undefined; /** - *

          Sort workforces in ascending or descending order.

          + *

          The Amazon Resource Name (ARN) of the endpoint.

          */ - SortOrder?: SortOrder | string; + EndpointArn: string | undefined; /** - *

          A filter you can use to search for workforces using part of the workforce name.

          + *

          A timestamp that shows when the endpoint was created.

          */ - NameContains?: string; + CreationTime: Date | undefined; /** - *

          A token to resume pagination.

          + *

          A timestamp that shows when the endpoint was last modified.

          */ - NextToken?: string; + LastModifiedTime: Date | undefined; /** - *

          The maximum number of workforces returned in the response.

          + *

          The status of the endpoint.

          + *
            + *
          • + *

            + * OutOfService: Endpoint is not available to take incoming + * requests.

            + *
          • + *
          • + *

            + * Creating: CreateEndpoint is executing.

            + *
          • + *
          • + *

            + * Updating: UpdateEndpoint or UpdateEndpointWeightsAndCapacities is executing.

            + *
          • + *
          • + *

            + * SystemUpdating: Endpoint is undergoing maintenance and cannot be + * updated or deleted or re-scaled until it has completed. This maintenance + * operation does not change any customer-specified values such as VPC config, KMS + * encryption, model, instance type, or instance count.

            + *
          • + *
          • + *

            + * RollingBack: Endpoint fails to scale up or down or change its + * variant weight and is in the process of rolling back to its previous + * configuration. Once the rollback completes, endpoint returns to an + * InService status. This transitional status only applies to an + * endpoint that has autoscaling enabled and is undergoing variant weight or + * capacity changes as part of an UpdateEndpointWeightsAndCapacities call or when the UpdateEndpointWeightsAndCapacities operation is called + * explicitly.

            + *
          • + *
          • + *

            + * InService: Endpoint is available to process incoming + * requests.

            + *
          • + *
          • + *

            + * Deleting: DeleteEndpoint is executing.

            + *
          • + *
          • + *

            + * Failed: Endpoint could not be created, updated, or re-scaled. Use + * DescribeEndpointOutput$FailureReason for information about + * the failure. DeleteEndpoint is the only operation that can be + * performed on a failed endpoint.

            + *
          • + *
          + *

          To get a list of endpoints with a specified status, use the ListEndpointsInput$StatusEquals filter.

          */ - MaxResults?: number; + EndpointStatus: EndpointStatus | string | undefined; } -export namespace ListWorkforcesRequest { - export const filterSensitiveLog = (obj: ListWorkforcesRequest): any => ({ +export namespace EndpointSummary { + export const filterSensitiveLog = (obj: EndpointSummary): any => ({ ...obj, }); } -export interface ListWorkforcesResponse { +/** + *

          The properties of an experiment as returned by the Search API.

          + */ +export interface Experiment { /** - *

          A list containing information about your workforce.

          + *

          The name of the experiment.

          */ - Workforces: Workforce[] | undefined; + ExperimentName?: string; /** - *

          A token to resume pagination.

          + *

          The Amazon Resource Name (ARN) of the experiment.

          */ - NextToken?: string; -} - -export namespace ListWorkforcesResponse { - export const filterSensitiveLog = (obj: ListWorkforcesResponse): any => ({ - ...obj, - }); -} - -export enum ListWorkteamsSortByOptions { - CreateDate = "CreateDate", - Name = "Name", -} + ExperimentArn?: string; -export interface ListWorkteamsRequest { /** - *

          The field to sort results by. The default is CreationTime.

          + *

          The name of the experiment as displayed. If DisplayName isn't specified, + * ExperimentName is displayed.

          */ - SortBy?: ListWorkteamsSortByOptions | string; + DisplayName?: string; /** - *

          The sort order for results. The default is Ascending.

          + *

          The source of the experiment.

          */ - SortOrder?: SortOrder | string; + Source?: ExperimentSource; /** - *

          A string in the work team's name. This filter returns only work teams whose name - * contains the specified string.

          + *

          The description of the experiment.

          */ - NameContains?: string; + Description?: string; /** - *

          If the result of the previous ListWorkteams request was truncated, the - * response includes a NextToken. To retrieve the next set of labeling jobs, - * use the token in the next request.

          + *

          When the experiment was created.

          */ - NextToken?: string; + CreationTime?: Date; /** - *

          The maximum number of work teams to return in each page of the response.

          + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          */ - MaxResults?: number; -} + CreatedBy?: UserContext; -export namespace ListWorkteamsRequest { - export const filterSensitiveLog = (obj: ListWorkteamsRequest): any => ({ - ...obj, - }); -} + /** + *

          When the experiment was last modified.

          + */ + LastModifiedTime?: Date; -export interface ListWorkteamsResponse { /** - *

          An array of Workteam objects, each describing a work team.

          + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          */ - Workteams: Workteam[] | undefined; + LastModifiedBy?: UserContext; /** - *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of - * work teams, use it in the subsequent request.

          + *

          The list of tags that are associated with the experiment. You can use Search API to search on the tags.

          */ - NextToken?: string; + Tags?: Tag[]; } -export namespace ListWorkteamsResponse { - export const filterSensitiveLog = (obj: ListWorkteamsResponse): any => ({ +export namespace Experiment { + export const filterSensitiveLog = (obj: Experiment): any => ({ ...obj, }); } /** - *

          A list of nested Filter objects. A resource must satisfy the conditions - * of all filters to be included in the results returned from the Search API.

          - *

          For example, to filter on a training job's InputDataConfig property with a - * specific channel name and S3Uri prefix, define the following filters:

          - *
            - *
          • - *

            - * '{Name:"InputDataConfig.ChannelName", "Operator":"Equals", "Value":"train"}', - *

            - *
          • - *
          • - *

            - * '{Name:"InputDataConfig.DataSource.S3DataSource.S3Uri", "Operator":"Contains", - * "Value":"mybucket/catdata"}' - *

            - *
          • - *
          + *

          A summary of the properties of an experiment. To get the complete set of properties, call + * the DescribeExperiment API and provide the + * ExperimentName.

          */ -export interface NestedFilters { +export interface ExperimentSummary { + /** + *

          The Amazon Resource Name (ARN) of the experiment.

          + */ + ExperimentArn?: string; + + /** + *

          The name of the experiment.

          + */ + ExperimentName?: string; + + /** + *

          The name of the experiment as displayed. If DisplayName isn't specified, + * ExperimentName is displayed.

          + */ + DisplayName?: string; + + /** + *

          The source of the experiment.

          + */ + ExperimentSource?: ExperimentSource; + /** - *

          The name of the property to use in the nested filters. The value must match a listed property name, - * such as InputDataConfig.

          + *

          When the experiment was created.

          */ - NestedPropertyName: string | undefined; + CreationTime?: Date; /** - *

          A list of filters. Each filter acts on a property. Filters must contain at least one - * Filters value. For example, a NestedFilters call might - * include a filter on the PropertyName parameter of the - * InputDataConfig property: - * InputDataConfig.DataSource.S3DataSource.S3Uri.

          + *

          When the experiment was last modified.

          */ - Filters: Filter[] | undefined; + LastModifiedTime?: Date; } -export namespace NestedFilters { - export const filterSensitiveLog = (obj: NestedFilters): any => ({ +export namespace ExperimentSummary { + export const filterSensitiveLog = (obj: ExperimentSummary): any => ({ ...obj, }); } /** - *

          The trial that a trial component is associated with and the experiment the trial is part - * of. A component might not be associated with a trial. A component can be associated with - * multiple trials.

          + *

          Amazon SageMaker Feature Store stores features in a collection called Feature Group. + * A Feature Group can be visualized as a table which has rows, + * with a unique identifier for each row where each column in the table is a feature. + * In principle, a Feature Group is composed of features and values per features.

          */ -export interface Parent { +export interface FeatureGroup { + /** + *

          The Amazon Resource Name (ARN) of a FeatureGroup.

          + */ + FeatureGroupArn?: string; + + /** + *

          The name of the FeatureGroup.

          + */ + FeatureGroupName?: string; + + /** + *

          The name of the Feature whose value uniquely identifies a + * Record defined in the FeatureGroup + * FeatureDefinitions.

          + */ + RecordIdentifierFeatureName?: string; + /** - *

          The name of the trial.

          + *

          The name of the feature that stores the EventTime of a Record in a + * FeatureGroup.

          + *

          A EventTime is point in time when a new event + * occurs that corresponds to the creation or update of a Record in + * FeatureGroup. All Records in the FeatureGroup + * must have a corresponding EventTime.

          */ - TrialName?: string; + EventTimeFeatureName?: string; /** - *

          The name of the experiment.

          + *

          A list of Features. Each Feature must include a + * FeatureName and a FeatureType.

          + *

          Valid FeatureTypes are Integral, Fractional and + * String.

          + *

          + * FeatureNames cannot be any of the following: is_deleted, + * write_time, api_invocation_time.

          + *

          You can create up to 2,500 FeatureDefinitions per + * FeatureGroup.

          */ - ExperimentName?: string; -} + FeatureDefinitions?: FeatureDefinition[]; -export namespace Parent { - export const filterSensitiveLog = (obj: Parent): any => ({ - ...obj, - }); -} + /** + *

          The time a FeatureGroup was created.

          + */ + CreationTime?: Date; -/** - *

          An Amazon SageMaker processing job that is used to analyze data and evaluate models. For more information, - * see Process - * Data and Evaluate Models.

          - */ -export interface ProcessingJob { /** - *

          For each input, data is downloaded from S3 into the processing container before the - * processing job begins running if "S3InputMode" is set to File.

          + *

          Use this to specify the AWS Key Management Service (KMS) Key ID, or + * KMSKeyId, for at rest data encryption. You can turn + * OnlineStore on or off by specifying the EnableOnlineStore flag + * at General Assembly; the default value is False.

          */ - ProcessingInputs?: ProcessingInput[]; + OnlineStoreConfig?: OnlineStoreConfig; /** - *

          The output configuration for the processing job.

          + *

          The configuration of an OfflineStore.

          + *

          Provide an OfflineStoreConfig in a request to + * CreateFeatureGroup to create an OfflineStore.

          + *

          To encrypt an OfflineStore using at rest data encryption, specify AWS Key + * Management Service (KMS) key ID, or KMSKeyId, in + * S3StorageConfig.

          */ - ProcessingOutputConfig?: ProcessingOutputConfig; + OfflineStoreConfig?: OfflineStoreConfig; /** - *

          The name of the processing job.

          + *

          The Amazon Resource Name (ARN) of the IAM execution role used to create the feature + * group.

          */ - ProcessingJobName?: string; + RoleArn?: string; /** - *

          Identifies the resources, ML compute instances, and ML storage volumes to deploy for a - * processing job. In distributed training, you specify more than one instance.

          + *

          A FeatureGroup status.

          */ - ProcessingResources?: ProcessingResources; + FeatureGroupStatus?: FeatureGroupStatus | string; /** - *

          Specifies a time limit for how long the processing job is allowed to run.

          + *

          The status of OfflineStore.

          */ - StoppingCondition?: ProcessingStoppingCondition; + OfflineStoreStatus?: OfflineStoreStatus; /** - *

          Configuration to run a processing job in a specified container image.

          + *

          The reason that the FeatureGroup failed to + * be replicated in the OfflineStore. This is + * failure may be due to a failure to create a FeatureGroup in + * or delete a FeatureGroup from the OfflineStore.

          */ - AppSpecification?: AppSpecification; + FailureReason?: string; /** - *

          Sets the environment variables in the Docker container.

          + *

          A free form description of a FeatureGroup.

          */ - Environment?: { [key: string]: string }; + Description?: string; /** - *

          Networking options for a job, such as network traffic encryption between containers, - * whether to allow inbound and outbound network calls to and from containers, and the VPC - * subnets and security groups to use for VPC-enabled jobs.

          + *

          Tags used to define a FeatureGroup.

          */ - NetworkConfig?: NetworkConfig; + Tags?: Tag[]; +} + +export namespace FeatureGroup { + export const filterSensitiveLog = (obj: FeatureGroup): any => ({ + ...obj, + }); +} + +export enum FeatureGroupSortBy { + CREATION_TIME = "CreationTime", + FEATURE_GROUP_STATUS = "FeatureGroupStatus", + NAME = "Name", + OFFLINE_STORE_STATUS = "OfflineStoreStatus", +} + +export enum FeatureGroupSortOrder { + ASCENDING = "Ascending", + DESCENDING = "Descending", +} +/** + *

          The name, Arn, CreationTime, FeatureGroup values, + * LastUpdatedTime and EnableOnlineStorage status of a + * FeatureGroup.

          + */ +export interface FeatureGroupSummary { /** - *

          The ARN of the role used to create the processing job.

          + *

          The name of FeatureGroup.

          */ - RoleArn?: string; + FeatureGroupName: string | undefined; /** - *

          Associates a SageMaker job as a trial component with an experiment and trial. Specified when - * you call the following APIs:

          - * + *

          Unique identifier for the FeatureGroup.

          */ - ExperimentConfig?: ExperimentConfig; + FeatureGroupArn: string | undefined; /** - *

          The ARN of the processing job.

          + *

          A timestamp indicating the time of creation time of the FeatureGroup.

          */ - ProcessingJobArn?: string; + CreationTime: Date | undefined; /** - *

          The status of the processing job.

          + *

          The status of a FeatureGroup. The status can be any of the following: + * Creating, Created, CreateFail, + * Deleting or DetailFail.

          */ - ProcessingJobStatus?: ProcessingJobStatus | string; + FeatureGroupStatus?: FeatureGroupStatus | string; /** - *

          A string, up to one KB in size, that contains metadata from the processing - * container when the processing job exits.

          + *

          Notifies you if replicating data into the OfflineStore has failed. Returns + * either: Active or Blocked.

          */ - ExitMessage?: string; + OfflineStoreStatus?: OfflineStoreStatus; +} + +export namespace FeatureGroupSummary { + export const filterSensitiveLog = (obj: FeatureGroupSummary): any => ({ + ...obj, + }); +} + +export enum Operator { + CONTAINS = "Contains", + EQUALS = "Equals", + EXISTS = "Exists", + GREATER_THAN = "GreaterThan", + GREATER_THAN_OR_EQUAL_TO = "GreaterThanOrEqualTo", + IN = "In", + LESS_THAN = "LessThan", + LESS_THAN_OR_EQUAL_TO = "LessThanOrEqualTo", + NOT_EQUALS = "NotEquals", + NOT_EXISTS = "NotExists", +} +/** + *

          A conditional statement for a search expression that includes a resource property, a + * Boolean operator, and a value. Resources that match the statement are returned in the + * results from the Search API.

          + * + *

          If you specify a Value, but not an Operator, Amazon SageMaker uses the + * equals operator.

          + *

          In search, there are several property types:

          + *
          + *
          Metrics
          + *
          + *

          To define a metric filter, enter a value using the form + * "Metrics.", where is + * a metric name. For example, the following filter searches for training jobs + * with an "accuracy" metric greater than + * "0.9":

          + *

          + * { + *

          + *

          + * "Name": "Metrics.accuracy", + *

          + *

          + * "Operator": "GreaterThan", + *

          + *

          + * "Value": "0.9" + *

          + *

          + * } + *

          + *
          + *
          HyperParameters
          + *
          + *

          To define a hyperparameter filter, enter a value with the form + * "HyperParameters.". Decimal hyperparameter + * values are treated as a decimal in a comparison if the specified + * Value is also a decimal value. If the specified + * Value is an integer, the decimal hyperparameter values are + * treated as integers. For example, the following filter is satisfied by + * training jobs with a "learning_rate" hyperparameter that is + * less than "0.5":

          + *

          + * { + *

          + *

          + * "Name": "HyperParameters.learning_rate", + *

          + *

          + * "Operator": "LessThan", + *

          + *

          + * "Value": "0.5" + *

          + *

          + * } + *

          + *
          + *
          Tags
          + *
          + *

          To define a tag filter, enter a value with the form + * Tags..

          + *
          + *
          + */ +export interface Filter { /** - *

          A string, up to one KB in size, that contains the reason a processing job failed, if - * it failed.

          + *

          A resource property name. For example, TrainingJobName. For + * valid property names, see SearchRecord. + * You must specify a valid property for the resource.

          */ - FailureReason?: string; + Name: string | undefined; /** - *

          The time that the processing job ended.

          + *

          A Boolean binary operator that is used to evaluate the filter. The operator field + * contains one of the following values:

          + *
          + *
          Equals
          + *
          + *

          The value of Name equals Value.

          + *
          + *
          NotEquals
          + *
          + *

          The value of Name doesn't equal Value.

          + *
          + *
          Exists
          + *
          + *

          The Name property exists.

          + *
          + *
          NotExists
          + *
          + *

          The Name property does not exist.

          + *
          + *
          GreaterThan
          + *
          + *

          The value of Name is greater than Value. + * Not supported for text properties.

          + *
          + *
          GreaterThanOrEqualTo
          + *
          + *

          The value of Name is greater than or equal to Value. + * Not supported for text properties.

          + *
          + *
          LessThan
          + *
          + *

          The value of Name is less than Value. + * Not supported for text properties.

          + *
          + *
          LessThanOrEqualTo
          + *
          + *

          The value of Name is less than or equal to Value. + * Not supported for text properties.

          + *
          + *
          In
          + *
          + *

          The value of Name is one of the comma delimited strings in + * Value. Only supported for text properties.

          + *
          + *
          Contains
          + *
          + *

          The value of Name contains the string Value. + * Only supported for text properties.

          + *

          A SearchExpression can include the Contains operator + * multiple times when the value of Name is one of the following:

          + *
            + *
          • + *

            + * Experiment.DisplayName + *

            + *
          • + *
          • + *

            + * Experiment.ExperimentName + *

            + *
          • + *
          • + *

            + * Experiment.Tags + *

            + *
          • + *
          • + *

            + * Trial.DisplayName + *

            + *
          • + *
          • + *

            + * Trial.TrialName + *

            + *
          • + *
          • + *

            + * Trial.Tags + *

            + *
          • + *
          • + *

            + * TrialComponent.DisplayName + *

            + *
          • + *
          • + *

            + * TrialComponent.TrialComponentName + *

            + *
          • + *
          • + *

            + * TrialComponent.Tags + *

            + *
          • + *
          • + *

            + * TrialComponent.InputArtifacts + *

            + *
          • + *
          • + *

            + * TrialComponent.OutputArtifacts + *

            + *
          • + *
          + *

          A SearchExpression can include only one Contains operator + * for all other values of Name. In these cases, if you include multiple + * Contains operators in the SearchExpression, the result is + * the following error message: "'CONTAINS' operator usage limit of 1 + * exceeded."

          + *
          + *
          */ - ProcessingEndTime?: Date; + Operator?: Operator | string; /** - *

          The time that the processing job started.

          + *

          A value used with Name and Operator to determine which + * resources satisfy the filter's condition. For numerical properties, Value + * must be an integer or floating-point decimal. For timestamp properties, + * Value must be an ISO 8601 date-time string of the following format: + * YYYY-mm-dd'T'HH:MM:SS.

          */ - ProcessingStartTime?: Date; + Value?: string; +} - /** - *

          The time the processing job was last modified.

          - */ - LastModifiedTime?: Date; +export namespace Filter { + export const filterSensitiveLog = (obj: Filter): any => ({ + ...obj, + }); +} +/** + *

          Contains summary information about the flow definition.

          + */ +export interface FlowDefinitionSummary { /** - *

          The time the processing job was created.

          + *

          The name of the flow definition.

          */ - CreationTime?: Date; + FlowDefinitionName: string | undefined; /** - *

          The ARN of a monitoring schedule for an endpoint associated with this processing - * job.

          + *

          The Amazon Resource Name (ARN) of the flow definition.

          */ - MonitoringScheduleArn?: string; + FlowDefinitionArn: string | undefined; /** - *

          The Amazon Resource Name (ARN) of the AutoML job associated with this processing job.

          + *

          The status of the flow definition. Valid values:

          */ - AutoMLJobArn?: string; + FlowDefinitionStatus: FlowDefinitionStatus | string | undefined; /** - *

          The ARN of the training job associated with this processing job.

          + *

          The timestamp when SageMaker created the flow definition.

          */ - TrainingJobArn?: string; + CreationTime: Date | undefined; /** - *

          An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management - * User Guide.

          + *

          The reason why the flow definition creation failed. A failure reason is returned only when the flow definition status is Failed.

          */ - Tags?: Tag[]; + FailureReason?: string; } -export namespace ProcessingJob { - export const filterSensitiveLog = (obj: ProcessingJob): any => ({ +export namespace FlowDefinitionSummary { + export const filterSensitiveLog = (obj: FlowDefinitionSummary): any => ({ ...obj, }); } -/** - *

          Contains input values for a task.

          - */ -export interface RenderableTask { +export interface GetModelPackageGroupPolicyInput { /** - *

          A JSON object that contains values for the variables defined in the template. It is - * made available to the template under the substitution variable task.input. - * For example, if you define a variable task.input.text in your template, you - * can supply the variable in the JSON object as "text": "sample text".

          + *

          The name of the model group for which to get the resource policy.

          */ - Input: string | undefined; + ModelPackageGroupName: string | undefined; } -export namespace RenderableTask { - export const filterSensitiveLog = (obj: RenderableTask): any => ({ +export namespace GetModelPackageGroupPolicyInput { + export const filterSensitiveLog = (obj: GetModelPackageGroupPolicyInput): any => ({ ...obj, }); } -/** - *

          A description of an error that occurred while rendering the template.

          - */ -export interface RenderingError { - /** - *

          A unique identifier for a specific class of errors.

          - */ - Code: string | undefined; - +export interface GetModelPackageGroupPolicyOutput { /** - *

          A human-readable message describing the error.

          + *

          The resource policy for the model group.

          */ - Message: string | undefined; + ResourcePolicy: string | undefined; } -export namespace RenderingError { - export const filterSensitiveLog = (obj: RenderingError): any => ({ +export namespace GetModelPackageGroupPolicyOutput { + export const filterSensitiveLog = (obj: GetModelPackageGroupPolicyOutput): any => ({ ...obj, }); } -export interface RenderUiTemplateRequest { - /** - *

          A Template object containing the worker UI template to render.

          - */ - UiTemplate?: UiTemplate; - - /** - *

          A RenderableTask object containing a representative task to - * render.

          - */ - Task: RenderableTask | undefined; - - /** - *

          The Amazon Resource Name (ARN) that has access to the S3 objects that are used by the - * template.

          - */ - RoleArn: string | undefined; - - /** - *

          The HumanTaskUiArn of the worker UI that you want to render. Do not - * provide a HumanTaskUiArn if you use the UiTemplate - * parameter.

          - *

          See a list of available Human Ui Amazon Resource Names (ARNs) in UiConfig.

          - */ - HumanTaskUiArn?: string; -} +export interface GetSagemakerServicecatalogPortfolioStatusInput {} -export namespace RenderUiTemplateRequest { - export const filterSensitiveLog = (obj: RenderUiTemplateRequest): any => ({ +export namespace GetSagemakerServicecatalogPortfolioStatusInput { + export const filterSensitiveLog = (obj: GetSagemakerServicecatalogPortfolioStatusInput): any => ({ ...obj, }); } -export interface RenderUiTemplateResponse { - /** - *

          A Liquid template that renders the HTML for the worker UI.

          - */ - RenderedContent: string | undefined; +export enum SagemakerServicecatalogStatus { + DISABLED = "Disabled", + ENABLED = "Enabled", +} +export interface GetSagemakerServicecatalogPortfolioStatusOutput { /** - *

          A list of one or more RenderingError objects if any were encountered - * while rendering the template. If there were no errors, the list is empty.

          + *

          Whether Service Catalog is enabled or disabled in SageMaker.

          */ - Errors: RenderingError[] | undefined; + Status?: SagemakerServicecatalogStatus | string; } -export namespace RenderUiTemplateResponse { - export const filterSensitiveLog = (obj: RenderUiTemplateResponse): any => ({ +export namespace GetSagemakerServicecatalogPortfolioStatusOutput { + export const filterSensitiveLog = (obj: GetSagemakerServicecatalogPortfolioStatusOutput): any => ({ ...obj, }); } -export enum SearchSortOrder { - ASCENDING = "Ascending", - DESCENDING = "Descending", +export enum ResourceType { + ENDPOINT = "Endpoint", + EXPERIMENT = "Experiment", + EXPERIMENT_TRIAL = "ExperimentTrial", + EXPERIMENT_TRIAL_COMPONENT = "ExperimentTrialComponent", + FEATURE_GROUP = "FeatureGroup", + MODEL_PACKAGE = "ModelPackage", + MODEL_PACKAGE_GROUP = "ModelPackageGroup", + PIPELINE = "Pipeline", + PIPELINE_EXECUTION = "PipelineExecution", + TRAINING_JOB = "TrainingJob", } /** - *

          Contains information about a training job.

          + *

          Part of the SuggestionQuery type. Specifies a hint for retrieving property + * names that begin with the specified text.

          */ -export interface TrainingJob { - /** - *

          The name of the training job.

          - */ - TrainingJobName?: string; - - /** - *

          The Amazon Resource Name (ARN) of the training job.

          - */ - TrainingJobArn?: string; - - /** - *

          The Amazon Resource Name (ARN) of the associated hyperparameter tuning job if the - * training job was launched by a hyperparameter tuning job.

          - */ - TuningJobArn?: string; - - /** - *

          The Amazon Resource Name (ARN) of the labeling job.

          - */ - LabelingJobArn?: string; - - /** - *

          The Amazon Resource Name (ARN) of the job.

          - */ - AutoMLJobArn?: string; - +export interface PropertyNameQuery { /** - *

          Information about the Amazon S3 location that is configured for storing model - * artifacts.

          + *

          Text that begins a property's name.

          */ - ModelArtifacts?: ModelArtifacts; + PropertyNameHint: string | undefined; +} - /** - *

          The status of the - * training - * job.

          - *

          Training job statuses are:

          - *
            - *
          • - *

            - * InProgress - The training is in progress.

            - *
          • - *
          • - *

            - * Completed - The training job has completed.

            - *
          • - *
          • - *

            - * Failed - The training job has failed. To see the reason for the - * failure, see the FailureReason field in the response to a - * DescribeTrainingJobResponse call.

            - *
          • - *
          • - *

            - * Stopping - The training job is stopping.

            - *
          • - *
          • - *

            - * Stopped - The training job has stopped.

            - *
          • - *
          - *

          For - * more detailed information, see SecondaryStatus.

          - */ - TrainingJobStatus?: TrainingJobStatus | string; +export namespace PropertyNameQuery { + export const filterSensitiveLog = (obj: PropertyNameQuery): any => ({ + ...obj, + }); +} +/** + *

          Specified in the GetSearchSuggestions request. + * Limits the property names that are included in the response.

          + */ +export interface SuggestionQuery { /** - *

          Provides detailed information about the state of the training job. For detailed - * information about the secondary status of the training job, see - * StatusMessage under SecondaryStatusTransition.

          - *

          Amazon SageMaker provides primary statuses and secondary statuses that apply to each of - * them:

          - *
          - *
          InProgress
          - *
          - *
            - *
          • - *

            - * Starting - * - Starting the training job.

            - *
          • - *
          • - *

            - * Downloading - An optional stage for algorithms that - * support File training input mode. It indicates that - * data is being downloaded to the ML storage volumes.

            - *
          • - *
          • - *

            - * Training - Training is in progress.

            - *
          • - *
          • - *

            - * Uploading - Training is complete and the model - * artifacts are being uploaded to the S3 location.

            - *
          • - *
          - *
          - *
          Completed
          - *
          - *
            - *
          • - *

            - * Completed - The training job has completed.

            - *
          • - *
          - *
          - *
          Failed
          - *
          - *
            - *
          • - *

            - * Failed - The training job has failed. The reason for - * the failure is returned in the FailureReason field of - * DescribeTrainingJobResponse.

            - *
          • - *
          - *
          - *
          Stopped
          - *
          - *
            - *
          • - *

            - * MaxRuntimeExceeded - The job stopped because it - * exceeded the maximum allowed runtime.

            - *
          • - *
          • - *

            - * Stopped - The training job has stopped.

            - *
          • - *
          - *
          - *
          Stopping
          - *
          - *
            - *
          • - *

            - * Stopping - Stopping the training job.

            - *
          • - *
          - *
          - *
          - * - *

          Valid values for SecondaryStatus are subject to change.

          - *
          - *

          We no longer support the following secondary statuses:

          - *
            - *
          • - *

            - * LaunchingMLInstances - *

            - *
          • - *
          • - *

            - * PreparingTrainingStack - *

            - *
          • - *
          • - *

            - * DownloadingTrainingImage - *

            - *
          • - *
          + *

          Defines a property name hint. Only property + * names that begin with the specified hint are included in the response.

          */ - SecondaryStatus?: SecondaryStatus | string; + PropertyNameQuery?: PropertyNameQuery; +} - /** - *

          If the training job failed, the reason it failed.

          - */ - FailureReason?: string; +export namespace SuggestionQuery { + export const filterSensitiveLog = (obj: SuggestionQuery): any => ({ + ...obj, + }); +} +export interface GetSearchSuggestionsRequest { /** - *

          Algorithm-specific parameters.

          + *

          The name of the Amazon SageMaker resource to search for.

          */ - HyperParameters?: { [key: string]: string }; + Resource: ResourceType | string | undefined; /** - *

          Information about the algorithm used for training, and algorithm metadata.

          + *

          Limits the property names that are included in the response.

          */ - AlgorithmSpecification?: AlgorithmSpecification; + SuggestionQuery?: SuggestionQuery; +} + +export namespace GetSearchSuggestionsRequest { + export const filterSensitiveLog = (obj: GetSearchSuggestionsRequest): any => ({ + ...obj, + }); +} +/** + *

          A property name returned from a GetSearchSuggestions call that specifies + * a value in the PropertyNameQuery field.

          + */ +export interface PropertyNameSuggestion { /** - *

          The AWS Identity and Access Management (IAM) role configured for the training job.

          + *

          A suggested property name based on what you entered in the search textbox in the Amazon SageMaker + * console.

          */ - RoleArn?: string; + PropertyName?: string; +} + +export namespace PropertyNameSuggestion { + export const filterSensitiveLog = (obj: PropertyNameSuggestion): any => ({ + ...obj, + }); +} +export interface GetSearchSuggestionsResponse { /** - *

          An array of Channel objects that describes each data input - * channel.

          + *

          A list of property names for a Resource that match a + * SuggestionQuery.

          */ - InputDataConfig?: Channel[]; + PropertyNameSuggestions?: PropertyNameSuggestion[]; +} + +export namespace GetSearchSuggestionsResponse { + export const filterSensitiveLog = (obj: GetSearchSuggestionsResponse): any => ({ + ...obj, + }); +} +/** + *

          Specifies configuration details for a Git repository when the repository is + * updated.

          + */ +export interface GitConfigForUpdate { /** - *

          The S3 path where model artifacts that you configured when creating the job are - * stored. Amazon SageMaker creates subfolders for model artifacts.

          + *

          The Amazon Resource Name (ARN) of the AWS Secrets Manager secret that contains the + * credentials used to access the git repository. The secret must have a staging label of + * AWSCURRENT and must be in the following format:

          + *

          + * {"username": UserName, "password": + * Password} + *

          */ - OutputDataConfig?: OutputDataConfig; + SecretArn?: string; +} + +export namespace GitConfigForUpdate { + export const filterSensitiveLog = (obj: GitConfigForUpdate): any => ({ + ...obj, + }); +} +/** + *

          Container for human task user interface information.

          + */ +export interface HumanTaskUiSummary { /** - *

          Resources, including ML compute instances and ML storage volumes, that are configured - * for model training.

          + *

          The name of the human task user interface.

          */ - ResourceConfig?: ResourceConfig; + HumanTaskUiName: string | undefined; /** - *

          A VpcConfig object that specifies the VPC that this training job has - * access to. For more information, see Protect Training Jobs by Using an Amazon - * Virtual Private Cloud.

          + *

          The Amazon Resource Name (ARN) of the human task user interface.

          */ - VpcConfig?: VpcConfig; + HumanTaskUiArn: string | undefined; /** - *

          Specifies a limit to how long a model training job can run. When the job reaches the - * time limit, Amazon SageMaker ends the training job. Use this API to cap model training costs.

          - *

          To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays - * job termination for 120 seconds. Algorithms can use this 120-second window to save the - * model artifacts, so the results of training are not lost.

          + *

          A timestamp when SageMaker created the human task user interface.

          */ - StoppingCondition?: StoppingCondition; + CreationTime: Date | undefined; +} + +export namespace HumanTaskUiSummary { + export const filterSensitiveLog = (obj: HumanTaskUiSummary): any => ({ + ...obj, + }); +} + +export enum HyperParameterTuningJobSortByOptions { + CreationTime = "CreationTime", + Name = "Name", + Status = "Status", +} +/** + *

          Provides summary information about a hyperparameter tuning job.

          + */ +export interface HyperParameterTuningJobSummary { /** - *

          A timestamp that indicates when the training job was created.

          + *

          The name of the tuning job.

          */ - CreationTime?: Date; + HyperParameterTuningJobName: string | undefined; /** - *

          Indicates the time when the training job starts on training instances. You are billed - * for the time interval between this time and the value of TrainingEndTime. - * The start time in CloudWatch Logs might be later than this time. The difference is due to the time - * it takes to download the training data and to the size of the training container.

          + *

          The + * Amazon + * Resource Name (ARN) of the tuning job.

          */ - TrainingStartTime?: Date; + HyperParameterTuningJobArn: string | undefined; /** - *

          Indicates the time when the training job ends on training instances. You are billed - * for the time interval between the value of TrainingStartTime and this time. - * For successful jobs and stopped jobs, this is the time after model artifacts are - * uploaded. For failed jobs, this is the time when Amazon SageMaker detects a job failure.

          + *

          The status of the + * tuning + * job.

          */ - TrainingEndTime?: Date; + HyperParameterTuningJobStatus: HyperParameterTuningJobStatus | string | undefined; /** - *

          A timestamp that indicates when the status of the training job was last - * modified.

          + *

          Specifies the search strategy hyperparameter tuning uses to choose which + * hyperparameters to + * use + * for each iteration. Currently, the only valid value is + * Bayesian.

          */ - LastModifiedTime?: Date; + Strategy: HyperParameterTuningJobStrategyType | string | undefined; /** - *

          A history of all of the secondary statuses that the training job has transitioned - * through.

          + *

          The date and time that the tuning job was created.

          */ - SecondaryStatusTransitions?: SecondaryStatusTransition[]; + CreationTime: Date | undefined; /** - *

          A list of final metric values that are set when the training job completes. Used only - * if the training job was configured to use metrics.

          + *

          The date and time that the tuning job ended.

          */ - FinalMetricDataList?: MetricData[]; + HyperParameterTuningEndTime?: Date; /** - *

          If the TrainingJob was created with network isolation, the value is set - * to true. If network isolation is enabled, nodes can't communicate beyond - * the VPC they run in.

          + *

          The date and time that the tuning job was + * modified.

          */ - EnableNetworkIsolation?: boolean; + LastModifiedTime?: Date; /** - *

          To encrypt all communications between ML compute instances in distributed training, - * choose True. Encryption provides greater security for distributed training, - * but training might take longer. How long it takes depends on the amount of communication - * between compute instances, especially if you use a deep learning algorithm in - * distributed training.

          + *

          The TrainingJobStatusCounters object that specifies the numbers of + * training jobs, categorized by status, that this tuning job launched.

          */ - EnableInterContainerTrafficEncryption?: boolean; + TrainingJobStatusCounters: TrainingJobStatusCounters | undefined; /** - *

          When true, enables managed spot training using Amazon EC2 Spot instances to run - * training jobs instead of on-demand instances. For more information, see Managed Spot Training.

          + *

          The ObjectiveStatusCounters object that specifies the numbers of + * training jobs, categorized by objective metric status, that this tuning job + * launched.

          */ - EnableManagedSpotTraining?: boolean; + ObjectiveStatusCounters: ObjectiveStatusCounters | undefined; /** - *

          Contains information about the output location for managed spot training checkpoint - * data.

          + *

          The ResourceLimits object that specifies the maximum number of + * training jobs and parallel training jobs allowed for this tuning job.

          */ - CheckpointConfig?: CheckpointConfig; + ResourceLimits?: ResourceLimits; +} + +export namespace HyperParameterTuningJobSummary { + export const filterSensitiveLog = (obj: HyperParameterTuningJobSummary): any => ({ + ...obj, + }); +} +/** + *

          A SageMaker image. A SageMaker image represents a set of container images that are derived from + * a common base container image. Each of these container images is represented by a SageMaker + * ImageVersion.

          + */ +export interface Image { /** - *

          The training time in seconds.

          + *

          When the image was created.

          */ - TrainingTimeInSeconds?: number; + CreationTime: Date | undefined; /** - *

          The billable time in seconds.

          + *

          The description of the image.

          */ - BillableTimeInSeconds?: number; + Description?: string; /** - *

          Configuration information for the debug hook parameters, collection configuration, and - * storage paths.

          + *

          The name of the image as displayed.

          */ - DebugHookConfig?: DebugHookConfig; + DisplayName?: string; /** - *

          Associates a SageMaker job as a trial component with an experiment and trial. Specified when - * you call the following APIs:

          - * + *

          When a create, update, or delete operation fails, the reason for the failure.

          */ - ExperimentConfig?: ExperimentConfig; + FailureReason?: string; /** - *

          Information about the debug rule configuration.

          + *

          The Amazon Resource Name (ARN) of the image.

          */ - DebugRuleConfigurations?: DebugRuleConfiguration[]; + ImageArn: string | undefined; /** - *

          Configuration of storage locations for TensorBoard output.

          + *

          The name of the image.

          */ - TensorBoardOutputConfig?: TensorBoardOutputConfig; + ImageName: string | undefined; /** - *

          Information about the evaluation status of the rules for the training job.

          + *

          The status of the image.

          */ - DebugRuleEvaluationStatuses?: DebugRuleEvaluationStatus[]; + ImageStatus: ImageStatus | string | undefined; /** - *

          An array of key-value pairs. For more information, see Using - * Cost Allocation Tags in the AWS Billing and Cost Management User - * Guide.

          + *

          When the image was last modified.

          */ - Tags?: Tag[]; + LastModifiedTime: Date | undefined; } -export namespace TrainingJob { - export const filterSensitiveLog = (obj: TrainingJob): any => ({ +export namespace Image { + export const filterSensitiveLog = (obj: Image): any => ({ ...obj, }); } -/** - *

          A short summary of a trial component.

          - */ -export interface TrialComponentSimpleSummary { +export enum ImageSortBy { + CREATION_TIME = "CREATION_TIME", + IMAGE_NAME = "IMAGE_NAME", + LAST_MODIFIED_TIME = "LAST_MODIFIED_TIME", +} + +export enum ImageSortOrder { + ASCENDING = "ASCENDING", + DESCENDING = "DESCENDING", +} + +/** + *

          A version of a SageMaker Image. A version represents an existing container + * image.

          + */ +export interface ImageVersion { + /** + *

          When the version was created.

          + */ + CreationTime: Date | undefined; + + /** + *

          When a create or delete operation fails, the reason for the failure.

          + */ + FailureReason?: string; + /** - *

          The name of the trial component.

          + *

          The Amazon Resource Name (ARN) of the image the version is based on.

          */ - TrialComponentName?: string; + ImageArn: string | undefined; /** - *

          The Amazon Resource Name (ARN) of the trial component.

          + *

          The ARN of the version.

          */ - TrialComponentArn?: string; + ImageVersionArn: string | undefined; /** - *

          The Amazon Resource Name (ARN) and job type of the source of a trial component.

          + *

          The status of the version.

          */ - TrialComponentSource?: TrialComponentSource; + ImageVersionStatus: ImageVersionStatus | string | undefined; /** - *

          When the component was created.

          + *

          When the version was last modified.

          */ - CreationTime?: Date; + LastModifiedTime: Date | undefined; /** - *

          Information about the user who created or modified an experiment, trial, or trial - * component.

          + *

          The version number.

          */ - CreatedBy?: UserContext; + Version: number | undefined; } -export namespace TrialComponentSimpleSummary { - export const filterSensitiveLog = (obj: TrialComponentSimpleSummary): any => ({ +export namespace ImageVersion { + export const filterSensitiveLog = (obj: ImageVersion): any => ({ ...obj, }); } +export enum ImageVersionSortBy { + CREATION_TIME = "CREATION_TIME", + LAST_MODIFIED_TIME = "LAST_MODIFIED_TIME", + VERSION = "VERSION", +} + +export enum ImageVersionSortOrder { + ASCENDING = "ASCENDING", + DESCENDING = "DESCENDING", +} + /** - *

          The properties of a trial as returned by the Search API.

          + *

          Provides counts for human-labeled tasks in the labeling job.

          */ -export interface Trial { - /** - *

          The name of the trial.

          - */ - TrialName?: string; - +export interface LabelCountersForWorkteam { /** - *

          The Amazon Resource Name (ARN) of the trial.

          + *

          The total number of data objects labeled by a human worker.

          */ - TrialArn?: string; + HumanLabeled?: number; /** - *

          The name of the trial as displayed. If DisplayName isn't specified, - * TrialName is displayed.

          + *

          The total number of data objects that need to be labeled by a human worker.

          */ - DisplayName?: string; + PendingHuman?: number; /** - *

          The name of the experiment the trial is part of.

          + *

          The total number of tasks in the labeling job.

          */ - ExperimentName?: string; + Total?: number; +} - /** - *

          The source of the trial.

          - */ - Source?: TrialSource; +export namespace LabelCountersForWorkteam { + export const filterSensitiveLog = (obj: LabelCountersForWorkteam): any => ({ + ...obj, + }); +} +/** + *

          Provides summary information for a work team.

          + */ +export interface LabelingJobForWorkteamSummary { /** - *

          When the trial was created.

          + *

          The name of the labeling job that the work team is assigned to.

          */ - CreationTime?: Date; + LabelingJobName?: string; /** - *

          Information about the user who created or modified an experiment, trial, or trial - * component.

          + *

          A unique identifier for a labeling job. You can use this to refer to a specific + * labeling job.

          */ - CreatedBy?: UserContext; + JobReferenceCode: string | undefined; /** - *

          Who last modified the trial.

          + *

          */ - LastModifiedTime?: Date; + WorkRequesterAccountId: string | undefined; /** - *

          Information about the user who created or modified an experiment, trial, or trial - * component.

          + *

          The date and time that the labeling job was created.

          */ - LastModifiedBy?: UserContext; + CreationTime: Date | undefined; /** - *

          The list of tags that are associated with the trial. You can use Search - * API to search on the tags.

          + *

          Provides information about the progress of a labeling job.

          */ - Tags?: Tag[]; + LabelCounters?: LabelCountersForWorkteam; /** - *

          A list of the components associated with the trial. For each component, a summary of the - * component's properties is included.

          + *

          The configured number of workers per data object.

          */ - TrialComponentSummaries?: TrialComponentSimpleSummary[]; + NumberOfHumanWorkersPerDataObject?: number; } -export namespace Trial { - export const filterSensitiveLog = (obj: Trial): any => ({ +export namespace LabelingJobForWorkteamSummary { + export const filterSensitiveLog = (obj: LabelingJobForWorkteamSummary): any => ({ ...obj, }); } /** - *

          A batch transform job. For information about SageMaker batch transform, see Use Batch - * Transform.

          + *

          Provides summary information about a labeling job.

          */ -export interface TransformJob { +export interface LabelingJobSummary { /** - *

          The name of the transform job.

          + *

          The name of the labeling job.

          */ - TransformJobName?: string; + LabelingJobName: string | undefined; /** - *

          The Amazon Resource Name (ARN) of the transform job.

          + *

          The Amazon Resource Name (ARN) assigned to the labeling job when it was + * created.

          */ - TransformJobArn?: string; + LabelingJobArn: string | undefined; /** - *

          The status of the transform job.

          - *

          Transform job statuses are:

          - *
            - *
          • - *

            - * InProgress - The job is in progress.

            - *
          • - *
          • - *

            - * Completed - The job has completed.

            - *
          • - *
          • - *

            - * Failed - The transform job has failed. To see the reason for the failure, - * see the FailureReason field in the response to a - * DescribeTransformJob call.

            - *
          • - *
          • - *

            - * Stopping - The transform job is stopping.

            - *
          • - *
          • - *

            - * Stopped - The transform job has stopped.

            - *
          • - *
          + *

          The date and time that the job was created (timestamp).

          */ - TransformJobStatus?: TransformJobStatus | string; + CreationTime: Date | undefined; /** - *

          If the transform job failed, the reason it failed.

          + *

          The date and time that the job was last modified (timestamp).

          */ - FailureReason?: string; + LastModifiedTime: Date | undefined; /** - *

          The name of the model associated with the transform job.

          + *

          The current status of the labeling job.

          */ - ModelName?: string; + LabelingJobStatus: LabelingJobStatus | string | undefined; /** - *

          The maximum number of parallel requests that can be sent to each instance in a transform - * job. If MaxConcurrentTransforms is set to 0 or left unset, SageMaker checks the - * optional execution-parameters to determine the settings for your chosen algorithm. If the - * execution-parameters endpoint is not enabled, the default value is 1. For built-in algorithms, - * you don't need to set a value for MaxConcurrentTransforms.

          + *

          Counts showing the progress of the labeling job.

          */ - MaxConcurrentTransforms?: number; + LabelCounters: LabelCounters | undefined; /** - *

          Configures the timeout and maximum number of retries for processing a transform job - * invocation.

          + *

          The Amazon Resource Name (ARN) of the work team assigned to the job.

          */ - ModelClientConfig?: ModelClientConfig; + WorkteamArn: string | undefined; /** - *

          The maximum allowed size of the payload, in MB. A payload is the data portion of a record - * (without metadata). The value in MaxPayloadInMB must be greater than, or equal - * to, the size of a single record. To estimate the size of a record in MB, divide the size of - * your dataset by the number of records. To ensure that the records fit within the maximum - * payload size, we recommend using a slightly larger value. The default value is 6 MB. For cases - * where the payload might be arbitrarily large and is transmitted using HTTP chunked encoding, - * set the value to 0. This feature works only in supported algorithms. Currently, SageMaker built-in - * algorithms do not support HTTP chunked encoding.

          + *

          The Amazon Resource Name (ARN) of a Lambda function. The function is run before each + * data object is sent to a worker.

          */ - MaxPayloadInMB?: number; + PreHumanTaskLambdaArn: string | undefined; /** - *

          Specifies the number of records to include in a mini-batch for an HTTP inference request. - * A record is a single unit of input data that inference can be made on. For example, a single - * line in a CSV file is a record.

          + *

          The Amazon Resource Name (ARN) of the Lambda function used to consolidate the + * annotations from individual workers into a label for a data object. For more + * information, see Annotation + * Consolidation.

          */ - BatchStrategy?: BatchStrategy | string; + AnnotationConsolidationLambdaArn?: string; /** - *

          The environment variables to set in the Docker container. We support up to 16 key and - * values entries in the map.

          + *

          If the LabelingJobStatus field is Failed, this field + * contains a description of the error.

          */ - Environment?: { [key: string]: string }; + FailureReason?: string; /** - *

          Describes the input source of a transform job and the way the transform job consumes - * it.

          + *

          The location of the output produced by the labeling job.

          */ - TransformInput?: TransformInput; + LabelingJobOutput?: LabelingJobOutput; /** - *

          Describes the results of a transform job.

          + *

          Input configuration for the labeling job.

          */ - TransformOutput?: TransformOutput; + InputConfig?: LabelingJobInputConfig; +} - /** - *

          Describes the resources, including ML instance types and ML instance count, to use for - * transform job.

          - */ - TransformResources?: TransformResources; +export namespace LabelingJobSummary { + export const filterSensitiveLog = (obj: LabelingJobSummary): any => ({ + ...obj, + }); +} + +export enum SortActionsBy { + CREATION_TIME = "CreationTime", + NAME = "Name", +} +export enum SortOrder { + ASCENDING = "Ascending", + DESCENDING = "Descending", +} + +export interface ListActionsRequest { /** - *

          A timestamp that shows when the transform Job was created.

          + *

          A filter that returns only actions with the specified source URI.

          */ - CreationTime?: Date; + SourceUri?: string; /** - *

          Indicates when the transform job starts on ML instances. You are billed for the time - * interval between this time and the value of TransformEndTime.

          + *

          A filter that returns only actions of the specified type.

          */ - TransformStartTime?: Date; + ActionType?: string; /** - *

          Indicates when the transform job has been completed, or has stopped or failed. You are - * billed for the time interval between this time and the value of - * TransformStartTime.

          + *

          A filter that returns only actions created on or after the specified time.

          */ - TransformEndTime?: Date; + CreatedAfter?: Date; /** - *

          The Amazon Resource Name (ARN) of the labeling job that created the transform job.

          + *

          A filter that returns only actions created on or before the specified time.

          */ - LabelingJobArn?: string; + CreatedBefore?: Date; /** - *

          The Amazon Resource Name (ARN) of the AutoML job that created the transform job.

          + *

          The property used to sort results. The default value is CreationTime.

          */ - AutoMLJobArn?: string; + SortBy?: SortActionsBy | string; /** - *

          The data structure used to specify the data to be used for inference in a batch - * transform job and to associate the data that is relevant to the prediction results in - * the output. The input filter provided allows you to exclude input data that is not - * needed for inference in a batch transform job. The output filter provided allows you to - * include input data relevant to interpreting the predictions in the output from the job. - * For more information, see Associate Prediction - * Results with their Corresponding Input Records.

          + *

          The sort order. The default value is Descending.

          */ - DataProcessing?: DataProcessing; + SortOrder?: SortOrder | string; /** - *

          Associates a SageMaker job as a trial component with an experiment and trial. Specified when - * you call the following APIs:

          - * + *

          If the previous call to ListActions didn't return the full set of actions, + * the call returns a token for getting the next set of actions.

          */ - ExperimentConfig?: ExperimentConfig; + NextToken?: string; /** - *

          A list of tags associated with the transform job.

          + *

          The maximum number of actions to return in the response. The default value is 10.

          */ - Tags?: Tag[]; + MaxResults?: number; } -export namespace TransformJob { - export const filterSensitiveLog = (obj: TransformJob): any => ({ +export namespace ListActionsRequest { + export const filterSensitiveLog = (obj: ListActionsRequest): any => ({ ...obj, }); } -/** - *

          Detailed information about the source of a trial component. Either - * ProcessingJob or TrainingJob is returned.

          - */ -export interface TrialComponentSourceDetail { - /** - *

          The Amazon Resource Name (ARN) of the source.

          - */ - SourceArn?: string; - - /** - *

          Information about a training job that's the source of a trial component.

          - */ - TrainingJob?: TrainingJob; - +export interface ListActionsResponse { /** - *

          Information about a processing job that's the source of a trial component.

          + *

          A list of actions and their properties.

          */ - ProcessingJob?: ProcessingJob; + ActionSummaries?: ActionSummary[]; /** - *

          Information about a transform job that's the source of a trial component.

          + *

          A token for getting the next set of actions, if there are any.

          */ - TransformJob?: TransformJob; + NextToken?: string; } -export namespace TrialComponentSourceDetail { - export const filterSensitiveLog = (obj: TrialComponentSourceDetail): any => ({ +export namespace ListActionsResponse { + export const filterSensitiveLog = (obj: ListActionsResponse): any => ({ ...obj, }); } -/** - *

          The properties of a trial component as returned by the Search - * API.

          - */ -export interface TrialComponent { - /** - *

          The name of the trial component.

          - */ - TrialComponentName?: string; - - /** - *

          The name of the component as displayed. If DisplayName isn't specified, - * TrialComponentName is displayed.

          - */ - DisplayName?: string; - - /** - *

          The Amazon Resource Name (ARN) of the trial component.

          - */ - TrialComponentArn?: string; - - /** - *

          The Amazon Resource Name (ARN) and job type of the source of the component.

          - */ - Source?: TrialComponentSource; - +export interface ListAlgorithmsInput { /** - *

          The status of the trial component.

          + *

          A filter that returns only algorithms created after the specified time + * (timestamp).

          */ - Status?: TrialComponentStatus; + CreationTimeAfter?: Date; /** - *

          When the component started.

          + *

          A filter that returns only algorithms created before the specified time + * (timestamp).

          */ - StartTime?: Date; + CreationTimeBefore?: Date; /** - *

          When the component ended.

          + *

          The maximum number of algorithms to return in the response.

          */ - EndTime?: Date; + MaxResults?: number; /** - *

          When the component was created.

          + *

          A string in the algorithm name. This filter returns only algorithms whose name + * contains the specified string.

          */ - CreationTime?: Date; + NameContains?: string; /** - *

          Information about the user who created or modified an experiment, trial, or trial - * component.

          + *

          If the response to a previous ListAlgorithms request was truncated, the + * response includes a NextToken. To retrieve the next set of algorithms, use + * the token in the next request.

          */ - CreatedBy?: UserContext; + NextToken?: string; /** - *

          When the component was last modified.

          + *

          The parameter by which to sort the results. The default is + * CreationTime.

          */ - LastModifiedTime?: Date; + SortBy?: AlgorithmSortBy | string; /** - *

          Information about the user who created or modified an experiment, trial, or trial - * component.

          + *

          The sort order for the results. The default is Ascending.

          */ - LastModifiedBy?: UserContext; + SortOrder?: SortOrder | string; +} + +export namespace ListAlgorithmsInput { + export const filterSensitiveLog = (obj: ListAlgorithmsInput): any => ({ + ...obj, + }); +} +export interface ListAlgorithmsOutput { /** - *

          The hyperparameters of the component.

          + *

          >An array of AlgorithmSummary objects, each of which lists an + * algorithm.

          */ - Parameters?: { [key: string]: TrialComponentParameterValue }; + AlgorithmSummaryList: AlgorithmSummary[] | undefined; /** - *

          The input artifacts of the component.

          + *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of + * algorithms, use it in the subsequent request.

          */ - InputArtifacts?: { [key: string]: TrialComponentArtifact }; + NextToken?: string; +} +export namespace ListAlgorithmsOutput { + export const filterSensitiveLog = (obj: ListAlgorithmsOutput): any => ({ + ...obj, + }); +} + +export interface ListAppImageConfigsRequest { /** - *

          The output artifacts of the component.

          + *

          The maximum number of AppImageConfigs to return in the response. The default value is + * 10.

          */ - OutputArtifacts?: { [key: string]: TrialComponentArtifact }; + MaxResults?: number; /** - *

          The metrics for the component.

          + *

          If the previous call to ListImages didn't return the full set of + * AppImageConfigs, the call returns a token for getting the next set of AppImageConfigs.

          */ - Metrics?: TrialComponentMetricSummary[]; + NextToken?: string; /** - *

          Details of the source of the component.

          + *

          A filter that returns only AppImageConfigs whose name contains the specified string.

          */ - SourceDetail?: TrialComponentSourceDetail; + NameContains?: string; /** - *

          The list of tags that are associated with the component. You can use Search API to search on the tags.

          + *

          A filter that returns only AppImageConfigs created on or before the specified time.

          */ - Tags?: Tag[]; + CreationTimeBefore?: Date; /** - *

          An array of the parents of the component. A parent is a trial the component is associated - * with and the experiment the trial is part of. A component might not have any parents.

          + *

          A filter that returns only AppImageConfigs created on or after the specified time.

          */ - Parents?: Parent[]; -} - -export namespace TrialComponent { - export const filterSensitiveLog = (obj: TrialComponent): any => ({ - ...obj, - }); -} + CreationTimeAfter?: Date; -/** - *

          A single resource returned as part of the Search API response.

          - */ -export interface SearchRecord { /** - *

          The properties of a training job.

          + *

          A filter that returns only AppImageConfigs modified on or before the specified time.

          */ - TrainingJob?: TrainingJob; + ModifiedTimeBefore?: Date; /** - *

          The properties of an experiment.

          + *

          A filter that returns only AppImageConfigs modified on or after the specified time.

          */ - Experiment?: Experiment; + ModifiedTimeAfter?: Date; /** - *

          The properties of a trial.

          + *

          The property used to sort results. The default value is CreationTime.

          */ - Trial?: Trial; + SortBy?: AppImageConfigSortKey | string; /** - *

          The properties of a trial component.

          + *

          The sort order. The default value is Descending.

          */ - TrialComponent?: TrialComponent; + SortOrder?: SortOrder | string; } -export namespace SearchRecord { - export const filterSensitiveLog = (obj: SearchRecord): any => ({ +export namespace ListAppImageConfigsRequest { + export const filterSensitiveLog = (obj: ListAppImageConfigsRequest): any => ({ ...obj, }); } -export interface SearchResponse { +export interface ListAppImageConfigsResponse { /** - *

          A list of SearchRecord objects.

          + *

          A token for getting the next set of AppImageConfigs, if there are any.

          */ - Results?: SearchRecord[]; + NextToken?: string; /** - *

          If the result of the previous Search request was truncated, the response - * includes a NextToken. To retrieve the next set of results, use the token in the next - * request.

          + *

          A list of AppImageConfigs and their properties.

          */ - NextToken?: string; + AppImageConfigs?: AppImageConfigDetails[]; } -export namespace SearchResponse { - export const filterSensitiveLog = (obj: SearchResponse): any => ({ +export namespace ListAppImageConfigsResponse { + export const filterSensitiveLog = (obj: ListAppImageConfigsResponse): any => ({ ...obj, }); } -export interface StartMonitoringScheduleRequest { +export interface ListAppsRequest { /** - *

          The name of the schedule to start.

          + *

          If the previous response was truncated, you will receive this token. + * Use it in your next request to receive the next set of results.

          */ - MonitoringScheduleName: string | undefined; -} - -export namespace StartMonitoringScheduleRequest { - export const filterSensitiveLog = (obj: StartMonitoringScheduleRequest): any => ({ - ...obj, - }); -} + NextToken?: string; -export interface StartNotebookInstanceInput { /** - *

          The name of the notebook instance to start.

          + *

          Returns a list up to a specified limit.

          */ - NotebookInstanceName: string | undefined; -} + MaxResults?: number; -export namespace StartNotebookInstanceInput { - export const filterSensitiveLog = (obj: StartNotebookInstanceInput): any => ({ - ...obj, - }); -} + /** + *

          The sort order for the results. The default is Ascending.

          + */ + SortOrder?: SortOrder | string; -export interface StopAutoMLJobRequest { /** - *

          The name of the object you are requesting.

          + *

          The parameter by which to sort the results. The default is CreationTime.

          */ - AutoMLJobName: string | undefined; -} + SortBy?: AppSortKey | string; -export namespace StopAutoMLJobRequest { - export const filterSensitiveLog = (obj: StopAutoMLJobRequest): any => ({ - ...obj, - }); -} + /** + *

          A parameter to search for the domain ID.

          + */ + DomainIdEquals?: string; -export interface StopCompilationJobRequest { /** - *

          The name of the model compilation job to stop.

          + *

          A parameter to search by user profile name.

          */ - CompilationJobName: string | undefined; + UserProfileNameEquals?: string; } -export namespace StopCompilationJobRequest { - export const filterSensitiveLog = (obj: StopCompilationJobRequest): any => ({ +export namespace ListAppsRequest { + export const filterSensitiveLog = (obj: ListAppsRequest): any => ({ ...obj, }); } -export interface StopHyperParameterTuningJobRequest { +export interface ListAppsResponse { /** - *

          The name of the tuning job to stop.

          + *

          The list of apps.

          */ - HyperParameterTuningJobName: string | undefined; -} - -export namespace StopHyperParameterTuningJobRequest { - export const filterSensitiveLog = (obj: StopHyperParameterTuningJobRequest): any => ({ - ...obj, - }); -} + Apps?: AppDetails[]; -export interface StopLabelingJobRequest { /** - *

          The name of the labeling job to stop.

          + *

          If the previous response was truncated, you will receive this token. + * Use it in your next request to receive the next set of results.

          */ - LabelingJobName: string | undefined; + NextToken?: string; } -export namespace StopLabelingJobRequest { - export const filterSensitiveLog = (obj: StopLabelingJobRequest): any => ({ +export namespace ListAppsResponse { + export const filterSensitiveLog = (obj: ListAppsResponse): any => ({ ...obj, }); } -export interface StopMonitoringScheduleRequest { - /** - *

          The name of the schedule to stop.

          - */ - MonitoringScheduleName: string | undefined; +export enum SortArtifactsBy { + CREATION_TIME = "CreationTime", } -export namespace StopMonitoringScheduleRequest { - export const filterSensitiveLog = (obj: StopMonitoringScheduleRequest): any => ({ - ...obj, - }); -} +export interface ListArtifactsRequest { + /** + *

          A filter that returns only artifacts with the specified source URI.

          + */ + SourceUri?: string; -export interface StopNotebookInstanceInput { /** - *

          The name of the notebook instance to terminate.

          + *

          A filter that returns only artifacts of the specified type.

          */ - NotebookInstanceName: string | undefined; -} + ArtifactType?: string; -export namespace StopNotebookInstanceInput { - export const filterSensitiveLog = (obj: StopNotebookInstanceInput): any => ({ - ...obj, - }); -} + /** + *

          A filter that returns only artifacts created on or after the specified time.

          + */ + CreatedAfter?: Date; -export interface StopProcessingJobRequest { /** - *

          The name of the processing job to stop.

          + *

          A filter that returns only artifacts created on or before the specified time.

          */ - ProcessingJobName: string | undefined; -} + CreatedBefore?: Date; -export namespace StopProcessingJobRequest { - export const filterSensitiveLog = (obj: StopProcessingJobRequest): any => ({ - ...obj, - }); -} + /** + *

          The property used to sort results. The default value is CreationTime.

          + */ + SortBy?: SortArtifactsBy | string; -export interface StopTrainingJobRequest { /** - *

          The name of the training job to stop.

          + *

          The sort order. The default value is Descending.

          */ - TrainingJobName: string | undefined; -} + SortOrder?: SortOrder | string; -export namespace StopTrainingJobRequest { - export const filterSensitiveLog = (obj: StopTrainingJobRequest): any => ({ - ...obj, - }); -} + /** + *

          If the previous call to ListArtifacts didn't return the full set of artifacts, + * the call returns a token for getting the next set of artifacts.

          + */ + NextToken?: string; -export interface StopTransformJobRequest { /** - *

          The name of the transform job to stop.

          + *

          The maximum number of artifacts to return in the response. The default value is 10.

          */ - TransformJobName: string | undefined; + MaxResults?: number; } -export namespace StopTransformJobRequest { - export const filterSensitiveLog = (obj: StopTransformJobRequest): any => ({ +export namespace ListArtifactsRequest { + export const filterSensitiveLog = (obj: ListArtifactsRequest): any => ({ ...obj, }); } -export interface UpdateAppImageConfigRequest { +export interface ListArtifactsResponse { /** - *

          The name of the AppImageConfig to update.

          + *

          A list of artifacts and their properties.

          */ - AppImageConfigName: string | undefined; + ArtifactSummaries?: ArtifactSummary[]; /** - *

          The new KernelGateway app to run on the image.

          + *

          A token for getting the next set of artifacts, if there are any.

          */ - KernelGatewayImageConfig?: KernelGatewayImageConfig; + NextToken?: string; } -export namespace UpdateAppImageConfigRequest { - export const filterSensitiveLog = (obj: UpdateAppImageConfigRequest): any => ({ +export namespace ListArtifactsResponse { + export const filterSensitiveLog = (obj: ListArtifactsResponse): any => ({ ...obj, }); } -export interface UpdateAppImageConfigResponse { - /** - *

          The Amazon Resource Name (ARN) for the AppImageConfig.

          - */ - AppImageConfigArn?: string; -} - -export namespace UpdateAppImageConfigResponse { - export const filterSensitiveLog = (obj: UpdateAppImageConfigResponse): any => ({ - ...obj, - }); +export enum SortAssociationsBy { + CREATION_TIME = "CreationTime", + DESTINATION_ARN = "DestinationArn", + DESTINATION_TYPE = "DestinationType", + SOURCE_ARN = "SourceArn", + SOURCE_TYPE = "SourceType", } -export interface UpdateCodeRepositoryInput { +export interface ListAssociationsRequest { /** - *

          The name of the Git repository to update.

          + *

          A filter that returns only associations with the specified source ARN.

          */ - CodeRepositoryName: string | undefined; + SourceArn?: string; /** - *

          The configuration of the git repository, including the URL and the Amazon Resource - * Name (ARN) of the AWS Secrets Manager secret that contains the credentials used to - * access the repository. The secret must have a staging label of AWSCURRENT - * and must be in the following format:

          - *

          - * {"username": UserName, "password": - * Password} - *

          + *

          A filter that returns only associations with the specified destination Amazon Resource Name (ARN).

          */ - GitConfig?: GitConfigForUpdate; -} - -export namespace UpdateCodeRepositoryInput { - export const filterSensitiveLog = (obj: UpdateCodeRepositoryInput): any => ({ - ...obj, - }); -} + DestinationArn?: string; -export interface UpdateCodeRepositoryOutput { /** - *

          The ARN of the Git repository.

          + *

          A filter that returns only associations with the specified source type.

          */ - CodeRepositoryArn: string | undefined; -} + SourceType?: string; -export namespace UpdateCodeRepositoryOutput { - export const filterSensitiveLog = (obj: UpdateCodeRepositoryOutput): any => ({ - ...obj, - }); -} + /** + *

          A filter that returns only associations with the specified destination type.

          + */ + DestinationType?: string; -export interface UpdateDomainRequest { /** - *

          The ID of the domain to be updated.

          + *

          A filter that returns only associations of the specified type.

          */ - DomainId: string | undefined; + AssociationType?: AssociationEdgeType | string; /** - *

          A collection of settings.

          + *

          A filter that returns only associations created on or after the specified time.

          */ - DefaultUserSettings?: UserSettings; -} + CreatedAfter?: Date; -export namespace UpdateDomainRequest { - export const filterSensitiveLog = (obj: UpdateDomainRequest): any => ({ - ...obj, - }); -} + /** + *

          A filter that returns only associations created on or before the specified time.

          + */ + CreatedBefore?: Date; -export interface UpdateDomainResponse { /** - *

          The Amazon Resource Name (ARN) of the domain.

          + *

          The property used to sort results. The default value is CreationTime.

          */ - DomainArn?: string; -} + SortBy?: SortAssociationsBy | string; -export namespace UpdateDomainResponse { - export const filterSensitiveLog = (obj: UpdateDomainResponse): any => ({ - ...obj, - }); -} + /** + *

          The sort order. The default value is Descending.

          + */ + SortOrder?: SortOrder | string; -export enum VariantPropertyType { - DataCaptureConfig = "DataCaptureConfig", - DesiredInstanceCount = "DesiredInstanceCount", - DesiredWeight = "DesiredWeight", -} + /** + *

          If the previous call to ListAssociations didn't return the full set of associations, + * the call returns a token for getting the next set of associations.

          + */ + NextToken?: string; -/** - *

          Specifies a production variant property type for an Endpoint.

          - *

          If you are updating an endpoint with the UpdateEndpointInput$RetainAllVariantProperties option set to - * true, the VariantProperty objects listed in UpdateEndpointInput$ExcludeRetainedVariantProperties override the - * existing variant properties of the endpoint.

          - */ -export interface VariantProperty { /** - *

          The type of variant property. The supported values are:

          - * + *

          The maximum number of associations to return in the response. The default value is 10.

          */ - VariantPropertyType: VariantPropertyType | string | undefined; + MaxResults?: number; } -export namespace VariantProperty { - export const filterSensitiveLog = (obj: VariantProperty): any => ({ +export namespace ListAssociationsRequest { + export const filterSensitiveLog = (obj: ListAssociationsRequest): any => ({ ...obj, }); } diff --git a/clients/client-sagemaker/models/models_2.ts b/clients/client-sagemaker/models/models_2.ts index d4da232cdd0d..65ae77ae76fb 100644 --- a/clients/client-sagemaker/models/models_2.ts +++ b/clients/client-sagemaker/models/models_2.ts @@ -1,31 +1,6139 @@ import { + ActionStatus, + AlgorithmSpecification, + AppSpecification, + AssociationSummary, + AutoMLCandidate, + AutoMLJobStatus, + AutoMLJobSummary, + AutoMLSortBy, + AutoMLSortOrder, + BatchStrategy, BooleanOperator, - MemberDefinition, + CacheHitResult, + CandidateSortBy, + CandidateStatus, + Channel, + CheckpointConfig, + CodeRepositorySortBy, + CodeRepositorySortOrder, + CodeRepositorySummary, + CompilationJobStatus, + CompilationJobSummary, + ConditionStepMetadata, + ContextSummary, + InferenceSpecification, + KernelGatewayImageConfig, + MetadataProperties, + ModelApprovalStatus, + ModelMetrics, + ModelPackageValidationSpecification, MonitoringScheduleConfig, + NetworkConfig, NotebookInstanceAcceleratorType, NotebookInstanceLifecycleHook, - NotificationConfiguration, - OidcConfig, + OutputDataConfig, + ResourceConfig, RootAccess, - SourceIpConfig, - TrialComponentArtifact, - TrialComponentParameterValue, - TrialComponentStatus, + SourceAlgorithmSpecification, + StoppingCondition, + Tag, + TransformInput, + TransformOutput, + TransformResources, + UiTemplate, + UserContext, UserSettings, + VpcConfig, _InstanceType, } from "./models_0"; import { + DataProcessing, + DebugHookConfig, + DebugRuleConfiguration, + DebugRuleEvaluationStatus, + DeploymentConfig, DesiredWeightAndCapacity, + DomainDetails, + Endpoint, + EndpointConfigSortKey, + EndpointConfigSummary, + EndpointSortKey, + EndpointStatus, + EndpointSummary, + ExecutionStatus, + Experiment, + ExperimentConfig, + ExperimentSummary, + FeatureGroup, + FeatureGroupSortBy, + FeatureGroupSortOrder, + FeatureGroupStatus, + FeatureGroupSummary, Filter, - NestedFilters, + FlowDefinitionSummary, + GitConfigForUpdate, + HumanTaskUiSummary, + HyperParameterTrainingJobSummary, + HyperParameterTuningJobSortByOptions, + HyperParameterTuningJobStatus, + HyperParameterTuningJobSummary, + Image, + ImageSortBy, + ImageSortOrder, + ImageVersion, + ImageVersionSortBy, + ImageVersionSortOrder, + LabelingJobForWorkteamSummary, + LabelingJobStatus, + LabelingJobSummary, + MemberDefinition, + MetricData, + ModelArtifacts, + ModelClientConfig, + ModelPackageGroupStatus, + ModelPackageStatus, + ModelPackageStatusDetails, + MonitoringExecutionSummary, + NotebookInstanceStatus, + NotificationConfiguration, + OfflineStoreStatusValue, + OidcConfig, + PipelineExecutionStatus, + PipelineStatus, + ProcessingInput, + ProcessingJobStatus, + ProcessingOutputConfig, + ProcessingResources, + ProcessingStoppingCondition, + ProjectStatus, ResourceType, - SearchSortOrder, - VariantProperty, + ScheduleStatus, + SecondaryStatus, + SecondaryStatusTransition, + SortOrder, + SourceIpConfig, + SubscribedWorkteam, + TensorBoardOutputConfig, + TrainingJobStatus, + TransformJobStatus, + TrialComponentArtifact, + TrialComponentMetricSummary, + TrialComponentParameterValue, + TrialComponentSource, + TrialComponentStatus, + TrialSource, + UserProfileStatus, Workforce, Workteam, } from "./models_1"; import { SENSITIVE_STRING } from "@aws-sdk/smithy-client"; +export interface ListAssociationsResponse { + /** + *

          A list of associations and their properties.

          + */ + AssociationSummaries?: AssociationSummary[]; + + /** + *

          A token for getting the next set of associations, if there are any.

          + */ + NextToken?: string; +} + +export namespace ListAssociationsResponse { + export const filterSensitiveLog = (obj: ListAssociationsResponse): any => ({ + ...obj, + }); +} + +export interface ListAutoMLJobsRequest { + /** + *

          Request a list of jobs, using a filter for time.

          + */ + CreationTimeAfter?: Date; + + /** + *

          Request a list of jobs, using a filter for time.

          + */ + CreationTimeBefore?: Date; + + /** + *

          Request a list of jobs, using a filter for time.

          + */ + LastModifiedTimeAfter?: Date; + + /** + *

          Request a list of jobs, using a filter for time.

          + */ + LastModifiedTimeBefore?: Date; + + /** + *

          Request a list of jobs, using a search filter for name.

          + */ + NameContains?: string; + + /** + *

          Request a list of jobs, using a filter for status.

          + */ + StatusEquals?: AutoMLJobStatus | string; + + /** + *

          The sort order for the results. The default is Descending.

          + */ + SortOrder?: AutoMLSortOrder | string; + + /** + *

          The parameter by which to sort the results. The default is AutoMLJobName.

          + */ + SortBy?: AutoMLSortBy | string; + + /** + *

          Request a list of jobs up to a specified limit.

          + */ + MaxResults?: number; + + /** + *

          If the previous response was truncated, you receive this token. Use it in your next + * request to receive the next set of results.

          + */ + NextToken?: string; +} + +export namespace ListAutoMLJobsRequest { + export const filterSensitiveLog = (obj: ListAutoMLJobsRequest): any => ({ + ...obj, + }); +} + +export interface ListAutoMLJobsResponse { + /** + *

          Returns a summary list of jobs.

          + */ + AutoMLJobSummaries: AutoMLJobSummary[] | undefined; + + /** + *

          If the previous response was truncated, you receive this token. Use it in your next + * request to receive the next set of results.

          + */ + NextToken?: string; +} + +export namespace ListAutoMLJobsResponse { + export const filterSensitiveLog = (obj: ListAutoMLJobsResponse): any => ({ + ...obj, + }); +} + +export interface ListCandidatesForAutoMLJobRequest { + /** + *

          List the Candidates created for the job by providing the job's name.

          + */ + AutoMLJobName: string | undefined; + + /** + *

          List the Candidates for the job and filter by status.

          + */ + StatusEquals?: CandidateStatus | string; + + /** + *

          List the Candidates for the job and filter by candidate name.

          + */ + CandidateNameEquals?: string; + + /** + *

          The sort order for the results. The default is Ascending.

          + */ + SortOrder?: AutoMLSortOrder | string; + + /** + *

          The parameter by which to sort the results. The default is Descending.

          + */ + SortBy?: CandidateSortBy | string; + + /** + *

          List the job's Candidates up to a specified limit.

          + */ + MaxResults?: number; + + /** + *

          If the previous response was truncated, you receive this token. Use it in your next + * request to receive the next set of results.

          + */ + NextToken?: string; +} + +export namespace ListCandidatesForAutoMLJobRequest { + export const filterSensitiveLog = (obj: ListCandidatesForAutoMLJobRequest): any => ({ + ...obj, + }); +} + +export interface ListCandidatesForAutoMLJobResponse { + /** + *

          Summaries about the Candidates.

          + */ + Candidates: AutoMLCandidate[] | undefined; + + /** + *

          If the previous response was truncated, you receive this token. Use it in your next + * request to receive the next set of results.

          + */ + NextToken?: string; +} + +export namespace ListCandidatesForAutoMLJobResponse { + export const filterSensitiveLog = (obj: ListCandidatesForAutoMLJobResponse): any => ({ + ...obj, + }); +} + +export interface ListCodeRepositoriesInput { + /** + *

          A filter that returns only Git repositories that were created after the specified + * time.

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns only Git repositories that were created before the specified + * time.

          + */ + CreationTimeBefore?: Date; + + /** + *

          A filter that returns only Git repositories that were last modified after the + * specified time.

          + */ + LastModifiedTimeAfter?: Date; + + /** + *

          A filter that returns only Git repositories that were last modified before the + * specified time.

          + */ + LastModifiedTimeBefore?: Date; + + /** + *

          The maximum number of Git repositories to return in the response.

          + */ + MaxResults?: number; + + /** + *

          A string in the Git repositories name. This filter returns only repositories whose + * name contains the specified string.

          + */ + NameContains?: string; + + /** + *

          If the result of a ListCodeRepositoriesOutput request was truncated, the + * response includes a NextToken. To get the next set of Git repositories, use + * the token in the next request.

          + */ + NextToken?: string; + + /** + *

          The field to sort results by. The default is Name.

          + */ + SortBy?: CodeRepositorySortBy | string; + + /** + *

          The sort order for results. The default is Ascending.

          + */ + SortOrder?: CodeRepositorySortOrder | string; +} + +export namespace ListCodeRepositoriesInput { + export const filterSensitiveLog = (obj: ListCodeRepositoriesInput): any => ({ + ...obj, + }); +} + +export interface ListCodeRepositoriesOutput { + /** + *

          Gets a list of summaries of the Git repositories. Each summary specifies the following + * values for the repository:

          + *
            + *
          • + *

            Name

            + *
          • + *
          • + *

            Amazon Resource Name (ARN)

            + *
          • + *
          • + *

            Creation time

            + *
          • + *
          • + *

            Last modified time

            + *
          • + *
          • + *

            Configuration information, including the URL location of the repository and + * the ARN of the AWS Secrets Manager secret that contains the credentials used + * to access the repository.

            + *
          • + *
          + */ + CodeRepositorySummaryList: CodeRepositorySummary[] | undefined; + + /** + *

          If the result of a ListCodeRepositoriesOutput request was truncated, the + * response includes a NextToken. To get the next set of Git repositories, use + * the token in the next request.

          + */ + NextToken?: string; +} + +export namespace ListCodeRepositoriesOutput { + export const filterSensitiveLog = (obj: ListCodeRepositoriesOutput): any => ({ + ...obj, + }); +} + +export enum ListCompilationJobsSortBy { + CREATION_TIME = "CreationTime", + NAME = "Name", + STATUS = "Status", +} + +export interface ListCompilationJobsRequest { + /** + *

          If the result of the previous ListCompilationJobs request was truncated, + * the response includes a NextToken. To retrieve the next set of model + * compilation jobs, use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          The maximum number of model compilation jobs to return in the response.

          + */ + MaxResults?: number; + + /** + *

          A filter that returns the model compilation jobs that were created after a specified + * time.

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns the model compilation jobs that were created before a specified + * time.

          + */ + CreationTimeBefore?: Date; + + /** + *

          A filter that returns the model compilation jobs that were modified after a specified + * time.

          + */ + LastModifiedTimeAfter?: Date; + + /** + *

          A filter that returns the model compilation jobs that were modified before a specified + * time.

          + */ + LastModifiedTimeBefore?: Date; + + /** + *

          A filter that returns the model compilation jobs whose name contains a specified + * string.

          + */ + NameContains?: string; + + /** + *

          A filter that retrieves model compilation jobs with a specific DescribeCompilationJobResponse$CompilationJobStatus status.

          + */ + StatusEquals?: CompilationJobStatus | string; + + /** + *

          The field by which to sort results. The default is CreationTime.

          + */ + SortBy?: ListCompilationJobsSortBy | string; + + /** + *

          The sort order for results. The default is Ascending.

          + */ + SortOrder?: SortOrder | string; +} + +export namespace ListCompilationJobsRequest { + export const filterSensitiveLog = (obj: ListCompilationJobsRequest): any => ({ + ...obj, + }); +} + +export interface ListCompilationJobsResponse { + /** + *

          An array of CompilationJobSummary objects, each describing a model + * compilation job.

          + */ + CompilationJobSummaries: CompilationJobSummary[] | undefined; + + /** + *

          If the response is truncated, Amazon SageMaker returns this NextToken. To retrieve + * the next set of model compilation jobs, use this token in the next request.

          + */ + NextToken?: string; +} + +export namespace ListCompilationJobsResponse { + export const filterSensitiveLog = (obj: ListCompilationJobsResponse): any => ({ + ...obj, + }); +} + +export enum SortContextsBy { + CREATION_TIME = "CreationTime", + NAME = "Name", +} + +export interface ListContextsRequest { + /** + *

          A filter that returns only contexts with the specified source URI.

          + */ + SourceUri?: string; + + /** + *

          A filter that returns only contexts of the specified type.

          + */ + ContextType?: string; + + /** + *

          A filter that returns only contexts created on or after the specified time.

          + */ + CreatedAfter?: Date; + + /** + *

          A filter that returns only contexts created on or before the specified time.

          + */ + CreatedBefore?: Date; + + /** + *

          The property used to sort results. The default value is CreationTime.

          + */ + SortBy?: SortContextsBy | string; + + /** + *

          The sort order. The default value is Descending.

          + */ + SortOrder?: SortOrder | string; + + /** + *

          If the previous call to ListContexts didn't return the full set of contexts, + * the call returns a token for getting the next set of contexts.

          + */ + NextToken?: string; + + /** + *

          The maximum number of contexts to return in the response. The default value is 10.

          + */ + MaxResults?: number; +} + +export namespace ListContextsRequest { + export const filterSensitiveLog = (obj: ListContextsRequest): any => ({ + ...obj, + }); +} + +export interface ListContextsResponse { + /** + *

          A list of contexts and their properties.

          + */ + ContextSummaries?: ContextSummary[]; + + /** + *

          A token for getting the next set of contexts, if there are any.

          + */ + NextToken?: string; +} + +export namespace ListContextsResponse { + export const filterSensitiveLog = (obj: ListContextsResponse): any => ({ + ...obj, + }); +} + +export interface ListDomainsRequest { + /** + *

          If the previous response was truncated, you will receive this token. + * Use it in your next request to receive the next set of results.

          + */ + NextToken?: string; + + /** + *

          Returns a list up to a specified limit.

          + */ + MaxResults?: number; +} + +export namespace ListDomainsRequest { + export const filterSensitiveLog = (obj: ListDomainsRequest): any => ({ + ...obj, + }); +} + +export interface ListDomainsResponse { + /** + *

          The list of domains.

          + */ + Domains?: DomainDetails[]; + + /** + *

          If the previous response was truncated, you will receive this token. + * Use it in your next request to receive the next set of results.

          + */ + NextToken?: string; +} + +export namespace ListDomainsResponse { + export const filterSensitiveLog = (obj: ListDomainsResponse): any => ({ + ...obj, + }); +} + +export enum OrderKey { + Ascending = "Ascending", + Descending = "Descending", +} + +export interface ListEndpointConfigsInput { + /** + *

          The field to sort results by. The default is CreationTime.

          + */ + SortBy?: EndpointConfigSortKey | string; + + /** + *

          The sort order for results. The default is Descending.

          + */ + SortOrder?: OrderKey | string; + + /** + *

          If the result of the previous ListEndpointConfig request was + * truncated, the response includes a NextToken. To retrieve the next set of + * endpoint configurations, use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          The maximum number of training jobs to return in the response.

          + */ + MaxResults?: number; + + /** + *

          A string in the endpoint configuration name. This filter returns only endpoint + * configurations whose name contains the specified string.

          + */ + NameContains?: string; + + /** + *

          A filter that returns only endpoint configurations created before the specified + * time (timestamp).

          + */ + CreationTimeBefore?: Date; + + /** + *

          A filter that returns only endpoint configurations with a creation time greater + * than or equal to the specified time (timestamp).

          + */ + CreationTimeAfter?: Date; +} + +export namespace ListEndpointConfigsInput { + export const filterSensitiveLog = (obj: ListEndpointConfigsInput): any => ({ + ...obj, + }); +} + +export interface ListEndpointConfigsOutput { + /** + *

          An array of endpoint configurations.

          + */ + EndpointConfigs: EndpointConfigSummary[] | undefined; + + /** + *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of + * endpoint configurations, use it in the subsequent request

          + */ + NextToken?: string; +} + +export namespace ListEndpointConfigsOutput { + export const filterSensitiveLog = (obj: ListEndpointConfigsOutput): any => ({ + ...obj, + }); +} + +export interface ListEndpointsInput { + /** + *

          Sorts the list of results. The default is CreationTime.

          + */ + SortBy?: EndpointSortKey | string; + + /** + *

          The sort order for results. The default is Descending.

          + */ + SortOrder?: OrderKey | string; + + /** + *

          If the result of a ListEndpoints request was truncated, the response + * includes a NextToken. To retrieve the next set of endpoints, use the token + * in the next request.

          + */ + NextToken?: string; + + /** + *

          The maximum number of endpoints to return in the response.

          + */ + MaxResults?: number; + + /** + *

          A string in endpoint names. This filter returns only endpoints whose name contains + * the specified string.

          + */ + NameContains?: string; + + /** + *

          A filter that returns only endpoints that were created before the specified time + * (timestamp).

          + */ + CreationTimeBefore?: Date; + + /** + *

          A filter that returns only endpoints with a creation time greater than or equal to + * the specified time (timestamp).

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns only endpoints that were modified before the specified + * timestamp.

          + */ + LastModifiedTimeBefore?: Date; + + /** + *

          A filter that returns only endpoints that were modified after the specified + * timestamp.

          + */ + LastModifiedTimeAfter?: Date; + + /** + *

          A filter that returns only endpoints with the specified status.

          + */ + StatusEquals?: EndpointStatus | string; +} + +export namespace ListEndpointsInput { + export const filterSensitiveLog = (obj: ListEndpointsInput): any => ({ + ...obj, + }); +} + +export interface ListEndpointsOutput { + /** + *

          An array or endpoint objects.

          + */ + Endpoints: EndpointSummary[] | undefined; + + /** + *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of + * training jobs, use it in the subsequent request.

          + */ + NextToken?: string; +} + +export namespace ListEndpointsOutput { + export const filterSensitiveLog = (obj: ListEndpointsOutput): any => ({ + ...obj, + }); +} + +export enum SortExperimentsBy { + CREATION_TIME = "CreationTime", + NAME = "Name", +} + +export interface ListExperimentsRequest { + /** + *

          A filter that returns only experiments created after the specified time.

          + */ + CreatedAfter?: Date; + + /** + *

          A filter that returns only experiments created before the specified time.

          + */ + CreatedBefore?: Date; + + /** + *

          The property used to sort results. The default value is CreationTime.

          + */ + SortBy?: SortExperimentsBy | string; + + /** + *

          The sort order. The default value is Descending.

          + */ + SortOrder?: SortOrder | string; + + /** + *

          If the previous call to ListExperiments didn't return the full set of + * experiments, the call returns a token for getting the next set of experiments.

          + */ + NextToken?: string; + + /** + *

          The maximum number of experiments to return in the response. The default value is + * 10.

          + */ + MaxResults?: number; +} + +export namespace ListExperimentsRequest { + export const filterSensitiveLog = (obj: ListExperimentsRequest): any => ({ + ...obj, + }); +} + +export interface ListExperimentsResponse { + /** + *

          A list of the summaries of your experiments.

          + */ + ExperimentSummaries?: ExperimentSummary[]; + + /** + *

          A token for getting the next set of experiments, if there are any.

          + */ + NextToken?: string; +} + +export namespace ListExperimentsResponse { + export const filterSensitiveLog = (obj: ListExperimentsResponse): any => ({ + ...obj, + }); +} + +export interface ListFeatureGroupsRequest { + /** + *

          A string that partially matches one or more FeatureGroups names. Filters + * FeatureGroups by name.

          + */ + NameContains?: string; + + /** + *

          A FeatureGroup status. Filters by FeatureGroup status.

          + */ + FeatureGroupStatusEquals?: FeatureGroupStatus | string; + + /** + *

          An OfflineStore status. Filters by OfflineStore status.

          + */ + OfflineStoreStatusEquals?: OfflineStoreStatusValue | string; + + /** + *

          Use this parameter to search for FeatureGroupss created after a specific + * date and time.

          + */ + CreationTimeAfter?: Date; + + /** + *

          Use this parameter to search for FeatureGroupss created before a specific + * date and time.

          + */ + CreationTimeBefore?: Date; + + /** + *

          The order in which feature groups are listed.

          + */ + SortOrder?: FeatureGroupSortOrder | string; + + /** + *

          The value on which the feature group list is sorted.

          + */ + SortBy?: FeatureGroupSortBy | string; + + /** + *

          The maximum number of results returned by ListFeatureGroups.

          + */ + MaxResults?: number; + + /** + *

          A token to resume pagination of ListFeatureGroups results.

          + */ + NextToken?: string; +} + +export namespace ListFeatureGroupsRequest { + export const filterSensitiveLog = (obj: ListFeatureGroupsRequest): any => ({ + ...obj, + }); +} + +export interface ListFeatureGroupsResponse { + /** + *

          A summary of feature groups.

          + */ + FeatureGroupSummaries: FeatureGroupSummary[] | undefined; + + /** + *

          A token to resume pagination of ListFeatureGroups results.

          + */ + NextToken: string | undefined; +} + +export namespace ListFeatureGroupsResponse { + export const filterSensitiveLog = (obj: ListFeatureGroupsResponse): any => ({ + ...obj, + }); +} + +export interface ListFlowDefinitionsRequest { + /** + *

          A filter that returns only flow definitions with a creation time greater than or equal to the specified timestamp.

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns only flow definitions that were created before the specified timestamp.

          + */ + CreationTimeBefore?: Date; + + /** + *

          An optional value that specifies whether you want the results sorted in Ascending or Descending order.

          + */ + SortOrder?: SortOrder | string; + + /** + *

          A token to resume pagination.

          + */ + NextToken?: string; + + /** + *

          The total number of items to return. If the total number of available items is more than the value specified in MaxResults, then a NextToken will be provided in the output that you can use to resume pagination.

          + */ + MaxResults?: number; +} + +export namespace ListFlowDefinitionsRequest { + export const filterSensitiveLog = (obj: ListFlowDefinitionsRequest): any => ({ + ...obj, + }); +} + +export interface ListFlowDefinitionsResponse { + /** + *

          An array of objects describing the flow definitions.

          + */ + FlowDefinitionSummaries: FlowDefinitionSummary[] | undefined; + + /** + *

          A token to resume pagination.

          + */ + NextToken?: string; +} + +export namespace ListFlowDefinitionsResponse { + export const filterSensitiveLog = (obj: ListFlowDefinitionsResponse): any => ({ + ...obj, + }); +} + +export interface ListHumanTaskUisRequest { + /** + *

          A filter that returns only human task user interfaces with a creation time greater than or equal to the specified timestamp.

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns only human task user interfaces that were created before the specified timestamp.

          + */ + CreationTimeBefore?: Date; + + /** + *

          An optional value that specifies whether you want the results sorted in Ascending or Descending order.

          + */ + SortOrder?: SortOrder | string; + + /** + *

          A token to resume pagination.

          + */ + NextToken?: string; + + /** + *

          The total number of items to return. If the total number of available items is more than the value specified in MaxResults, then a NextToken will be provided in the output that you can use to resume pagination.

          + */ + MaxResults?: number; +} + +export namespace ListHumanTaskUisRequest { + export const filterSensitiveLog = (obj: ListHumanTaskUisRequest): any => ({ + ...obj, + }); +} + +export interface ListHumanTaskUisResponse { + /** + *

          An array of objects describing the human task user interfaces.

          + */ + HumanTaskUiSummaries: HumanTaskUiSummary[] | undefined; + + /** + *

          A token to resume pagination.

          + */ + NextToken?: string; +} + +export namespace ListHumanTaskUisResponse { + export const filterSensitiveLog = (obj: ListHumanTaskUisResponse): any => ({ + ...obj, + }); +} + +export interface ListHyperParameterTuningJobsRequest { + /** + *

          If the result of the previous ListHyperParameterTuningJobs request was + * truncated, the response includes a NextToken. To retrieve the next set of + * tuning jobs, use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          The + * maximum number of tuning jobs to return. The default value is + * 10.

          + */ + MaxResults?: number; + + /** + *

          The + * field + * to sort results by. The default is Name.

          + */ + SortBy?: HyperParameterTuningJobSortByOptions | string; + + /** + *

          The sort + * order + * for results. The default is Ascending.

          + */ + SortOrder?: SortOrder | string; + + /** + *

          A string in the tuning job name. This filter returns only tuning jobs whose name + * contains the specified string.

          + */ + NameContains?: string; + + /** + *

          A filter that returns only tuning jobs that were created after the + * specified + * time.

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns only tuning jobs that were created before the + * specified + * time.

          + */ + CreationTimeBefore?: Date; + + /** + *

          A filter that returns only tuning jobs that were modified after the specified + * time.

          + */ + LastModifiedTimeAfter?: Date; + + /** + *

          A filter that returns only tuning jobs that were modified before the specified + * time.

          + */ + LastModifiedTimeBefore?: Date; + + /** + *

          A filter that returns only tuning jobs with the + * specified + * status.

          + */ + StatusEquals?: HyperParameterTuningJobStatus | string; +} + +export namespace ListHyperParameterTuningJobsRequest { + export const filterSensitiveLog = (obj: ListHyperParameterTuningJobsRequest): any => ({ + ...obj, + }); +} + +export interface ListHyperParameterTuningJobsResponse { + /** + *

          A list of HyperParameterTuningJobSummary objects that + * describe + * the tuning jobs that the ListHyperParameterTuningJobs + * request returned.

          + */ + HyperParameterTuningJobSummaries: HyperParameterTuningJobSummary[] | undefined; + + /** + *

          If the result of this ListHyperParameterTuningJobs request was truncated, + * the response includes a NextToken. To retrieve the next set of tuning jobs, + * use the token in the next request.

          + */ + NextToken?: string; +} + +export namespace ListHyperParameterTuningJobsResponse { + export const filterSensitiveLog = (obj: ListHyperParameterTuningJobsResponse): any => ({ + ...obj, + }); +} + +export interface ListImagesRequest { + /** + *

          A filter that returns only images created on or after the specified time.

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns only images created on or before the specified time.

          + */ + CreationTimeBefore?: Date; + + /** + *

          A filter that returns only images modified on or after the specified time.

          + */ + LastModifiedTimeAfter?: Date; + + /** + *

          A filter that returns only images modified on or before the specified time.

          + */ + LastModifiedTimeBefore?: Date; + + /** + *

          The maximum number of images to return in the response. The default value is 10.

          + */ + MaxResults?: number; + + /** + *

          A filter that returns only images whose name contains the specified string.

          + */ + NameContains?: string; + + /** + *

          If the previous call to ListImages didn't return the full set of images, + * the call returns a token for getting the next set of images.

          + */ + NextToken?: string; + + /** + *

          The property used to sort results. The default value is CREATION_TIME.

          + */ + SortBy?: ImageSortBy | string; + + /** + *

          The sort order. The default value is DESCENDING.

          + */ + SortOrder?: ImageSortOrder | string; +} + +export namespace ListImagesRequest { + export const filterSensitiveLog = (obj: ListImagesRequest): any => ({ + ...obj, + }); +} + +export interface ListImagesResponse { + /** + *

          A list of images and their properties.

          + */ + Images?: Image[]; + + /** + *

          A token for getting the next set of images, if there are any.

          + */ + NextToken?: string; +} + +export namespace ListImagesResponse { + export const filterSensitiveLog = (obj: ListImagesResponse): any => ({ + ...obj, + }); +} + +export interface ListImageVersionsRequest { + /** + *

          A filter that returns only versions created on or after the specified time.

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns only versions created on or before the specified time.

          + */ + CreationTimeBefore?: Date; + + /** + *

          The name of the image to list the versions of.

          + */ + ImageName: string | undefined; + + /** + *

          A filter that returns only versions modified on or after the specified time.

          + */ + LastModifiedTimeAfter?: Date; + + /** + *

          A filter that returns only versions modified on or before the specified time.

          + */ + LastModifiedTimeBefore?: Date; + + /** + *

          The maximum number of versions to return in the response. The default value is 10.

          + */ + MaxResults?: number; + + /** + *

          If the previous call to ListImageVersions didn't return the full set of + * versions, the call returns a token for getting the next set of versions.

          + */ + NextToken?: string; + + /** + *

          The property used to sort results. The default value is CREATION_TIME.

          + */ + SortBy?: ImageVersionSortBy | string; + + /** + *

          The sort order. The default value is DESCENDING.

          + */ + SortOrder?: ImageVersionSortOrder | string; +} + +export namespace ListImageVersionsRequest { + export const filterSensitiveLog = (obj: ListImageVersionsRequest): any => ({ + ...obj, + }); +} + +export interface ListImageVersionsResponse { + /** + *

          A list of versions and their properties.

          + */ + ImageVersions?: ImageVersion[]; + + /** + *

          A token for getting the next set of versions, if there are any.

          + */ + NextToken?: string; +} + +export namespace ListImageVersionsResponse { + export const filterSensitiveLog = (obj: ListImageVersionsResponse): any => ({ + ...obj, + }); +} + +export enum SortBy { + CREATION_TIME = "CreationTime", + NAME = "Name", + STATUS = "Status", +} + +export interface ListLabelingJobsRequest { + /** + *

          A filter that returns only labeling jobs created after the specified time + * (timestamp).

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns only labeling jobs created before the specified time + * (timestamp).

          + */ + CreationTimeBefore?: Date; + + /** + *

          A filter that returns only labeling jobs modified after the specified time + * (timestamp).

          + */ + LastModifiedTimeAfter?: Date; + + /** + *

          A filter that returns only labeling jobs modified before the specified time + * (timestamp).

          + */ + LastModifiedTimeBefore?: Date; + + /** + *

          The maximum number of labeling jobs to return in each page of the response.

          + */ + MaxResults?: number; + + /** + *

          If the result of the previous ListLabelingJobs request was truncated, the + * response includes a NextToken. To retrieve the next set of labeling jobs, + * use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          A string in the labeling job name. This filter returns only labeling jobs whose name + * contains the specified string.

          + */ + NameContains?: string; + + /** + *

          The field to sort results by. The default is CreationTime.

          + */ + SortBy?: SortBy | string; + + /** + *

          The sort order for results. The default is Ascending.

          + */ + SortOrder?: SortOrder | string; + + /** + *

          A filter that retrieves only labeling jobs with a specific status.

          + */ + StatusEquals?: LabelingJobStatus | string; +} + +export namespace ListLabelingJobsRequest { + export const filterSensitiveLog = (obj: ListLabelingJobsRequest): any => ({ + ...obj, + }); +} + +export interface ListLabelingJobsResponse { + /** + *

          An array of LabelingJobSummary objects, each describing a labeling + * job.

          + */ + LabelingJobSummaryList?: LabelingJobSummary[]; + + /** + *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of + * labeling jobs, use it in the subsequent request.

          + */ + NextToken?: string; +} + +export namespace ListLabelingJobsResponse { + export const filterSensitiveLog = (obj: ListLabelingJobsResponse): any => ({ + ...obj, + }); +} + +export enum ListLabelingJobsForWorkteamSortByOptions { + CREATION_TIME = "CreationTime", +} + +export interface ListLabelingJobsForWorkteamRequest { + /** + *

          The Amazon Resource Name (ARN) of the work team for which you want to see labeling + * jobs for.

          + */ + WorkteamArn: string | undefined; + + /** + *

          The maximum number of labeling jobs to return in each page of the response.

          + */ + MaxResults?: number; + + /** + *

          If the result of the previous ListLabelingJobsForWorkteam request was + * truncated, the response includes a NextToken. To retrieve the next set of + * labeling jobs, use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          A filter that returns only labeling jobs created after the specified time + * (timestamp).

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns only labeling jobs created before the specified time + * (timestamp).

          + */ + CreationTimeBefore?: Date; + + /** + *

          A filter the limits jobs to only the ones whose job reference code contains the + * specified string.

          + */ + JobReferenceCodeContains?: string; + + /** + *

          The field to sort results by. The default is CreationTime.

          + */ + SortBy?: ListLabelingJobsForWorkteamSortByOptions | string; + + /** + *

          The sort order for results. The default is Ascending.

          + */ + SortOrder?: SortOrder | string; +} + +export namespace ListLabelingJobsForWorkteamRequest { + export const filterSensitiveLog = (obj: ListLabelingJobsForWorkteamRequest): any => ({ + ...obj, + }); +} + +export interface ListLabelingJobsForWorkteamResponse { + /** + *

          An array of LabelingJobSummary objects, each describing a labeling + * job.

          + */ + LabelingJobSummaryList: LabelingJobForWorkteamSummary[] | undefined; + + /** + *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of + * labeling jobs, use it in the subsequent request.

          + */ + NextToken?: string; +} + +export namespace ListLabelingJobsForWorkteamResponse { + export const filterSensitiveLog = (obj: ListLabelingJobsForWorkteamResponse): any => ({ + ...obj, + }); +} + +export enum ModelPackageGroupSortBy { + CREATION_TIME = "CreationTime", + NAME = "Name", +} + +export interface ListModelPackageGroupsInput { + /** + *

          A filter that returns only model groups created after the specified time.

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns only model groups created before the specified time.

          + */ + CreationTimeBefore?: Date; + + /** + *

          The maximum number of results to return in the response.

          + */ + MaxResults?: number; + + /** + *

          A string in the model group name. This filter returns only model groups whose name + * contains the specified string.

          + */ + NameContains?: string; + + /** + *

          If the result of the previous ListModelPackageGroups request was + * truncated, the response includes a NextToken. To retrieve the next set of + * model groups, use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          The field to sort results by. The default is CreationTime.

          + */ + SortBy?: ModelPackageGroupSortBy | string; + + /** + *

          The sort order for results. The default is Ascending.

          + */ + SortOrder?: SortOrder | string; +} + +export namespace ListModelPackageGroupsInput { + export const filterSensitiveLog = (obj: ListModelPackageGroupsInput): any => ({ + ...obj, + }); +} + +/** + *

          Summary information about a model group.

          + */ +export interface ModelPackageGroupSummary { + /** + *

          The name of the model group.

          + */ + ModelPackageGroupName: string | undefined; + + /** + *

          The Amazon Resource Name (ARN) of the model group.

          + */ + ModelPackageGroupArn: string | undefined; + + /** + *

          A description of the model group.

          + */ + ModelPackageGroupDescription?: string; + + /** + *

          The time that the model group was created.

          + */ + CreationTime: Date | undefined; + + /** + *

          The status of the model group.

          + */ + ModelPackageGroupStatus: ModelPackageGroupStatus | string | undefined; +} + +export namespace ModelPackageGroupSummary { + export const filterSensitiveLog = (obj: ModelPackageGroupSummary): any => ({ + ...obj, + }); +} + +export interface ListModelPackageGroupsOutput { + /** + *

          A list of summaries of the model groups in your AWS account.

          + */ + ModelPackageGroupSummaryList: ModelPackageGroupSummary[] | undefined; + + /** + *

          If the response is truncated, SageMaker returns this token. To retrieve the next set + * of model groups, use it in the subsequent request.

          + */ + NextToken?: string; +} + +export namespace ListModelPackageGroupsOutput { + export const filterSensitiveLog = (obj: ListModelPackageGroupsOutput): any => ({ + ...obj, + }); +} + +export enum ModelPackageType { + BOTH = "Both", + UNVERSIONED = "Unversioned", + VERSIONED = "Versioned", +} + +export enum ModelPackageSortBy { + CREATION_TIME = "CreationTime", + NAME = "Name", +} + +export interface ListModelPackagesInput { + /** + *

          A filter that returns only model packages created after the specified time + * (timestamp).

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns only model packages created before the specified time + * (timestamp).

          + */ + CreationTimeBefore?: Date; + + /** + *

          The maximum number of model packages to return in the response.

          + */ + MaxResults?: number; + + /** + *

          A string in the model package name. This filter returns only model packages whose name + * contains the specified string.

          + */ + NameContains?: string; + + /** + *

          A filter that returns only the model packages with the specified approval + * status.

          + */ + ModelApprovalStatus?: ModelApprovalStatus | string; + + /** + *

          A filter that returns only model versions that belong to the specified model group.

          + */ + ModelPackageGroupName?: string; + + /** + *

          A filter that returns onlyl the model packages of the specified type. This can be one + * of the following values.

          + *
            + *
          • + *

            + * VERSIONED - List only versioned models.

            + *
          • + *
          • + *

            + * UNVERSIONED - List only unversioined models.

            + *
          • + *
          • + *

            + * BOTH - List both versioned and unversioned models.

            + *
          • + *
          + */ + ModelPackageType?: ModelPackageType | string; + + /** + *

          If the response to a previous ListModelPackages request was truncated, + * the response includes a NextToken. To retrieve the next set of model + * packages, use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          The parameter by which to sort the results. The default is + * CreationTime.

          + */ + SortBy?: ModelPackageSortBy | string; + + /** + *

          The sort order for the results. The default is Ascending.

          + */ + SortOrder?: SortOrder | string; +} + +export namespace ListModelPackagesInput { + export const filterSensitiveLog = (obj: ListModelPackagesInput): any => ({ + ...obj, + }); +} + +/** + *

          Provides summary information about a model package.

          + */ +export interface ModelPackageSummary { + /** + *

          The name of the model package.

          + */ + ModelPackageName: string | undefined; + + /** + *

          If the model package is a versioned model, the model group that the versioned model + * belongs to.

          + */ + ModelPackageGroupName?: string; + + /** + *

          If the model package is a versioned model, the version of the model.

          + */ + ModelPackageVersion?: number; + + /** + *

          The Amazon Resource Name (ARN) of the model package.

          + */ + ModelPackageArn: string | undefined; + + /** + *

          A brief description of the model package.

          + */ + ModelPackageDescription?: string; + + /** + *

          A timestamp that shows when the model package was created.

          + */ + CreationTime: Date | undefined; + + /** + *

          The overall status of the model package.

          + */ + ModelPackageStatus: ModelPackageStatus | string | undefined; + + /** + *

          The approval status of the model. This can be one of the following values.

          + *
            + *
          • + *

            + * APPROVED - The model is approved

            + *
          • + *
          • + *

            + * REJECTED - The model is rejected.

            + *
          • + *
          • + *

            + * PENDING_MANUAL_APPROVAL - The model is waiting for manual + * approval.

            + *
          • + *
          + */ + ModelApprovalStatus?: ModelApprovalStatus | string; +} + +export namespace ModelPackageSummary { + export const filterSensitiveLog = (obj: ModelPackageSummary): any => ({ + ...obj, + }); +} + +export interface ListModelPackagesOutput { + /** + *

          An array of ModelPackageSummary objects, each of which lists a model + * package.

          + */ + ModelPackageSummaryList: ModelPackageSummary[] | undefined; + + /** + *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of + * model packages, use it in the subsequent request.

          + */ + NextToken?: string; +} + +export namespace ListModelPackagesOutput { + export const filterSensitiveLog = (obj: ListModelPackagesOutput): any => ({ + ...obj, + }); +} + +export enum ModelSortKey { + CreationTime = "CreationTime", + Name = "Name", +} + +export interface ListModelsInput { + /** + *

          Sorts the list of results. The default is CreationTime.

          + */ + SortBy?: ModelSortKey | string; + + /** + *

          The sort order for results. The default is Descending.

          + */ + SortOrder?: OrderKey | string; + + /** + *

          If the response to a previous ListModels request was truncated, the + * response includes a NextToken. To retrieve the next set of models, use the + * token in the next request.

          + */ + NextToken?: string; + + /** + *

          The maximum number of models to return in the response.

          + */ + MaxResults?: number; + + /** + *

          A string in the training job name. This filter returns only models in the training + * job whose name contains the specified string.

          + */ + NameContains?: string; + + /** + *

          A filter that returns only models created before the specified time + * (timestamp).

          + */ + CreationTimeBefore?: Date; + + /** + *

          A filter that returns only models with a creation time greater than or equal to the + * specified time (timestamp).

          + */ + CreationTimeAfter?: Date; +} + +export namespace ListModelsInput { + export const filterSensitiveLog = (obj: ListModelsInput): any => ({ + ...obj, + }); +} + +/** + *

          Provides summary information about a model.

          + */ +export interface ModelSummary { + /** + *

          The name of the model that you want a summary for.

          + */ + ModelName: string | undefined; + + /** + *

          The Amazon Resource Name (ARN) of the model.

          + */ + ModelArn: string | undefined; + + /** + *

          A timestamp that indicates when the model was created.

          + */ + CreationTime: Date | undefined; +} + +export namespace ModelSummary { + export const filterSensitiveLog = (obj: ModelSummary): any => ({ + ...obj, + }); +} + +export interface ListModelsOutput { + /** + *

          An array of ModelSummary objects, each of which lists a + * model.

          + */ + Models: ModelSummary[] | undefined; + + /** + *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of + * models, use it in the subsequent request.

          + */ + NextToken?: string; +} + +export namespace ListModelsOutput { + export const filterSensitiveLog = (obj: ListModelsOutput): any => ({ + ...obj, + }); +} + +export enum MonitoringExecutionSortKey { + CREATION_TIME = "CreationTime", + SCHEDULED_TIME = "ScheduledTime", + STATUS = "Status", +} + +export interface ListMonitoringExecutionsRequest { + /** + *

          Name of a specific schedule to fetch jobs for.

          + */ + MonitoringScheduleName?: string; + + /** + *

          Name of a specific endpoint to fetch jobs for.

          + */ + EndpointName?: string; + + /** + *

          Whether to sort results by Status, CreationTime, + * ScheduledTime field. The default is CreationTime.

          + */ + SortBy?: MonitoringExecutionSortKey | string; + + /** + *

          Whether to sort the results in Ascending or Descending order. + * The default is Descending.

          + */ + SortOrder?: SortOrder | string; + + /** + *

          The token returned if the response is truncated. To retrieve the next set of job + * executions, use it in the next request.

          + */ + NextToken?: string; + + /** + *

          The maximum number of jobs to return in the response. The default value is 10.

          + */ + MaxResults?: number; + + /** + *

          Filter for jobs scheduled before a specified time.

          + */ + ScheduledTimeBefore?: Date; + + /** + *

          Filter for jobs scheduled after a specified time.

          + */ + ScheduledTimeAfter?: Date; + + /** + *

          A filter that returns only jobs created before a specified time.

          + */ + CreationTimeBefore?: Date; + + /** + *

          A filter that returns only jobs created after a specified time.

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns only jobs modified after a specified time.

          + */ + LastModifiedTimeBefore?: Date; + + /** + *

          A filter that returns only jobs modified before a specified time.

          + */ + LastModifiedTimeAfter?: Date; + + /** + *

          A filter that retrieves only jobs with a specific status.

          + */ + StatusEquals?: ExecutionStatus | string; +} + +export namespace ListMonitoringExecutionsRequest { + export const filterSensitiveLog = (obj: ListMonitoringExecutionsRequest): any => ({ + ...obj, + }); +} + +export interface ListMonitoringExecutionsResponse { + /** + *

          A JSON array in which each element is a summary for a monitoring execution.

          + */ + MonitoringExecutionSummaries: MonitoringExecutionSummary[] | undefined; + + /** + *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of jobs, + * use it in the subsequent reques

          + */ + NextToken?: string; +} + +export namespace ListMonitoringExecutionsResponse { + export const filterSensitiveLog = (obj: ListMonitoringExecutionsResponse): any => ({ + ...obj, + }); +} + +export enum MonitoringScheduleSortKey { + CREATION_TIME = "CreationTime", + NAME = "Name", + STATUS = "Status", +} + +export interface ListMonitoringSchedulesRequest { + /** + *

          Name of a specific endpoint to fetch schedules for.

          + */ + EndpointName?: string; + + /** + *

          Whether to sort results by Status, CreationTime, + * ScheduledTime field. The default is CreationTime.

          + */ + SortBy?: MonitoringScheduleSortKey | string; + + /** + *

          Whether to sort the results in Ascending or Descending order. + * The default is Descending.

          + */ + SortOrder?: SortOrder | string; + + /** + *

          The token returned if the response is truncated. To retrieve the next set of job + * executions, use it in the next request.

          + */ + NextToken?: string; + + /** + *

          The maximum number of jobs to return in the response. The default value is 10.

          + */ + MaxResults?: number; + + /** + *

          Filter for monitoring schedules whose name contains a specified string.

          + */ + NameContains?: string; + + /** + *

          A filter that returns only monitoring schedules created before a specified time.

          + */ + CreationTimeBefore?: Date; + + /** + *

          A filter that returns only monitoring schedules created after a specified time.

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns only monitoring schedules modified before a specified time.

          + */ + LastModifiedTimeBefore?: Date; + + /** + *

          A filter that returns only monitoring schedules modified after a specified time.

          + */ + LastModifiedTimeAfter?: Date; + + /** + *

          A filter that returns only monitoring schedules modified before a specified time.

          + */ + StatusEquals?: ScheduleStatus | string; +} + +export namespace ListMonitoringSchedulesRequest { + export const filterSensitiveLog = (obj: ListMonitoringSchedulesRequest): any => ({ + ...obj, + }); +} + +/** + *

          Summarizes the monitoring schedule.

          + */ +export interface MonitoringScheduleSummary { + /** + *

          The name of the monitoring schedule.

          + */ + MonitoringScheduleName: string | undefined; + + /** + *

          The Amazon Resource Name (ARN) of the monitoring schedule.

          + */ + MonitoringScheduleArn: string | undefined; + + /** + *

          The creation time of the monitoring schedule.

          + */ + CreationTime: Date | undefined; + + /** + *

          The last time the monitoring schedule was modified.

          + */ + LastModifiedTime: Date | undefined; + + /** + *

          The status of the monitoring schedule.

          + */ + MonitoringScheduleStatus: ScheduleStatus | string | undefined; + + /** + *

          The name of the endpoint using the monitoring schedule.

          + */ + EndpointName?: string; +} + +export namespace MonitoringScheduleSummary { + export const filterSensitiveLog = (obj: MonitoringScheduleSummary): any => ({ + ...obj, + }); +} + +export interface ListMonitoringSchedulesResponse { + /** + *

          A JSON array in which each element is a summary for a monitoring schedule.

          + */ + MonitoringScheduleSummaries: MonitoringScheduleSummary[] | undefined; + + /** + *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of jobs, + * use it in the subsequent reques

          + */ + NextToken?: string; +} + +export namespace ListMonitoringSchedulesResponse { + export const filterSensitiveLog = (obj: ListMonitoringSchedulesResponse): any => ({ + ...obj, + }); +} + +export enum NotebookInstanceLifecycleConfigSortKey { + CREATION_TIME = "CreationTime", + LAST_MODIFIED_TIME = "LastModifiedTime", + NAME = "Name", +} + +export enum NotebookInstanceLifecycleConfigSortOrder { + ASCENDING = "Ascending", + DESCENDING = "Descending", +} + +export interface ListNotebookInstanceLifecycleConfigsInput { + /** + *

          If the result of a ListNotebookInstanceLifecycleConfigs request was + * truncated, the response includes a NextToken. To get the next set of + * lifecycle configurations, use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          The maximum number of lifecycle configurations to return in the response.

          + */ + MaxResults?: number; + + /** + *

          Sorts the list of results. The default is CreationTime.

          + */ + SortBy?: NotebookInstanceLifecycleConfigSortKey | string; + + /** + *

          The sort order for results.

          + */ + SortOrder?: NotebookInstanceLifecycleConfigSortOrder | string; + + /** + *

          A string in the lifecycle configuration name. This filter returns only lifecycle + * configurations whose name contains the specified string.

          + */ + NameContains?: string; + + /** + *

          A filter that returns only lifecycle configurations that were created before the + * specified time (timestamp).

          + */ + CreationTimeBefore?: Date; + + /** + *

          A filter that returns only lifecycle configurations that were created after the + * specified time (timestamp).

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns only lifecycle configurations that were modified before the + * specified time (timestamp).

          + */ + LastModifiedTimeBefore?: Date; + + /** + *

          A filter that returns only lifecycle configurations that were modified after the + * specified time (timestamp).

          + */ + LastModifiedTimeAfter?: Date; +} + +export namespace ListNotebookInstanceLifecycleConfigsInput { + export const filterSensitiveLog = (obj: ListNotebookInstanceLifecycleConfigsInput): any => ({ + ...obj, + }); +} + +/** + *

          Provides a summary of a notebook instance lifecycle configuration.

          + */ +export interface NotebookInstanceLifecycleConfigSummary { + /** + *

          The name of the lifecycle configuration.

          + */ + NotebookInstanceLifecycleConfigName: string | undefined; + + /** + *

          The Amazon Resource Name (ARN) of the lifecycle configuration.

          + */ + NotebookInstanceLifecycleConfigArn: string | undefined; + + /** + *

          A timestamp that tells when the lifecycle configuration was created.

          + */ + CreationTime?: Date; + + /** + *

          A timestamp that tells when the lifecycle configuration was last modified.

          + */ + LastModifiedTime?: Date; +} + +export namespace NotebookInstanceLifecycleConfigSummary { + export const filterSensitiveLog = (obj: NotebookInstanceLifecycleConfigSummary): any => ({ + ...obj, + }); +} + +export interface ListNotebookInstanceLifecycleConfigsOutput { + /** + *

          If the response is truncated, Amazon SageMaker returns this token. To get the next set of + * lifecycle configurations, use it in the next request.

          + */ + NextToken?: string; + + /** + *

          An array of NotebookInstanceLifecycleConfiguration objects, each listing + * a lifecycle configuration.

          + */ + NotebookInstanceLifecycleConfigs?: NotebookInstanceLifecycleConfigSummary[]; +} + +export namespace ListNotebookInstanceLifecycleConfigsOutput { + export const filterSensitiveLog = (obj: ListNotebookInstanceLifecycleConfigsOutput): any => ({ + ...obj, + }); +} + +export enum NotebookInstanceSortKey { + CREATION_TIME = "CreationTime", + NAME = "Name", + STATUS = "Status", +} + +export enum NotebookInstanceSortOrder { + ASCENDING = "Ascending", + DESCENDING = "Descending", +} + +export interface ListNotebookInstancesInput { + /** + *

          If the previous call to the ListNotebookInstances is truncated, the + * response includes a NextToken. You can use this token in your subsequent + * ListNotebookInstances request to fetch the next set of notebook + * instances.

          + * + *

          You might specify a filter or a sort order in your request. When response is + * truncated, you must use the same values for the filer and sort order in the next + * request.

          + *
          + */ + NextToken?: string; + + /** + *

          The maximum number of notebook instances to return.

          + */ + MaxResults?: number; + + /** + *

          The field to sort results by. The default is Name.

          + */ + SortBy?: NotebookInstanceSortKey | string; + + /** + *

          The sort order for results.

          + */ + SortOrder?: NotebookInstanceSortOrder | string; + + /** + *

          A string in the notebook instances' name. This filter returns only notebook + * instances whose name contains the specified string.

          + */ + NameContains?: string; + + /** + *

          A filter that returns only notebook instances that were created before the + * specified time (timestamp).

          + */ + CreationTimeBefore?: Date; + + /** + *

          A filter that returns only notebook instances that were created after the specified + * time (timestamp).

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns only notebook instances that were modified before the + * specified time (timestamp).

          + */ + LastModifiedTimeBefore?: Date; + + /** + *

          A filter that returns only notebook instances that were modified after the + * specified time (timestamp).

          + */ + LastModifiedTimeAfter?: Date; + + /** + *

          A filter that returns only notebook instances with the specified status.

          + */ + StatusEquals?: NotebookInstanceStatus | string; + + /** + *

          A string in the name of a notebook instances lifecycle configuration associated with + * this notebook instance. This filter returns only notebook instances associated with a + * lifecycle configuration with a name that contains the specified string.

          + */ + NotebookInstanceLifecycleConfigNameContains?: string; + + /** + *

          A string in the name or URL of a Git repository associated with this notebook + * instance. This filter returns only notebook instances associated with a git repository + * with a name that contains the specified string.

          + */ + DefaultCodeRepositoryContains?: string; + + /** + *

          A filter that returns only notebook instances with associated with the specified git + * repository.

          + */ + AdditionalCodeRepositoryEquals?: string; +} + +export namespace ListNotebookInstancesInput { + export const filterSensitiveLog = (obj: ListNotebookInstancesInput): any => ({ + ...obj, + }); +} + +/** + *

          Provides summary information for an Amazon SageMaker notebook instance.

          + */ +export interface NotebookInstanceSummary { + /** + *

          The name of the notebook instance that you want a summary for.

          + */ + NotebookInstanceName: string | undefined; + + /** + *

          The Amazon Resource Name (ARN) of the notebook instance.

          + */ + NotebookInstanceArn: string | undefined; + + /** + *

          The status of the notebook instance.

          + */ + NotebookInstanceStatus?: NotebookInstanceStatus | string; + + /** + *

          The + * URL that you use to connect to the Jupyter instance running in your notebook instance. + *

          + */ + Url?: string; + + /** + *

          The type of ML compute instance that the notebook instance is running on.

          + */ + InstanceType?: _InstanceType | string; + + /** + *

          A timestamp that shows when the notebook instance was created.

          + */ + CreationTime?: Date; + + /** + *

          A timestamp that shows when the notebook instance was last modified.

          + */ + LastModifiedTime?: Date; + + /** + *

          The name of a notebook instance lifecycle configuration associated with this notebook + * instance.

          + *

          For information about notebook instance lifestyle configurations, see Step + * 2.1: (Optional) Customize a Notebook Instance.

          + */ + NotebookInstanceLifecycleConfigName?: string; + + /** + *

          The Git repository associated with the notebook instance as its default code + * repository. This can be either the name of a Git repository stored as a resource in your + * account, or the URL of a Git repository in AWS CodeCommit or in any + * other Git repository. When you open a notebook instance, it opens in the directory that + * contains this repository. For more information, see Associating Git Repositories with Amazon SageMaker + * Notebook Instances.

          + */ + DefaultCodeRepository?: string; + + /** + *

          An array of up to three Git repositories associated with the notebook instance. These + * can be either the names of Git repositories stored as resources in your account, or the + * URL of Git repositories in AWS CodeCommit or in any + * other Git repository. These repositories are cloned at the same level as the default + * repository of your notebook instance. For more information, see Associating Git + * Repositories with Amazon SageMaker Notebook Instances.

          + */ + AdditionalCodeRepositories?: string[]; +} + +export namespace NotebookInstanceSummary { + export const filterSensitiveLog = (obj: NotebookInstanceSummary): any => ({ + ...obj, + }); +} + +export interface ListNotebookInstancesOutput { + /** + *

          If the response to the previous ListNotebookInstances request was + * truncated, Amazon SageMaker returns this token. To retrieve the next set of notebook instances, use + * the token in the next request.

          + */ + NextToken?: string; + + /** + *

          An array of NotebookInstanceSummary objects, one for each notebook + * instance.

          + */ + NotebookInstances?: NotebookInstanceSummary[]; +} + +export namespace ListNotebookInstancesOutput { + export const filterSensitiveLog = (obj: ListNotebookInstancesOutput): any => ({ + ...obj, + }); +} + +export enum SortPipelineExecutionsBy { + CREATION_TIME = "CreationTime", + PIPELINE_EXECUTION_ARN = "PipelineExecutionArn", +} + +export interface ListPipelineExecutionsRequest { + /** + *

          The name of the pipeline.

          + */ + PipelineName: string | undefined; + + /** + *

          A filter that returns the pipeline executions that were created after a specified + * time.

          + */ + CreatedAfter?: Date; + + /** + *

          A filter that returns the pipeline executions that were created before a specified + * time.

          + */ + CreatedBefore?: Date; + + /** + *

          The field by which to sort results. The default is CreatedTime.

          + */ + SortBy?: SortPipelineExecutionsBy | string; + + /** + *

          The sort order for results.

          + */ + SortOrder?: SortOrder | string; + + /** + *

          If the result of the previous ListPipelineExecutions request was truncated, + * the response includes a NextToken. To retrieve the next set of pipeline executions, use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          The maximum number of pipeline executions to return in the response.

          + */ + MaxResults?: number; +} + +export namespace ListPipelineExecutionsRequest { + export const filterSensitiveLog = (obj: ListPipelineExecutionsRequest): any => ({ + ...obj, + }); +} + +/** + *

          A pipeline execution summary.

          + */ +export interface PipelineExecutionSummary { + /** + *

          The Amazon Resource Name (ARN) of the pipeline execution.

          + */ + PipelineExecutionArn?: string; + + /** + *

          The start time of the pipeline execution.

          + */ + StartTime?: Date; + + /** + *

          The status of the pipeline execution.

          + */ + PipelineExecutionStatus?: PipelineExecutionStatus | string; + + /** + *

          The description of the pipeline execution.

          + */ + PipelineExecutionDescription?: string; + + /** + *

          The display name of the pipeline execution.

          + */ + PipelineExecutionDisplayName?: string; +} + +export namespace PipelineExecutionSummary { + export const filterSensitiveLog = (obj: PipelineExecutionSummary): any => ({ + ...obj, + }); +} + +export interface ListPipelineExecutionsResponse { + /** + *

          Contains a sorted list of pipeline execution summary objects matching the specified + * filters. Each run summary includes the Amazon Resource Name (ARN) of the pipeline execution, the run date, + * and the status. This list can be empty.

          + */ + PipelineExecutionSummaries?: PipelineExecutionSummary[]; + + /** + *

          If the result of the previous ListPipelineExecutions request was truncated, + * the response includes a NextToken. To retrieve the next set of pipeline executions, use the token in the next request.

          + */ + NextToken?: string; +} + +export namespace ListPipelineExecutionsResponse { + export const filterSensitiveLog = (obj: ListPipelineExecutionsResponse): any => ({ + ...obj, + }); +} + +export interface ListPipelineExecutionStepsRequest { + /** + *

          The Amazon Resource Name (ARN) of the pipeline execution.

          + */ + PipelineExecutionArn?: string; + + /** + *

          If the result of the previous ListPipelineExecutionSteps request was truncated, + * the response includes a NextToken. To retrieve the next set of pipeline execution steps, use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          The maximum number of pipeline execution steps to return in the response.

          + */ + MaxResults?: number; + + /** + *

          The field by which to sort results. The default is CreatedTime.

          + */ + SortOrder?: SortOrder | string; +} + +export namespace ListPipelineExecutionStepsRequest { + export const filterSensitiveLog = (obj: ListPipelineExecutionStepsRequest): any => ({ + ...obj, + }); +} + +/** + *

          Metadata for Model steps.

          + */ +export interface ModelStepMetadata { + /** + *

          The Amazon Resource Name (ARN) of the created model.

          + */ + Arn?: string; +} + +export namespace ModelStepMetadata { + export const filterSensitiveLog = (obj: ModelStepMetadata): any => ({ + ...obj, + }); +} + +/** + *

          Metadata for a processing job step.

          + */ +export interface ProcessingJobStepMetadata { + /** + *

          The Amazon Resource Name (ARN) of the processing job.

          + */ + Arn?: string; +} + +export namespace ProcessingJobStepMetadata { + export const filterSensitiveLog = (obj: ProcessingJobStepMetadata): any => ({ + ...obj, + }); +} + +/** + *

          Metadata for a register model job step.

          + */ +export interface RegisterModelStepMetadata { + /** + *

          The Amazon Resource Name (ARN) of the model package.

          + */ + Arn?: string; +} + +export namespace RegisterModelStepMetadata { + export const filterSensitiveLog = (obj: RegisterModelStepMetadata): any => ({ + ...obj, + }); +} + +/** + *

          Metadata for a training job step.

          + */ +export interface TrainingJobStepMetadata { + /** + *

          The Amazon Resource Name (ARN) of the training job that was run by this step execution.

          + */ + Arn?: string; +} + +export namespace TrainingJobStepMetadata { + export const filterSensitiveLog = (obj: TrainingJobStepMetadata): any => ({ + ...obj, + }); +} + +/** + *

          Metadata for a transform job step.

          + */ +export interface TransformJobStepMetadata { + /** + *

          The Amazon Resource Name (ARN) of the transform job that was run by this step execution.

          + */ + Arn?: string; +} + +export namespace TransformJobStepMetadata { + export const filterSensitiveLog = (obj: TransformJobStepMetadata): any => ({ + ...obj, + }); +} + +/** + *

          Metadata for a step execution.

          + */ +export interface PipelineExecutionStepMetadata { + /** + *

          The Amazon Resource Name (ARN) of the training job that was run by this step execution.

          + */ + TrainingJob?: TrainingJobStepMetadata; + + /** + *

          The Amazon Resource Name (ARN) of the processing job that was run by this step execution.

          + */ + ProcessingJob?: ProcessingJobStepMetadata; + + /** + *

          The Amazon Resource Name (ARN) of the transform job that was run by this step execution.

          + */ + TransformJob?: TransformJobStepMetadata; + + /** + *

          Metadata for the Model step.

          + */ + Model?: ModelStepMetadata; + + /** + *

          Metadata for the RegisterModel step.

          + */ + RegisterModel?: RegisterModelStepMetadata; + + /** + *

          If this is a Condition step metadata object, details on the condition.

          + */ + Condition?: ConditionStepMetadata; +} + +export namespace PipelineExecutionStepMetadata { + export const filterSensitiveLog = (obj: PipelineExecutionStepMetadata): any => ({ + ...obj, + }); +} + +export enum StepStatus { + EXECUTING = "Executing", + FAILED = "Failed", + STARTING = "Starting", + STOPPED = "Stopped", + STOPPING = "Stopping", + SUCCEEDED = "Succeeded", +} + +/** + *

          An execution of a step in a pipeline.

          + */ +export interface PipelineExecutionStep { + /** + *

          The name of the step that is executed.

          + */ + StepName?: string; + + /** + *

          The time that the step started executing.

          + */ + StartTime?: Date; + + /** + *

          The time that the step stopped executing.

          + */ + EndTime?: Date; + + /** + *

          The status of the step execution.

          + */ + StepStatus?: StepStatus | string; + + /** + *

          If this pipeline execution step was cached, details on the cache hit.

          + */ + CacheHitResult?: CacheHitResult; + + /** + *

          The reason why the step failed execution. This is only returned if the step failed its execution.

          + */ + FailureReason?: string; + + /** + *

          The metadata for the step execution.

          + */ + Metadata?: PipelineExecutionStepMetadata; +} + +export namespace PipelineExecutionStep { + export const filterSensitiveLog = (obj: PipelineExecutionStep): any => ({ + ...obj, + }); +} + +export interface ListPipelineExecutionStepsResponse { + /** + *

          A list of PipeLineExecutionStep objects. Each + * PipeLineExecutionStep consists of StepName, StartTime, EndTime, StepStatus, + * and Metadata. Metadata is an object with properties for each job that contains relevant + * information about the job created by the step.

          + */ + PipelineExecutionSteps?: PipelineExecutionStep[]; + + /** + *

          If the result of the previous ListPipelineExecutionSteps request was truncated, + * the response includes a NextToken. To retrieve the next set of pipeline execution steps, use the token in the next request.

          + */ + NextToken?: string; +} + +export namespace ListPipelineExecutionStepsResponse { + export const filterSensitiveLog = (obj: ListPipelineExecutionStepsResponse): any => ({ + ...obj, + }); +} + +export interface ListPipelineParametersForExecutionRequest { + /** + *

          The Amazon Resource Name (ARN) of the pipeline execution.

          + */ + PipelineExecutionArn: string | undefined; + + /** + *

          If the result of the previous ListPipelineParametersForExecution request was truncated, + * the response includes a NextToken. To retrieve the next set of parameters, use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          The maximum number of parameters to return in the response.

          + */ + MaxResults?: number; +} + +export namespace ListPipelineParametersForExecutionRequest { + export const filterSensitiveLog = (obj: ListPipelineParametersForExecutionRequest): any => ({ + ...obj, + }); +} + +/** + *

          Assigns a value to a named Pipeline parameter.

          + */ +export interface Parameter { + /** + *

          The name of the parameter to assign a value to. This parameter name must match a named parameter in the pipeline definition.

          + */ + Name: string | undefined; + + /** + *

          The literal value for the parameter.

          + */ + Value: string | undefined; +} + +export namespace Parameter { + export const filterSensitiveLog = (obj: Parameter): any => ({ + ...obj, + }); +} + +export interface ListPipelineParametersForExecutionResponse { + /** + *

          Contains a list of pipeline parameters. This list can be empty.

          + */ + PipelineParameters?: Parameter[]; + + /** + *

          If the result of the previous ListPipelineParametersForExecution request was truncated, + * the response includes a NextToken. To retrieve the next set of parameters, use the token in the next request.

          + */ + NextToken?: string; +} + +export namespace ListPipelineParametersForExecutionResponse { + export const filterSensitiveLog = (obj: ListPipelineParametersForExecutionResponse): any => ({ + ...obj, + }); +} + +export enum SortPipelinesBy { + CREATION_TIME = "CreationTime", + NAME = "Name", +} + +export interface ListPipelinesRequest { + /** + *

          The prefix of the pipeline name.

          + */ + PipelineNamePrefix?: string; + + /** + *

          A filter that returns the pipelines that were created after a specified + * time.

          + */ + CreatedAfter?: Date; + + /** + *

          A filter that returns the pipelines that were created before a specified + * time.

          + */ + CreatedBefore?: Date; + + /** + *

          The field by which to sort results. The default is CreatedTime.

          + */ + SortBy?: SortPipelinesBy | string; + + /** + *

          The sort order for results.

          + */ + SortOrder?: SortOrder | string; + + /** + *

          If the result of the previous ListPipelines request was truncated, + * the response includes a NextToken. To retrieve the next set of pipelines, use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          The maximum number of pipelines to return in the response.

          + */ + MaxResults?: number; +} + +export namespace ListPipelinesRequest { + export const filterSensitiveLog = (obj: ListPipelinesRequest): any => ({ + ...obj, + }); +} + +/** + *

          A summary of a pipeline.

          + */ +export interface PipelineSummary { + /** + *

          The Amazon Resource Name (ARN) of the pipeline.

          + */ + PipelineArn?: string; + + /** + *

          The name of the pipeline.

          + */ + PipelineName?: string; + + /** + *

          The display name of the pipeline.

          + */ + PipelineDisplayName?: string; + + /** + *

          The description of the pipeline.

          + */ + PipelineDescription?: string; + + /** + *

          The Amazon Resource Name (ARN) that the pipeline used to execute.

          + */ + RoleArn?: string; + + /** + *

          The creation time of the pipeline.

          + */ + CreationTime?: Date; + + /** + *

          The time that the pipeline was last modified.

          + */ + LastModifiedTime?: Date; + + /** + *

          The last time that a pipeline execution began.

          + */ + LastExecutionTime?: Date; +} + +export namespace PipelineSummary { + export const filterSensitiveLog = (obj: PipelineSummary): any => ({ + ...obj, + }); +} + +export interface ListPipelinesResponse { + /** + *

          Contains a sorted list of PipelineSummary objects matching the specified + * filters. Each PipelineSummary consists of PipelineArn, PipelineName, + * ExperimentName, PipelineDescription, CreationTime, LastModifiedTime, LastRunTime, and + * RoleArn. This list can be empty.

          + */ + PipelineSummaries?: PipelineSummary[]; + + /** + *

          If the result of the previous ListPipelines request was truncated, + * the response includes a NextToken. To retrieve the next set of pipelines, use the token in the next request.

          + */ + NextToken?: string; +} + +export namespace ListPipelinesResponse { + export const filterSensitiveLog = (obj: ListPipelinesResponse): any => ({ + ...obj, + }); +} + +export interface ListProcessingJobsRequest { + /** + *

          A filter that returns only processing jobs created after the specified time.

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns only processing jobs created after the specified time.

          + */ + CreationTimeBefore?: Date; + + /** + *

          A filter that returns only processing jobs modified after the specified time.

          + */ + LastModifiedTimeAfter?: Date; + + /** + *

          A filter that returns only processing jobs modified before the specified time.

          + */ + LastModifiedTimeBefore?: Date; + + /** + *

          A string in the processing job name. This filter returns only processing jobs whose + * name contains the specified string.

          + */ + NameContains?: string; + + /** + *

          A filter that retrieves only processing jobs with a specific status.

          + */ + StatusEquals?: ProcessingJobStatus | string; + + /** + *

          The field to sort results by. The default is CreationTime.

          + */ + SortBy?: SortBy | string; + + /** + *

          The sort order for results. The default is Ascending.

          + */ + SortOrder?: SortOrder | string; + + /** + *

          If the result of the previous ListProcessingJobs request was truncated, + * the response includes a NextToken. To retrieve the next set of processing + * jobs, use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          The maximum number of processing jobs to return in the response.

          + */ + MaxResults?: number; +} + +export namespace ListProcessingJobsRequest { + export const filterSensitiveLog = (obj: ListProcessingJobsRequest): any => ({ + ...obj, + }); +} + +/** + *

          Summary of information about a processing job.

          + */ +export interface ProcessingJobSummary { + /** + *

          The name of the processing job.

          + */ + ProcessingJobName: string | undefined; + + /** + *

          The Amazon Resource Name (ARN) of the processing job..

          + */ + ProcessingJobArn: string | undefined; + + /** + *

          The time at which the processing job was created.

          + */ + CreationTime: Date | undefined; + + /** + *

          The time at which the processing job completed.

          + */ + ProcessingEndTime?: Date; + + /** + *

          A timestamp that indicates the last time the processing job was modified.

          + */ + LastModifiedTime?: Date; + + /** + *

          The status of the processing job.

          + */ + ProcessingJobStatus: ProcessingJobStatus | string | undefined; + + /** + *

          A string, up to one KB in size, that contains the reason a processing job failed, if + * it failed.

          + */ + FailureReason?: string; + + /** + *

          An optional string, up to one KB in size, that contains metadata from the processing + * container when the processing job exits.

          + */ + ExitMessage?: string; +} + +export namespace ProcessingJobSummary { + export const filterSensitiveLog = (obj: ProcessingJobSummary): any => ({ + ...obj, + }); +} + +export interface ListProcessingJobsResponse { + /** + *

          An array of ProcessingJobSummary objects, each listing a processing + * job.

          + */ + ProcessingJobSummaries: ProcessingJobSummary[] | undefined; + + /** + *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of + * processing jobs, use it in the subsequent request.

          + */ + NextToken?: string; +} + +export namespace ListProcessingJobsResponse { + export const filterSensitiveLog = (obj: ListProcessingJobsResponse): any => ({ + ...obj, + }); +} + +export enum ProjectSortBy { + CREATION_TIME = "CreationTime", + NAME = "Name", +} + +export enum ProjectSortOrder { + ASCENDING = "Ascending", + DESCENDING = "Descending", +} + +export interface ListProjectsInput { + /** + *

          A filter that returns the projects that were created after a specified + * time.

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns the projects that were created before a specified + * time.

          + */ + CreationTimeBefore?: Date; + + /** + *

          The maximum number of projects to return in the response.

          + */ + MaxResults?: number; + + /** + *

          A filter that returns the projects whose name contains a specified + * string.

          + */ + NameContains?: string; + + /** + *

          If the result of the previous ListProjects request was truncated, + * the response includes a NextToken. To retrieve the next set of projects, use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          The field by which to sort results. The default is CreationTime.

          + */ + SortBy?: ProjectSortBy | string; + + /** + *

          The sort order for results. The default is Ascending.

          + */ + SortOrder?: ProjectSortOrder | string; +} + +export namespace ListProjectsInput { + export const filterSensitiveLog = (obj: ListProjectsInput): any => ({ + ...obj, + }); +} + +/** + *

          Information about a project.

          + */ +export interface ProjectSummary { + /** + *

          The name of the project.

          + */ + ProjectName: string | undefined; + + /** + *

          The description of the project.

          + */ + ProjectDescription?: string; + + /** + *

          The Amazon Resource Name (ARN) of the project.

          + */ + ProjectArn: string | undefined; + + /** + *

          The ID of the project.

          + */ + ProjectId: string | undefined; + + /** + *

          The time that the project was created.

          + */ + CreationTime: Date | undefined; + + /** + *

          The status of the project.

          + */ + ProjectStatus: ProjectStatus | string | undefined; +} + +export namespace ProjectSummary { + export const filterSensitiveLog = (obj: ProjectSummary): any => ({ + ...obj, + }); +} + +export interface ListProjectsOutput { + /** + *

          A list of summaries of projects.

          + */ + ProjectSummaryList: ProjectSummary[] | undefined; + + /** + *

          If the result of the previous ListCompilationJobs request was truncated, + * the response includes a NextToken. To retrieve the next set of model + * compilation jobs, use the token in the next request.

          + */ + NextToken?: string; +} + +export namespace ListProjectsOutput { + export const filterSensitiveLog = (obj: ListProjectsOutput): any => ({ + ...obj, + }); +} + +export interface ListSubscribedWorkteamsRequest { + /** + *

          A string in the work team name. This filter returns only work teams whose name + * contains the specified string.

          + */ + NameContains?: string; + + /** + *

          If the result of the previous ListSubscribedWorkteams request was + * truncated, the response includes a NextToken. To retrieve the next set of + * labeling jobs, use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          The maximum number of work teams to return in each page of the response.

          + */ + MaxResults?: number; +} + +export namespace ListSubscribedWorkteamsRequest { + export const filterSensitiveLog = (obj: ListSubscribedWorkteamsRequest): any => ({ + ...obj, + }); +} + +export interface ListSubscribedWorkteamsResponse { + /** + *

          An array of Workteam objects, each describing a work team.

          + */ + SubscribedWorkteams: SubscribedWorkteam[] | undefined; + + /** + *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of + * work teams, use it in the subsequent request.

          + */ + NextToken?: string; +} + +export namespace ListSubscribedWorkteamsResponse { + export const filterSensitiveLog = (obj: ListSubscribedWorkteamsResponse): any => ({ + ...obj, + }); +} + +export interface ListTagsInput { + /** + *

          The Amazon Resource Name (ARN) of the resource whose tags you want to + * retrieve.

          + */ + ResourceArn: string | undefined; + + /** + *

          If the response to the previous ListTags request is truncated, Amazon SageMaker + * returns this token. To retrieve the next set of tags, use it in the subsequent request. + *

          + */ + NextToken?: string; + + /** + *

          Maximum number of tags to return.

          + */ + MaxResults?: number; +} + +export namespace ListTagsInput { + export const filterSensitiveLog = (obj: ListTagsInput): any => ({ + ...obj, + }); +} + +export interface ListTagsOutput { + /** + *

          An array of Tag objects, each with a tag key and a value.

          + */ + Tags?: Tag[]; + + /** + *

          If response is truncated, Amazon SageMaker includes a token in the response. You can use this + * token in your subsequent request to fetch next set of tokens.

          + */ + NextToken?: string; +} + +export namespace ListTagsOutput { + export const filterSensitiveLog = (obj: ListTagsOutput): any => ({ + ...obj, + }); +} + +export interface ListTrainingJobsRequest { + /** + *

          If the result of the previous ListTrainingJobs request was truncated, + * the response includes a NextToken. To retrieve the next set of training + * jobs, use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          The maximum number of training jobs to return in the response.

          + */ + MaxResults?: number; + + /** + *

          A filter that returns only training jobs created after the specified time + * (timestamp).

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns only training jobs created before the specified time + * (timestamp).

          + */ + CreationTimeBefore?: Date; + + /** + *

          A filter that returns only training jobs modified after the specified time + * (timestamp).

          + */ + LastModifiedTimeAfter?: Date; + + /** + *

          A filter that returns only training jobs modified before the specified time + * (timestamp).

          + */ + LastModifiedTimeBefore?: Date; + + /** + *

          A string in the training job name. This filter returns only training jobs whose + * name contains the specified string.

          + */ + NameContains?: string; + + /** + *

          A filter that retrieves only training jobs with a specific status.

          + */ + StatusEquals?: TrainingJobStatus | string; + + /** + *

          The field to sort results by. The default is CreationTime.

          + */ + SortBy?: SortBy | string; + + /** + *

          The sort order for results. The default is Ascending.

          + */ + SortOrder?: SortOrder | string; +} + +export namespace ListTrainingJobsRequest { + export const filterSensitiveLog = (obj: ListTrainingJobsRequest): any => ({ + ...obj, + }); +} + +/** + *

          Provides summary information about a training job.

          + */ +export interface TrainingJobSummary { + /** + *

          The name of the training job that you want a summary for.

          + */ + TrainingJobName: string | undefined; + + /** + *

          The Amazon Resource Name (ARN) of the training job.

          + */ + TrainingJobArn: string | undefined; + + /** + *

          A timestamp that shows when the training job was created.

          + */ + CreationTime: Date | undefined; + + /** + *

          A timestamp that shows when the training job ended. This field is set only if the + * training job has one of the terminal statuses (Completed, + * Failed, or Stopped).

          + */ + TrainingEndTime?: Date; + + /** + *

          Timestamp when the training job was last modified.

          + */ + LastModifiedTime?: Date; + + /** + *

          The status of the training job.

          + */ + TrainingJobStatus: TrainingJobStatus | string | undefined; +} + +export namespace TrainingJobSummary { + export const filterSensitiveLog = (obj: TrainingJobSummary): any => ({ + ...obj, + }); +} + +export interface ListTrainingJobsResponse { + /** + *

          An array of TrainingJobSummary objects, each listing a training + * job.

          + */ + TrainingJobSummaries: TrainingJobSummary[] | undefined; + + /** + *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of + * training jobs, use it in the subsequent request.

          + */ + NextToken?: string; +} + +export namespace ListTrainingJobsResponse { + export const filterSensitiveLog = (obj: ListTrainingJobsResponse): any => ({ + ...obj, + }); +} + +export enum TrainingJobSortByOptions { + CreationTime = "CreationTime", + FinalObjectiveMetricValue = "FinalObjectiveMetricValue", + Name = "Name", + Status = "Status", +} + +export interface ListTrainingJobsForHyperParameterTuningJobRequest { + /** + *

          The name of the tuning job whose training jobs you want to list.

          + */ + HyperParameterTuningJobName: string | undefined; + + /** + *

          If the result of the previous ListTrainingJobsForHyperParameterTuningJob + * request was truncated, the response includes a NextToken. To retrieve the + * next set of training jobs, use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          The maximum number of training jobs to return. The default value is 10.

          + */ + MaxResults?: number; + + /** + *

          A filter that returns only training jobs with the + * specified + * status.

          + */ + StatusEquals?: TrainingJobStatus | string; + + /** + *

          The field to sort + * results + * by. The default is Name.

          + *

          If the value of this field is FinalObjectiveMetricValue, any training + * jobs that did not return an objective metric are not listed.

          + */ + SortBy?: TrainingJobSortByOptions | string; + + /** + *

          The sort order + * for + * results. The default is Ascending.

          + */ + SortOrder?: SortOrder | string; +} + +export namespace ListTrainingJobsForHyperParameterTuningJobRequest { + export const filterSensitiveLog = (obj: ListTrainingJobsForHyperParameterTuningJobRequest): any => ({ + ...obj, + }); +} + +export interface ListTrainingJobsForHyperParameterTuningJobResponse { + /** + *

          A list of TrainingJobSummary objects that + * describe + * the training jobs that the + * ListTrainingJobsForHyperParameterTuningJob request returned.

          + */ + TrainingJobSummaries: HyperParameterTrainingJobSummary[] | undefined; + + /** + *

          If the result of this ListTrainingJobsForHyperParameterTuningJob request + * was truncated, the response includes a NextToken. To retrieve the next set + * of training jobs, use the token in the next request.

          + */ + NextToken?: string; +} + +export namespace ListTrainingJobsForHyperParameterTuningJobResponse { + export const filterSensitiveLog = (obj: ListTrainingJobsForHyperParameterTuningJobResponse): any => ({ + ...obj, + }); +} + +export interface ListTransformJobsRequest { + /** + *

          A filter that returns only transform jobs created after the specified time.

          + */ + CreationTimeAfter?: Date; + + /** + *

          A filter that returns only transform jobs created before the specified time.

          + */ + CreationTimeBefore?: Date; + + /** + *

          A filter that returns only transform jobs modified after the specified time.

          + */ + LastModifiedTimeAfter?: Date; + + /** + *

          A filter that returns only transform jobs modified before the specified time.

          + */ + LastModifiedTimeBefore?: Date; + + /** + *

          A string in the transform job name. This filter returns only transform jobs whose name + * contains the specified string.

          + */ + NameContains?: string; + + /** + *

          A filter that retrieves only transform jobs with a specific status.

          + */ + StatusEquals?: TransformJobStatus | string; + + /** + *

          The field to sort results by. The default is CreationTime.

          + */ + SortBy?: SortBy | string; + + /** + *

          The sort order for results. The default is Descending.

          + */ + SortOrder?: SortOrder | string; + + /** + *

          If the result of the previous ListTransformJobs request was truncated, + * the response includes a NextToken. To retrieve the next set of transform + * jobs, use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          The maximum number of + * transform + * jobs to return in the response. The default value is + * 10.

          + */ + MaxResults?: number; +} + +export namespace ListTransformJobsRequest { + export const filterSensitiveLog = (obj: ListTransformJobsRequest): any => ({ + ...obj, + }); +} + +/** + *

          Provides a + * summary + * of a transform job. Multiple TransformJobSummary objects are returned as a + * list after in response to a ListTransformJobs call.

          + */ +export interface TransformJobSummary { + /** + *

          The name of the transform job.

          + */ + TransformJobName: string | undefined; + + /** + *

          The Amazon Resource Name (ARN) of the transform job.

          + */ + TransformJobArn: string | undefined; + + /** + *

          A timestamp that shows when the transform Job was created.

          + */ + CreationTime: Date | undefined; + + /** + *

          Indicates when the transform + * job + * ends on compute instances. For successful jobs and stopped jobs, this + * is the exact time + * recorded + * after the results are uploaded. For failed jobs, this is when Amazon SageMaker + * detected that the job failed.

          + */ + TransformEndTime?: Date; + + /** + *

          Indicates when the transform job was last modified.

          + */ + LastModifiedTime?: Date; + + /** + *

          The status of the transform job.

          + */ + TransformJobStatus: TransformJobStatus | string | undefined; + + /** + *

          If the transform job failed, + * the + * reason it failed.

          + */ + FailureReason?: string; +} + +export namespace TransformJobSummary { + export const filterSensitiveLog = (obj: TransformJobSummary): any => ({ + ...obj, + }); +} + +export interface ListTransformJobsResponse { + /** + *

          An array of + * TransformJobSummary + * objects.

          + */ + TransformJobSummaries: TransformJobSummary[] | undefined; + + /** + *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of + * transform jobs, use it in the next request.

          + */ + NextToken?: string; +} + +export namespace ListTransformJobsResponse { + export const filterSensitiveLog = (obj: ListTransformJobsResponse): any => ({ + ...obj, + }); +} + +export enum SortTrialComponentsBy { + CREATION_TIME = "CreationTime", + NAME = "Name", +} + +export interface ListTrialComponentsRequest { + /** + *

          A filter that returns only components that are part of the specified experiment. If you + * specify ExperimentName, you can't filter by SourceArn or + * TrialName.

          + */ + ExperimentName?: string; + + /** + *

          A filter that returns only components that are part of the specified trial. If you specify + * TrialName, you can't filter by ExperimentName or + * SourceArn.

          + */ + TrialName?: string; + + /** + *

          A filter that returns only components that have the specified source Amazon Resource Name + * (ARN). If you specify SourceArn, you can't filter by ExperimentName + * or TrialName.

          + */ + SourceArn?: string; + + /** + *

          A filter that returns only components created after the specified time.

          + */ + CreatedAfter?: Date; + + /** + *

          A filter that returns only components created before the specified time.

          + */ + CreatedBefore?: Date; + + /** + *

          The property used to sort results. The default value is CreationTime.

          + */ + SortBy?: SortTrialComponentsBy | string; + + /** + *

          The sort order. The default value is Descending.

          + */ + SortOrder?: SortOrder | string; + + /** + *

          The maximum number of components to return in the response. The default value is + * 10.

          + */ + MaxResults?: number; + + /** + *

          If the previous call to ListTrialComponents didn't return the full set of + * components, the call returns a token for getting the next set of components.

          + */ + NextToken?: string; +} + +export namespace ListTrialComponentsRequest { + export const filterSensitiveLog = (obj: ListTrialComponentsRequest): any => ({ + ...obj, + }); +} + +/** + *

          A summary of the properties of a trial component. To get all the properties, call the + * DescribeTrialComponent API and provide the + * TrialComponentName.

          + */ +export interface TrialComponentSummary { + /** + *

          The name of the trial component.

          + */ + TrialComponentName?: string; + + /** + *

          The ARN of the trial component.

          + */ + TrialComponentArn?: string; + + /** + *

          The name of the component as displayed. If DisplayName isn't specified, + * TrialComponentName is displayed.

          + */ + DisplayName?: string; + + /** + *

          The Amazon Resource Name (ARN) and job type of the source of a trial component.

          + */ + TrialComponentSource?: TrialComponentSource; + + /** + *

          The status of the component. States include:

          + *
            + *
          • + *

            InProgress

            + *
          • + *
          • + *

            Completed

            + *
          • + *
          • + *

            Failed

            + *
          • + *
          + */ + Status?: TrialComponentStatus; + + /** + *

          When the component started.

          + */ + StartTime?: Date; + + /** + *

          When the component ended.

          + */ + EndTime?: Date; + + /** + *

          When the component was created.

          + */ + CreationTime?: Date; + + /** + *

          Who created the component.

          + */ + CreatedBy?: UserContext; + + /** + *

          When the component was last modified.

          + */ + LastModifiedTime?: Date; + + /** + *

          Who last modified the component.

          + */ + LastModifiedBy?: UserContext; +} + +export namespace TrialComponentSummary { + export const filterSensitiveLog = (obj: TrialComponentSummary): any => ({ + ...obj, + }); +} + +export interface ListTrialComponentsResponse { + /** + *

          A list of the summaries of your trial components.

          + */ + TrialComponentSummaries?: TrialComponentSummary[]; + + /** + *

          A token for getting the next set of components, if there are any.

          + */ + NextToken?: string; +} + +export namespace ListTrialComponentsResponse { + export const filterSensitiveLog = (obj: ListTrialComponentsResponse): any => ({ + ...obj, + }); +} + +export enum SortTrialsBy { + CREATION_TIME = "CreationTime", + NAME = "Name", +} + +export interface ListTrialsRequest { + /** + *

          A filter that returns only trials that are part of the specified experiment.

          + */ + ExperimentName?: string; + + /** + *

          A filter that returns only trials that are associated with the specified trial + * component.

          + */ + TrialComponentName?: string; + + /** + *

          A filter that returns only trials created after the specified time.

          + */ + CreatedAfter?: Date; + + /** + *

          A filter that returns only trials created before the specified time.

          + */ + CreatedBefore?: Date; + + /** + *

          The property used to sort results. The default value is CreationTime.

          + */ + SortBy?: SortTrialsBy | string; + + /** + *

          The sort order. The default value is Descending.

          + */ + SortOrder?: SortOrder | string; + + /** + *

          The maximum number of trials to return in the response. The default value is 10.

          + */ + MaxResults?: number; + + /** + *

          If the previous call to ListTrials didn't return the full set of trials, the + * call returns a token for getting the next set of trials.

          + */ + NextToken?: string; +} + +export namespace ListTrialsRequest { + export const filterSensitiveLog = (obj: ListTrialsRequest): any => ({ + ...obj, + }); +} + +/** + *

          A summary of the properties of a trial. To get the complete set of properties, call the + * DescribeTrial API and provide the TrialName.

          + */ +export interface TrialSummary { + /** + *

          The Amazon Resource Name (ARN) of the trial.

          + */ + TrialArn?: string; + + /** + *

          The name of the trial.

          + */ + TrialName?: string; + + /** + *

          The name of the trial as displayed. If DisplayName isn't specified, + * TrialName is displayed.

          + */ + DisplayName?: string; + + /** + *

          The source of the trial.

          + */ + TrialSource?: TrialSource; + + /** + *

          When the trial was created.

          + */ + CreationTime?: Date; + + /** + *

          When the trial was last modified.

          + */ + LastModifiedTime?: Date; +} + +export namespace TrialSummary { + export const filterSensitiveLog = (obj: TrialSummary): any => ({ + ...obj, + }); +} + +export interface ListTrialsResponse { + /** + *

          A list of the summaries of your trials.

          + */ + TrialSummaries?: TrialSummary[]; + + /** + *

          A token for getting the next set of trials, if there are any.

          + */ + NextToken?: string; +} + +export namespace ListTrialsResponse { + export const filterSensitiveLog = (obj: ListTrialsResponse): any => ({ + ...obj, + }); +} + +export enum UserProfileSortKey { + CreationTime = "CreationTime", + LastModifiedTime = "LastModifiedTime", +} + +export interface ListUserProfilesRequest { + /** + *

          If the previous response was truncated, you will receive this token. + * Use it in your next request to receive the next set of results.

          + */ + NextToken?: string; + + /** + *

          Returns a list up to a specified limit.

          + */ + MaxResults?: number; + + /** + *

          The sort order for the results. The default is Ascending.

          + */ + SortOrder?: SortOrder | string; + + /** + *

          The parameter by which to sort the results. The default is CreationTime.

          + */ + SortBy?: UserProfileSortKey | string; + + /** + *

          A parameter by which to filter the results.

          + */ + DomainIdEquals?: string; + + /** + *

          A parameter by which to filter the results.

          + */ + UserProfileNameContains?: string; +} + +export namespace ListUserProfilesRequest { + export const filterSensitiveLog = (obj: ListUserProfilesRequest): any => ({ + ...obj, + }); +} + +/** + *

          The user profile details.

          + */ +export interface UserProfileDetails { + /** + *

          The domain ID.

          + */ + DomainId?: string; + + /** + *

          The user profile name.

          + */ + UserProfileName?: string; + + /** + *

          The status.

          + */ + Status?: UserProfileStatus | string; + + /** + *

          The creation time.

          + */ + CreationTime?: Date; + + /** + *

          The last modified time.

          + */ + LastModifiedTime?: Date; +} + +export namespace UserProfileDetails { + export const filterSensitiveLog = (obj: UserProfileDetails): any => ({ + ...obj, + }); +} + +export interface ListUserProfilesResponse { + /** + *

          The list of user profiles.

          + */ + UserProfiles?: UserProfileDetails[]; + + /** + *

          If the previous response was truncated, you will receive this token. + * Use it in your next request to receive the next set of results.

          + */ + NextToken?: string; +} + +export namespace ListUserProfilesResponse { + export const filterSensitiveLog = (obj: ListUserProfilesResponse): any => ({ + ...obj, + }); +} + +export enum ListWorkforcesSortByOptions { + CreateDate = "CreateDate", + Name = "Name", +} + +export interface ListWorkforcesRequest { + /** + *

          Sort workforces using the workforce name or creation date.

          + */ + SortBy?: ListWorkforcesSortByOptions | string; + + /** + *

          Sort workforces in ascending or descending order.

          + */ + SortOrder?: SortOrder | string; + + /** + *

          A filter you can use to search for workforces using part of the workforce name.

          + */ + NameContains?: string; + + /** + *

          A token to resume pagination.

          + */ + NextToken?: string; + + /** + *

          The maximum number of workforces returned in the response.

          + */ + MaxResults?: number; +} + +export namespace ListWorkforcesRequest { + export const filterSensitiveLog = (obj: ListWorkforcesRequest): any => ({ + ...obj, + }); +} + +export interface ListWorkforcesResponse { + /** + *

          A list containing information about your workforce.

          + */ + Workforces: Workforce[] | undefined; + + /** + *

          A token to resume pagination.

          + */ + NextToken?: string; +} + +export namespace ListWorkforcesResponse { + export const filterSensitiveLog = (obj: ListWorkforcesResponse): any => ({ + ...obj, + }); +} + +export enum ListWorkteamsSortByOptions { + CreateDate = "CreateDate", + Name = "Name", +} + +export interface ListWorkteamsRequest { + /** + *

          The field to sort results by. The default is CreationTime.

          + */ + SortBy?: ListWorkteamsSortByOptions | string; + + /** + *

          The sort order for results. The default is Ascending.

          + */ + SortOrder?: SortOrder | string; + + /** + *

          A string in the work team's name. This filter returns only work teams whose name + * contains the specified string.

          + */ + NameContains?: string; + + /** + *

          If the result of the previous ListWorkteams request was truncated, the + * response includes a NextToken. To retrieve the next set of labeling jobs, + * use the token in the next request.

          + */ + NextToken?: string; + + /** + *

          The maximum number of work teams to return in each page of the response.

          + */ + MaxResults?: number; +} + +export namespace ListWorkteamsRequest { + export const filterSensitiveLog = (obj: ListWorkteamsRequest): any => ({ + ...obj, + }); +} + +export interface ListWorkteamsResponse { + /** + *

          An array of Workteam objects, each describing a work team.

          + */ + Workteams: Workteam[] | undefined; + + /** + *

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of + * work teams, use it in the subsequent request.

          + */ + NextToken?: string; +} + +export namespace ListWorkteamsResponse { + export const filterSensitiveLog = (obj: ListWorkteamsResponse): any => ({ + ...obj, + }); +} + +/** + *

          A versioned model that can be deployed for SageMaker inference.

          + */ +export interface ModelPackage { + /** + *

          The name of the model.

          + */ + ModelPackageName?: string; + + /** + *

          The model group to which the model belongs.

          + */ + ModelPackageGroupName?: string; + + /** + *

          The version number of a versioned model.

          + */ + ModelPackageVersion?: number; + + /** + *

          The Amazon Resource Name (ARN) of the model package.

          + */ + ModelPackageArn?: string; + + /** + *

          The description of the model package.

          + */ + ModelPackageDescription?: string; + + /** + *

          The time that the model package was created.

          + */ + CreationTime?: Date; + + /** + *

          Defines how to perform inference generation after a training job is run.

          + */ + InferenceSpecification?: InferenceSpecification; + + /** + *

          A list of algorithms that were used to create a model package.

          + */ + SourceAlgorithmSpecification?: SourceAlgorithmSpecification; + + /** + *

          Specifies batch transform jobs that Amazon SageMaker runs to validate your model package.

          + */ + ValidationSpecification?: ModelPackageValidationSpecification; + + /** + *

          The status of the model package. This can be one of the following values.

          + *
            + *
          • + *

            + * PENDING - The model package is pending being created.

            + *
          • + *
          • + *

            + * IN_PROGRESS - The model package is in the process of being + * created.

            + *
          • + *
          • + *

            + * COMPLETED - The model package was successfully created.

            + *
          • + *
          • + *

            + * FAILED - The model package failed.

            + *
          • + *
          • + *

            + * DELETING - The model package is in the process of being deleted.

            + *
          • + *
          + */ + ModelPackageStatus?: ModelPackageStatus | string; + + /** + *

          Specifies the validation and image scan statuses of the model package.

          + */ + ModelPackageStatusDetails?: ModelPackageStatusDetails; + + /** + *

          Whether the model package is to be certified to be listed on AWS Marketplace. For + * information about listing model packages on AWS Marketplace, see List Your + * Algorithm or Model Package on AWS Marketplace.

          + */ + CertifyForMarketplace?: boolean; + + /** + *

          The approval status of the model. This can be one of the following values.

          + *
            + *
          • + *

            + * APPROVED - The model is approved

            + *
          • + *
          • + *

            + * REJECTED - The model is rejected.

            + *
          • + *
          • + *

            + * PENDING_MANUAL_APPROVAL - The model is waiting for manual + * approval.

            + *
          • + *
          + */ + ModelApprovalStatus?: ModelApprovalStatus | string; + + /** + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          + */ + CreatedBy?: UserContext; + + /** + *

          Metadata properties of the tracking entity, trial, or trial component.

          + */ + MetadataProperties?: MetadataProperties; + + /** + *

          Metrics for the model.

          + */ + ModelMetrics?: ModelMetrics; + + /** + *

          The last time the model package was modified.

          + */ + LastModifiedTime?: Date; + + /** + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          + */ + LastModifiedBy?: UserContext; + + /** + *

          A description provided when the model approval is set.

          + */ + ApprovalDescription?: string; + + /** + *

          A list of the tags associated with the model package. For more information, see Tagging AWS + * resources in the AWS General Reference Guide.

          + */ + Tags?: Tag[]; +} + +export namespace ModelPackage { + export const filterSensitiveLog = (obj: ModelPackage): any => ({ + ...obj, + }); +} + +/** + *

          A group of versioned models in the model registry.

          + */ +export interface ModelPackageGroup { + /** + *

          The name of the model group.

          + */ + ModelPackageGroupName?: string; + + /** + *

          The Amazon Resource Name (ARN) of the model group.

          + */ + ModelPackageGroupArn?: string; + + /** + *

          The description for the model group.

          + */ + ModelPackageGroupDescription?: string; + + /** + *

          The time that the model group was created.

          + */ + CreationTime?: Date; + + /** + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          + */ + CreatedBy?: UserContext; + + /** + *

          The status of the model group. This can be one of the following values.

          + *
            + *
          • + *

            + * PENDING - The model group is pending being created.

            + *
          • + *
          • + *

            + * IN_PROGRESS - The model group is in the process of being + * created.

            + *
          • + *
          • + *

            + * COMPLETED - The model group was successfully created.

            + *
          • + *
          • + *

            + * FAILED - The model group failed.

            + *
          • + *
          • + *

            + * DELETING - The model group is in the process of being deleted.

            + *
          • + *
          • + *

            + * DELETE_FAILED - SageMaker failed to delete the model group.

            + *
          • + *
          + */ + ModelPackageGroupStatus?: ModelPackageGroupStatus | string; + + /** + *

          A list of the tags associated with the model group. For more information, see Tagging AWS + * resources in the AWS General Reference Guide.

          + */ + Tags?: Tag[]; +} + +export namespace ModelPackageGroup { + export const filterSensitiveLog = (obj: ModelPackageGroup): any => ({ + ...obj, + }); +} + +/** + *

          A list of nested Filter objects. A resource must satisfy the conditions + * of all filters to be included in the results returned from the Search API.

          + *

          For example, to filter on a training job's InputDataConfig property with a + * specific channel name and S3Uri prefix, define the following filters:

          + *
            + *
          • + *

            + * '{Name:"InputDataConfig.ChannelName", "Operator":"Equals", "Value":"train"}', + *

            + *
          • + *
          • + *

            + * '{Name:"InputDataConfig.DataSource.S3DataSource.S3Uri", "Operator":"Contains", + * "Value":"mybucket/catdata"}' + *

            + *
          • + *
          + */ +export interface NestedFilters { + /** + *

          The name of the property to use in the nested filters. The value must match a listed property name, + * such as InputDataConfig.

          + */ + NestedPropertyName: string | undefined; + + /** + *

          A list of filters. Each filter acts on a property. Filters must contain at least one + * Filters value. For example, a NestedFilters call might + * include a filter on the PropertyName parameter of the + * InputDataConfig property: + * InputDataConfig.DataSource.S3DataSource.S3Uri.

          + */ + Filters: Filter[] | undefined; +} + +export namespace NestedFilters { + export const filterSensitiveLog = (obj: NestedFilters): any => ({ + ...obj, + }); +} + +/** + *

          The trial that a trial component is associated with and the experiment the trial is part + * of. A component might not be associated with a trial. A component can be associated with + * multiple trials.

          + */ +export interface Parent { + /** + *

          The name of the trial.

          + */ + TrialName?: string; + + /** + *

          The name of the experiment.

          + */ + ExperimentName?: string; +} + +export namespace Parent { + export const filterSensitiveLog = (obj: Parent): any => ({ + ...obj, + }); +} + +/** + *

          A SageMaker Model Building Pipeline instance.

          + */ +export interface Pipeline { + /** + *

          The Amazon Resource Name (ARN) of the pipeline.

          + */ + PipelineArn?: string; + + /** + *

          The name of the pipeline.

          + */ + PipelineName?: string; + + /** + *

          The display name of the pipeline.

          + */ + PipelineDisplayName?: string; + + /** + *

          The description of the pipeline.

          + */ + PipelineDescription?: string; + + /** + *

          The Amazon Resource Name (ARN) of the role that created the pipeline.

          + */ + RoleArn?: string; + + /** + *

          The status of the pipeline.

          + */ + PipelineStatus?: PipelineStatus | string; + + /** + *

          The creation time of the pipeline.

          + */ + CreationTime?: Date; + + /** + *

          The time that the pipeline was last modified.

          + */ + LastModifiedTime?: Date; + + /** + *

          The time when the pipeline was last run.

          + */ + LastRunTime?: Date; + + /** + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          + */ + CreatedBy?: UserContext; + + /** + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          + */ + LastModifiedBy?: UserContext; + + /** + *

          A list of tags that apply to the pipeline.

          + */ + Tags?: Tag[]; +} + +export namespace Pipeline { + export const filterSensitiveLog = (obj: Pipeline): any => ({ + ...obj, + }); +} + +/** + *

          An execution of a pipeline.

          + */ +export interface PipelineExecution { + /** + *

          The Amazon Resource Name (ARN) of the pipeline that was executed.

          + */ + PipelineArn?: string; + + /** + *

          The Amazon Resource Name (ARN) of the pipeline execution.

          + */ + PipelineExecutionArn?: string; + + /** + *

          The display name of the pipeline execution.

          + */ + PipelineExecutionDisplayName?: string; + + /** + *

          The status of the pipeline status.

          + */ + PipelineExecutionStatus?: PipelineExecutionStatus | string; + + /** + *

          The description of the pipeline execution.

          + */ + PipelineExecutionDescription?: string; + + /** + *

          The creation time of the pipeline execution.

          + */ + CreationTime?: Date; + + /** + *

          The time that the pipeline execution was last modified.

          + */ + LastModifiedTime?: Date; + + /** + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          + */ + CreatedBy?: UserContext; + + /** + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          + */ + LastModifiedBy?: UserContext; + + /** + *

          Contains a list of pipeline parameters. This list can be empty.

          + */ + PipelineParameters?: Parameter[]; +} + +export namespace PipelineExecution { + export const filterSensitiveLog = (obj: PipelineExecution): any => ({ + ...obj, + }); +} + +/** + *

          An Amazon SageMaker processing job that is used to analyze data and evaluate models. For more information, + * see Process + * Data and Evaluate Models.

          + */ +export interface ProcessingJob { + /** + *

          List of input configurations for the processing job.

          + */ + ProcessingInputs?: ProcessingInput[]; + + /** + *

          The output configuration for the processing job.

          + */ + ProcessingOutputConfig?: ProcessingOutputConfig; + + /** + *

          The name of the processing job.

          + */ + ProcessingJobName?: string; + + /** + *

          Identifies the resources, ML compute instances, and ML storage volumes to deploy for a + * processing job. In distributed training, you specify more than one instance.

          + */ + ProcessingResources?: ProcessingResources; + + /** + *

          Specifies a time limit for how long the processing job is allowed to run.

          + */ + StoppingCondition?: ProcessingStoppingCondition; + + /** + *

          Configuration to run a processing job in a specified container image.

          + */ + AppSpecification?: AppSpecification; + + /** + *

          Sets the environment variables in the Docker container.

          + */ + Environment?: { [key: string]: string }; + + /** + *

          Networking options for a job, such as network traffic encryption between containers, + * whether to allow inbound and outbound network calls to and from containers, and the VPC + * subnets and security groups to use for VPC-enabled jobs.

          + */ + NetworkConfig?: NetworkConfig; + + /** + *

          The ARN of the role used to create the processing job.

          + */ + RoleArn?: string; + + /** + *

          Associates a SageMaker job as a trial component with an experiment and trial. Specified when + * you call the following APIs:

          + * + */ + ExperimentConfig?: ExperimentConfig; + + /** + *

          The ARN of the processing job.

          + */ + ProcessingJobArn?: string; + + /** + *

          The status of the processing job.

          + */ + ProcessingJobStatus?: ProcessingJobStatus | string; + + /** + *

          A string, up to one KB in size, that contains metadata from the processing + * container when the processing job exits.

          + */ + ExitMessage?: string; + + /** + *

          A string, up to one KB in size, that contains the reason a processing job failed, if + * it failed.

          + */ + FailureReason?: string; + + /** + *

          The time that the processing job ended.

          + */ + ProcessingEndTime?: Date; + + /** + *

          The time that the processing job started.

          + */ + ProcessingStartTime?: Date; + + /** + *

          The time the processing job was last modified.

          + */ + LastModifiedTime?: Date; + + /** + *

          The time the processing job was created.

          + */ + CreationTime?: Date; + + /** + *

          The ARN of a monitoring schedule for an endpoint associated with this processing + * job.

          + */ + MonitoringScheduleArn?: string; + + /** + *

          The Amazon Resource Name (ARN) of the AutoML job associated with this processing job.

          + */ + AutoMLJobArn?: string; + + /** + *

          The ARN of the training job associated with this processing job.

          + */ + TrainingJobArn?: string; + + /** + *

          An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management + * User Guide.

          + */ + Tags?: Tag[]; +} + +export namespace ProcessingJob { + export const filterSensitiveLog = (obj: ProcessingJob): any => ({ + ...obj, + }); +} + +export interface PutModelPackageGroupPolicyInput { + /** + *

          The name of the model group to add a resource policy to.

          + */ + ModelPackageGroupName: string | undefined; + + /** + *

          The resource policy for the model group.

          + */ + ResourcePolicy: string | undefined; +} + +export namespace PutModelPackageGroupPolicyInput { + export const filterSensitiveLog = (obj: PutModelPackageGroupPolicyInput): any => ({ + ...obj, + }); +} + +export interface PutModelPackageGroupPolicyOutput { + /** + *

          The Amazon Resource Name (ARN) of the model package group.

          + */ + ModelPackageGroupArn: string | undefined; +} + +export namespace PutModelPackageGroupPolicyOutput { + export const filterSensitiveLog = (obj: PutModelPackageGroupPolicyOutput): any => ({ + ...obj, + }); +} + +/** + *

          Contains input values for a task.

          + */ +export interface RenderableTask { + /** + *

          A JSON object that contains values for the variables defined in the template. It is + * made available to the template under the substitution variable task.input. + * For example, if you define a variable task.input.text in your template, you + * can supply the variable in the JSON object as "text": "sample text".

          + */ + Input: string | undefined; +} + +export namespace RenderableTask { + export const filterSensitiveLog = (obj: RenderableTask): any => ({ + ...obj, + }); +} + +/** + *

          A description of an error that occurred while rendering the template.

          + */ +export interface RenderingError { + /** + *

          A unique identifier for a specific class of errors.

          + */ + Code: string | undefined; + + /** + *

          A human-readable message describing the error.

          + */ + Message: string | undefined; +} + +export namespace RenderingError { + export const filterSensitiveLog = (obj: RenderingError): any => ({ + ...obj, + }); +} + +export interface RenderUiTemplateRequest { + /** + *

          A Template object containing the worker UI template to render.

          + */ + UiTemplate?: UiTemplate; + + /** + *

          A RenderableTask object containing a representative task to + * render.

          + */ + Task: RenderableTask | undefined; + + /** + *

          The Amazon Resource Name (ARN) that has access to the S3 objects that are used by the + * template.

          + */ + RoleArn: string | undefined; + + /** + *

          The HumanTaskUiArn of the worker UI that you want to render. Do not + * provide a HumanTaskUiArn if you use the UiTemplate + * parameter.

          + *

          See a list of available Human Ui Amazon Resource Names (ARNs) in UiConfig.

          + */ + HumanTaskUiArn?: string; +} + +export namespace RenderUiTemplateRequest { + export const filterSensitiveLog = (obj: RenderUiTemplateRequest): any => ({ + ...obj, + }); +} + +export interface RenderUiTemplateResponse { + /** + *

          A Liquid template that renders the HTML for the worker UI.

          + */ + RenderedContent: string | undefined; + + /** + *

          A list of one or more RenderingError objects if any were encountered + * while rendering the template. If there were no errors, the list is empty.

          + */ + Errors: RenderingError[] | undefined; +} + +export namespace RenderUiTemplateResponse { + export const filterSensitiveLog = (obj: RenderUiTemplateResponse): any => ({ + ...obj, + }); +} + +export enum SearchSortOrder { + ASCENDING = "Ascending", + DESCENDING = "Descending", +} + +/** + *

          Contains information about a training job.

          + */ +export interface TrainingJob { + /** + *

          The name of the training job.

          + */ + TrainingJobName?: string; + + /** + *

          The Amazon Resource Name (ARN) of the training job.

          + */ + TrainingJobArn?: string; + + /** + *

          The Amazon Resource Name (ARN) of the associated hyperparameter tuning job if the + * training job was launched by a hyperparameter tuning job.

          + */ + TuningJobArn?: string; + + /** + *

          The Amazon Resource Name (ARN) of the labeling job.

          + */ + LabelingJobArn?: string; + + /** + *

          The Amazon Resource Name (ARN) of the job.

          + */ + AutoMLJobArn?: string; + + /** + *

          Information about the Amazon S3 location that is configured for storing model + * artifacts.

          + */ + ModelArtifacts?: ModelArtifacts; + + /** + *

          The status of the + * training + * job.

          + *

          Training job statuses are:

          + *
            + *
          • + *

            + * InProgress - The training is in progress.

            + *
          • + *
          • + *

            + * Completed - The training job has completed.

            + *
          • + *
          • + *

            + * Failed - The training job has failed. To see the reason for the + * failure, see the FailureReason field in the response to a + * DescribeTrainingJobResponse call.

            + *
          • + *
          • + *

            + * Stopping - The training job is stopping.

            + *
          • + *
          • + *

            + * Stopped - The training job has stopped.

            + *
          • + *
          + *

          For + * more detailed information, see SecondaryStatus.

          + */ + TrainingJobStatus?: TrainingJobStatus | string; + + /** + *

          Provides detailed information about the state of the training job. For detailed + * information about the secondary status of the training job, see + * StatusMessage under SecondaryStatusTransition.

          + *

          Amazon SageMaker provides primary statuses and secondary statuses that apply to each of + * them:

          + *
          + *
          InProgress
          + *
          + *
            + *
          • + *

            + * Starting + * - Starting the training job.

            + *
          • + *
          • + *

            + * Downloading - An optional stage for algorithms that + * support File training input mode. It indicates that + * data is being downloaded to the ML storage volumes.

            + *
          • + *
          • + *

            + * Training - Training is in progress.

            + *
          • + *
          • + *

            + * Uploading - Training is complete and the model + * artifacts are being uploaded to the S3 location.

            + *
          • + *
          + *
          + *
          Completed
          + *
          + *
            + *
          • + *

            + * Completed - The training job has completed.

            + *
          • + *
          + *
          + *
          Failed
          + *
          + *
            + *
          • + *

            + * Failed - The training job has failed. The reason for + * the failure is returned in the FailureReason field of + * DescribeTrainingJobResponse.

            + *
          • + *
          + *
          + *
          Stopped
          + *
          + *
            + *
          • + *

            + * MaxRuntimeExceeded - The job stopped because it + * exceeded the maximum allowed runtime.

            + *
          • + *
          • + *

            + * Stopped - The training job has stopped.

            + *
          • + *
          + *
          + *
          Stopping
          + *
          + *
            + *
          • + *

            + * Stopping - Stopping the training job.

            + *
          • + *
          + *
          + *
          + * + *

          Valid values for SecondaryStatus are subject to change.

          + *
          + *

          We no longer support the following secondary statuses:

          + *
            + *
          • + *

            + * LaunchingMLInstances + *

            + *
          • + *
          • + *

            + * PreparingTrainingStack + *

            + *
          • + *
          • + *

            + * DownloadingTrainingImage + *

            + *
          • + *
          + */ + SecondaryStatus?: SecondaryStatus | string; + + /** + *

          If the training job failed, the reason it failed.

          + */ + FailureReason?: string; + + /** + *

          Algorithm-specific parameters.

          + */ + HyperParameters?: { [key: string]: string }; + + /** + *

          Information about the algorithm used for training, and algorithm metadata.

          + */ + AlgorithmSpecification?: AlgorithmSpecification; + + /** + *

          The AWS Identity and Access Management (IAM) role configured for the training job.

          + */ + RoleArn?: string; + + /** + *

          An array of Channel objects that describes each data input + * channel.

          + */ + InputDataConfig?: Channel[]; + + /** + *

          The S3 path where model artifacts that you configured when creating the job are + * stored. Amazon SageMaker creates subfolders for model artifacts.

          + */ + OutputDataConfig?: OutputDataConfig; + + /** + *

          Resources, including ML compute instances and ML storage volumes, that are configured + * for model training.

          + */ + ResourceConfig?: ResourceConfig; + + /** + *

          A VpcConfig object that specifies the VPC that this training job has + * access to. For more information, see Protect Training Jobs by Using an Amazon + * Virtual Private Cloud.

          + */ + VpcConfig?: VpcConfig; + + /** + *

          Specifies a limit to how long a model training job can run. When the job reaches the + * time limit, Amazon SageMaker ends the training job. Use this API to cap model training costs.

          + *

          To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays + * job termination for 120 seconds. Algorithms can use this 120-second window to save the + * model artifacts, so the results of training are not lost.

          + */ + StoppingCondition?: StoppingCondition; + + /** + *

          A timestamp that indicates when the training job was created.

          + */ + CreationTime?: Date; + + /** + *

          Indicates the time when the training job starts on training instances. You are billed + * for the time interval between this time and the value of TrainingEndTime. + * The start time in CloudWatch Logs might be later than this time. The difference is due to the time + * it takes to download the training data and to the size of the training container.

          + */ + TrainingStartTime?: Date; + + /** + *

          Indicates the time when the training job ends on training instances. You are billed + * for the time interval between the value of TrainingStartTime and this time. + * For successful jobs and stopped jobs, this is the time after model artifacts are + * uploaded. For failed jobs, this is the time when Amazon SageMaker detects a job failure.

          + */ + TrainingEndTime?: Date; + + /** + *

          A timestamp that indicates when the status of the training job was last + * modified.

          + */ + LastModifiedTime?: Date; + + /** + *

          A history of all of the secondary statuses that the training job has transitioned + * through.

          + */ + SecondaryStatusTransitions?: SecondaryStatusTransition[]; + + /** + *

          A list of final metric values that are set when the training job completes. Used only + * if the training job was configured to use metrics.

          + */ + FinalMetricDataList?: MetricData[]; + + /** + *

          If the TrainingJob was created with network isolation, the value is set + * to true. If network isolation is enabled, nodes can't communicate beyond + * the VPC they run in.

          + */ + EnableNetworkIsolation?: boolean; + + /** + *

          To encrypt all communications between ML compute instances in distributed training, + * choose True. Encryption provides greater security for distributed training, + * but training might take longer. How long it takes depends on the amount of communication + * between compute instances, especially if you use a deep learning algorithm in + * distributed training.

          + */ + EnableInterContainerTrafficEncryption?: boolean; + + /** + *

          When true, enables managed spot training using Amazon EC2 Spot instances to run + * training jobs instead of on-demand instances. For more information, see Managed Spot Training.

          + */ + EnableManagedSpotTraining?: boolean; + + /** + *

          Contains information about the output location for managed spot training checkpoint + * data.

          + */ + CheckpointConfig?: CheckpointConfig; + + /** + *

          The training time in seconds.

          + */ + TrainingTimeInSeconds?: number; + + /** + *

          The billable time in seconds.

          + */ + BillableTimeInSeconds?: number; + + /** + *

          Configuration information for the debug hook parameters, collection configuration, and + * storage paths.

          + */ + DebugHookConfig?: DebugHookConfig; + + /** + *

          Associates a SageMaker job as a trial component with an experiment and trial. Specified when + * you call the following APIs:

          + * + */ + ExperimentConfig?: ExperimentConfig; + + /** + *

          Information about the debug rule configuration.

          + */ + DebugRuleConfigurations?: DebugRuleConfiguration[]; + + /** + *

          Configuration of storage locations for TensorBoard output.

          + */ + TensorBoardOutputConfig?: TensorBoardOutputConfig; + + /** + *

          Information about the evaluation status of the rules for the training job.

          + */ + DebugRuleEvaluationStatuses?: DebugRuleEvaluationStatus[]; + + /** + *

          An array of key-value pairs. You can use tags to categorize your AWS resources in + * different ways, for example, by purpose, owner, or environment. For more information, + * see Tagging AWS + * Resources.

          + */ + Tags?: Tag[]; +} + +export namespace TrainingJob { + export const filterSensitiveLog = (obj: TrainingJob): any => ({ + ...obj, + }); +} + +/** + *

          A short summary of a trial component.

          + */ +export interface TrialComponentSimpleSummary { + /** + *

          The name of the trial component.

          + */ + TrialComponentName?: string; + + /** + *

          The Amazon Resource Name (ARN) of the trial component.

          + */ + TrialComponentArn?: string; + + /** + *

          The Amazon Resource Name (ARN) and job type of the source of a trial component.

          + */ + TrialComponentSource?: TrialComponentSource; + + /** + *

          When the component was created.

          + */ + CreationTime?: Date; + + /** + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          + */ + CreatedBy?: UserContext; +} + +export namespace TrialComponentSimpleSummary { + export const filterSensitiveLog = (obj: TrialComponentSimpleSummary): any => ({ + ...obj, + }); +} + +/** + *

          The properties of a trial as returned by the Search API.

          + */ +export interface Trial { + /** + *

          The name of the trial.

          + */ + TrialName?: string; + + /** + *

          The Amazon Resource Name (ARN) of the trial.

          + */ + TrialArn?: string; + + /** + *

          The name of the trial as displayed. If DisplayName isn't specified, + * TrialName is displayed.

          + */ + DisplayName?: string; + + /** + *

          The name of the experiment the trial is part of.

          + */ + ExperimentName?: string; + + /** + *

          The source of the trial.

          + */ + Source?: TrialSource; + + /** + *

          When the trial was created.

          + */ + CreationTime?: Date; + + /** + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          + */ + CreatedBy?: UserContext; + + /** + *

          Who last modified the trial.

          + */ + LastModifiedTime?: Date; + + /** + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          + */ + LastModifiedBy?: UserContext; + + /** + *

          Metadata properties of the tracking entity, trial, or trial component.

          + */ + MetadataProperties?: MetadataProperties; + + /** + *

          The list of tags that are associated with the trial. You can use Search + * API to search on the tags.

          + */ + Tags?: Tag[]; + + /** + *

          A list of the components associated with the trial. For each component, a summary of the + * component's properties is included.

          + */ + TrialComponentSummaries?: TrialComponentSimpleSummary[]; +} + +export namespace Trial { + export const filterSensitiveLog = (obj: Trial): any => ({ + ...obj, + }); +} + +/** + *

          A batch transform job. For information about SageMaker batch transform, see Use Batch + * Transform.

          + */ +export interface TransformJob { + /** + *

          The name of the transform job.

          + */ + TransformJobName?: string; + + /** + *

          The Amazon Resource Name (ARN) of the transform job.

          + */ + TransformJobArn?: string; + + /** + *

          The status of the transform job.

          + *

          Transform job statuses are:

          + *
            + *
          • + *

            + * InProgress - The job is in progress.

            + *
          • + *
          • + *

            + * Completed - The job has completed.

            + *
          • + *
          • + *

            + * Failed - The transform job has failed. To see the reason for the failure, + * see the FailureReason field in the response to a + * DescribeTransformJob call.

            + *
          • + *
          • + *

            + * Stopping - The transform job is stopping.

            + *
          • + *
          • + *

            + * Stopped - The transform job has stopped.

            + *
          • + *
          + */ + TransformJobStatus?: TransformJobStatus | string; + + /** + *

          If the transform job failed, the reason it failed.

          + */ + FailureReason?: string; + + /** + *

          The name of the model associated with the transform job.

          + */ + ModelName?: string; + + /** + *

          The maximum number of parallel requests that can be sent to each instance in a transform + * job. If MaxConcurrentTransforms is set to 0 or left unset, SageMaker checks the + * optional execution-parameters to determine the settings for your chosen algorithm. If the + * execution-parameters endpoint is not enabled, the default value is 1. For built-in algorithms, + * you don't need to set a value for MaxConcurrentTransforms.

          + */ + MaxConcurrentTransforms?: number; + + /** + *

          Configures the timeout and maximum number of retries for processing a transform job + * invocation.

          + */ + ModelClientConfig?: ModelClientConfig; + + /** + *

          The maximum allowed size of the payload, in MB. A payload is the data portion of a record + * (without metadata). The value in MaxPayloadInMB must be greater than, or equal + * to, the size of a single record. To estimate the size of a record in MB, divide the size of + * your dataset by the number of records. To ensure that the records fit within the maximum + * payload size, we recommend using a slightly larger value. The default value is 6 MB. For cases + * where the payload might be arbitrarily large and is transmitted using HTTP chunked encoding, + * set the value to 0. This feature works only in supported algorithms. Currently, SageMaker built-in + * algorithms do not support HTTP chunked encoding.

          + */ + MaxPayloadInMB?: number; + + /** + *

          Specifies the number of records to include in a mini-batch for an HTTP inference request. + * A record is a single unit of input data that inference can be made on. For example, a single + * line in a CSV file is a record.

          + */ + BatchStrategy?: BatchStrategy | string; + + /** + *

          The environment variables to set in the Docker container. We support up to 16 key and + * values entries in the map.

          + */ + Environment?: { [key: string]: string }; + + /** + *

          Describes the input source of a transform job and the way the transform job consumes + * it.

          + */ + TransformInput?: TransformInput; + + /** + *

          Describes the results of a transform job.

          + */ + TransformOutput?: TransformOutput; + + /** + *

          Describes the resources, including ML instance types and ML instance count, to use for + * transform job.

          + */ + TransformResources?: TransformResources; + + /** + *

          A timestamp that shows when the transform Job was created.

          + */ + CreationTime?: Date; + + /** + *

          Indicates when the transform job starts on ML instances. You are billed for the time + * interval between this time and the value of TransformEndTime.

          + */ + TransformStartTime?: Date; + + /** + *

          Indicates when the transform job has been completed, or has stopped or failed. You are + * billed for the time interval between this time and the value of + * TransformStartTime.

          + */ + TransformEndTime?: Date; + + /** + *

          The Amazon Resource Name (ARN) of the labeling job that created the transform job.

          + */ + LabelingJobArn?: string; + + /** + *

          The Amazon Resource Name (ARN) of the AutoML job that created the transform job.

          + */ + AutoMLJobArn?: string; + + /** + *

          The data structure used to specify the data to be used for inference in a batch + * transform job and to associate the data that is relevant to the prediction results in + * the output. The input filter provided allows you to exclude input data that is not + * needed for inference in a batch transform job. The output filter provided allows you to + * include input data relevant to interpreting the predictions in the output from the job. + * For more information, see Associate Prediction + * Results with their Corresponding Input Records.

          + */ + DataProcessing?: DataProcessing; + + /** + *

          Associates a SageMaker job as a trial component with an experiment and trial. Specified when + * you call the following APIs:

          + * + */ + ExperimentConfig?: ExperimentConfig; + + /** + *

          A list of tags associated with the transform job.

          + */ + Tags?: Tag[]; +} + +export namespace TransformJob { + export const filterSensitiveLog = (obj: TransformJob): any => ({ + ...obj, + }); +} + +/** + *

          Detailed information about the source of a trial component. Either + * ProcessingJob or TrainingJob is returned.

          + */ +export interface TrialComponentSourceDetail { + /** + *

          The Amazon Resource Name (ARN) of the source.

          + */ + SourceArn?: string; + + /** + *

          Information about a training job that's the source of a trial component.

          + */ + TrainingJob?: TrainingJob; + + /** + *

          Information about a processing job that's the source of a trial component.

          + */ + ProcessingJob?: ProcessingJob; + + /** + *

          Information about a transform job that's the source of a trial component.

          + */ + TransformJob?: TransformJob; +} + +export namespace TrialComponentSourceDetail { + export const filterSensitiveLog = (obj: TrialComponentSourceDetail): any => ({ + ...obj, + }); +} + +/** + *

          The properties of a trial component as returned by the Search + * API.

          + */ +export interface TrialComponent { + /** + *

          The name of the trial component.

          + */ + TrialComponentName?: string; + + /** + *

          The name of the component as displayed. If DisplayName isn't specified, + * TrialComponentName is displayed.

          + */ + DisplayName?: string; + + /** + *

          The Amazon Resource Name (ARN) of the trial component.

          + */ + TrialComponentArn?: string; + + /** + *

          The Amazon Resource Name (ARN) and job type of the source of the component.

          + */ + Source?: TrialComponentSource; + + /** + *

          The status of the trial component.

          + */ + Status?: TrialComponentStatus; + + /** + *

          When the component started.

          + */ + StartTime?: Date; + + /** + *

          When the component ended.

          + */ + EndTime?: Date; + + /** + *

          When the component was created.

          + */ + CreationTime?: Date; + + /** + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          + */ + CreatedBy?: UserContext; + + /** + *

          When the component was last modified.

          + */ + LastModifiedTime?: Date; + + /** + *

          Information about the user who created or modified an experiment, trial, or trial + * component.

          + */ + LastModifiedBy?: UserContext; + + /** + *

          The hyperparameters of the component.

          + */ + Parameters?: { [key: string]: TrialComponentParameterValue }; + + /** + *

          The input artifacts of the component.

          + */ + InputArtifacts?: { [key: string]: TrialComponentArtifact }; + + /** + *

          The output artifacts of the component.

          + */ + OutputArtifacts?: { [key: string]: TrialComponentArtifact }; + + /** + *

          The metrics for the component.

          + */ + Metrics?: TrialComponentMetricSummary[]; + + /** + *

          Metadata properties of the tracking entity, trial, or trial component.

          + */ + MetadataProperties?: MetadataProperties; + + /** + *

          Details of the source of the component.

          + */ + SourceDetail?: TrialComponentSourceDetail; + + /** + *

          The list of tags that are associated with the component. You can use Search API to search on the tags.

          + */ + Tags?: Tag[]; + + /** + *

          An array of the parents of the component. A parent is a trial the component is associated + * with and the experiment the trial is part of. A component might not have any parents.

          + */ + Parents?: Parent[]; +} + +export namespace TrialComponent { + export const filterSensitiveLog = (obj: TrialComponent): any => ({ + ...obj, + ...(obj.Parameters && { + Parameters: Object.entries(obj.Parameters).reduce( + (acc: any, [key, value]: [string, TrialComponentParameterValue]) => ({ + ...acc, + [key]: TrialComponentParameterValue.filterSensitiveLog(value), + }), + {} + ), + }), + }); +} + +/** + *

          A single resource returned as part of the Search API response.

          + */ +export interface SearchRecord { + /** + *

          The properties of a training job.

          + */ + TrainingJob?: TrainingJob; + + /** + *

          The properties of an experiment.

          + */ + Experiment?: Experiment; + + /** + *

          The properties of a trial.

          + */ + Trial?: Trial; + + /** + *

          The properties of a trial component.

          + */ + TrialComponent?: TrialComponent; + + /** + *

          A hosted endpoint for real-time inference.

          + */ + Endpoint?: Endpoint; + + /** + *

          A versioned model that can be deployed for SageMaker inference.

          + */ + ModelPackage?: ModelPackage; + + /** + *

          A group of versioned models in the model registry.

          + */ + ModelPackageGroup?: ModelPackageGroup; + + /** + *

          A SageMaker Model Building Pipeline instance.

          + */ + Pipeline?: Pipeline; + + /** + *

          An execution of a pipeline.

          + */ + PipelineExecution?: PipelineExecution; + + /** + *

          Amazon SageMaker Feature Store stores features in a collection called Feature Group. + * A Feature Group can be visualized as a table which has rows, + * with a unique identifier for each row where each column in the table is a feature. + * In principle, a Feature Group is composed of features and values per features.

          + */ + FeatureGroup?: FeatureGroup; +} + +export namespace SearchRecord { + export const filterSensitiveLog = (obj: SearchRecord): any => ({ + ...obj, + ...(obj.TrialComponent && { TrialComponent: TrialComponent.filterSensitiveLog(obj.TrialComponent) }), + }); +} + +export interface SearchResponse { + /** + *

          A list of SearchRecord objects.

          + */ + Results?: SearchRecord[]; + + /** + *

          If the result of the previous Search request was truncated, the response + * includes a NextToken. To retrieve the next set of results, use the token in the next + * request.

          + */ + NextToken?: string; +} + +export namespace SearchResponse { + export const filterSensitiveLog = (obj: SearchResponse): any => ({ + ...obj, + }); +} + +export interface StartMonitoringScheduleRequest { + /** + *

          The name of the schedule to start.

          + */ + MonitoringScheduleName: string | undefined; +} + +export namespace StartMonitoringScheduleRequest { + export const filterSensitiveLog = (obj: StartMonitoringScheduleRequest): any => ({ + ...obj, + }); +} + +export interface StartNotebookInstanceInput { + /** + *

          The name of the notebook instance to start.

          + */ + NotebookInstanceName: string | undefined; +} + +export namespace StartNotebookInstanceInput { + export const filterSensitiveLog = (obj: StartNotebookInstanceInput): any => ({ + ...obj, + }); +} + +export interface StartPipelineExecutionRequest { + /** + *

          The name of the pipeline.

          + */ + PipelineName: string | undefined; + + /** + *

          The display name of the pipeline execution.

          + */ + PipelineExecutionDisplayName?: string; + + /** + *

          Contains a list of pipeline parameters. This list can be empty.

          + */ + PipelineParameters?: Parameter[]; + + /** + *

          The description of the pipeline execution.

          + */ + PipelineExecutionDescription?: string; + + /** + *

          A unique, case-sensitive identifier that you provide to ensure the idempotency of the + * operation. An idempotent operation completes no more than one time.

          + */ + ClientRequestToken?: string; +} + +export namespace StartPipelineExecutionRequest { + export const filterSensitiveLog = (obj: StartPipelineExecutionRequest): any => ({ + ...obj, + }); +} + +export interface StartPipelineExecutionResponse { + /** + *

          The Amazon Resource Name (ARN) of the pipeline execution.

          + */ + PipelineExecutionArn?: string; +} + +export namespace StartPipelineExecutionResponse { + export const filterSensitiveLog = (obj: StartPipelineExecutionResponse): any => ({ + ...obj, + }); +} + +export interface StopAutoMLJobRequest { + /** + *

          The name of the object you are requesting.

          + */ + AutoMLJobName: string | undefined; +} + +export namespace StopAutoMLJobRequest { + export const filterSensitiveLog = (obj: StopAutoMLJobRequest): any => ({ + ...obj, + }); +} + +export interface StopCompilationJobRequest { + /** + *

          The name of the model compilation job to stop.

          + */ + CompilationJobName: string | undefined; +} + +export namespace StopCompilationJobRequest { + export const filterSensitiveLog = (obj: StopCompilationJobRequest): any => ({ + ...obj, + }); +} + +export interface StopHyperParameterTuningJobRequest { + /** + *

          The name of the tuning job to stop.

          + */ + HyperParameterTuningJobName: string | undefined; +} + +export namespace StopHyperParameterTuningJobRequest { + export const filterSensitiveLog = (obj: StopHyperParameterTuningJobRequest): any => ({ + ...obj, + }); +} + +export interface StopLabelingJobRequest { + /** + *

          The name of the labeling job to stop.

          + */ + LabelingJobName: string | undefined; +} + +export namespace StopLabelingJobRequest { + export const filterSensitiveLog = (obj: StopLabelingJobRequest): any => ({ + ...obj, + }); +} + +export interface StopMonitoringScheduleRequest { + /** + *

          The name of the schedule to stop.

          + */ + MonitoringScheduleName: string | undefined; +} + +export namespace StopMonitoringScheduleRequest { + export const filterSensitiveLog = (obj: StopMonitoringScheduleRequest): any => ({ + ...obj, + }); +} + +export interface StopNotebookInstanceInput { + /** + *

          The name of the notebook instance to terminate.

          + */ + NotebookInstanceName: string | undefined; +} + +export namespace StopNotebookInstanceInput { + export const filterSensitiveLog = (obj: StopNotebookInstanceInput): any => ({ + ...obj, + }); +} + +export interface StopPipelineExecutionRequest { + /** + *

          The Amazon Resource Name (ARN) of the pipeline execution.

          + */ + PipelineExecutionArn: string | undefined; + + /** + *

          A unique, case-sensitive identifier that you provide to ensure the idempotency of the + * operation. An idempotent operation completes no more than one time.

          + */ + ClientRequestToken?: string; +} + +export namespace StopPipelineExecutionRequest { + export const filterSensitiveLog = (obj: StopPipelineExecutionRequest): any => ({ + ...obj, + }); +} + +export interface StopPipelineExecutionResponse { + /** + *

          The Amazon Resource Name (ARN) of the pipeline execution.

          + */ + PipelineExecutionArn?: string; +} + +export namespace StopPipelineExecutionResponse { + export const filterSensitiveLog = (obj: StopPipelineExecutionResponse): any => ({ + ...obj, + }); +} + +export interface StopProcessingJobRequest { + /** + *

          The name of the processing job to stop.

          + */ + ProcessingJobName: string | undefined; +} + +export namespace StopProcessingJobRequest { + export const filterSensitiveLog = (obj: StopProcessingJobRequest): any => ({ + ...obj, + }); +} + +export interface StopTrainingJobRequest { + /** + *

          The name of the training job to stop.

          + */ + TrainingJobName: string | undefined; +} + +export namespace StopTrainingJobRequest { + export const filterSensitiveLog = (obj: StopTrainingJobRequest): any => ({ + ...obj, + }); +} + +export interface StopTransformJobRequest { + /** + *

          The name of the transform job to stop.

          + */ + TransformJobName: string | undefined; +} + +export namespace StopTransformJobRequest { + export const filterSensitiveLog = (obj: StopTransformJobRequest): any => ({ + ...obj, + }); +} + +export interface UpdateActionRequest { + /** + *

          The name of the action to update.

          + */ + ActionName: string | undefined; + + /** + *

          The new description for the action.

          + */ + Description?: string; + + /** + *

          The new status for the action.

          + */ + Status?: ActionStatus | string; + + /** + *

          The new list of properties. Overwrites the current property list.

          + */ + Properties?: { [key: string]: string }; + + /** + *

          A list of properties to remove.

          + */ + PropertiesToRemove?: string[]; +} + +export namespace UpdateActionRequest { + export const filterSensitiveLog = (obj: UpdateActionRequest): any => ({ + ...obj, + }); +} + +export interface UpdateActionResponse { + /** + *

          The Amazon Resource Name (ARN) of the action.

          + */ + ActionArn?: string; +} + +export namespace UpdateActionResponse { + export const filterSensitiveLog = (obj: UpdateActionResponse): any => ({ + ...obj, + }); +} + +export interface UpdateAppImageConfigRequest { + /** + *

          The name of the AppImageConfig to update.

          + */ + AppImageConfigName: string | undefined; + + /** + *

          The new KernelGateway app to run on the image.

          + */ + KernelGatewayImageConfig?: KernelGatewayImageConfig; +} + +export namespace UpdateAppImageConfigRequest { + export const filterSensitiveLog = (obj: UpdateAppImageConfigRequest): any => ({ + ...obj, + }); +} + +export interface UpdateAppImageConfigResponse { + /** + *

          The Amazon Resource Name (ARN) for the AppImageConfig.

          + */ + AppImageConfigArn?: string; +} + +export namespace UpdateAppImageConfigResponse { + export const filterSensitiveLog = (obj: UpdateAppImageConfigResponse): any => ({ + ...obj, + }); +} + +export interface UpdateArtifactRequest { + /** + *

          The Amazon Resource Name (ARN) of the artifact to update.

          + */ + ArtifactArn: string | undefined; + + /** + *

          The new name for the artifact.

          + */ + ArtifactName?: string; + + /** + *

          The new list of properties. Overwrites the current property list.

          + */ + Properties?: { [key: string]: string }; + + /** + *

          A list of properties to remove.

          + */ + PropertiesToRemove?: string[]; +} + +export namespace UpdateArtifactRequest { + export const filterSensitiveLog = (obj: UpdateArtifactRequest): any => ({ + ...obj, + }); +} + +export interface UpdateArtifactResponse { + /** + *

          The Amazon Resource Name (ARN) of the artifact.

          + */ + ArtifactArn?: string; +} + +export namespace UpdateArtifactResponse { + export const filterSensitiveLog = (obj: UpdateArtifactResponse): any => ({ + ...obj, + }); +} + +export interface UpdateCodeRepositoryInput { + /** + *

          The name of the Git repository to update.

          + */ + CodeRepositoryName: string | undefined; + + /** + *

          The configuration of the git repository, including the URL and the Amazon Resource + * Name (ARN) of the AWS Secrets Manager secret that contains the credentials used to + * access the repository. The secret must have a staging label of AWSCURRENT + * and must be in the following format:

          + *

          + * {"username": UserName, "password": + * Password} + *

          + */ + GitConfig?: GitConfigForUpdate; +} + +export namespace UpdateCodeRepositoryInput { + export const filterSensitiveLog = (obj: UpdateCodeRepositoryInput): any => ({ + ...obj, + }); +} + +export interface UpdateCodeRepositoryOutput { + /** + *

          The ARN of the Git repository.

          + */ + CodeRepositoryArn: string | undefined; +} + +export namespace UpdateCodeRepositoryOutput { + export const filterSensitiveLog = (obj: UpdateCodeRepositoryOutput): any => ({ + ...obj, + }); +} + +export interface UpdateContextRequest { + /** + *

          The name of the context to update.

          + */ + ContextName: string | undefined; + + /** + *

          The new description for the context.

          + */ + Description?: string; + + /** + *

          The new list of properties. Overwrites the current property list.

          + */ + Properties?: { [key: string]: string }; + + /** + *

          A list of properties to remove.

          + */ + PropertiesToRemove?: string[]; +} + +export namespace UpdateContextRequest { + export const filterSensitiveLog = (obj: UpdateContextRequest): any => ({ + ...obj, + }); +} + +export interface UpdateContextResponse { + /** + *

          The Amazon Resource Name (ARN) of the context.

          + */ + ContextArn?: string; +} + +export namespace UpdateContextResponse { + export const filterSensitiveLog = (obj: UpdateContextResponse): any => ({ + ...obj, + }); +} + +export interface UpdateDomainRequest { + /** + *

          The ID of the domain to be updated.

          + */ + DomainId: string | undefined; + + /** + *

          A collection of settings.

          + */ + DefaultUserSettings?: UserSettings; +} + +export namespace UpdateDomainRequest { + export const filterSensitiveLog = (obj: UpdateDomainRequest): any => ({ + ...obj, + }); +} + +export interface UpdateDomainResponse { + /** + *

          The Amazon Resource Name (ARN) of the domain.

          + */ + DomainArn?: string; +} + +export namespace UpdateDomainResponse { + export const filterSensitiveLog = (obj: UpdateDomainResponse): any => ({ + ...obj, + }); +} + +export enum VariantPropertyType { + DataCaptureConfig = "DataCaptureConfig", + DesiredInstanceCount = "DesiredInstanceCount", + DesiredWeight = "DesiredWeight", +} + +/** + *

          Specifies a production variant property type for an Endpoint.

          + *

          If you are updating an endpoint with the UpdateEndpointInput$RetainAllVariantProperties option set to + * true, the VariantProperty objects listed in UpdateEndpointInput$ExcludeRetainedVariantProperties override the + * existing variant properties of the endpoint.

          + */ +export interface VariantProperty { + /** + *

          The type of variant property. The supported values are:

          + * + */ + VariantPropertyType: VariantPropertyType | string | undefined; +} + +export namespace VariantProperty { + export const filterSensitiveLog = (obj: VariantProperty): any => ({ + ...obj, + }); +} + export interface UpdateEndpointInput { /** *

          The name of the endpoint whose configuration you want to update.

          @@ -38,12 +6146,13 @@ export interface UpdateEndpointInput { EndpointConfigName: string | undefined; /** - *

          When updating endpoint resources, enables or disables the retention of variant - * properties, such as the instance count or the variant weight. To retain the variant + *

          When updating endpoint resources, enables or disables the retention of variant + * properties, such as the instance count or the variant weight. To retain the variant * properties of an endpoint when updating it, set RetainAllVariantProperties * to true. To use the variant properties specified in a new * EndpointConfig call when updating an endpoint, set - * RetainAllVariantProperties to false.

          + * RetainAllVariantProperties to false. The default is + * false.

          */ RetainAllVariantProperties?: boolean; @@ -56,6 +6165,11 @@ export interface UpdateEndpointInput { *

          */ ExcludeRetainedVariantProperties?: VariantProperty[]; + + /** + *

          The deployment configuration for the endpoint to be updated.

          + */ + DeploymentConfig?: DeploymentConfig; } export namespace UpdateEndpointInput { @@ -192,6 +6306,42 @@ export namespace UpdateImageResponse { }); } +export interface UpdateModelPackageInput { + /** + *

          The Amazon Resource Name (ARN) of the model.

          + */ + ModelPackageArn: string | undefined; + + /** + *

          The approval status of the model.

          + */ + ModelApprovalStatus: ModelApprovalStatus | string | undefined; + + /** + *

          A description for the approval status of the model.

          + */ + ApprovalDescription?: string; +} + +export namespace UpdateModelPackageInput { + export const filterSensitiveLog = (obj: UpdateModelPackageInput): any => ({ + ...obj, + }); +} + +export interface UpdateModelPackageOutput { + /** + *

          The Amazon Resource Name (ARN) of the model.

          + */ + ModelPackageArn: string | undefined; +} + +export namespace UpdateModelPackageOutput { + export const filterSensitiveLog = (obj: UpdateModelPackageOutput): any => ({ + ...obj, + }); +} + export interface UpdateMonitoringScheduleRequest { /** *

          The name of the monitoring schedule. The name must be unique within an AWS Region within @@ -382,6 +6532,88 @@ export namespace UpdateNotebookInstanceLifecycleConfigOutput { }); } +export interface UpdatePipelineRequest { + /** + *

          The name of the pipeline to update.

          + */ + PipelineName: string | undefined; + + /** + *

          The display name of the pipeline.

          + */ + PipelineDisplayName?: string; + + /** + *

          The JSON pipeline definition.

          + */ + PipelineDefinition?: string; + + /** + *

          The description of the pipeline.

          + */ + PipelineDescription?: string; + + /** + *

          The Amazon Resource Name (ARN) that the pipeline uses to execute.

          + */ + RoleArn?: string; +} + +export namespace UpdatePipelineRequest { + export const filterSensitiveLog = (obj: UpdatePipelineRequest): any => ({ + ...obj, + }); +} + +export interface UpdatePipelineResponse { + /** + *

          The Amazon Resource Name (ARN) of the updated pipeline.

          + */ + PipelineArn?: string; +} + +export namespace UpdatePipelineResponse { + export const filterSensitiveLog = (obj: UpdatePipelineResponse): any => ({ + ...obj, + }); +} + +export interface UpdatePipelineExecutionRequest { + /** + *

          The Amazon Resource Name (ARN) of the pipeline execution.

          + */ + PipelineExecutionArn: string | undefined; + + /** + *

          The description of the pipeline execution.

          + */ + PipelineExecutionDescription?: string; + + /** + *

          The display name of the pipeline execution.

          + */ + PipelineExecutionDisplayName?: string; +} + +export namespace UpdatePipelineExecutionRequest { + export const filterSensitiveLog = (obj: UpdatePipelineExecutionRequest): any => ({ + ...obj, + }); +} + +export interface UpdatePipelineExecutionResponse { + /** + *

          The Amazon Resource Name (ARN) of the updated pipeline execution.

          + */ + PipelineExecutionArn?: string; +} + +export namespace UpdatePipelineExecutionResponse { + export const filterSensitiveLog = (obj: UpdatePipelineExecutionResponse): any => ({ + ...obj, + }); +} + export interface UpdateTrialRequest { /** *

          The name of the trial to update.

          @@ -476,6 +6708,15 @@ export interface UpdateTrialComponentRequest { export namespace UpdateTrialComponentRequest { export const filterSensitiveLog = (obj: UpdateTrialComponentRequest): any => ({ ...obj, + ...(obj.Parameters && { + Parameters: Object.entries(obj.Parameters).reduce( + (acc: any, [key, value]: [string, TrialComponentParameterValue]) => ({ + ...acc, + [key]: TrialComponentParameterValue.filterSensitiveLog(value), + }), + {} + ), + }), }); } diff --git a/clients/client-sagemaker/pagination/ListActionsPaginator.ts b/clients/client-sagemaker/pagination/ListActionsPaginator.ts new file mode 100644 index 000000000000..dd2c41c2b1cf --- /dev/null +++ b/clients/client-sagemaker/pagination/ListActionsPaginator.ts @@ -0,0 +1,53 @@ +import { SageMaker } from "../SageMaker"; +import { SageMakerClient } from "../SageMakerClient"; +import { ListActionsCommand, ListActionsCommandInput, ListActionsCommandOutput } from "../commands/ListActionsCommand"; +import { SageMakerPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: SageMakerClient, + input: ListActionsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListActionsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: SageMaker, + input: ListActionsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listActions(input, ...args); +}; +export async function* paginateListActions( + config: SageMakerPaginationConfiguration, + input: ListActionsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListActionsCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof SageMaker) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof SageMakerClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected SageMaker | SageMakerClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-sagemaker/pagination/ListAppImageConfigsPaginator.ts b/clients/client-sagemaker/pagination/ListAppImageConfigsPaginator.ts new file mode 100644 index 000000000000..2c8a0dba6ddd --- /dev/null +++ b/clients/client-sagemaker/pagination/ListAppImageConfigsPaginator.ts @@ -0,0 +1,57 @@ +import { SageMaker } from "../SageMaker"; +import { SageMakerClient } from "../SageMakerClient"; +import { + ListAppImageConfigsCommand, + ListAppImageConfigsCommandInput, + ListAppImageConfigsCommandOutput, +} from "../commands/ListAppImageConfigsCommand"; +import { SageMakerPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: SageMakerClient, + input: ListAppImageConfigsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListAppImageConfigsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: SageMaker, + input: ListAppImageConfigsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listAppImageConfigs(input, ...args); +}; +export async function* paginateListAppImageConfigs( + config: SageMakerPaginationConfiguration, + input: ListAppImageConfigsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListAppImageConfigsCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof SageMaker) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof SageMakerClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected SageMaker | SageMakerClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-sagemaker/pagination/ListArtifactsPaginator.ts b/clients/client-sagemaker/pagination/ListArtifactsPaginator.ts new file mode 100644 index 000000000000..1f5ffc7eb5a6 --- /dev/null +++ b/clients/client-sagemaker/pagination/ListArtifactsPaginator.ts @@ -0,0 +1,57 @@ +import { SageMaker } from "../SageMaker"; +import { SageMakerClient } from "../SageMakerClient"; +import { + ListArtifactsCommand, + ListArtifactsCommandInput, + ListArtifactsCommandOutput, +} from "../commands/ListArtifactsCommand"; +import { SageMakerPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: SageMakerClient, + input: ListArtifactsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListArtifactsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: SageMaker, + input: ListArtifactsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listArtifacts(input, ...args); +}; +export async function* paginateListArtifacts( + config: SageMakerPaginationConfiguration, + input: ListArtifactsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListArtifactsCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof SageMaker) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof SageMakerClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected SageMaker | SageMakerClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-sagemaker/pagination/ListAssociationsPaginator.ts b/clients/client-sagemaker/pagination/ListAssociationsPaginator.ts new file mode 100644 index 000000000000..820f3a7f3c34 --- /dev/null +++ b/clients/client-sagemaker/pagination/ListAssociationsPaginator.ts @@ -0,0 +1,57 @@ +import { SageMaker } from "../SageMaker"; +import { SageMakerClient } from "../SageMakerClient"; +import { + ListAssociationsCommand, + ListAssociationsCommandInput, + ListAssociationsCommandOutput, +} from "../commands/ListAssociationsCommand"; +import { SageMakerPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: SageMakerClient, + input: ListAssociationsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListAssociationsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: SageMaker, + input: ListAssociationsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listAssociations(input, ...args); +}; +export async function* paginateListAssociations( + config: SageMakerPaginationConfiguration, + input: ListAssociationsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListAssociationsCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof SageMaker) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof SageMakerClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected SageMaker | SageMakerClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-sagemaker/pagination/ListContextsPaginator.ts b/clients/client-sagemaker/pagination/ListContextsPaginator.ts new file mode 100644 index 000000000000..ab2ab7804620 --- /dev/null +++ b/clients/client-sagemaker/pagination/ListContextsPaginator.ts @@ -0,0 +1,57 @@ +import { SageMaker } from "../SageMaker"; +import { SageMakerClient } from "../SageMakerClient"; +import { + ListContextsCommand, + ListContextsCommandInput, + ListContextsCommandOutput, +} from "../commands/ListContextsCommand"; +import { SageMakerPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: SageMakerClient, + input: ListContextsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListContextsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: SageMaker, + input: ListContextsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listContexts(input, ...args); +}; +export async function* paginateListContexts( + config: SageMakerPaginationConfiguration, + input: ListContextsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListContextsCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof SageMaker) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof SageMakerClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected SageMaker | SageMakerClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-sagemaker/pagination/ListModelPackageGroupsPaginator.ts b/clients/client-sagemaker/pagination/ListModelPackageGroupsPaginator.ts new file mode 100644 index 000000000000..ab9d92b678f2 --- /dev/null +++ b/clients/client-sagemaker/pagination/ListModelPackageGroupsPaginator.ts @@ -0,0 +1,57 @@ +import { SageMaker } from "../SageMaker"; +import { SageMakerClient } from "../SageMakerClient"; +import { + ListModelPackageGroupsCommand, + ListModelPackageGroupsCommandInput, + ListModelPackageGroupsCommandOutput, +} from "../commands/ListModelPackageGroupsCommand"; +import { SageMakerPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: SageMakerClient, + input: ListModelPackageGroupsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListModelPackageGroupsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: SageMaker, + input: ListModelPackageGroupsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listModelPackageGroups(input, ...args); +}; +export async function* paginateListModelPackageGroups( + config: SageMakerPaginationConfiguration, + input: ListModelPackageGroupsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListModelPackageGroupsCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof SageMaker) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof SageMakerClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected SageMaker | SageMakerClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-sagemaker/pagination/ListPipelineExecutionStepsPaginator.ts b/clients/client-sagemaker/pagination/ListPipelineExecutionStepsPaginator.ts new file mode 100644 index 000000000000..b77f1cbd26ec --- /dev/null +++ b/clients/client-sagemaker/pagination/ListPipelineExecutionStepsPaginator.ts @@ -0,0 +1,57 @@ +import { SageMaker } from "../SageMaker"; +import { SageMakerClient } from "../SageMakerClient"; +import { + ListPipelineExecutionStepsCommand, + ListPipelineExecutionStepsCommandInput, + ListPipelineExecutionStepsCommandOutput, +} from "../commands/ListPipelineExecutionStepsCommand"; +import { SageMakerPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: SageMakerClient, + input: ListPipelineExecutionStepsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListPipelineExecutionStepsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: SageMaker, + input: ListPipelineExecutionStepsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listPipelineExecutionSteps(input, ...args); +}; +export async function* paginateListPipelineExecutionSteps( + config: SageMakerPaginationConfiguration, + input: ListPipelineExecutionStepsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListPipelineExecutionStepsCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof SageMaker) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof SageMakerClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected SageMaker | SageMakerClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-sagemaker/pagination/ListPipelineExecutionsPaginator.ts b/clients/client-sagemaker/pagination/ListPipelineExecutionsPaginator.ts new file mode 100644 index 000000000000..c0555ee31a6b --- /dev/null +++ b/clients/client-sagemaker/pagination/ListPipelineExecutionsPaginator.ts @@ -0,0 +1,57 @@ +import { SageMaker } from "../SageMaker"; +import { SageMakerClient } from "../SageMakerClient"; +import { + ListPipelineExecutionsCommand, + ListPipelineExecutionsCommandInput, + ListPipelineExecutionsCommandOutput, +} from "../commands/ListPipelineExecutionsCommand"; +import { SageMakerPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: SageMakerClient, + input: ListPipelineExecutionsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListPipelineExecutionsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: SageMaker, + input: ListPipelineExecutionsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listPipelineExecutions(input, ...args); +}; +export async function* paginateListPipelineExecutions( + config: SageMakerPaginationConfiguration, + input: ListPipelineExecutionsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListPipelineExecutionsCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof SageMaker) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof SageMakerClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected SageMaker | SageMakerClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-sagemaker/pagination/ListPipelineParametersForExecutionPaginator.ts b/clients/client-sagemaker/pagination/ListPipelineParametersForExecutionPaginator.ts new file mode 100644 index 000000000000..b6c9ece1bb19 --- /dev/null +++ b/clients/client-sagemaker/pagination/ListPipelineParametersForExecutionPaginator.ts @@ -0,0 +1,57 @@ +import { SageMaker } from "../SageMaker"; +import { SageMakerClient } from "../SageMakerClient"; +import { + ListPipelineParametersForExecutionCommand, + ListPipelineParametersForExecutionCommandInput, + ListPipelineParametersForExecutionCommandOutput, +} from "../commands/ListPipelineParametersForExecutionCommand"; +import { SageMakerPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: SageMakerClient, + input: ListPipelineParametersForExecutionCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListPipelineParametersForExecutionCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: SageMaker, + input: ListPipelineParametersForExecutionCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listPipelineParametersForExecution(input, ...args); +}; +export async function* paginateListPipelineParametersForExecution( + config: SageMakerPaginationConfiguration, + input: ListPipelineParametersForExecutionCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListPipelineParametersForExecutionCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof SageMaker) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof SageMakerClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected SageMaker | SageMakerClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-sagemaker/pagination/ListPipelinesPaginator.ts b/clients/client-sagemaker/pagination/ListPipelinesPaginator.ts new file mode 100644 index 000000000000..5ab7025e6bf5 --- /dev/null +++ b/clients/client-sagemaker/pagination/ListPipelinesPaginator.ts @@ -0,0 +1,57 @@ +import { SageMaker } from "../SageMaker"; +import { SageMakerClient } from "../SageMakerClient"; +import { + ListPipelinesCommand, + ListPipelinesCommandInput, + ListPipelinesCommandOutput, +} from "../commands/ListPipelinesCommand"; +import { SageMakerPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: SageMakerClient, + input: ListPipelinesCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListPipelinesCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: SageMaker, + input: ListPipelinesCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listPipelines(input, ...args); +}; +export async function* paginateListPipelines( + config: SageMakerPaginationConfiguration, + input: ListPipelinesCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListPipelinesCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof SageMaker) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof SageMakerClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected SageMaker | SageMakerClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-sagemaker/pagination/ListProjectsPaginator.ts b/clients/client-sagemaker/pagination/ListProjectsPaginator.ts new file mode 100644 index 000000000000..603870f3ce14 --- /dev/null +++ b/clients/client-sagemaker/pagination/ListProjectsPaginator.ts @@ -0,0 +1,57 @@ +import { SageMaker } from "../SageMaker"; +import { SageMakerClient } from "../SageMakerClient"; +import { + ListProjectsCommand, + ListProjectsCommandInput, + ListProjectsCommandOutput, +} from "../commands/ListProjectsCommand"; +import { SageMakerPaginationConfiguration } from "./Interfaces"; +import { Paginator } from "@aws-sdk/types"; + +/** + * @private + */ +const makePagedClientRequest = async ( + client: SageMakerClient, + input: ListProjectsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.send(new ListProjectsCommand(input), ...args); +}; +/** + * @private + */ +const makePagedRequest = async ( + client: SageMaker, + input: ListProjectsCommandInput, + ...args: any +): Promise => { + // @ts-ignore + return await client.listProjects(input, ...args); +}; +export async function* paginateListProjects( + config: SageMakerPaginationConfiguration, + input: ListProjectsCommandInput, + ...additionalArguments: any +): Paginator { + let token: string | undefined = config.startingToken || undefined; + let hasNext = true; + let page: ListProjectsCommandOutput; + while (hasNext) { + input.NextToken = token; + input["MaxResults"] = config.pageSize; + if (config.client instanceof SageMaker) { + page = await makePagedRequest(config.client, input, ...additionalArguments); + } else if (config.client instanceof SageMakerClient) { + page = await makePagedClientRequest(config.client, input, ...additionalArguments); + } else { + throw new Error("Invalid client, expected SageMaker | SageMakerClient"); + } + yield page; + token = page.NextToken; + hasNext = !!token; + } + // @ts-ignore + return undefined; +} diff --git a/clients/client-sagemaker/protocols/Aws_json1_1.ts b/clients/client-sagemaker/protocols/Aws_json1_1.ts index 244bb855e737..db15b98cc08d 100644 --- a/clients/client-sagemaker/protocols/Aws_json1_1.ts +++ b/clients/client-sagemaker/protocols/Aws_json1_1.ts @@ -1,14 +1,17 @@ +import { AddAssociationCommandInput, AddAssociationCommandOutput } from "../commands/AddAssociationCommand"; import { AddTagsCommandInput, AddTagsCommandOutput } from "../commands/AddTagsCommand"; import { AssociateTrialComponentCommandInput, AssociateTrialComponentCommandOutput, } from "../commands/AssociateTrialComponentCommand"; +import { CreateActionCommandInput, CreateActionCommandOutput } from "../commands/CreateActionCommand"; import { CreateAlgorithmCommandInput, CreateAlgorithmCommandOutput } from "../commands/CreateAlgorithmCommand"; import { CreateAppCommandInput, CreateAppCommandOutput } from "../commands/CreateAppCommand"; import { CreateAppImageConfigCommandInput, CreateAppImageConfigCommandOutput, } from "../commands/CreateAppImageConfigCommand"; +import { CreateArtifactCommandInput, CreateArtifactCommandOutput } from "../commands/CreateArtifactCommand"; import { CreateAutoMLJobCommandInput, CreateAutoMLJobCommandOutput } from "../commands/CreateAutoMLJobCommand"; import { CreateCodeRepositoryCommandInput, @@ -18,6 +21,7 @@ import { CreateCompilationJobCommandInput, CreateCompilationJobCommandOutput, } from "../commands/CreateCompilationJobCommand"; +import { CreateContextCommandInput, CreateContextCommandOutput } from "../commands/CreateContextCommand"; import { CreateDomainCommandInput, CreateDomainCommandOutput } from "../commands/CreateDomainCommand"; import { CreateEndpointCommandInput, CreateEndpointCommandOutput } from "../commands/CreateEndpointCommand"; import { @@ -25,6 +29,7 @@ import { CreateEndpointConfigCommandOutput, } from "../commands/CreateEndpointConfigCommand"; import { CreateExperimentCommandInput, CreateExperimentCommandOutput } from "../commands/CreateExperimentCommand"; +import { CreateFeatureGroupCommandInput, CreateFeatureGroupCommandOutput } from "../commands/CreateFeatureGroupCommand"; import { CreateFlowDefinitionCommandInput, CreateFlowDefinitionCommandOutput, @@ -39,6 +44,10 @@ import { CreateImageVersionCommandInput, CreateImageVersionCommandOutput } from import { CreateLabelingJobCommandInput, CreateLabelingJobCommandOutput } from "../commands/CreateLabelingJobCommand"; import { CreateModelCommandInput, CreateModelCommandOutput } from "../commands/CreateModelCommand"; import { CreateModelPackageCommandInput, CreateModelPackageCommandOutput } from "../commands/CreateModelPackageCommand"; +import { + CreateModelPackageGroupCommandInput, + CreateModelPackageGroupCommandOutput, +} from "../commands/CreateModelPackageGroupCommand"; import { CreateMonitoringScheduleCommandInput, CreateMonitoringScheduleCommandOutput, @@ -51,6 +60,7 @@ import { CreateNotebookInstanceLifecycleConfigCommandInput, CreateNotebookInstanceLifecycleConfigCommandOutput, } from "../commands/CreateNotebookInstanceLifecycleConfigCommand"; +import { CreatePipelineCommandInput, CreatePipelineCommandOutput } from "../commands/CreatePipelineCommand"; import { CreatePresignedDomainUrlCommandInput, CreatePresignedDomainUrlCommandOutput, @@ -63,6 +73,7 @@ import { CreateProcessingJobCommandInput, CreateProcessingJobCommandOutput, } from "../commands/CreateProcessingJobCommand"; +import { CreateProjectCommandInput, CreateProjectCommandOutput } from "../commands/CreateProjectCommand"; import { CreateTrainingJobCommandInput, CreateTrainingJobCommandOutput } from "../commands/CreateTrainingJobCommand"; import { CreateTransformJobCommandInput, CreateTransformJobCommandOutput } from "../commands/CreateTransformJobCommand"; import { CreateTrialCommandInput, CreateTrialCommandOutput } from "../commands/CreateTrialCommand"; @@ -73,16 +84,20 @@ import { import { CreateUserProfileCommandInput, CreateUserProfileCommandOutput } from "../commands/CreateUserProfileCommand"; import { CreateWorkforceCommandInput, CreateWorkforceCommandOutput } from "../commands/CreateWorkforceCommand"; import { CreateWorkteamCommandInput, CreateWorkteamCommandOutput } from "../commands/CreateWorkteamCommand"; +import { DeleteActionCommandInput, DeleteActionCommandOutput } from "../commands/DeleteActionCommand"; import { DeleteAlgorithmCommandInput, DeleteAlgorithmCommandOutput } from "../commands/DeleteAlgorithmCommand"; import { DeleteAppCommandInput, DeleteAppCommandOutput } from "../commands/DeleteAppCommand"; import { DeleteAppImageConfigCommandInput, DeleteAppImageConfigCommandOutput, } from "../commands/DeleteAppImageConfigCommand"; +import { DeleteArtifactCommandInput, DeleteArtifactCommandOutput } from "../commands/DeleteArtifactCommand"; +import { DeleteAssociationCommandInput, DeleteAssociationCommandOutput } from "../commands/DeleteAssociationCommand"; import { DeleteCodeRepositoryCommandInput, DeleteCodeRepositoryCommandOutput, } from "../commands/DeleteCodeRepositoryCommand"; +import { DeleteContextCommandInput, DeleteContextCommandOutput } from "../commands/DeleteContextCommand"; import { DeleteDomainCommandInput, DeleteDomainCommandOutput } from "../commands/DeleteDomainCommand"; import { DeleteEndpointCommandInput, DeleteEndpointCommandOutput } from "../commands/DeleteEndpointCommand"; import { @@ -90,6 +105,7 @@ import { DeleteEndpointConfigCommandOutput, } from "../commands/DeleteEndpointConfigCommand"; import { DeleteExperimentCommandInput, DeleteExperimentCommandOutput } from "../commands/DeleteExperimentCommand"; +import { DeleteFeatureGroupCommandInput, DeleteFeatureGroupCommandOutput } from "../commands/DeleteFeatureGroupCommand"; import { DeleteFlowDefinitionCommandInput, DeleteFlowDefinitionCommandOutput, @@ -99,6 +115,14 @@ import { DeleteImageCommandInput, DeleteImageCommandOutput } from "../commands/D import { DeleteImageVersionCommandInput, DeleteImageVersionCommandOutput } from "../commands/DeleteImageVersionCommand"; import { DeleteModelCommandInput, DeleteModelCommandOutput } from "../commands/DeleteModelCommand"; import { DeleteModelPackageCommandInput, DeleteModelPackageCommandOutput } from "../commands/DeleteModelPackageCommand"; +import { + DeleteModelPackageGroupCommandInput, + DeleteModelPackageGroupCommandOutput, +} from "../commands/DeleteModelPackageGroupCommand"; +import { + DeleteModelPackageGroupPolicyCommandInput, + DeleteModelPackageGroupPolicyCommandOutput, +} from "../commands/DeleteModelPackageGroupPolicyCommand"; import { DeleteMonitoringScheduleCommandInput, DeleteMonitoringScheduleCommandOutput, @@ -111,6 +135,8 @@ import { DeleteNotebookInstanceLifecycleConfigCommandInput, DeleteNotebookInstanceLifecycleConfigCommandOutput, } from "../commands/DeleteNotebookInstanceLifecycleConfigCommand"; +import { DeletePipelineCommandInput, DeletePipelineCommandOutput } from "../commands/DeletePipelineCommand"; +import { DeleteProjectCommandInput, DeleteProjectCommandOutput } from "../commands/DeleteProjectCommand"; import { DeleteTagsCommandInput, DeleteTagsCommandOutput } from "../commands/DeleteTagsCommand"; import { DeleteTrialCommandInput, DeleteTrialCommandOutput } from "../commands/DeleteTrialCommand"; import { @@ -120,12 +146,14 @@ import { import { DeleteUserProfileCommandInput, DeleteUserProfileCommandOutput } from "../commands/DeleteUserProfileCommand"; import { DeleteWorkforceCommandInput, DeleteWorkforceCommandOutput } from "../commands/DeleteWorkforceCommand"; import { DeleteWorkteamCommandInput, DeleteWorkteamCommandOutput } from "../commands/DeleteWorkteamCommand"; +import { DescribeActionCommandInput, DescribeActionCommandOutput } from "../commands/DescribeActionCommand"; import { DescribeAlgorithmCommandInput, DescribeAlgorithmCommandOutput } from "../commands/DescribeAlgorithmCommand"; import { DescribeAppCommandInput, DescribeAppCommandOutput } from "../commands/DescribeAppCommand"; import { DescribeAppImageConfigCommandInput, DescribeAppImageConfigCommandOutput, } from "../commands/DescribeAppImageConfigCommand"; +import { DescribeArtifactCommandInput, DescribeArtifactCommandOutput } from "../commands/DescribeArtifactCommand"; import { DescribeAutoMLJobCommandInput, DescribeAutoMLJobCommandOutput } from "../commands/DescribeAutoMLJobCommand"; import { DescribeCodeRepositoryCommandInput, @@ -135,6 +163,7 @@ import { DescribeCompilationJobCommandInput, DescribeCompilationJobCommandOutput, } from "../commands/DescribeCompilationJobCommand"; +import { DescribeContextCommandInput, DescribeContextCommandOutput } from "../commands/DescribeContextCommand"; import { DescribeDomainCommandInput, DescribeDomainCommandOutput } from "../commands/DescribeDomainCommand"; import { DescribeEndpointCommandInput, DescribeEndpointCommandOutput } from "../commands/DescribeEndpointCommand"; import { @@ -142,6 +171,10 @@ import { DescribeEndpointConfigCommandOutput, } from "../commands/DescribeEndpointConfigCommand"; import { DescribeExperimentCommandInput, DescribeExperimentCommandOutput } from "../commands/DescribeExperimentCommand"; +import { + DescribeFeatureGroupCommandInput, + DescribeFeatureGroupCommandOutput, +} from "../commands/DescribeFeatureGroupCommand"; import { DescribeFlowDefinitionCommandInput, DescribeFlowDefinitionCommandOutput, @@ -168,6 +201,10 @@ import { DescribeModelPackageCommandInput, DescribeModelPackageCommandOutput, } from "../commands/DescribeModelPackageCommand"; +import { + DescribeModelPackageGroupCommandInput, + DescribeModelPackageGroupCommandOutput, +} from "../commands/DescribeModelPackageGroupCommand"; import { DescribeMonitoringScheduleCommandInput, DescribeMonitoringScheduleCommandOutput, @@ -180,10 +217,20 @@ import { DescribeNotebookInstanceLifecycleConfigCommandInput, DescribeNotebookInstanceLifecycleConfigCommandOutput, } from "../commands/DescribeNotebookInstanceLifecycleConfigCommand"; +import { DescribePipelineCommandInput, DescribePipelineCommandOutput } from "../commands/DescribePipelineCommand"; +import { + DescribePipelineDefinitionForExecutionCommandInput, + DescribePipelineDefinitionForExecutionCommandOutput, +} from "../commands/DescribePipelineDefinitionForExecutionCommand"; +import { + DescribePipelineExecutionCommandInput, + DescribePipelineExecutionCommandOutput, +} from "../commands/DescribePipelineExecutionCommand"; import { DescribeProcessingJobCommandInput, DescribeProcessingJobCommandOutput, } from "../commands/DescribeProcessingJobCommand"; +import { DescribeProjectCommandInput, DescribeProjectCommandOutput } from "../commands/DescribeProjectCommand"; import { DescribeSubscribedWorkteamCommandInput, DescribeSubscribedWorkteamCommandOutput, @@ -207,20 +254,39 @@ import { } from "../commands/DescribeUserProfileCommand"; import { DescribeWorkforceCommandInput, DescribeWorkforceCommandOutput } from "../commands/DescribeWorkforceCommand"; import { DescribeWorkteamCommandInput, DescribeWorkteamCommandOutput } from "../commands/DescribeWorkteamCommand"; +import { + DisableSagemakerServicecatalogPortfolioCommandInput, + DisableSagemakerServicecatalogPortfolioCommandOutput, +} from "../commands/DisableSagemakerServicecatalogPortfolioCommand"; import { DisassociateTrialComponentCommandInput, DisassociateTrialComponentCommandOutput, } from "../commands/DisassociateTrialComponentCommand"; +import { + EnableSagemakerServicecatalogPortfolioCommandInput, + EnableSagemakerServicecatalogPortfolioCommandOutput, +} from "../commands/EnableSagemakerServicecatalogPortfolioCommand"; +import { + GetModelPackageGroupPolicyCommandInput, + GetModelPackageGroupPolicyCommandOutput, +} from "../commands/GetModelPackageGroupPolicyCommand"; +import { + GetSagemakerServicecatalogPortfolioStatusCommandInput, + GetSagemakerServicecatalogPortfolioStatusCommandOutput, +} from "../commands/GetSagemakerServicecatalogPortfolioStatusCommand"; import { GetSearchSuggestionsCommandInput, GetSearchSuggestionsCommandOutput, } from "../commands/GetSearchSuggestionsCommand"; +import { ListActionsCommandInput, ListActionsCommandOutput } from "../commands/ListActionsCommand"; import { ListAlgorithmsCommandInput, ListAlgorithmsCommandOutput } from "../commands/ListAlgorithmsCommand"; import { ListAppImageConfigsCommandInput, ListAppImageConfigsCommandOutput, } from "../commands/ListAppImageConfigsCommand"; import { ListAppsCommandInput, ListAppsCommandOutput } from "../commands/ListAppsCommand"; +import { ListArtifactsCommandInput, ListArtifactsCommandOutput } from "../commands/ListArtifactsCommand"; +import { ListAssociationsCommandInput, ListAssociationsCommandOutput } from "../commands/ListAssociationsCommand"; import { ListAutoMLJobsCommandInput, ListAutoMLJobsCommandOutput } from "../commands/ListAutoMLJobsCommand"; import { ListCandidatesForAutoMLJobCommandInput, @@ -234,6 +300,7 @@ import { ListCompilationJobsCommandInput, ListCompilationJobsCommandOutput, } from "../commands/ListCompilationJobsCommand"; +import { ListContextsCommandInput, ListContextsCommandOutput } from "../commands/ListContextsCommand"; import { ListDomainsCommandInput, ListDomainsCommandOutput } from "../commands/ListDomainsCommand"; import { ListEndpointConfigsCommandInput, @@ -241,6 +308,7 @@ import { } from "../commands/ListEndpointConfigsCommand"; import { ListEndpointsCommandInput, ListEndpointsCommandOutput } from "../commands/ListEndpointsCommand"; import { ListExperimentsCommandInput, ListExperimentsCommandOutput } from "../commands/ListExperimentsCommand"; +import { ListFeatureGroupsCommandInput, ListFeatureGroupsCommandOutput } from "../commands/ListFeatureGroupsCommand"; import { ListFlowDefinitionsCommandInput, ListFlowDefinitionsCommandOutput, @@ -257,6 +325,10 @@ import { ListLabelingJobsForWorkteamCommandInput, ListLabelingJobsForWorkteamCommandOutput, } from "../commands/ListLabelingJobsForWorkteamCommand"; +import { + ListModelPackageGroupsCommandInput, + ListModelPackageGroupsCommandOutput, +} from "../commands/ListModelPackageGroupsCommand"; import { ListModelPackagesCommandInput, ListModelPackagesCommandOutput } from "../commands/ListModelPackagesCommand"; import { ListModelsCommandInput, ListModelsCommandOutput } from "../commands/ListModelsCommand"; import { @@ -275,7 +347,21 @@ import { ListNotebookInstancesCommandInput, ListNotebookInstancesCommandOutput, } from "../commands/ListNotebookInstancesCommand"; +import { + ListPipelineExecutionStepsCommandInput, + ListPipelineExecutionStepsCommandOutput, +} from "../commands/ListPipelineExecutionStepsCommand"; +import { + ListPipelineExecutionsCommandInput, + ListPipelineExecutionsCommandOutput, +} from "../commands/ListPipelineExecutionsCommand"; +import { + ListPipelineParametersForExecutionCommandInput, + ListPipelineParametersForExecutionCommandOutput, +} from "../commands/ListPipelineParametersForExecutionCommand"; +import { ListPipelinesCommandInput, ListPipelinesCommandOutput } from "../commands/ListPipelinesCommand"; import { ListProcessingJobsCommandInput, ListProcessingJobsCommandOutput } from "../commands/ListProcessingJobsCommand"; +import { ListProjectsCommandInput, ListProjectsCommandOutput } from "../commands/ListProjectsCommand"; import { ListSubscribedWorkteamsCommandInput, ListSubscribedWorkteamsCommandOutput, @@ -295,6 +381,10 @@ import { ListTrialsCommandInput, ListTrialsCommandOutput } from "../commands/Lis import { ListUserProfilesCommandInput, ListUserProfilesCommandOutput } from "../commands/ListUserProfilesCommand"; import { ListWorkforcesCommandInput, ListWorkforcesCommandOutput } from "../commands/ListWorkforcesCommand"; import { ListWorkteamsCommandInput, ListWorkteamsCommandOutput } from "../commands/ListWorkteamsCommand"; +import { + PutModelPackageGroupPolicyCommandInput, + PutModelPackageGroupPolicyCommandOutput, +} from "../commands/PutModelPackageGroupPolicyCommand"; import { RenderUiTemplateCommandInput, RenderUiTemplateCommandOutput } from "../commands/RenderUiTemplateCommand"; import { SearchCommandInput, SearchCommandOutput } from "../commands/SearchCommand"; import { @@ -305,6 +395,10 @@ import { StartNotebookInstanceCommandInput, StartNotebookInstanceCommandOutput, } from "../commands/StartNotebookInstanceCommand"; +import { + StartPipelineExecutionCommandInput, + StartPipelineExecutionCommandOutput, +} from "../commands/StartPipelineExecutionCommand"; import { StopAutoMLJobCommandInput, StopAutoMLJobCommandOutput } from "../commands/StopAutoMLJobCommand"; import { StopCompilationJobCommandInput, StopCompilationJobCommandOutput } from "../commands/StopCompilationJobCommand"; import { @@ -320,17 +414,24 @@ import { StopNotebookInstanceCommandInput, StopNotebookInstanceCommandOutput, } from "../commands/StopNotebookInstanceCommand"; +import { + StopPipelineExecutionCommandInput, + StopPipelineExecutionCommandOutput, +} from "../commands/StopPipelineExecutionCommand"; import { StopProcessingJobCommandInput, StopProcessingJobCommandOutput } from "../commands/StopProcessingJobCommand"; import { StopTrainingJobCommandInput, StopTrainingJobCommandOutput } from "../commands/StopTrainingJobCommand"; import { StopTransformJobCommandInput, StopTransformJobCommandOutput } from "../commands/StopTransformJobCommand"; +import { UpdateActionCommandInput, UpdateActionCommandOutput } from "../commands/UpdateActionCommand"; import { UpdateAppImageConfigCommandInput, UpdateAppImageConfigCommandOutput, } from "../commands/UpdateAppImageConfigCommand"; +import { UpdateArtifactCommandInput, UpdateArtifactCommandOutput } from "../commands/UpdateArtifactCommand"; import { UpdateCodeRepositoryCommandInput, UpdateCodeRepositoryCommandOutput, } from "../commands/UpdateCodeRepositoryCommand"; +import { UpdateContextCommandInput, UpdateContextCommandOutput } from "../commands/UpdateContextCommand"; import { UpdateDomainCommandInput, UpdateDomainCommandOutput } from "../commands/UpdateDomainCommand"; import { UpdateEndpointCommandInput, UpdateEndpointCommandOutput } from "../commands/UpdateEndpointCommand"; import { @@ -339,6 +440,7 @@ import { } from "../commands/UpdateEndpointWeightsAndCapacitiesCommand"; import { UpdateExperimentCommandInput, UpdateExperimentCommandOutput } from "../commands/UpdateExperimentCommand"; import { UpdateImageCommandInput, UpdateImageCommandOutput } from "../commands/UpdateImageCommand"; +import { UpdateModelPackageCommandInput, UpdateModelPackageCommandOutput } from "../commands/UpdateModelPackageCommand"; import { UpdateMonitoringScheduleCommandInput, UpdateMonitoringScheduleCommandOutput, @@ -351,6 +453,11 @@ import { UpdateNotebookInstanceLifecycleConfigCommandInput, UpdateNotebookInstanceLifecycleConfigCommandOutput, } from "../commands/UpdateNotebookInstanceLifecycleConfigCommand"; +import { UpdatePipelineCommandInput, UpdatePipelineCommandOutput } from "../commands/UpdatePipelineCommand"; +import { + UpdatePipelineExecutionCommandInput, + UpdatePipelineExecutionCommandOutput, +} from "../commands/UpdatePipelineExecutionCommand"; import { UpdateTrialCommandInput, UpdateTrialCommandOutput } from "../commands/UpdateTrialCommand"; import { UpdateTrialComponentCommandInput, @@ -360,8 +467,13 @@ import { UpdateUserProfileCommandInput, UpdateUserProfileCommandOutput } from ". import { UpdateWorkforceCommandInput, UpdateWorkforceCommandOutput } from "../commands/UpdateWorkforceCommand"; import { UpdateWorkteamCommandInput, UpdateWorkteamCommandOutput } from "../commands/UpdateWorkteamCommand"; import { + ActionSource, + ActionSummary, + AddAssociationRequest, + AddAssociationResponse, AddTagsInput, AddTagsOutput, + Alarm, AlgorithmSpecification, AlgorithmStatusDetails, AlgorithmStatusItem, @@ -372,8 +484,13 @@ import { AppDetails, AppImageConfigDetails, AppSpecification, + ArtifactSource, + ArtifactSourceType, + ArtifactSummary, AssociateTrialComponentRequest, AssociateTrialComponentResponse, + AssociationSummary, + AthenaDatasetDefinition, AutoMLCandidate, AutoMLCandidateStep, AutoMLChannel, @@ -387,6 +504,11 @@ import { AutoMLOutputDataConfig, AutoMLS3DataSource, AutoMLSecurityConfig, + AutoRollbackConfig, + Bias, + BlueGreenUpdatePolicy, + CacheHitResult, + CapacitySize, CaptureContentTypeHeader, CaptureOption, CategoricalParameterRange, @@ -400,23 +522,32 @@ import { CollectionConfiguration, CompilationJobSummary, CompressionType, + ConditionStepMetadata, ConflictException, ContainerDefinition, ContentClassifier, + ContextSource, + ContextSummary, ContinuousParameterRange, ContinuousParameterRangeSpecification, + CreateActionRequest, + CreateActionResponse, CreateAlgorithmInput, CreateAlgorithmOutput, CreateAppImageConfigRequest, CreateAppImageConfigResponse, CreateAppRequest, CreateAppResponse, + CreateArtifactRequest, + CreateArtifactResponse, CreateAutoMLJobRequest, CreateAutoMLJobResponse, CreateCodeRepositoryInput, CreateCodeRepositoryOutput, CreateCompilationJobRequest, CreateCompilationJobResponse, + CreateContextRequest, + CreateContextResponse, CreateDomainRequest, CreateDomainResponse, CreateEndpointConfigInput, @@ -425,6 +556,8 @@ import { CreateEndpointOutput, CreateExperimentRequest, CreateExperimentResponse, + CreateFeatureGroupRequest, + CreateFeatureGroupResponse, CreateFlowDefinitionRequest, CreateFlowDefinitionResponse, CreateHumanTaskUiRequest, @@ -439,6 +572,8 @@ import { CreateLabelingJobResponse, CreateModelInput, CreateModelOutput, + CreateModelPackageGroupInput, + CreateModelPackageGroupOutput, CreateModelPackageInput, CreateModelPackageOutput, CreateMonitoringScheduleRequest, @@ -447,40 +582,16 @@ import { CreateNotebookInstanceLifecycleConfigInput, CreateNotebookInstanceLifecycleConfigOutput, CreateNotebookInstanceOutput, + CreatePipelineRequest, + CreatePipelineResponse, CreatePresignedDomainUrlRequest, - CreatePresignedDomainUrlResponse, - CreatePresignedNotebookInstanceUrlInput, - CreatePresignedNotebookInstanceUrlOutput, - CreateProcessingJobRequest, - CreateProcessingJobResponse, - CreateTrainingJobRequest, - CreateTrainingJobResponse, - CreateTransformJobRequest, - CreateTransformJobResponse, - CreateTrialComponentRequest, - CreateTrialComponentResponse, - CreateTrialRequest, - CreateTrialResponse, - CreateUserProfileRequest, - CreateUserProfileResponse, - CreateWorkforceRequest, - CreateWorkforceResponse, - CreateWorkteamRequest, - CreateWorkteamResponse, CustomImage, DataCaptureConfig, - DataCaptureConfigSummary, - DataProcessing, + DataCatalogConfig, DataSource, - DebugHookConfig, - DebugRuleConfiguration, - DebugRuleEvaluationStatus, - DeleteAlgorithmInput, - DeleteAppImageConfigRequest, - DeleteAppRequest, - DeleteCodeRepositoryInput, EndpointInput, - ExperimentConfig, + Explainability, + FeatureDefinition, FileSystemConfig, FileSystemDataSource, FinalAutoMLJobObjectiveMetric, @@ -515,12 +626,15 @@ import { LabelingJobS3DataSource, LabelingJobSnsDataSource, LabelingJobStoppingConditions, - MemberDefinition, + MetadataProperties, MetricDefinition, - ModelClientConfig, + MetricsSource, + ModelDataQuality, + ModelMetrics, ModelPackageContainerDefinition, ModelPackageValidationProfile, ModelPackageValidationSpecification, + ModelQuality, MonitoringAppSpecification, MonitoringBaselineConfig, MonitoringClusterConfig, @@ -537,22 +651,14 @@ import { NetworkConfig, NotebookInstanceAcceleratorType, NotebookInstanceLifecycleHook, - NotificationConfiguration, - OidcConfig, - OidcMemberDefinition, + OfflineStoreConfig, + OnlineStoreConfig, + OnlineStoreSecurityConfig, OutputConfig, OutputDataConfig, ParameterRange, ParameterRanges, ParentHyperParameterTuningJob, - ProcessingClusterConfig, - ProcessingInput, - ProcessingOutput, - ProcessingOutputConfig, - ProcessingResources, - ProcessingS3Input, - ProcessingS3Output, - ProcessingStoppingCondition, ProductionVariant, ProductionVariantInstanceType, PublicWorkforceTaskPrice, @@ -562,19 +668,18 @@ import { ResourceLimits, ResourceNotFound, ResourceSpec, - RetentionPolicy, S3DataSource, + S3StorageConfig, ScheduleConfig, SharingSettings, ShuffleConfig, SourceAlgorithm, SourceAlgorithmSpecification, - SourceIpConfig, StoppingCondition, Tag, TargetPlatform, TensorBoardAppSettings, - TensorBoardOutputConfig, + TrafficRoutingConfig, TrainingInputMode, TrainingInstanceType, TrainingJobDefinition, @@ -586,22 +691,60 @@ import { TransformOutput, TransformResources, TransformS3DataSource, - TrialComponentArtifact, - TrialComponentParameterValue, - TrialComponentStatus, TuningJobCompletionCriteria, USD, UiConfig, UiTemplate, + UserContext, UserSettings, VpcConfig, } from "../models/models_0"; import { + CreatePresignedDomainUrlResponse, + CreatePresignedNotebookInstanceUrlInput, + CreatePresignedNotebookInstanceUrlOutput, + CreateProcessingJobRequest, + CreateProcessingJobResponse, + CreateProjectInput, + CreateProjectOutput, + CreateTrainingJobRequest, + CreateTrainingJobResponse, + CreateTransformJobRequest, + CreateTransformJobResponse, + CreateTrialComponentRequest, + CreateTrialComponentResponse, + CreateTrialRequest, + CreateTrialResponse, + CreateUserProfileRequest, + CreateUserProfileResponse, + CreateWorkforceRequest, + CreateWorkforceResponse, + CreateWorkteamRequest, + CreateWorkteamResponse, + DataCaptureConfigSummary, + DataProcessing, + DatasetDefinition, + DebugHookConfig, + DebugRuleConfiguration, + DebugRuleEvaluationStatus, + DeleteActionRequest, + DeleteActionResponse, + DeleteAlgorithmInput, + DeleteAppImageConfigRequest, + DeleteAppRequest, + DeleteArtifactRequest, + DeleteArtifactResponse, + DeleteAssociationRequest, + DeleteAssociationResponse, + DeleteCodeRepositoryInput, + DeleteContextRequest, + DeleteContextResponse, DeleteDomainRequest, DeleteEndpointConfigInput, DeleteEndpointInput, DeleteExperimentRequest, DeleteExperimentResponse, + DeleteFeatureGroupRequest, DeleteFlowDefinitionRequest, DeleteFlowDefinitionResponse, DeleteHumanTaskUiRequest, @@ -611,10 +754,15 @@ import { DeleteImageVersionRequest, DeleteImageVersionResponse, DeleteModelInput, + DeleteModelPackageGroupInput, + DeleteModelPackageGroupPolicyInput, DeleteModelPackageInput, DeleteMonitoringScheduleRequest, DeleteNotebookInstanceInput, DeleteNotebookInstanceLifecycleConfigInput, + DeletePipelineRequest, + DeletePipelineResponse, + DeleteProjectInput, DeleteTagsInput, DeleteTagsOutput, DeleteTrialComponentRequest, @@ -627,18 +775,25 @@ import { DeleteWorkteamRequest, DeleteWorkteamResponse, DeployedImage, + DeploymentConfig, + DescribeActionRequest, + DescribeActionResponse, DescribeAlgorithmInput, DescribeAlgorithmOutput, DescribeAppImageConfigRequest, DescribeAppImageConfigResponse, DescribeAppRequest, DescribeAppResponse, + DescribeArtifactRequest, + DescribeArtifactResponse, DescribeAutoMLJobRequest, DescribeAutoMLJobResponse, DescribeCodeRepositoryInput, DescribeCodeRepositoryOutput, DescribeCompilationJobRequest, DescribeCompilationJobResponse, + DescribeContextRequest, + DescribeContextResponse, DescribeDomainRequest, DescribeDomainResponse, DescribeEndpointConfigInput, @@ -647,6 +802,8 @@ import { DescribeEndpointOutput, DescribeExperimentRequest, DescribeExperimentResponse, + DescribeFeatureGroupRequest, + DescribeFeatureGroupResponse, DescribeFlowDefinitionRequest, DescribeFlowDefinitionResponse, DescribeHumanTaskUiRequest, @@ -661,6 +818,8 @@ import { DescribeLabelingJobResponse, DescribeModelInput, DescribeModelOutput, + DescribeModelPackageGroupInput, + DescribeModelPackageGroupOutput, DescribeModelPackageInput, DescribeModelPackageOutput, DescribeMonitoringScheduleRequest, @@ -669,8 +828,16 @@ import { DescribeNotebookInstanceLifecycleConfigInput, DescribeNotebookInstanceLifecycleConfigOutput, DescribeNotebookInstanceOutput, + DescribePipelineDefinitionForExecutionRequest, + DescribePipelineDefinitionForExecutionResponse, + DescribePipelineExecutionRequest, + DescribePipelineExecutionResponse, + DescribePipelineRequest, + DescribePipelineResponse, DescribeProcessingJobRequest, DescribeProcessingJobResponse, + DescribeProjectInput, + DescribeProjectOutput, DescribeSubscribedWorkteamRequest, DescribeSubscribedWorkteamResponse, DescribeTrainingJobRequest, @@ -688,17 +855,29 @@ import { DescribeWorkteamRequest, DescribeWorkteamResponse, DesiredWeightAndCapacity, + DisableSagemakerServicecatalogPortfolioInput, + DisableSagemakerServicecatalogPortfolioOutput, DisassociateTrialComponentRequest, DisassociateTrialComponentResponse, DomainDetails, + EnableSagemakerServicecatalogPortfolioInput, + EnableSagemakerServicecatalogPortfolioOutput, + Endpoint, EndpointConfigSummary, EndpointSummary, Experiment, + ExperimentConfig, ExperimentSource, ExperimentSummary, + FeatureGroup, + FeatureGroupSummary, Filter, FinalHyperParameterTuningJobObjectiveMetric, FlowDefinitionSummary, + GetModelPackageGroupPolicyInput, + GetModelPackageGroupPolicyOutput, + GetSagemakerServicecatalogPortfolioStatusInput, + GetSagemakerServicecatalogPortfolioStatusOutput, GetSearchSuggestionsRequest, GetSearchSuggestionsResponse, GitConfigForUpdate, @@ -712,12 +891,68 @@ import { LabelingJobForWorkteamSummary, LabelingJobOutput, LabelingJobSummary, + ListActionsRequest, + ListActionsResponse, ListAlgorithmsInput, ListAlgorithmsOutput, ListAppImageConfigsRequest, ListAppImageConfigsResponse, ListAppsRequest, ListAppsResponse, + ListArtifactsRequest, + ListArtifactsResponse, + ListAssociationsRequest, + MemberDefinition, + MetricData, + ModelArtifacts, + ModelClientConfig, + ModelDigests, + ModelPackageStatusDetails, + ModelPackageStatusItem, + MonitoringExecutionSummary, + MonitoringSchedule, + NotificationConfiguration, + ObjectiveStatusCounters, + OfflineStoreStatus, + OidcConfig, + OidcConfigForResponse, + OidcMemberDefinition, + ProcessingClusterConfig, + ProcessingFeatureStoreOutput, + ProcessingInput, + ProcessingOutput, + ProcessingOutputConfig, + ProcessingResources, + ProcessingS3Input, + ProcessingS3Output, + ProcessingStoppingCondition, + ProductionVariantSummary, + PropertyNameQuery, + PropertyNameSuggestion, + ProvisioningParameter, + RedshiftDatasetDefinition, + ResolvedAttributes, + RetentionPolicy, + SecondaryStatusTransition, + ServiceCatalogProvisionedProductDetails, + ServiceCatalogProvisioningDetails, + SourceIpConfig, + SubscribedWorkteam, + SuggestionQuery, + TensorBoardOutputConfig, + TrainingJobStatusCounters, + TrialComponentArtifact, + TrialComponentMetricSummary, + TrialComponentParameterValue, + TrialComponentSource, + TrialComponentStatus, + TrialSource, + UiTemplateInfo, + Workforce, + Workteam, +} from "../models/models_1"; +import { + ListAssociationsResponse, ListAutoMLJobsRequest, ListAutoMLJobsResponse, ListCandidatesForAutoMLJobRequest, @@ -726,6 +961,8 @@ import { ListCodeRepositoriesOutput, ListCompilationJobsRequest, ListCompilationJobsResponse, + ListContextsRequest, + ListContextsResponse, ListDomainsRequest, ListDomainsResponse, ListEndpointConfigsInput, @@ -734,6 +971,8 @@ import { ListEndpointsOutput, ListExperimentsRequest, ListExperimentsResponse, + ListFeatureGroupsRequest, + ListFeatureGroupsResponse, ListFlowDefinitionsRequest, ListFlowDefinitionsResponse, ListHumanTaskUisRequest, @@ -748,6 +987,8 @@ import { ListLabelingJobsForWorkteamResponse, ListLabelingJobsRequest, ListLabelingJobsResponse, + ListModelPackageGroupsInput, + ListModelPackageGroupsOutput, ListModelPackagesInput, ListModelPackagesOutput, ListModelsInput, @@ -760,8 +1001,18 @@ import { ListNotebookInstanceLifecycleConfigsOutput, ListNotebookInstancesInput, ListNotebookInstancesOutput, + ListPipelineExecutionStepsRequest, + ListPipelineExecutionStepsResponse, + ListPipelineExecutionsRequest, + ListPipelineExecutionsResponse, + ListPipelineParametersForExecutionRequest, + ListPipelineParametersForExecutionResponse, + ListPipelinesRequest, + ListPipelinesResponse, ListProcessingJobsRequest, ListProcessingJobsResponse, + ListProjectsInput, + ListProjectsOutput, ListSubscribedWorkteamsRequest, ListSubscribedWorkteamsResponse, ListTagsInput, @@ -782,76 +1033,78 @@ import { ListWorkforcesResponse, ListWorkteamsRequest, ListWorkteamsResponse, - MetricData, - ModelArtifacts, - ModelPackageStatusDetails, - ModelPackageStatusItem, + ModelPackage, + ModelPackageGroup, + ModelPackageGroupSummary, ModelPackageSummary, + ModelStepMetadata, ModelSummary, - MonitoringExecutionSummary, MonitoringScheduleSummary, NestedFilters, NotebookInstanceLifecycleConfigSummary, NotebookInstanceSummary, - ObjectiveStatusCounters, - OidcConfigForResponse, + Parameter, Parent, + Pipeline, + PipelineExecution, + PipelineExecutionStep, + PipelineExecutionStepMetadata, + PipelineExecutionSummary, + PipelineSummary, ProcessingJob, + ProcessingJobStepMetadata, ProcessingJobSummary, - ProductionVariantSummary, - PropertyNameQuery, - PropertyNameSuggestion, + ProjectSummary, + PutModelPackageGroupPolicyInput, + PutModelPackageGroupPolicyOutput, + RegisterModelStepMetadata, RenderUiTemplateRequest, RenderUiTemplateResponse, RenderableTask, RenderingError, - ResolvedAttributes, + SearchExpression, SearchRecord, + SearchRequest, SearchResponse, - SecondaryStatusTransition, StartMonitoringScheduleRequest, StartNotebookInstanceInput, + StartPipelineExecutionRequest, + StartPipelineExecutionResponse, StopAutoMLJobRequest, StopCompilationJobRequest, StopHyperParameterTuningJobRequest, StopLabelingJobRequest, StopMonitoringScheduleRequest, StopNotebookInstanceInput, + StopPipelineExecutionRequest, + StopPipelineExecutionResponse, StopProcessingJobRequest, StopTrainingJobRequest, StopTransformJobRequest, - SubscribedWorkteam, - SuggestionQuery, TrainingJob, - TrainingJobStatusCounters, + TrainingJobStepMetadata, TrainingJobSummary, TransformJob, + TransformJobStepMetadata, TransformJobSummary, Trial, TrialComponent, - TrialComponentMetricSummary, TrialComponentSimpleSummary, - TrialComponentSource, TrialComponentSourceDetail, TrialComponentSummary, - TrialSource, TrialSummary, - UiTemplateInfo, + UpdateActionRequest, + UpdateActionResponse, UpdateAppImageConfigRequest, UpdateAppImageConfigResponse, + UpdateArtifactRequest, + UpdateArtifactResponse, UpdateCodeRepositoryInput, UpdateCodeRepositoryOutput, + UpdateContextRequest, + UpdateContextResponse, UpdateDomainRequest, UpdateDomainResponse, - UserContext, - UserProfileDetails, - VariantProperty, - Workforce, - Workteam, -} from "../models/models_1"; -import { - SearchExpression, - SearchRequest, UpdateEndpointInput, UpdateEndpointOutput, UpdateEndpointWeightsAndCapacitiesInput, @@ -860,12 +1113,18 @@ import { UpdateExperimentResponse, UpdateImageRequest, UpdateImageResponse, + UpdateModelPackageInput, + UpdateModelPackageOutput, UpdateMonitoringScheduleRequest, UpdateMonitoringScheduleResponse, UpdateNotebookInstanceInput, UpdateNotebookInstanceLifecycleConfigInput, UpdateNotebookInstanceLifecycleConfigOutput, UpdateNotebookInstanceOutput, + UpdatePipelineExecutionRequest, + UpdatePipelineExecutionResponse, + UpdatePipelineRequest, + UpdatePipelineResponse, UpdateTrialComponentRequest, UpdateTrialComponentResponse, UpdateTrialRequest, @@ -876,6 +1135,8 @@ import { UpdateWorkforceResponse, UpdateWorkteamRequest, UpdateWorkteamResponse, + UserProfileDetails, + VariantProperty, } from "../models/models_2"; import { HttpRequest as __HttpRequest, HttpResponse as __HttpResponse } from "@aws-sdk/protocol-http"; import { LazyJsonString as __LazyJsonString, SmithyException as __SmithyException } from "@aws-sdk/smithy-client"; @@ -888,6 +1149,19 @@ import { } from "@aws-sdk/types"; import { v4 as generateIdempotencyToken } from "uuid"; +export const serializeAws_json1_1AddAssociationCommand = async ( + input: AddAssociationCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.AddAssociation", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1AddAssociationRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1AddTagsCommand = async ( input: AddTagsCommandInput, context: __SerdeContext @@ -914,6 +1188,19 @@ export const serializeAws_json1_1AssociateTrialComponentCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1CreateActionCommand = async ( + input: CreateActionCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.CreateAction", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1CreateActionRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1CreateAlgorithmCommand = async ( input: CreateAlgorithmCommandInput, context: __SerdeContext @@ -953,6 +1240,19 @@ export const serializeAws_json1_1CreateAppImageConfigCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1CreateArtifactCommand = async ( + input: CreateArtifactCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.CreateArtifact", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1CreateArtifactRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1CreateAutoMLJobCommand = async ( input: CreateAutoMLJobCommandInput, context: __SerdeContext @@ -992,6 +1292,19 @@ export const serializeAws_json1_1CreateCompilationJobCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1CreateContextCommand = async ( + input: CreateContextCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.CreateContext", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1CreateContextRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1CreateDomainCommand = async ( input: CreateDomainCommandInput, context: __SerdeContext @@ -1044,6 +1357,19 @@ export const serializeAws_json1_1CreateExperimentCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1CreateFeatureGroupCommand = async ( + input: CreateFeatureGroupCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.CreateFeatureGroup", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1CreateFeatureGroupRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1CreateFlowDefinitionCommand = async ( input: CreateFlowDefinitionCommandInput, context: __SerdeContext @@ -1148,6 +1474,19 @@ export const serializeAws_json1_1CreateModelPackageCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1CreateModelPackageGroupCommand = async ( + input: CreateModelPackageGroupCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.CreateModelPackageGroup", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1CreateModelPackageGroupInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1CreateMonitoringScheduleCommand = async ( input: CreateMonitoringScheduleCommandInput, context: __SerdeContext @@ -1187,6 +1526,19 @@ export const serializeAws_json1_1CreateNotebookInstanceLifecycleConfigCommand = return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1CreatePipelineCommand = async ( + input: CreatePipelineCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.CreatePipeline", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1CreatePipelineRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1CreatePresignedDomainUrlCommand = async ( input: CreatePresignedDomainUrlCommandInput, context: __SerdeContext @@ -1226,6 +1578,19 @@ export const serializeAws_json1_1CreateProcessingJobCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1CreateProjectCommand = async ( + input: CreateProjectCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.CreateProject", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1CreateProjectInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1CreateTrainingJobCommand = async ( input: CreateTrainingJobCommandInput, context: __SerdeContext @@ -1317,6 +1682,19 @@ export const serializeAws_json1_1CreateWorkteamCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1DeleteActionCommand = async ( + input: DeleteActionCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DeleteAction", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DeleteActionRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1DeleteAlgorithmCommand = async ( input: DeleteAlgorithmCommandInput, context: __SerdeContext @@ -1356,16 +1734,55 @@ export const serializeAws_json1_1DeleteAppImageConfigCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; -export const serializeAws_json1_1DeleteCodeRepositoryCommand = async ( - input: DeleteCodeRepositoryCommandInput, +export const serializeAws_json1_1DeleteArtifactCommand = async ( + input: DeleteArtifactCommandInput, context: __SerdeContext ): Promise<__HttpRequest> => { const headers: __HeaderBag = { "Content-Type": "application/x-amz-json-1.1", - "X-Amz-Target": "SageMaker.DeleteCodeRepository", + "X-Amz-Target": "SageMaker.DeleteArtifact", }; let body: any; - body = JSON.stringify(serializeAws_json1_1DeleteCodeRepositoryInput(input, context)); + body = JSON.stringify(serializeAws_json1_1DeleteArtifactRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1DeleteAssociationCommand = async ( + input: DeleteAssociationCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DeleteAssociation", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DeleteAssociationRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1DeleteCodeRepositoryCommand = async ( + input: DeleteCodeRepositoryCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DeleteCodeRepository", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DeleteCodeRepositoryInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1DeleteContextCommand = async ( + input: DeleteContextCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DeleteContext", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DeleteContextRequest(input, context)); return buildHttpRpcRequest(context, headers, "/", undefined, body); }; @@ -1421,6 +1838,19 @@ export const serializeAws_json1_1DeleteExperimentCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1DeleteFeatureGroupCommand = async ( + input: DeleteFeatureGroupCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DeleteFeatureGroup", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DeleteFeatureGroupRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1DeleteFlowDefinitionCommand = async ( input: DeleteFlowDefinitionCommandInput, context: __SerdeContext @@ -1499,6 +1929,32 @@ export const serializeAws_json1_1DeleteModelPackageCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1DeleteModelPackageGroupCommand = async ( + input: DeleteModelPackageGroupCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DeleteModelPackageGroup", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DeleteModelPackageGroupInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1DeleteModelPackageGroupPolicyCommand = async ( + input: DeleteModelPackageGroupPolicyCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DeleteModelPackageGroupPolicy", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DeleteModelPackageGroupPolicyInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1DeleteMonitoringScheduleCommand = async ( input: DeleteMonitoringScheduleCommandInput, context: __SerdeContext @@ -1538,6 +1994,32 @@ export const serializeAws_json1_1DeleteNotebookInstanceLifecycleConfigCommand = return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1DeletePipelineCommand = async ( + input: DeletePipelineCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DeletePipeline", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DeletePipelineRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1DeleteProjectCommand = async ( + input: DeleteProjectCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DeleteProject", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DeleteProjectInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1DeleteTagsCommand = async ( input: DeleteTagsCommandInput, context: __SerdeContext @@ -1616,6 +2098,19 @@ export const serializeAws_json1_1DeleteWorkteamCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1DescribeActionCommand = async ( + input: DescribeActionCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DescribeAction", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DescribeActionRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1DescribeAlgorithmCommand = async ( input: DescribeAlgorithmCommandInput, context: __SerdeContext @@ -1655,6 +2150,19 @@ export const serializeAws_json1_1DescribeAppImageConfigCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1DescribeArtifactCommand = async ( + input: DescribeArtifactCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DescribeArtifact", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DescribeArtifactRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1DescribeAutoMLJobCommand = async ( input: DescribeAutoMLJobCommandInput, context: __SerdeContext @@ -1694,6 +2202,19 @@ export const serializeAws_json1_1DescribeCompilationJobCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1DescribeContextCommand = async ( + input: DescribeContextCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DescribeContext", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DescribeContextRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1DescribeDomainCommand = async ( input: DescribeDomainCommandInput, context: __SerdeContext @@ -1746,6 +2267,19 @@ export const serializeAws_json1_1DescribeExperimentCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1DescribeFeatureGroupCommand = async ( + input: DescribeFeatureGroupCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DescribeFeatureGroup", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DescribeFeatureGroupRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1DescribeFlowDefinitionCommand = async ( input: DescribeFlowDefinitionCommandInput, context: __SerdeContext @@ -1850,6 +2384,19 @@ export const serializeAws_json1_1DescribeModelPackageCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1DescribeModelPackageGroupCommand = async ( + input: DescribeModelPackageGroupCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DescribeModelPackageGroup", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DescribeModelPackageGroupInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1DescribeMonitoringScheduleCommand = async ( input: DescribeMonitoringScheduleCommandInput, context: __SerdeContext @@ -1889,6 +2436,45 @@ export const serializeAws_json1_1DescribeNotebookInstanceLifecycleConfigCommand return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1DescribePipelineCommand = async ( + input: DescribePipelineCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DescribePipeline", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DescribePipelineRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1DescribePipelineDefinitionForExecutionCommand = async ( + input: DescribePipelineDefinitionForExecutionCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DescribePipelineDefinitionForExecution", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DescribePipelineDefinitionForExecutionRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1DescribePipelineExecutionCommand = async ( + input: DescribePipelineExecutionCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DescribePipelineExecution", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DescribePipelineExecutionRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1DescribeProcessingJobCommand = async ( input: DescribeProcessingJobCommandInput, context: __SerdeContext @@ -1902,6 +2488,19 @@ export const serializeAws_json1_1DescribeProcessingJobCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1DescribeProjectCommand = async ( + input: DescribeProjectCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DescribeProject", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DescribeProjectInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1DescribeSubscribedWorkteamCommand = async ( input: DescribeSubscribedWorkteamCommandInput, context: __SerdeContext @@ -2006,6 +2605,19 @@ export const serializeAws_json1_1DescribeWorkteamCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1DisableSagemakerServicecatalogPortfolioCommand = async ( + input: DisableSagemakerServicecatalogPortfolioCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.DisableSagemakerServicecatalogPortfolio", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1DisableSagemakerServicecatalogPortfolioInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1DisassociateTrialComponentCommand = async ( input: DisassociateTrialComponentCommandInput, context: __SerdeContext @@ -2019,6 +2631,45 @@ export const serializeAws_json1_1DisassociateTrialComponentCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1EnableSagemakerServicecatalogPortfolioCommand = async ( + input: EnableSagemakerServicecatalogPortfolioCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.EnableSagemakerServicecatalogPortfolio", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1EnableSagemakerServicecatalogPortfolioInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1GetModelPackageGroupPolicyCommand = async ( + input: GetModelPackageGroupPolicyCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.GetModelPackageGroupPolicy", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1GetModelPackageGroupPolicyInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1GetSagemakerServicecatalogPortfolioStatusCommand = async ( + input: GetSagemakerServicecatalogPortfolioStatusCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.GetSagemakerServicecatalogPortfolioStatus", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1GetSagemakerServicecatalogPortfolioStatusInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1GetSearchSuggestionsCommand = async ( input: GetSearchSuggestionsCommandInput, context: __SerdeContext @@ -2032,6 +2683,19 @@ export const serializeAws_json1_1GetSearchSuggestionsCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1ListActionsCommand = async ( + input: ListActionsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.ListActions", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1ListActionsRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1ListAlgorithmsCommand = async ( input: ListAlgorithmsCommandInput, context: __SerdeContext @@ -2071,6 +2735,32 @@ export const serializeAws_json1_1ListAppsCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1ListArtifactsCommand = async ( + input: ListArtifactsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.ListArtifacts", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1ListArtifactsRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1ListAssociationsCommand = async ( + input: ListAssociationsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.ListAssociations", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1ListAssociationsRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1ListAutoMLJobsCommand = async ( input: ListAutoMLJobsCommandInput, context: __SerdeContext @@ -2123,6 +2813,19 @@ export const serializeAws_json1_1ListCompilationJobsCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1ListContextsCommand = async ( + input: ListContextsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.ListContexts", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1ListContextsRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1ListDomainsCommand = async ( input: ListDomainsCommandInput, context: __SerdeContext @@ -2175,6 +2878,19 @@ export const serializeAws_json1_1ListExperimentsCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1ListFeatureGroupsCommand = async ( + input: ListFeatureGroupsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.ListFeatureGroups", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1ListFeatureGroupsRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1ListFlowDefinitionsCommand = async ( input: ListFlowDefinitionsCommandInput, context: __SerdeContext @@ -2266,6 +2982,19 @@ export const serializeAws_json1_1ListLabelingJobsForWorkteamCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1ListModelPackageGroupsCommand = async ( + input: ListModelPackageGroupsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.ListModelPackageGroups", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1ListModelPackageGroupsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1ListModelPackagesCommand = async ( input: ListModelPackagesCommandInput, context: __SerdeContext @@ -2344,33 +3073,98 @@ export const serializeAws_json1_1ListNotebookInstancesCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; -export const serializeAws_json1_1ListProcessingJobsCommand = async ( - input: ListProcessingJobsCommandInput, +export const serializeAws_json1_1ListPipelineExecutionsCommand = async ( + input: ListPipelineExecutionsCommandInput, context: __SerdeContext ): Promise<__HttpRequest> => { const headers: __HeaderBag = { "Content-Type": "application/x-amz-json-1.1", - "X-Amz-Target": "SageMaker.ListProcessingJobs", + "X-Amz-Target": "SageMaker.ListPipelineExecutions", }; let body: any; - body = JSON.stringify(serializeAws_json1_1ListProcessingJobsRequest(input, context)); + body = JSON.stringify(serializeAws_json1_1ListPipelineExecutionsRequest(input, context)); return buildHttpRpcRequest(context, headers, "/", undefined, body); }; -export const serializeAws_json1_1ListSubscribedWorkteamsCommand = async ( - input: ListSubscribedWorkteamsCommandInput, +export const serializeAws_json1_1ListPipelineExecutionStepsCommand = async ( + input: ListPipelineExecutionStepsCommandInput, context: __SerdeContext ): Promise<__HttpRequest> => { const headers: __HeaderBag = { "Content-Type": "application/x-amz-json-1.1", - "X-Amz-Target": "SageMaker.ListSubscribedWorkteams", + "X-Amz-Target": "SageMaker.ListPipelineExecutionSteps", }; let body: any; - body = JSON.stringify(serializeAws_json1_1ListSubscribedWorkteamsRequest(input, context)); + body = JSON.stringify(serializeAws_json1_1ListPipelineExecutionStepsRequest(input, context)); return buildHttpRpcRequest(context, headers, "/", undefined, body); }; -export const serializeAws_json1_1ListTagsCommand = async ( +export const serializeAws_json1_1ListPipelineParametersForExecutionCommand = async ( + input: ListPipelineParametersForExecutionCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.ListPipelineParametersForExecution", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1ListPipelineParametersForExecutionRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1ListPipelinesCommand = async ( + input: ListPipelinesCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.ListPipelines", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1ListPipelinesRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1ListProcessingJobsCommand = async ( + input: ListProcessingJobsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.ListProcessingJobs", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1ListProcessingJobsRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1ListProjectsCommand = async ( + input: ListProjectsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.ListProjects", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1ListProjectsInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1ListSubscribedWorkteamsCommand = async ( + input: ListSubscribedWorkteamsCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.ListSubscribedWorkteams", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1ListSubscribedWorkteamsRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1ListTagsCommand = async ( input: ListTagsCommandInput, context: __SerdeContext ): Promise<__HttpRequest> => { @@ -2487,6 +3281,19 @@ export const serializeAws_json1_1ListWorkteamsCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1PutModelPackageGroupPolicyCommand = async ( + input: PutModelPackageGroupPolicyCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.PutModelPackageGroupPolicy", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1PutModelPackageGroupPolicyInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1RenderUiTemplateCommand = async ( input: RenderUiTemplateCommandInput, context: __SerdeContext @@ -2539,6 +3346,19 @@ export const serializeAws_json1_1StartNotebookInstanceCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1StartPipelineExecutionCommand = async ( + input: StartPipelineExecutionCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.StartPipelineExecution", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1StartPipelineExecutionRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1StopAutoMLJobCommand = async ( input: StopAutoMLJobCommandInput, context: __SerdeContext @@ -2617,6 +3437,19 @@ export const serializeAws_json1_1StopNotebookInstanceCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1StopPipelineExecutionCommand = async ( + input: StopPipelineExecutionCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.StopPipelineExecution", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1StopPipelineExecutionRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1StopProcessingJobCommand = async ( input: StopProcessingJobCommandInput, context: __SerdeContext @@ -2656,6 +3489,19 @@ export const serializeAws_json1_1StopTransformJobCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1UpdateActionCommand = async ( + input: UpdateActionCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.UpdateAction", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1UpdateActionRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1UpdateAppImageConfigCommand = async ( input: UpdateAppImageConfigCommandInput, context: __SerdeContext @@ -2669,6 +3515,19 @@ export const serializeAws_json1_1UpdateAppImageConfigCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1UpdateArtifactCommand = async ( + input: UpdateArtifactCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.UpdateArtifact", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1UpdateArtifactRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1UpdateCodeRepositoryCommand = async ( input: UpdateCodeRepositoryCommandInput, context: __SerdeContext @@ -2682,6 +3541,19 @@ export const serializeAws_json1_1UpdateCodeRepositoryCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1UpdateContextCommand = async ( + input: UpdateContextCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.UpdateContext", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1UpdateContextRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1UpdateDomainCommand = async ( input: UpdateDomainCommandInput, context: __SerdeContext @@ -2747,6 +3619,19 @@ export const serializeAws_json1_1UpdateImageCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1UpdateModelPackageCommand = async ( + input: UpdateModelPackageCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.UpdateModelPackage", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1UpdateModelPackageInput(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1UpdateMonitoringScheduleCommand = async ( input: UpdateMonitoringScheduleCommandInput, context: __SerdeContext @@ -2786,6 +3671,32 @@ export const serializeAws_json1_1UpdateNotebookInstanceLifecycleConfigCommand = return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const serializeAws_json1_1UpdatePipelineCommand = async ( + input: UpdatePipelineCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.UpdatePipeline", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1UpdatePipelineRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + +export const serializeAws_json1_1UpdatePipelineExecutionCommand = async ( + input: UpdatePipelineExecutionCommandInput, + context: __SerdeContext +): Promise<__HttpRequest> => { + const headers: __HeaderBag = { + "Content-Type": "application/x-amz-json-1.1", + "X-Amz-Target": "SageMaker.UpdatePipelineExecution", + }; + let body: any; + body = JSON.stringify(serializeAws_json1_1UpdatePipelineExecutionRequest(input, context)); + return buildHttpRpcRequest(context, headers, "/", undefined, body); +}; + export const serializeAws_json1_1UpdateTrialCommand = async ( input: UpdateTrialCommandInput, context: __SerdeContext @@ -2851,6 +3762,69 @@ export const serializeAws_json1_1UpdateWorkteamCommand = async ( return buildHttpRpcRequest(context, headers, "/", undefined, body); }; +export const deserializeAws_json1_1AddAssociationCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1AddAssociationCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1AddAssociationResponse(data, context); + const response: AddAssociationCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1AddAssociationCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceLimitExceeded": + case "com.amazonaws.sagemaker#ResourceLimitExceeded": + response = { + ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + export const deserializeAws_json1_1AddTagsCommand = async ( output: __HttpResponse, context: __SerdeContext @@ -2961,6 +3935,61 @@ const deserializeAws_json1_1AssociateTrialComponentCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; +export const deserializeAws_json1_1CreateActionCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1CreateActionCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1CreateActionResponse(data, context); + const response: CreateActionCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1CreateActionCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceLimitExceeded": + case "com.amazonaws.sagemaker#ResourceLimitExceeded": + response = { + ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + export const deserializeAws_json1_1CreateAlgorithmCommand = async ( output: __HttpResponse, context: __SerdeContext @@ -3126,27 +4155,27 @@ const deserializeAws_json1_1CreateAppImageConfigCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1CreateAutoMLJobCommand = async ( +export const deserializeAws_json1_1CreateArtifactCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1CreateAutoMLJobCommandError(output, context); + return deserializeAws_json1_1CreateArtifactCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1CreateAutoMLJobResponse(data, context); - const response: CreateAutoMLJobCommandOutput = { + contents = deserializeAws_json1_1CreateArtifactResponse(data, context); + const response: CreateArtifactCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1CreateAutoMLJobCommandError = async ( +const deserializeAws_json1_1CreateArtifactCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -3156,14 +4185,6 @@ const deserializeAws_json1_1CreateAutoMLJobCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceInUse": - case "com.amazonaws.sagemaker#ResourceInUse": - response = { - ...(await deserializeAws_json1_1ResourceInUseResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; case "ResourceLimitExceeded": case "com.amazonaws.sagemaker#ResourceLimitExceeded": response = { @@ -3189,7 +4210,70 @@ const deserializeAws_json1_1CreateAutoMLJobCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1CreateCodeRepositoryCommand = async ( +export const deserializeAws_json1_1CreateAutoMLJobCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1CreateAutoMLJobCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1CreateAutoMLJobResponse(data, context); + const response: CreateAutoMLJobCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1CreateAutoMLJobCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceInUse": + case "com.amazonaws.sagemaker#ResourceInUse": + response = { + ...(await deserializeAws_json1_1ResourceInUseResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceLimitExceeded": + case "com.amazonaws.sagemaker#ResourceLimitExceeded": + response = { + ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1CreateCodeRepositoryCommand = async ( output: __HttpResponse, context: __SerdeContext ): Promise => { @@ -3299,6 +4383,61 @@ const deserializeAws_json1_1CreateCompilationJobCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; +export const deserializeAws_json1_1CreateContextCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1CreateContextCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1CreateContextResponse(data, context); + const response: CreateContextCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1CreateContextCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceLimitExceeded": + case "com.amazonaws.sagemaker#ResourceLimitExceeded": + response = { + ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + export const deserializeAws_json1_1CreateDomainCommand = async ( output: __HttpResponse, context: __SerdeContext @@ -3527,6 +4666,69 @@ const deserializeAws_json1_1CreateExperimentCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; +export const deserializeAws_json1_1CreateFeatureGroupCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1CreateFeatureGroupCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1CreateFeatureGroupResponse(data, context); + const response: CreateFeatureGroupCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1CreateFeatureGroupCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceInUse": + case "com.amazonaws.sagemaker#ResourceInUse": + response = { + ...(await deserializeAws_json1_1ResourceInUseResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceLimitExceeded": + case "com.amazonaws.sagemaker#ResourceLimitExceeded": + response = { + ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + export const deserializeAws_json1_1CreateFlowDefinitionCommand = async ( output: __HttpResponse, context: __SerdeContext @@ -4031,6 +5233,61 @@ const deserializeAws_json1_1CreateModelPackageCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; +export const deserializeAws_json1_1CreateModelPackageGroupCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1CreateModelPackageGroupCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1CreateModelPackageGroupOutput(data, context); + const response: CreateModelPackageGroupCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1CreateModelPackageGroupCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceLimitExceeded": + case "com.amazonaws.sagemaker#ResourceLimitExceeded": + response = { + ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + export const deserializeAws_json1_1CreateMonitoringScheduleCommand = async ( output: __HttpResponse, context: __SerdeContext @@ -4204,27 +5461,27 @@ const deserializeAws_json1_1CreateNotebookInstanceLifecycleConfigCommandError = return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1CreatePresignedDomainUrlCommand = async ( +export const deserializeAws_json1_1CreatePipelineCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1CreatePresignedDomainUrlCommandError(output, context); + return deserializeAws_json1_1CreatePipelineCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1CreatePresignedDomainUrlResponse(data, context); - const response: CreatePresignedDomainUrlCommandOutput = { + contents = deserializeAws_json1_1CreatePipelineResponse(data, context); + const response: CreatePipelineCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1CreatePresignedDomainUrlCommandError = async ( +const deserializeAws_json1_1CreatePipelineCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -4234,6 +5491,14 @@ const deserializeAws_json1_1CreatePresignedDomainUrlCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceLimitExceeded": + case "com.amazonaws.sagemaker#ResourceLimitExceeded": + response = { + ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; case "ResourceNotFound": case "com.amazonaws.sagemaker#ResourceNotFound": response = { @@ -4259,27 +5524,27 @@ const deserializeAws_json1_1CreatePresignedDomainUrlCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1CreatePresignedNotebookInstanceUrlCommand = async ( +export const deserializeAws_json1_1CreatePresignedDomainUrlCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1CreatePresignedNotebookInstanceUrlCommandError(output, context); + return deserializeAws_json1_1CreatePresignedDomainUrlCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1CreatePresignedNotebookInstanceUrlOutput(data, context); - const response: CreatePresignedNotebookInstanceUrlCommandOutput = { + contents = deserializeAws_json1_1CreatePresignedDomainUrlResponse(data, context); + const response: CreatePresignedDomainUrlCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1CreatePresignedNotebookInstanceUrlCommandError = async ( +const deserializeAws_json1_1CreatePresignedDomainUrlCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -4289,6 +5554,14 @@ const deserializeAws_json1_1CreatePresignedNotebookInstanceUrlCommandError = asy const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -4306,10 +5579,57 @@ const deserializeAws_json1_1CreatePresignedNotebookInstanceUrlCommandError = asy return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1CreateProcessingJobCommand = async ( +export const deserializeAws_json1_1CreatePresignedNotebookInstanceUrlCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1CreatePresignedNotebookInstanceUrlCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1CreatePresignedNotebookInstanceUrlOutput(data, context); + const response: CreatePresignedNotebookInstanceUrlCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1CreatePresignedNotebookInstanceUrlCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1CreateProcessingJobCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { if (output.statusCode >= 300) { return deserializeAws_json1_1CreateProcessingJobCommandError(output, context); } @@ -4377,6 +5697,61 @@ const deserializeAws_json1_1CreateProcessingJobCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; +export const deserializeAws_json1_1CreateProjectCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1CreateProjectCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1CreateProjectOutput(data, context); + const response: CreateProjectCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1CreateProjectCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceLimitExceeded": + case "com.amazonaws.sagemaker#ResourceLimitExceeded": + response = { + ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + export const deserializeAws_json1_1CreateTrainingJobCommand = async ( output: __HttpResponse, context: __SerdeContext @@ -4810,24 +6185,27 @@ const deserializeAws_json1_1CreateWorkteamCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteAlgorithmCommand = async ( +export const deserializeAws_json1_1DeleteActionCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteAlgorithmCommandError(output, context); + return deserializeAws_json1_1DeleteActionCommandError(output, context); } - await collectBody(output.body, context); - const response: DeleteAlgorithmCommandOutput = { + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DeleteActionResponse(data, context); + const response: DeleteActionCommandOutput = { $metadata: deserializeMetadata(output), + ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteAlgorithmCommandError = async ( +const deserializeAws_json1_1DeleteActionCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -4837,6 +6215,14 @@ const deserializeAws_json1_1DeleteAlgorithmCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -4854,24 +6240,24 @@ const deserializeAws_json1_1DeleteAlgorithmCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteAppCommand = async ( +export const deserializeAws_json1_1DeleteAlgorithmCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteAppCommandError(output, context); + return deserializeAws_json1_1DeleteAlgorithmCommandError(output, context); } await collectBody(output.body, context); - const response: DeleteAppCommandOutput = { + const response: DeleteAlgorithmCommandOutput = { $metadata: deserializeMetadata(output), }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteAppCommandError = async ( +const deserializeAws_json1_1DeleteAlgorithmCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -4881,22 +6267,6 @@ const deserializeAws_json1_1DeleteAppCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceInUse": - case "com.amazonaws.sagemaker#ResourceInUse": - response = { - ...(await deserializeAws_json1_1ResourceInUseResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -4914,24 +6284,24 @@ const deserializeAws_json1_1DeleteAppCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteAppImageConfigCommand = async ( +export const deserializeAws_json1_1DeleteAppCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteAppImageConfigCommandError(output, context); + return deserializeAws_json1_1DeleteAppCommandError(output, context); } await collectBody(output.body, context); - const response: DeleteAppImageConfigCommandOutput = { + const response: DeleteAppCommandOutput = { $metadata: deserializeMetadata(output), }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteAppImageConfigCommandError = async ( +const deserializeAws_json1_1DeleteAppCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -4941,6 +6311,14 @@ const deserializeAws_json1_1DeleteAppImageConfigCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceInUse": + case "com.amazonaws.sagemaker#ResourceInUse": + response = { + ...(await deserializeAws_json1_1ResourceInUseResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; case "ResourceNotFound": case "com.amazonaws.sagemaker#ResourceNotFound": response = { @@ -4966,24 +6344,24 @@ const deserializeAws_json1_1DeleteAppImageConfigCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteCodeRepositoryCommand = async ( +export const deserializeAws_json1_1DeleteAppImageConfigCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteCodeRepositoryCommandError(output, context); + return deserializeAws_json1_1DeleteAppImageConfigCommandError(output, context); } await collectBody(output.body, context); - const response: DeleteCodeRepositoryCommandOutput = { + const response: DeleteAppImageConfigCommandOutput = { $metadata: deserializeMetadata(output), }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteCodeRepositoryCommandError = async ( +const deserializeAws_json1_1DeleteAppImageConfigCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -4993,6 +6371,14 @@ const deserializeAws_json1_1DeleteCodeRepositoryCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -5010,24 +6396,27 @@ const deserializeAws_json1_1DeleteCodeRepositoryCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteDomainCommand = async ( +export const deserializeAws_json1_1DeleteArtifactCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteDomainCommandError(output, context); + return deserializeAws_json1_1DeleteArtifactCommandError(output, context); } - await collectBody(output.body, context); - const response: DeleteDomainCommandOutput = { + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DeleteArtifactResponse(data, context); + const response: DeleteArtifactCommandOutput = { $metadata: deserializeMetadata(output), + ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteDomainCommandError = async ( +const deserializeAws_json1_1DeleteArtifactCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -5037,14 +6426,6 @@ const deserializeAws_json1_1DeleteDomainCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceInUse": - case "com.amazonaws.sagemaker#ResourceInUse": - response = { - ...(await deserializeAws_json1_1ResourceInUseResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; case "ResourceNotFound": case "com.amazonaws.sagemaker#ResourceNotFound": response = { @@ -5070,24 +6451,27 @@ const deserializeAws_json1_1DeleteDomainCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteEndpointCommand = async ( +export const deserializeAws_json1_1DeleteAssociationCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteEndpointCommandError(output, context); + return deserializeAws_json1_1DeleteAssociationCommandError(output, context); } - await collectBody(output.body, context); - const response: DeleteEndpointCommandOutput = { + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DeleteAssociationResponse(data, context); + const response: DeleteAssociationCommandOutput = { $metadata: deserializeMetadata(output), + ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteEndpointCommandError = async ( +const deserializeAws_json1_1DeleteAssociationCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -5097,6 +6481,14 @@ const deserializeAws_json1_1DeleteEndpointCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -5114,24 +6506,24 @@ const deserializeAws_json1_1DeleteEndpointCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteEndpointConfigCommand = async ( +export const deserializeAws_json1_1DeleteCodeRepositoryCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteEndpointConfigCommandError(output, context); + return deserializeAws_json1_1DeleteCodeRepositoryCommandError(output, context); } await collectBody(output.body, context); - const response: DeleteEndpointConfigCommandOutput = { + const response: DeleteCodeRepositoryCommandOutput = { $metadata: deserializeMetadata(output), }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteEndpointConfigCommandError = async ( +const deserializeAws_json1_1DeleteCodeRepositoryCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -5158,27 +6550,27 @@ const deserializeAws_json1_1DeleteEndpointConfigCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteExperimentCommand = async ( +export const deserializeAws_json1_1DeleteContextCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteExperimentCommandError(output, context); + return deserializeAws_json1_1DeleteContextCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DeleteExperimentResponse(data, context); - const response: DeleteExperimentCommandOutput = { + contents = deserializeAws_json1_1DeleteContextResponse(data, context); + const response: DeleteContextCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteExperimentCommandError = async ( +const deserializeAws_json1_1DeleteContextCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -5213,27 +6605,24 @@ const deserializeAws_json1_1DeleteExperimentCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteFlowDefinitionCommand = async ( +export const deserializeAws_json1_1DeleteDomainCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteFlowDefinitionCommandError(output, context); + return deserializeAws_json1_1DeleteDomainCommandError(output, context); } - const data: any = await parseBody(output.body, context); - let contents: any = {}; - contents = deserializeAws_json1_1DeleteFlowDefinitionResponse(data, context); - const response: DeleteFlowDefinitionCommandOutput = { + await collectBody(output.body, context); + const response: DeleteDomainCommandOutput = { $metadata: deserializeMetadata(output), - ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteFlowDefinitionCommandError = async ( +const deserializeAws_json1_1DeleteDomainCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -5276,10 +6665,268 @@ const deserializeAws_json1_1DeleteFlowDefinitionCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteHumanTaskUiCommand = async ( +export const deserializeAws_json1_1DeleteEndpointCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteEndpointCommandError(output, context); + } + await collectBody(output.body, context); + const response: DeleteEndpointCommandOutput = { + $metadata: deserializeMetadata(output), + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteEndpointCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteEndpointConfigCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteEndpointConfigCommandError(output, context); + } + await collectBody(output.body, context); + const response: DeleteEndpointConfigCommandOutput = { + $metadata: deserializeMetadata(output), + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteEndpointConfigCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteExperimentCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteExperimentCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DeleteExperimentResponse(data, context); + const response: DeleteExperimentCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteExperimentCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteFeatureGroupCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteFeatureGroupCommandError(output, context); + } + await collectBody(output.body, context); + const response: DeleteFeatureGroupCommandOutput = { + $metadata: deserializeMetadata(output), + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteFeatureGroupCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteFlowDefinitionCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteFlowDefinitionCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DeleteFlowDefinitionResponse(data, context); + const response: DeleteFlowDefinitionCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteFlowDefinitionCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceInUse": + case "com.amazonaws.sagemaker#ResourceInUse": + response = { + ...(await deserializeAws_json1_1ResourceInUseResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteHumanTaskUiCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { if (output.statusCode >= 300) { return deserializeAws_json1_1DeleteHumanTaskUiCommandError(output, context); } @@ -5394,27 +7041,1994 @@ const deserializeAws_json1_1DeleteImageCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteImageVersionCommand = async ( +export const deserializeAws_json1_1DeleteImageVersionCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteImageVersionCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DeleteImageVersionResponse(data, context); + const response: DeleteImageVersionCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteImageVersionCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceInUse": + case "com.amazonaws.sagemaker#ResourceInUse": + response = { + ...(await deserializeAws_json1_1ResourceInUseResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteModelCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteModelCommandError(output, context); + } + await collectBody(output.body, context); + const response: DeleteModelCommandOutput = { + $metadata: deserializeMetadata(output), + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteModelCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteModelPackageCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteModelPackageCommandError(output, context); + } + await collectBody(output.body, context); + const response: DeleteModelPackageCommandOutput = { + $metadata: deserializeMetadata(output), + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteModelPackageCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ConflictException": + case "com.amazonaws.sagemaker#ConflictException": + response = { + ...(await deserializeAws_json1_1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteModelPackageGroupCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteModelPackageGroupCommandError(output, context); + } + await collectBody(output.body, context); + const response: DeleteModelPackageGroupCommandOutput = { + $metadata: deserializeMetadata(output), + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteModelPackageGroupCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteModelPackageGroupPolicyCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteModelPackageGroupPolicyCommandError(output, context); + } + await collectBody(output.body, context); + const response: DeleteModelPackageGroupPolicyCommandOutput = { + $metadata: deserializeMetadata(output), + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteModelPackageGroupPolicyCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteMonitoringScheduleCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteMonitoringScheduleCommandError(output, context); + } + await collectBody(output.body, context); + const response: DeleteMonitoringScheduleCommandOutput = { + $metadata: deserializeMetadata(output), + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteMonitoringScheduleCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteNotebookInstanceCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteNotebookInstanceCommandError(output, context); + } + await collectBody(output.body, context); + const response: DeleteNotebookInstanceCommandOutput = { + $metadata: deserializeMetadata(output), + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteNotebookInstanceCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteNotebookInstanceLifecycleConfigCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteNotebookInstanceLifecycleConfigCommandError(output, context); + } + await collectBody(output.body, context); + const response: DeleteNotebookInstanceLifecycleConfigCommandOutput = { + $metadata: deserializeMetadata(output), + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteNotebookInstanceLifecycleConfigCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeletePipelineCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeletePipelineCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DeletePipelineResponse(data, context); + const response: DeletePipelineCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeletePipelineCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteProjectCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteProjectCommandError(output, context); + } + await collectBody(output.body, context); + const response: DeleteProjectCommandOutput = { + $metadata: deserializeMetadata(output), + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteProjectCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteTagsCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteTagsCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DeleteTagsOutput(data, context); + const response: DeleteTagsCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteTagsCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteTrialCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteTrialCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DeleteTrialResponse(data, context); + const response: DeleteTrialCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteTrialCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteTrialComponentCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteTrialComponentCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DeleteTrialComponentResponse(data, context); + const response: DeleteTrialComponentCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteTrialComponentCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteUserProfileCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteUserProfileCommandError(output, context); + } + await collectBody(output.body, context); + const response: DeleteUserProfileCommandOutput = { + $metadata: deserializeMetadata(output), + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteUserProfileCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceInUse": + case "com.amazonaws.sagemaker#ResourceInUse": + response = { + ...(await deserializeAws_json1_1ResourceInUseResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteWorkforceCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteWorkforceCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DeleteWorkforceResponse(data, context); + const response: DeleteWorkforceCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteWorkforceCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DeleteWorkteamCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DeleteWorkteamCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DeleteWorkteamResponse(data, context); + const response: DeleteWorkteamCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DeleteWorkteamCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceLimitExceeded": + case "com.amazonaws.sagemaker#ResourceLimitExceeded": + response = { + ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeActionCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeActionCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeActionResponse(data, context); + const response: DescribeActionCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeActionCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeAlgorithmCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeAlgorithmCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeAlgorithmOutput(data, context); + const response: DescribeAlgorithmCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeAlgorithmCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeAppCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeAppCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeAppResponse(data, context); + const response: DescribeAppCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeAppCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeAppImageConfigCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeAppImageConfigCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeAppImageConfigResponse(data, context); + const response: DescribeAppImageConfigCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeAppImageConfigCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeArtifactCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeArtifactCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeArtifactResponse(data, context); + const response: DescribeArtifactCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeArtifactCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeAutoMLJobCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeAutoMLJobCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeAutoMLJobResponse(data, context); + const response: DescribeAutoMLJobCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeAutoMLJobCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeCodeRepositoryCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeCodeRepositoryCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeCodeRepositoryOutput(data, context); + const response: DescribeCodeRepositoryCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeCodeRepositoryCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeCompilationJobCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeCompilationJobCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeCompilationJobResponse(data, context); + const response: DescribeCompilationJobCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeCompilationJobCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeContextCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeContextCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeContextResponse(data, context); + const response: DescribeContextCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeContextCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeDomainCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeDomainCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeDomainResponse(data, context); + const response: DescribeDomainCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeDomainCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeEndpointCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeEndpointCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeEndpointOutput(data, context); + const response: DescribeEndpointCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeEndpointCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeEndpointConfigCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeEndpointConfigCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeEndpointConfigOutput(data, context); + const response: DescribeEndpointConfigCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeEndpointConfigCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeExperimentCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeExperimentCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeExperimentResponse(data, context); + const response: DescribeExperimentCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeExperimentCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeFeatureGroupCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeFeatureGroupCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeFeatureGroupResponse(data, context); + const response: DescribeFeatureGroupCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeFeatureGroupCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeFlowDefinitionCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeFlowDefinitionCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeFlowDefinitionResponse(data, context); + const response: DescribeFlowDefinitionCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeFlowDefinitionCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeHumanTaskUiCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeHumanTaskUiCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeHumanTaskUiResponse(data, context); + const response: DescribeHumanTaskUiCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeHumanTaskUiCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeHyperParameterTuningJobCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeHyperParameterTuningJobCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeHyperParameterTuningJobResponse(data, context); + const response: DescribeHyperParameterTuningJobCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeHyperParameterTuningJobCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeImageCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeImageCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeImageResponse(data, context); + const response: DescribeImageCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeImageCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeImageVersionCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeImageVersionCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeImageVersionResponse(data, context); + const response: DescribeImageVersionCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeImageVersionCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeLabelingJobCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeLabelingJobCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeLabelingJobResponse(data, context); + const response: DescribeLabelingJobCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeLabelingJobCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeModelCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeModelCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeModelOutput(data, context); + const response: DescribeModelCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeModelCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeModelPackageCommand = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + if (output.statusCode >= 300) { + return deserializeAws_json1_1DescribeModelPackageCommandError(output, context); + } + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeModelPackageOutput(data, context); + const response: DescribeModelPackageCommandOutput = { + $metadata: deserializeMetadata(output), + ...contents, + }; + return Promise.resolve(response); +}; + +const deserializeAws_json1_1DescribeModelPackageCommandError = async ( + output: __HttpResponse, + context: __SerdeContext +): Promise => { + const parsedOutput: any = { + ...output, + body: await parseBody(output.body, context), + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { + default: + const parsedBody = parsedOutput.body; + errorCode = parsedBody.code || parsedBody.Code || errorCode; + response = { + ...parsedBody, + name: `${errorCode}`, + message: parsedBody.message || parsedBody.Message || errorCode, + $fault: "client", + $metadata: deserializeMetadata(output), + } as any; + } + const message = response.message || response.Message || errorCode; + response.message = message; + delete response.Message; + return Promise.reject(Object.assign(new Error(message), response)); +}; + +export const deserializeAws_json1_1DescribeModelPackageGroupCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteImageVersionCommandError(output, context); + return deserializeAws_json1_1DescribeModelPackageGroupCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DeleteImageVersionResponse(data, context); - const response: DeleteImageVersionCommandOutput = { + contents = deserializeAws_json1_1DescribeModelPackageGroupOutput(data, context); + const response: DescribeModelPackageGroupCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteImageVersionCommandError = async ( +const deserializeAws_json1_1DescribeModelPackageGroupCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -5424,22 +9038,6 @@ const deserializeAws_json1_1DeleteImageVersionCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceInUse": - case "com.amazonaws.sagemaker#ResourceInUse": - response = { - ...(await deserializeAws_json1_1ResourceInUseResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -5457,24 +9055,27 @@ const deserializeAws_json1_1DeleteImageVersionCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteModelCommand = async ( +export const deserializeAws_json1_1DescribeMonitoringScheduleCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteModelCommandError(output, context); + return deserializeAws_json1_1DescribeMonitoringScheduleCommandError(output, context); } - await collectBody(output.body, context); - const response: DeleteModelCommandOutput = { + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeMonitoringScheduleResponse(data, context); + const response: DescribeMonitoringScheduleCommandOutput = { $metadata: deserializeMetadata(output), + ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteModelCommandError = async ( +const deserializeAws_json1_1DescribeMonitoringScheduleCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -5484,6 +9085,14 @@ const deserializeAws_json1_1DeleteModelCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -5501,24 +9110,27 @@ const deserializeAws_json1_1DeleteModelCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteModelPackageCommand = async ( +export const deserializeAws_json1_1DescribeNotebookInstanceCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteModelPackageCommandError(output, context); + return deserializeAws_json1_1DescribeNotebookInstanceCommandError(output, context); } - await collectBody(output.body, context); - const response: DeleteModelPackageCommandOutput = { + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeNotebookInstanceOutput(data, context); + const response: DescribeNotebookInstanceCommandOutput = { $metadata: deserializeMetadata(output), + ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteModelPackageCommandError = async ( +const deserializeAws_json1_1DescribeNotebookInstanceCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -5528,14 +9140,6 @@ const deserializeAws_json1_1DeleteModelPackageCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ConflictException": - case "com.amazonaws.sagemaker#ConflictException": - response = { - ...(await deserializeAws_json1_1ConflictExceptionResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -5553,24 +9157,27 @@ const deserializeAws_json1_1DeleteModelPackageCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteMonitoringScheduleCommand = async ( +export const deserializeAws_json1_1DescribeNotebookInstanceLifecycleConfigCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteMonitoringScheduleCommandError(output, context); + return deserializeAws_json1_1DescribeNotebookInstanceLifecycleConfigCommandError(output, context); } - await collectBody(output.body, context); - const response: DeleteMonitoringScheduleCommandOutput = { + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeNotebookInstanceLifecycleConfigOutput(data, context); + const response: DescribeNotebookInstanceLifecycleConfigCommandOutput = { $metadata: deserializeMetadata(output), + ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteMonitoringScheduleCommandError = async ( +const deserializeAws_json1_1DescribeNotebookInstanceLifecycleConfigCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -5580,14 +9187,6 @@ const deserializeAws_json1_1DeleteMonitoringScheduleCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -5605,24 +9204,27 @@ const deserializeAws_json1_1DeleteMonitoringScheduleCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteNotebookInstanceCommand = async ( +export const deserializeAws_json1_1DescribePipelineCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteNotebookInstanceCommandError(output, context); + return deserializeAws_json1_1DescribePipelineCommandError(output, context); } - await collectBody(output.body, context); - const response: DeleteNotebookInstanceCommandOutput = { + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribePipelineResponse(data, context); + const response: DescribePipelineCommandOutput = { $metadata: deserializeMetadata(output), + ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteNotebookInstanceCommandError = async ( +const deserializeAws_json1_1DescribePipelineCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -5632,6 +9234,14 @@ const deserializeAws_json1_1DeleteNotebookInstanceCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -5649,24 +9259,27 @@ const deserializeAws_json1_1DeleteNotebookInstanceCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteNotebookInstanceLifecycleConfigCommand = async ( +export const deserializeAws_json1_1DescribePipelineDefinitionForExecutionCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteNotebookInstanceLifecycleConfigCommandError(output, context); + return deserializeAws_json1_1DescribePipelineDefinitionForExecutionCommandError(output, context); } - await collectBody(output.body, context); - const response: DeleteNotebookInstanceLifecycleConfigCommandOutput = { + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribePipelineDefinitionForExecutionResponse(data, context); + const response: DescribePipelineDefinitionForExecutionCommandOutput = { $metadata: deserializeMetadata(output), + ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteNotebookInstanceLifecycleConfigCommandError = async ( +const deserializeAws_json1_1DescribePipelineDefinitionForExecutionCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -5676,6 +9289,14 @@ const deserializeAws_json1_1DeleteNotebookInstanceLifecycleConfigCommandError = const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -5693,27 +9314,27 @@ const deserializeAws_json1_1DeleteNotebookInstanceLifecycleConfigCommandError = return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteTagsCommand = async ( +export const deserializeAws_json1_1DescribePipelineExecutionCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteTagsCommandError(output, context); + return deserializeAws_json1_1DescribePipelineExecutionCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DeleteTagsOutput(data, context); - const response: DeleteTagsCommandOutput = { + contents = deserializeAws_json1_1DescribePipelineExecutionResponse(data, context); + const response: DescribePipelineExecutionCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteTagsCommandError = async ( +const deserializeAws_json1_1DescribePipelineExecutionCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -5723,6 +9344,14 @@ const deserializeAws_json1_1DeleteTagsCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -5740,27 +9369,27 @@ const deserializeAws_json1_1DeleteTagsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteTrialCommand = async ( +export const deserializeAws_json1_1DescribeProcessingJobCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteTrialCommandError(output, context); + return deserializeAws_json1_1DescribeProcessingJobCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DeleteTrialResponse(data, context); - const response: DeleteTrialCommandOutput = { + contents = deserializeAws_json1_1DescribeProcessingJobResponse(data, context); + const response: DescribeProcessingJobCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteTrialCommandError = async ( +const deserializeAws_json1_1DescribeProcessingJobCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -5795,27 +9424,27 @@ const deserializeAws_json1_1DeleteTrialCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteTrialComponentCommand = async ( +export const deserializeAws_json1_1DescribeProjectCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteTrialComponentCommandError(output, context); + return deserializeAws_json1_1DescribeProjectCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DeleteTrialComponentResponse(data, context); - const response: DeleteTrialComponentCommandOutput = { + contents = deserializeAws_json1_1DescribeProjectOutput(data, context); + const response: DescribeProjectCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteTrialComponentCommandError = async ( +const deserializeAws_json1_1DescribeProjectCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -5825,14 +9454,6 @@ const deserializeAws_json1_1DeleteTrialComponentCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -5850,24 +9471,27 @@ const deserializeAws_json1_1DeleteTrialComponentCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteUserProfileCommand = async ( +export const deserializeAws_json1_1DescribeSubscribedWorkteamCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteUserProfileCommandError(output, context); + return deserializeAws_json1_1DescribeSubscribedWorkteamCommandError(output, context); } - await collectBody(output.body, context); - const response: DeleteUserProfileCommandOutput = { + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1DescribeSubscribedWorkteamResponse(data, context); + const response: DescribeSubscribedWorkteamCommandOutput = { $metadata: deserializeMetadata(output), + ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteUserProfileCommandError = async ( +const deserializeAws_json1_1DescribeSubscribedWorkteamCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -5877,22 +9501,6 @@ const deserializeAws_json1_1DeleteUserProfileCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceInUse": - case "com.amazonaws.sagemaker#ResourceInUse": - response = { - ...(await deserializeAws_json1_1ResourceInUseResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -5910,27 +9518,27 @@ const deserializeAws_json1_1DeleteUserProfileCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteWorkforceCommand = async ( +export const deserializeAws_json1_1DescribeTrainingJobCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteWorkforceCommandError(output, context); + return deserializeAws_json1_1DescribeTrainingJobCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DeleteWorkforceResponse(data, context); - const response: DeleteWorkforceCommandOutput = { + contents = deserializeAws_json1_1DescribeTrainingJobResponse(data, context); + const response: DescribeTrainingJobCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteWorkforceCommandError = async ( +const deserializeAws_json1_1DescribeTrainingJobCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -5940,6 +9548,14 @@ const deserializeAws_json1_1DeleteWorkforceCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -5957,27 +9573,27 @@ const deserializeAws_json1_1DeleteWorkforceCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DeleteWorkteamCommand = async ( +export const deserializeAws_json1_1DescribeTransformJobCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DeleteWorkteamCommandError(output, context); + return deserializeAws_json1_1DescribeTransformJobCommandError(output, context); } const data: any = await parseBody(output.body, context); - let contents: any = {}; - contents = deserializeAws_json1_1DeleteWorkteamResponse(data, context); - const response: DeleteWorkteamCommandOutput = { + let contents: any = {}; + contents = deserializeAws_json1_1DescribeTransformJobResponse(data, context); + const response: DescribeTransformJobCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DeleteWorkteamCommandError = async ( +const deserializeAws_json1_1DescribeTransformJobCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -5987,10 +9603,10 @@ const deserializeAws_json1_1DeleteWorkteamCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceLimitExceeded": - case "com.amazonaws.sagemaker#ResourceLimitExceeded": + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": response = { - ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), name: errorCode, $metadata: deserializeMetadata(output), }; @@ -6012,27 +9628,27 @@ const deserializeAws_json1_1DeleteWorkteamCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeAlgorithmCommand = async ( +export const deserializeAws_json1_1DescribeTrialCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeAlgorithmCommandError(output, context); + return deserializeAws_json1_1DescribeTrialCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeAlgorithmOutput(data, context); - const response: DescribeAlgorithmCommandOutput = { + contents = deserializeAws_json1_1DescribeTrialResponse(data, context); + const response: DescribeTrialCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeAlgorithmCommandError = async ( +const deserializeAws_json1_1DescribeTrialCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6042,6 +9658,14 @@ const deserializeAws_json1_1DescribeAlgorithmCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -6059,27 +9683,27 @@ const deserializeAws_json1_1DescribeAlgorithmCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeAppCommand = async ( +export const deserializeAws_json1_1DescribeTrialComponentCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeAppCommandError(output, context); + return deserializeAws_json1_1DescribeTrialComponentCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeAppResponse(data, context); - const response: DescribeAppCommandOutput = { + contents = deserializeAws_json1_1DescribeTrialComponentResponse(data, context); + const response: DescribeTrialComponentCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeAppCommandError = async ( +const deserializeAws_json1_1DescribeTrialComponentCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6114,27 +9738,27 @@ const deserializeAws_json1_1DescribeAppCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeAppImageConfigCommand = async ( +export const deserializeAws_json1_1DescribeUserProfileCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeAppImageConfigCommandError(output, context); + return deserializeAws_json1_1DescribeUserProfileCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeAppImageConfigResponse(data, context); - const response: DescribeAppImageConfigCommandOutput = { + contents = deserializeAws_json1_1DescribeUserProfileResponse(data, context); + const response: DescribeUserProfileCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeAppImageConfigCommandError = async ( +const deserializeAws_json1_1DescribeUserProfileCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6169,27 +9793,27 @@ const deserializeAws_json1_1DescribeAppImageConfigCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeAutoMLJobCommand = async ( +export const deserializeAws_json1_1DescribeWorkforceCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeAutoMLJobCommandError(output, context); + return deserializeAws_json1_1DescribeWorkforceCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeAutoMLJobResponse(data, context); - const response: DescribeAutoMLJobCommandOutput = { + contents = deserializeAws_json1_1DescribeWorkforceResponse(data, context); + const response: DescribeWorkforceCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeAutoMLJobCommandError = async ( +const deserializeAws_json1_1DescribeWorkforceCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6199,14 +9823,6 @@ const deserializeAws_json1_1DescribeAutoMLJobCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -6224,27 +9840,27 @@ const deserializeAws_json1_1DescribeAutoMLJobCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeCodeRepositoryCommand = async ( +export const deserializeAws_json1_1DescribeWorkteamCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeCodeRepositoryCommandError(output, context); + return deserializeAws_json1_1DescribeWorkteamCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeCodeRepositoryOutput(data, context); - const response: DescribeCodeRepositoryCommandOutput = { + contents = deserializeAws_json1_1DescribeWorkteamResponse(data, context); + const response: DescribeWorkteamCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeCodeRepositoryCommandError = async ( +const deserializeAws_json1_1DescribeWorkteamCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6271,27 +9887,27 @@ const deserializeAws_json1_1DescribeCodeRepositoryCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeCompilationJobCommand = async ( +export const deserializeAws_json1_1DisableSagemakerServicecatalogPortfolioCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeCompilationJobCommandError(output, context); + return deserializeAws_json1_1DisableSagemakerServicecatalogPortfolioCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeCompilationJobResponse(data, context); - const response: DescribeCompilationJobCommandOutput = { + contents = deserializeAws_json1_1DisableSagemakerServicecatalogPortfolioOutput(data, context); + const response: DisableSagemakerServicecatalogPortfolioCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeCompilationJobCommandError = async ( +const deserializeAws_json1_1DisableSagemakerServicecatalogPortfolioCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6301,14 +9917,6 @@ const deserializeAws_json1_1DescribeCompilationJobCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -6326,27 +9934,27 @@ const deserializeAws_json1_1DescribeCompilationJobCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeDomainCommand = async ( +export const deserializeAws_json1_1DisassociateTrialComponentCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeDomainCommandError(output, context); + return deserializeAws_json1_1DisassociateTrialComponentCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeDomainResponse(data, context); - const response: DescribeDomainCommandOutput = { + contents = deserializeAws_json1_1DisassociateTrialComponentResponse(data, context); + const response: DisassociateTrialComponentCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeDomainCommandError = async ( +const deserializeAws_json1_1DisassociateTrialComponentCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6381,27 +9989,27 @@ const deserializeAws_json1_1DescribeDomainCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeEndpointCommand = async ( +export const deserializeAws_json1_1EnableSagemakerServicecatalogPortfolioCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeEndpointCommandError(output, context); + return deserializeAws_json1_1EnableSagemakerServicecatalogPortfolioCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeEndpointOutput(data, context); - const response: DescribeEndpointCommandOutput = { + contents = deserializeAws_json1_1EnableSagemakerServicecatalogPortfolioOutput(data, context); + const response: EnableSagemakerServicecatalogPortfolioCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeEndpointCommandError = async ( +const deserializeAws_json1_1EnableSagemakerServicecatalogPortfolioCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6428,27 +10036,27 @@ const deserializeAws_json1_1DescribeEndpointCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeEndpointConfigCommand = async ( +export const deserializeAws_json1_1GetModelPackageGroupPolicyCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeEndpointConfigCommandError(output, context); + return deserializeAws_json1_1GetModelPackageGroupPolicyCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeEndpointConfigOutput(data, context); - const response: DescribeEndpointConfigCommandOutput = { + contents = deserializeAws_json1_1GetModelPackageGroupPolicyOutput(data, context); + const response: GetModelPackageGroupPolicyCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeEndpointConfigCommandError = async ( +const deserializeAws_json1_1GetModelPackageGroupPolicyCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6475,27 +10083,27 @@ const deserializeAws_json1_1DescribeEndpointConfigCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeExperimentCommand = async ( +export const deserializeAws_json1_1GetSagemakerServicecatalogPortfolioStatusCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeExperimentCommandError(output, context); + return deserializeAws_json1_1GetSagemakerServicecatalogPortfolioStatusCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeExperimentResponse(data, context); - const response: DescribeExperimentCommandOutput = { + contents = deserializeAws_json1_1GetSagemakerServicecatalogPortfolioStatusOutput(data, context); + const response: GetSagemakerServicecatalogPortfolioStatusCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeExperimentCommandError = async ( +const deserializeAws_json1_1GetSagemakerServicecatalogPortfolioStatusCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6505,14 +10113,6 @@ const deserializeAws_json1_1DescribeExperimentCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -6530,27 +10130,27 @@ const deserializeAws_json1_1DescribeExperimentCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeFlowDefinitionCommand = async ( +export const deserializeAws_json1_1GetSearchSuggestionsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeFlowDefinitionCommandError(output, context); + return deserializeAws_json1_1GetSearchSuggestionsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeFlowDefinitionResponse(data, context); - const response: DescribeFlowDefinitionCommandOutput = { + contents = deserializeAws_json1_1GetSearchSuggestionsResponse(data, context); + const response: GetSearchSuggestionsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeFlowDefinitionCommandError = async ( +const deserializeAws_json1_1GetSearchSuggestionsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6560,14 +10160,6 @@ const deserializeAws_json1_1DescribeFlowDefinitionCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -6585,27 +10177,27 @@ const deserializeAws_json1_1DescribeFlowDefinitionCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeHumanTaskUiCommand = async ( +export const deserializeAws_json1_1ListActionsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeHumanTaskUiCommandError(output, context); + return deserializeAws_json1_1ListActionsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeHumanTaskUiResponse(data, context); - const response: DescribeHumanTaskUiCommandOutput = { + contents = deserializeAws_json1_1ListActionsResponse(data, context); + const response: ListActionsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeHumanTaskUiCommandError = async ( +const deserializeAws_json1_1ListActionsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6640,27 +10232,27 @@ const deserializeAws_json1_1DescribeHumanTaskUiCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeHyperParameterTuningJobCommand = async ( +export const deserializeAws_json1_1ListAlgorithmsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeHyperParameterTuningJobCommandError(output, context); + return deserializeAws_json1_1ListAlgorithmsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeHyperParameterTuningJobResponse(data, context); - const response: DescribeHyperParameterTuningJobCommandOutput = { + contents = deserializeAws_json1_1ListAlgorithmsOutput(data, context); + const response: ListAlgorithmsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeHyperParameterTuningJobCommandError = async ( +const deserializeAws_json1_1ListAlgorithmsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6670,14 +10262,6 @@ const deserializeAws_json1_1DescribeHyperParameterTuningJobCommandError = async const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -6695,27 +10279,27 @@ const deserializeAws_json1_1DescribeHyperParameterTuningJobCommandError = async return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeImageCommand = async ( +export const deserializeAws_json1_1ListAppImageConfigsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeImageCommandError(output, context); + return deserializeAws_json1_1ListAppImageConfigsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeImageResponse(data, context); - const response: DescribeImageCommandOutput = { + contents = deserializeAws_json1_1ListAppImageConfigsResponse(data, context); + const response: ListAppImageConfigsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeImageCommandError = async ( +const deserializeAws_json1_1ListAppImageConfigsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6725,14 +10309,6 @@ const deserializeAws_json1_1DescribeImageCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -6750,44 +10326,36 @@ const deserializeAws_json1_1DescribeImageCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeImageVersionCommand = async ( +export const deserializeAws_json1_1ListAppsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeImageVersionCommandError(output, context); + return deserializeAws_json1_1ListAppsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeImageVersionResponse(data, context); - const response: DescribeImageVersionCommandOutput = { + contents = deserializeAws_json1_1ListAppsResponse(data, context); + const response: ListAppsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeImageVersionCommandError = async ( +const deserializeAws_json1_1ListAppsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), }; let response: __SmithyException & __MetadataBearer & { [key: string]: any }; - let errorCode: string = "UnknownError"; - const errorTypeParts: String = parsedOutput.body["__type"].split("#"); - errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; - switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -6805,27 +10373,27 @@ const deserializeAws_json1_1DescribeImageVersionCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeLabelingJobCommand = async ( +export const deserializeAws_json1_1ListArtifactsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeLabelingJobCommandError(output, context); + return deserializeAws_json1_1ListArtifactsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeLabelingJobResponse(data, context); - const response: DescribeLabelingJobCommandOutput = { + contents = deserializeAws_json1_1ListArtifactsResponse(data, context); + const response: ListArtifactsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeLabelingJobCommandError = async ( +const deserializeAws_json1_1ListArtifactsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6860,27 +10428,27 @@ const deserializeAws_json1_1DescribeLabelingJobCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeModelCommand = async ( +export const deserializeAws_json1_1ListAssociationsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeModelCommandError(output, context); + return deserializeAws_json1_1ListAssociationsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeModelOutput(data, context); - const response: DescribeModelCommandOutput = { + contents = deserializeAws_json1_1ListAssociationsResponse(data, context); + const response: ListAssociationsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeModelCommandError = async ( +const deserializeAws_json1_1ListAssociationsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6890,6 +10458,14 @@ const deserializeAws_json1_1DescribeModelCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -6907,27 +10483,27 @@ const deserializeAws_json1_1DescribeModelCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeModelPackageCommand = async ( +export const deserializeAws_json1_1ListAutoMLJobsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeModelPackageCommandError(output, context); + return deserializeAws_json1_1ListAutoMLJobsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeModelPackageOutput(data, context); - const response: DescribeModelPackageCommandOutput = { + contents = deserializeAws_json1_1ListAutoMLJobsResponse(data, context); + const response: ListAutoMLJobsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeModelPackageCommandError = async ( +const deserializeAws_json1_1ListAutoMLJobsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -6954,27 +10530,27 @@ const deserializeAws_json1_1DescribeModelPackageCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeMonitoringScheduleCommand = async ( +export const deserializeAws_json1_1ListCandidatesForAutoMLJobCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeMonitoringScheduleCommandError(output, context); + return deserializeAws_json1_1ListCandidatesForAutoMLJobCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeMonitoringScheduleResponse(data, context); - const response: DescribeMonitoringScheduleCommandOutput = { + contents = deserializeAws_json1_1ListCandidatesForAutoMLJobResponse(data, context); + const response: ListCandidatesForAutoMLJobCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeMonitoringScheduleCommandError = async ( +const deserializeAws_json1_1ListCandidatesForAutoMLJobCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7009,27 +10585,27 @@ const deserializeAws_json1_1DescribeMonitoringScheduleCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeNotebookInstanceCommand = async ( +export const deserializeAws_json1_1ListCodeRepositoriesCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeNotebookInstanceCommandError(output, context); + return deserializeAws_json1_1ListCodeRepositoriesCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeNotebookInstanceOutput(data, context); - const response: DescribeNotebookInstanceCommandOutput = { + contents = deserializeAws_json1_1ListCodeRepositoriesOutput(data, context); + const response: ListCodeRepositoriesCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeNotebookInstanceCommandError = async ( +const deserializeAws_json1_1ListCodeRepositoriesCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7056,27 +10632,27 @@ const deserializeAws_json1_1DescribeNotebookInstanceCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeNotebookInstanceLifecycleConfigCommand = async ( +export const deserializeAws_json1_1ListCompilationJobsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeNotebookInstanceLifecycleConfigCommandError(output, context); + return deserializeAws_json1_1ListCompilationJobsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeNotebookInstanceLifecycleConfigOutput(data, context); - const response: DescribeNotebookInstanceLifecycleConfigCommandOutput = { + contents = deserializeAws_json1_1ListCompilationJobsResponse(data, context); + const response: ListCompilationJobsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeNotebookInstanceLifecycleConfigCommandError = async ( +const deserializeAws_json1_1ListCompilationJobsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7103,27 +10679,27 @@ const deserializeAws_json1_1DescribeNotebookInstanceLifecycleConfigCommandError return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeProcessingJobCommand = async ( +export const deserializeAws_json1_1ListContextsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeProcessingJobCommandError(output, context); + return deserializeAws_json1_1ListContextsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeProcessingJobResponse(data, context); - const response: DescribeProcessingJobCommandOutput = { + contents = deserializeAws_json1_1ListContextsResponse(data, context); + const response: ListContextsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeProcessingJobCommandError = async ( +const deserializeAws_json1_1ListContextsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7158,74 +10734,27 @@ const deserializeAws_json1_1DescribeProcessingJobCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeSubscribedWorkteamCommand = async ( - output: __HttpResponse, - context: __SerdeContext -): Promise => { - if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeSubscribedWorkteamCommandError(output, context); - } - const data: any = await parseBody(output.body, context); - let contents: any = {}; - contents = deserializeAws_json1_1DescribeSubscribedWorkteamResponse(data, context); - const response: DescribeSubscribedWorkteamCommandOutput = { - $metadata: deserializeMetadata(output), - ...contents, - }; - return Promise.resolve(response); -}; - -const deserializeAws_json1_1DescribeSubscribedWorkteamCommandError = async ( - output: __HttpResponse, - context: __SerdeContext -): Promise => { - const parsedOutput: any = { - ...output, - body: await parseBody(output.body, context), - }; - let response: __SmithyException & __MetadataBearer & { [key: string]: any }; - let errorCode: string = "UnknownError"; - const errorTypeParts: String = parsedOutput.body["__type"].split("#"); - errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; - switch (errorCode) { - default: - const parsedBody = parsedOutput.body; - errorCode = parsedBody.code || parsedBody.Code || errorCode; - response = { - ...parsedBody, - name: `${errorCode}`, - message: parsedBody.message || parsedBody.Message || errorCode, - $fault: "client", - $metadata: deserializeMetadata(output), - } as any; - } - const message = response.message || response.Message || errorCode; - response.message = message; - delete response.Message; - return Promise.reject(Object.assign(new Error(message), response)); -}; - -export const deserializeAws_json1_1DescribeTrainingJobCommand = async ( +export const deserializeAws_json1_1ListDomainsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeTrainingJobCommandError(output, context); + return deserializeAws_json1_1ListDomainsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeTrainingJobResponse(data, context); - const response: DescribeTrainingJobCommandOutput = { + contents = deserializeAws_json1_1ListDomainsResponse(data, context); + const response: ListDomainsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeTrainingJobCommandError = async ( +const deserializeAws_json1_1ListDomainsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7235,14 +10764,6 @@ const deserializeAws_json1_1DescribeTrainingJobCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -7260,27 +10781,27 @@ const deserializeAws_json1_1DescribeTrainingJobCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeTransformJobCommand = async ( +export const deserializeAws_json1_1ListEndpointConfigsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeTransformJobCommandError(output, context); + return deserializeAws_json1_1ListEndpointConfigsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeTransformJobResponse(data, context); - const response: DescribeTransformJobCommandOutput = { + contents = deserializeAws_json1_1ListEndpointConfigsOutput(data, context); + const response: ListEndpointConfigsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeTransformJobCommandError = async ( +const deserializeAws_json1_1ListEndpointConfigsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7290,14 +10811,6 @@ const deserializeAws_json1_1DescribeTransformJobCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -7315,27 +10828,27 @@ const deserializeAws_json1_1DescribeTransformJobCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeTrialCommand = async ( +export const deserializeAws_json1_1ListEndpointsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeTrialCommandError(output, context); + return deserializeAws_json1_1ListEndpointsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeTrialResponse(data, context); - const response: DescribeTrialCommandOutput = { + contents = deserializeAws_json1_1ListEndpointsOutput(data, context); + const response: ListEndpointsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeTrialCommandError = async ( +const deserializeAws_json1_1ListEndpointsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7345,14 +10858,6 @@ const deserializeAws_json1_1DescribeTrialCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -7370,27 +10875,27 @@ const deserializeAws_json1_1DescribeTrialCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeTrialComponentCommand = async ( +export const deserializeAws_json1_1ListExperimentsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeTrialComponentCommandError(output, context); + return deserializeAws_json1_1ListExperimentsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeTrialComponentResponse(data, context); - const response: DescribeTrialComponentCommandOutput = { + contents = deserializeAws_json1_1ListExperimentsResponse(data, context); + const response: ListExperimentsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeTrialComponentCommandError = async ( +const deserializeAws_json1_1ListExperimentsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7400,14 +10905,6 @@ const deserializeAws_json1_1DescribeTrialComponentCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -7425,27 +10922,27 @@ const deserializeAws_json1_1DescribeTrialComponentCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeUserProfileCommand = async ( +export const deserializeAws_json1_1ListFeatureGroupsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeUserProfileCommandError(output, context); + return deserializeAws_json1_1ListFeatureGroupsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeUserProfileResponse(data, context); - const response: DescribeUserProfileCommandOutput = { + contents = deserializeAws_json1_1ListFeatureGroupsResponse(data, context); + const response: ListFeatureGroupsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeUserProfileCommandError = async ( +const deserializeAws_json1_1ListFeatureGroupsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7455,14 +10952,6 @@ const deserializeAws_json1_1DescribeUserProfileCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -7480,27 +10969,27 @@ const deserializeAws_json1_1DescribeUserProfileCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeWorkforceCommand = async ( +export const deserializeAws_json1_1ListFlowDefinitionsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeWorkforceCommandError(output, context); + return deserializeAws_json1_1ListFlowDefinitionsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeWorkforceResponse(data, context); - const response: DescribeWorkforceCommandOutput = { + contents = deserializeAws_json1_1ListFlowDefinitionsResponse(data, context); + const response: ListFlowDefinitionsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeWorkforceCommandError = async ( +const deserializeAws_json1_1ListFlowDefinitionsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7527,27 +11016,27 @@ const deserializeAws_json1_1DescribeWorkforceCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DescribeWorkteamCommand = async ( +export const deserializeAws_json1_1ListHumanTaskUisCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DescribeWorkteamCommandError(output, context); + return deserializeAws_json1_1ListHumanTaskUisCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DescribeWorkteamResponse(data, context); - const response: DescribeWorkteamCommandOutput = { + contents = deserializeAws_json1_1ListHumanTaskUisResponse(data, context); + const response: ListHumanTaskUisCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DescribeWorkteamCommandError = async ( +const deserializeAws_json1_1ListHumanTaskUisCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7574,27 +11063,27 @@ const deserializeAws_json1_1DescribeWorkteamCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1DisassociateTrialComponentCommand = async ( +export const deserializeAws_json1_1ListHyperParameterTuningJobsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1DisassociateTrialComponentCommandError(output, context); + return deserializeAws_json1_1ListHyperParameterTuningJobsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1DisassociateTrialComponentResponse(data, context); - const response: DisassociateTrialComponentCommandOutput = { + contents = deserializeAws_json1_1ListHyperParameterTuningJobsResponse(data, context); + const response: ListHyperParameterTuningJobsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1DisassociateTrialComponentCommandError = async ( +const deserializeAws_json1_1ListHyperParameterTuningJobsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7604,14 +11093,6 @@ const deserializeAws_json1_1DisassociateTrialComponentCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -7629,27 +11110,27 @@ const deserializeAws_json1_1DisassociateTrialComponentCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1GetSearchSuggestionsCommand = async ( +export const deserializeAws_json1_1ListImagesCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1GetSearchSuggestionsCommandError(output, context); + return deserializeAws_json1_1ListImagesCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1GetSearchSuggestionsResponse(data, context); - const response: GetSearchSuggestionsCommandOutput = { + contents = deserializeAws_json1_1ListImagesResponse(data, context); + const response: ListImagesCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1GetSearchSuggestionsCommandError = async ( +const deserializeAws_json1_1ListImagesCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7676,27 +11157,27 @@ const deserializeAws_json1_1GetSearchSuggestionsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListAlgorithmsCommand = async ( +export const deserializeAws_json1_1ListImageVersionsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListAlgorithmsCommandError(output, context); + return deserializeAws_json1_1ListImageVersionsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListAlgorithmsOutput(data, context); - const response: ListAlgorithmsCommandOutput = { + contents = deserializeAws_json1_1ListImageVersionsResponse(data, context); + const response: ListImageVersionsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListAlgorithmsCommandError = async ( +const deserializeAws_json1_1ListImageVersionsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7706,6 +11187,14 @@ const deserializeAws_json1_1ListAlgorithmsCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -7723,27 +11212,27 @@ const deserializeAws_json1_1ListAlgorithmsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListAppImageConfigsCommand = async ( +export const deserializeAws_json1_1ListLabelingJobsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListAppImageConfigsCommandError(output, context); + return deserializeAws_json1_1ListLabelingJobsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListAppImageConfigsResponse(data, context); - const response: ListAppImageConfigsCommandOutput = { + contents = deserializeAws_json1_1ListLabelingJobsResponse(data, context); + const response: ListLabelingJobsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListAppImageConfigsCommandError = async ( +const deserializeAws_json1_1ListLabelingJobsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7770,27 +11259,27 @@ const deserializeAws_json1_1ListAppImageConfigsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListAppsCommand = async ( +export const deserializeAws_json1_1ListLabelingJobsForWorkteamCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListAppsCommandError(output, context); + return deserializeAws_json1_1ListLabelingJobsForWorkteamCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListAppsResponse(data, context); - const response: ListAppsCommandOutput = { + contents = deserializeAws_json1_1ListLabelingJobsForWorkteamResponse(data, context); + const response: ListLabelingJobsForWorkteamCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListAppsCommandError = async ( +const deserializeAws_json1_1ListLabelingJobsForWorkteamCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7800,6 +11289,14 @@ const deserializeAws_json1_1ListAppsCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -7817,27 +11314,27 @@ const deserializeAws_json1_1ListAppsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListAutoMLJobsCommand = async ( +export const deserializeAws_json1_1ListModelPackageGroupsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListAutoMLJobsCommandError(output, context); + return deserializeAws_json1_1ListModelPackageGroupsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListAutoMLJobsResponse(data, context); - const response: ListAutoMLJobsCommandOutput = { + contents = deserializeAws_json1_1ListModelPackageGroupsOutput(data, context); + const response: ListModelPackageGroupsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListAutoMLJobsCommandError = async ( +const deserializeAws_json1_1ListModelPackageGroupsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7864,27 +11361,27 @@ const deserializeAws_json1_1ListAutoMLJobsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListCandidatesForAutoMLJobCommand = async ( +export const deserializeAws_json1_1ListModelPackagesCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListCandidatesForAutoMLJobCommandError(output, context); + return deserializeAws_json1_1ListModelPackagesCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListCandidatesForAutoMLJobResponse(data, context); - const response: ListCandidatesForAutoMLJobCommandOutput = { + contents = deserializeAws_json1_1ListModelPackagesOutput(data, context); + const response: ListModelPackagesCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListCandidatesForAutoMLJobCommandError = async ( +const deserializeAws_json1_1ListModelPackagesCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7894,14 +11391,6 @@ const deserializeAws_json1_1ListCandidatesForAutoMLJobCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -7919,27 +11408,27 @@ const deserializeAws_json1_1ListCandidatesForAutoMLJobCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListCodeRepositoriesCommand = async ( +export const deserializeAws_json1_1ListModelsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListCodeRepositoriesCommandError(output, context); + return deserializeAws_json1_1ListModelsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListCodeRepositoriesOutput(data, context); - const response: ListCodeRepositoriesCommandOutput = { + contents = deserializeAws_json1_1ListModelsOutput(data, context); + const response: ListModelsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListCodeRepositoriesCommandError = async ( +const deserializeAws_json1_1ListModelsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -7966,27 +11455,27 @@ const deserializeAws_json1_1ListCodeRepositoriesCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListCompilationJobsCommand = async ( +export const deserializeAws_json1_1ListMonitoringExecutionsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListCompilationJobsCommandError(output, context); + return deserializeAws_json1_1ListMonitoringExecutionsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListCompilationJobsResponse(data, context); - const response: ListCompilationJobsCommandOutput = { + contents = deserializeAws_json1_1ListMonitoringExecutionsResponse(data, context); + const response: ListMonitoringExecutionsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListCompilationJobsCommandError = async ( +const deserializeAws_json1_1ListMonitoringExecutionsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8013,27 +11502,27 @@ const deserializeAws_json1_1ListCompilationJobsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListDomainsCommand = async ( +export const deserializeAws_json1_1ListMonitoringSchedulesCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListDomainsCommandError(output, context); + return deserializeAws_json1_1ListMonitoringSchedulesCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListDomainsResponse(data, context); - const response: ListDomainsCommandOutput = { + contents = deserializeAws_json1_1ListMonitoringSchedulesResponse(data, context); + const response: ListMonitoringSchedulesCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListDomainsCommandError = async ( +const deserializeAws_json1_1ListMonitoringSchedulesCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8060,27 +11549,27 @@ const deserializeAws_json1_1ListDomainsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListEndpointConfigsCommand = async ( +export const deserializeAws_json1_1ListNotebookInstanceLifecycleConfigsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListEndpointConfigsCommandError(output, context); + return deserializeAws_json1_1ListNotebookInstanceLifecycleConfigsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListEndpointConfigsOutput(data, context); - const response: ListEndpointConfigsCommandOutput = { + contents = deserializeAws_json1_1ListNotebookInstanceLifecycleConfigsOutput(data, context); + const response: ListNotebookInstanceLifecycleConfigsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListEndpointConfigsCommandError = async ( +const deserializeAws_json1_1ListNotebookInstanceLifecycleConfigsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8107,27 +11596,27 @@ const deserializeAws_json1_1ListEndpointConfigsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListEndpointsCommand = async ( +export const deserializeAws_json1_1ListNotebookInstancesCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListEndpointsCommandError(output, context); + return deserializeAws_json1_1ListNotebookInstancesCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListEndpointsOutput(data, context); - const response: ListEndpointsCommandOutput = { + contents = deserializeAws_json1_1ListNotebookInstancesOutput(data, context); + const response: ListNotebookInstancesCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListEndpointsCommandError = async ( +const deserializeAws_json1_1ListNotebookInstancesCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8154,27 +11643,27 @@ const deserializeAws_json1_1ListEndpointsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListExperimentsCommand = async ( +export const deserializeAws_json1_1ListPipelineExecutionsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListExperimentsCommandError(output, context); + return deserializeAws_json1_1ListPipelineExecutionsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListExperimentsResponse(data, context); - const response: ListExperimentsCommandOutput = { + contents = deserializeAws_json1_1ListPipelineExecutionsResponse(data, context); + const response: ListPipelineExecutionsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListExperimentsCommandError = async ( +const deserializeAws_json1_1ListPipelineExecutionsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8184,6 +11673,14 @@ const deserializeAws_json1_1ListExperimentsCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -8201,27 +11698,27 @@ const deserializeAws_json1_1ListExperimentsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListFlowDefinitionsCommand = async ( +export const deserializeAws_json1_1ListPipelineExecutionStepsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListFlowDefinitionsCommandError(output, context); + return deserializeAws_json1_1ListPipelineExecutionStepsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListFlowDefinitionsResponse(data, context); - const response: ListFlowDefinitionsCommandOutput = { + contents = deserializeAws_json1_1ListPipelineExecutionStepsResponse(data, context); + const response: ListPipelineExecutionStepsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListFlowDefinitionsCommandError = async ( +const deserializeAws_json1_1ListPipelineExecutionStepsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8231,6 +11728,14 @@ const deserializeAws_json1_1ListFlowDefinitionsCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -8248,27 +11753,27 @@ const deserializeAws_json1_1ListFlowDefinitionsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListHumanTaskUisCommand = async ( +export const deserializeAws_json1_1ListPipelineParametersForExecutionCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListHumanTaskUisCommandError(output, context); + return deserializeAws_json1_1ListPipelineParametersForExecutionCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListHumanTaskUisResponse(data, context); - const response: ListHumanTaskUisCommandOutput = { + contents = deserializeAws_json1_1ListPipelineParametersForExecutionResponse(data, context); + const response: ListPipelineParametersForExecutionCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListHumanTaskUisCommandError = async ( +const deserializeAws_json1_1ListPipelineParametersForExecutionCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8278,6 +11783,14 @@ const deserializeAws_json1_1ListHumanTaskUisCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -8295,27 +11808,27 @@ const deserializeAws_json1_1ListHumanTaskUisCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListHyperParameterTuningJobsCommand = async ( +export const deserializeAws_json1_1ListPipelinesCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListHyperParameterTuningJobsCommandError(output, context); + return deserializeAws_json1_1ListPipelinesCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListHyperParameterTuningJobsResponse(data, context); - const response: ListHyperParameterTuningJobsCommandOutput = { + contents = deserializeAws_json1_1ListPipelinesResponse(data, context); + const response: ListPipelinesCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListHyperParameterTuningJobsCommandError = async ( +const deserializeAws_json1_1ListPipelinesCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8342,27 +11855,27 @@ const deserializeAws_json1_1ListHyperParameterTuningJobsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListImagesCommand = async ( +export const deserializeAws_json1_1ListProcessingJobsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListImagesCommandError(output, context); + return deserializeAws_json1_1ListProcessingJobsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListImagesResponse(data, context); - const response: ListImagesCommandOutput = { + contents = deserializeAws_json1_1ListProcessingJobsResponse(data, context); + const response: ListProcessingJobsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListImagesCommandError = async ( +const deserializeAws_json1_1ListProcessingJobsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8389,27 +11902,27 @@ const deserializeAws_json1_1ListImagesCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListImageVersionsCommand = async ( +export const deserializeAws_json1_1ListProjectsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListImageVersionsCommandError(output, context); + return deserializeAws_json1_1ListProjectsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListImageVersionsResponse(data, context); - const response: ListImageVersionsCommandOutput = { + contents = deserializeAws_json1_1ListProjectsOutput(data, context); + const response: ListProjectsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListImageVersionsCommandError = async ( +const deserializeAws_json1_1ListProjectsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8419,14 +11932,6 @@ const deserializeAws_json1_1ListImageVersionsCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -8444,27 +11949,27 @@ const deserializeAws_json1_1ListImageVersionsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListLabelingJobsCommand = async ( +export const deserializeAws_json1_1ListSubscribedWorkteamsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListLabelingJobsCommandError(output, context); + return deserializeAws_json1_1ListSubscribedWorkteamsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListLabelingJobsResponse(data, context); - const response: ListLabelingJobsCommandOutput = { + contents = deserializeAws_json1_1ListSubscribedWorkteamsResponse(data, context); + const response: ListSubscribedWorkteamsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListLabelingJobsCommandError = async ( +const deserializeAws_json1_1ListSubscribedWorkteamsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8491,27 +11996,27 @@ const deserializeAws_json1_1ListLabelingJobsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListLabelingJobsForWorkteamCommand = async ( +export const deserializeAws_json1_1ListTagsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListLabelingJobsForWorkteamCommandError(output, context); + return deserializeAws_json1_1ListTagsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListLabelingJobsForWorkteamResponse(data, context); - const response: ListLabelingJobsForWorkteamCommandOutput = { + contents = deserializeAws_json1_1ListTagsOutput(data, context); + const response: ListTagsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListLabelingJobsForWorkteamCommandError = async ( +const deserializeAws_json1_1ListTagsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8521,14 +12026,6 @@ const deserializeAws_json1_1ListLabelingJobsForWorkteamCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -8546,27 +12043,27 @@ const deserializeAws_json1_1ListLabelingJobsForWorkteamCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListModelPackagesCommand = async ( +export const deserializeAws_json1_1ListTrainingJobsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListModelPackagesCommandError(output, context); + return deserializeAws_json1_1ListTrainingJobsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListModelPackagesOutput(data, context); - const response: ListModelPackagesCommandOutput = { + contents = deserializeAws_json1_1ListTrainingJobsResponse(data, context); + const response: ListTrainingJobsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListModelPackagesCommandError = async ( +const deserializeAws_json1_1ListTrainingJobsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8593,27 +12090,27 @@ const deserializeAws_json1_1ListModelPackagesCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListModelsCommand = async ( +export const deserializeAws_json1_1ListTrainingJobsForHyperParameterTuningJobCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListModelsCommandError(output, context); + return deserializeAws_json1_1ListTrainingJobsForHyperParameterTuningJobCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListModelsOutput(data, context); - const response: ListModelsCommandOutput = { + contents = deserializeAws_json1_1ListTrainingJobsForHyperParameterTuningJobResponse(data, context); + const response: ListTrainingJobsForHyperParameterTuningJobCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListModelsCommandError = async ( +const deserializeAws_json1_1ListTrainingJobsForHyperParameterTuningJobCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8623,6 +12120,14 @@ const deserializeAws_json1_1ListModelsCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -8640,27 +12145,27 @@ const deserializeAws_json1_1ListModelsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListMonitoringExecutionsCommand = async ( +export const deserializeAws_json1_1ListTransformJobsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListMonitoringExecutionsCommandError(output, context); + return deserializeAws_json1_1ListTransformJobsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListMonitoringExecutionsResponse(data, context); - const response: ListMonitoringExecutionsCommandOutput = { + contents = deserializeAws_json1_1ListTransformJobsResponse(data, context); + const response: ListTransformJobsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListMonitoringExecutionsCommandError = async ( +const deserializeAws_json1_1ListTransformJobsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8687,27 +12192,27 @@ const deserializeAws_json1_1ListMonitoringExecutionsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListMonitoringSchedulesCommand = async ( +export const deserializeAws_json1_1ListTrialComponentsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListMonitoringSchedulesCommandError(output, context); + return deserializeAws_json1_1ListTrialComponentsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListMonitoringSchedulesResponse(data, context); - const response: ListMonitoringSchedulesCommandOutput = { + contents = deserializeAws_json1_1ListTrialComponentsResponse(data, context); + const response: ListTrialComponentsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListMonitoringSchedulesCommandError = async ( +const deserializeAws_json1_1ListTrialComponentsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8717,6 +12222,14 @@ const deserializeAws_json1_1ListMonitoringSchedulesCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -8734,27 +12247,27 @@ const deserializeAws_json1_1ListMonitoringSchedulesCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListNotebookInstanceLifecycleConfigsCommand = async ( +export const deserializeAws_json1_1ListTrialsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListNotebookInstanceLifecycleConfigsCommandError(output, context); + return deserializeAws_json1_1ListTrialsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListNotebookInstanceLifecycleConfigsOutput(data, context); - const response: ListNotebookInstanceLifecycleConfigsCommandOutput = { + contents = deserializeAws_json1_1ListTrialsResponse(data, context); + const response: ListTrialsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListNotebookInstanceLifecycleConfigsCommandError = async ( +const deserializeAws_json1_1ListTrialsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8764,6 +12277,14 @@ const deserializeAws_json1_1ListNotebookInstanceLifecycleConfigsCommandError = a const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -8781,27 +12302,27 @@ const deserializeAws_json1_1ListNotebookInstanceLifecycleConfigsCommandError = a return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListNotebookInstancesCommand = async ( +export const deserializeAws_json1_1ListUserProfilesCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListNotebookInstancesCommandError(output, context); + return deserializeAws_json1_1ListUserProfilesCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListNotebookInstancesOutput(data, context); - const response: ListNotebookInstancesCommandOutput = { + contents = deserializeAws_json1_1ListUserProfilesResponse(data, context); + const response: ListUserProfilesCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListNotebookInstancesCommandError = async ( +const deserializeAws_json1_1ListUserProfilesCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8828,27 +12349,27 @@ const deserializeAws_json1_1ListNotebookInstancesCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListProcessingJobsCommand = async ( +export const deserializeAws_json1_1ListWorkforcesCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListProcessingJobsCommandError(output, context); + return deserializeAws_json1_1ListWorkforcesCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListProcessingJobsResponse(data, context); - const response: ListProcessingJobsCommandOutput = { + contents = deserializeAws_json1_1ListWorkforcesResponse(data, context); + const response: ListWorkforcesCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListProcessingJobsCommandError = async ( +const deserializeAws_json1_1ListWorkforcesCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8875,27 +12396,27 @@ const deserializeAws_json1_1ListProcessingJobsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListSubscribedWorkteamsCommand = async ( +export const deserializeAws_json1_1ListWorkteamsCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListSubscribedWorkteamsCommandError(output, context); + return deserializeAws_json1_1ListWorkteamsCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListSubscribedWorkteamsResponse(data, context); - const response: ListSubscribedWorkteamsCommandOutput = { + contents = deserializeAws_json1_1ListWorkteamsResponse(data, context); + const response: ListWorkteamsCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListSubscribedWorkteamsCommandError = async ( +const deserializeAws_json1_1ListWorkteamsCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8922,27 +12443,27 @@ const deserializeAws_json1_1ListSubscribedWorkteamsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListTagsCommand = async ( +export const deserializeAws_json1_1PutModelPackageGroupPolicyCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListTagsCommandError(output, context); + return deserializeAws_json1_1PutModelPackageGroupPolicyCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListTagsOutput(data, context); - const response: ListTagsCommandOutput = { + contents = deserializeAws_json1_1PutModelPackageGroupPolicyOutput(data, context); + const response: PutModelPackageGroupPolicyCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListTagsCommandError = async ( +const deserializeAws_json1_1PutModelPackageGroupPolicyCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8969,27 +12490,27 @@ const deserializeAws_json1_1ListTagsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListTrainingJobsCommand = async ( +export const deserializeAws_json1_1RenderUiTemplateCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListTrainingJobsCommandError(output, context); + return deserializeAws_json1_1RenderUiTemplateCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListTrainingJobsResponse(data, context); - const response: ListTrainingJobsCommandOutput = { + contents = deserializeAws_json1_1RenderUiTemplateResponse(data, context); + const response: RenderUiTemplateCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListTrainingJobsCommandError = async ( +const deserializeAws_json1_1RenderUiTemplateCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -8999,6 +12520,14 @@ const deserializeAws_json1_1ListTrainingJobsCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -9016,44 +12545,36 @@ const deserializeAws_json1_1ListTrainingJobsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListTrainingJobsForHyperParameterTuningJobCommand = async ( +export const deserializeAws_json1_1SearchCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListTrainingJobsForHyperParameterTuningJobCommandError(output, context); + return deserializeAws_json1_1SearchCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListTrainingJobsForHyperParameterTuningJobResponse(data, context); - const response: ListTrainingJobsForHyperParameterTuningJobCommandOutput = { + contents = deserializeAws_json1_1SearchResponse(data, context); + const response: SearchCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListTrainingJobsForHyperParameterTuningJobCommandError = async ( +const deserializeAws_json1_1SearchCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), }; let response: __SmithyException & __MetadataBearer & { [key: string]: any }; - let errorCode: string = "UnknownError"; - const errorTypeParts: String = parsedOutput.body["__type"].split("#"); - errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; - switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -9071,27 +12592,24 @@ const deserializeAws_json1_1ListTrainingJobsForHyperParameterTuningJobCommandErr return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListTransformJobsCommand = async ( +export const deserializeAws_json1_1StartMonitoringScheduleCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListTransformJobsCommandError(output, context); + return deserializeAws_json1_1StartMonitoringScheduleCommandError(output, context); } - const data: any = await parseBody(output.body, context); - let contents: any = {}; - contents = deserializeAws_json1_1ListTransformJobsResponse(data, context); - const response: ListTransformJobsCommandOutput = { + await collectBody(output.body, context); + const response: StartMonitoringScheduleCommandOutput = { $metadata: deserializeMetadata(output), - ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListTransformJobsCommandError = async ( +const deserializeAws_json1_1StartMonitoringScheduleCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -9101,6 +12619,14 @@ const deserializeAws_json1_1ListTransformJobsCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -9118,27 +12644,24 @@ const deserializeAws_json1_1ListTransformJobsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListTrialComponentsCommand = async ( +export const deserializeAws_json1_1StartNotebookInstanceCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListTrialComponentsCommandError(output, context); + return deserializeAws_json1_1StartNotebookInstanceCommandError(output, context); } - const data: any = await parseBody(output.body, context); - let contents: any = {}; - contents = deserializeAws_json1_1ListTrialComponentsResponse(data, context); - const response: ListTrialComponentsCommandOutput = { + await collectBody(output.body, context); + const response: StartNotebookInstanceCommandOutput = { $metadata: deserializeMetadata(output), - ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListTrialComponentsCommandError = async ( +const deserializeAws_json1_1StartNotebookInstanceCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -9148,10 +12671,10 @@ const deserializeAws_json1_1ListTrialComponentsCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": + case "ResourceLimitExceeded": + case "com.amazonaws.sagemaker#ResourceLimitExceeded": response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), name: errorCode, $metadata: deserializeMetadata(output), }; @@ -9173,27 +12696,27 @@ const deserializeAws_json1_1ListTrialComponentsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListTrialsCommand = async ( +export const deserializeAws_json1_1StartPipelineExecutionCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListTrialsCommandError(output, context); + return deserializeAws_json1_1StartPipelineExecutionCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1ListTrialsResponse(data, context); - const response: ListTrialsCommandOutput = { + contents = deserializeAws_json1_1StartPipelineExecutionResponse(data, context); + const response: StartPipelineExecutionCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListTrialsCommandError = async ( +const deserializeAws_json1_1StartPipelineExecutionCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -9203,6 +12726,14 @@ const deserializeAws_json1_1ListTrialsCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceLimitExceeded": + case "com.amazonaws.sagemaker#ResourceLimitExceeded": + response = { + ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; case "ResourceNotFound": case "com.amazonaws.sagemaker#ResourceNotFound": response = { @@ -9228,27 +12759,24 @@ const deserializeAws_json1_1ListTrialsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListUserProfilesCommand = async ( +export const deserializeAws_json1_1StopAutoMLJobCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListUserProfilesCommandError(output, context); + return deserializeAws_json1_1StopAutoMLJobCommandError(output, context); } - const data: any = await parseBody(output.body, context); - let contents: any = {}; - contents = deserializeAws_json1_1ListUserProfilesResponse(data, context); - const response: ListUserProfilesCommandOutput = { + await collectBody(output.body, context); + const response: StopAutoMLJobCommandOutput = { $metadata: deserializeMetadata(output), - ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListUserProfilesCommandError = async ( +const deserializeAws_json1_1StopAutoMLJobCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -9258,6 +12786,14 @@ const deserializeAws_json1_1ListUserProfilesCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -9275,27 +12811,24 @@ const deserializeAws_json1_1ListUserProfilesCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListWorkforcesCommand = async ( +export const deserializeAws_json1_1StopCompilationJobCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListWorkforcesCommandError(output, context); + return deserializeAws_json1_1StopCompilationJobCommandError(output, context); } - const data: any = await parseBody(output.body, context); - let contents: any = {}; - contents = deserializeAws_json1_1ListWorkforcesResponse(data, context); - const response: ListWorkforcesCommandOutput = { + await collectBody(output.body, context); + const response: StopCompilationJobCommandOutput = { $metadata: deserializeMetadata(output), - ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListWorkforcesCommandError = async ( +const deserializeAws_json1_1StopCompilationJobCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -9305,6 +12838,14 @@ const deserializeAws_json1_1ListWorkforcesCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -9322,27 +12863,24 @@ const deserializeAws_json1_1ListWorkforcesCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1ListWorkteamsCommand = async ( +export const deserializeAws_json1_1StopHyperParameterTuningJobCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1ListWorkteamsCommandError(output, context); + return deserializeAws_json1_1StopHyperParameterTuningJobCommandError(output, context); } - const data: any = await parseBody(output.body, context); - let contents: any = {}; - contents = deserializeAws_json1_1ListWorkteamsResponse(data, context); - const response: ListWorkteamsCommandOutput = { + await collectBody(output.body, context); + const response: StopHyperParameterTuningJobCommandOutput = { $metadata: deserializeMetadata(output), - ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1ListWorkteamsCommandError = async ( +const deserializeAws_json1_1StopHyperParameterTuningJobCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -9352,6 +12890,14 @@ const deserializeAws_json1_1ListWorkteamsCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -9369,27 +12915,24 @@ const deserializeAws_json1_1ListWorkteamsCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1RenderUiTemplateCommand = async ( +export const deserializeAws_json1_1StopLabelingJobCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1RenderUiTemplateCommandError(output, context); + return deserializeAws_json1_1StopLabelingJobCommandError(output, context); } - const data: any = await parseBody(output.body, context); - let contents: any = {}; - contents = deserializeAws_json1_1RenderUiTemplateResponse(data, context); - const response: RenderUiTemplateCommandOutput = { + await collectBody(output.body, context); + const response: StopLabelingJobCommandOutput = { $metadata: deserializeMetadata(output), - ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1RenderUiTemplateCommandError = async ( +const deserializeAws_json1_1StopLabelingJobCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -9424,27 +12967,24 @@ const deserializeAws_json1_1RenderUiTemplateCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1SearchCommand = async ( +export const deserializeAws_json1_1StopMonitoringScheduleCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1SearchCommandError(output, context); + return deserializeAws_json1_1StopMonitoringScheduleCommandError(output, context); } - const data: any = await parseBody(output.body, context); - let contents: any = {}; - contents = deserializeAws_json1_1SearchResponse(data, context); - const response: SearchCommandOutput = { + await collectBody(output.body, context); + const response: StopMonitoringScheduleCommandOutput = { $metadata: deserializeMetadata(output), - ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1SearchCommandError = async ( +const deserializeAws_json1_1StopMonitoringScheduleCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -9454,6 +12994,14 @@ const deserializeAws_json1_1SearchCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -9471,24 +13019,24 @@ const deserializeAws_json1_1SearchCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1StartMonitoringScheduleCommand = async ( +export const deserializeAws_json1_1StopNotebookInstanceCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1StartMonitoringScheduleCommandError(output, context); + return deserializeAws_json1_1StopNotebookInstanceCommandError(output, context); } await collectBody(output.body, context); - const response: StartMonitoringScheduleCommandOutput = { + const response: StopNotebookInstanceCommandOutput = { $metadata: deserializeMetadata(output), }; return Promise.resolve(response); }; -const deserializeAws_json1_1StartMonitoringScheduleCommandError = async ( +const deserializeAws_json1_1StopNotebookInstanceCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -9498,14 +13046,6 @@ const deserializeAws_json1_1StartMonitoringScheduleCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -9523,24 +13063,27 @@ const deserializeAws_json1_1StartMonitoringScheduleCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1StartNotebookInstanceCommand = async ( +export const deserializeAws_json1_1StopPipelineExecutionCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1StartNotebookInstanceCommandError(output, context); + return deserializeAws_json1_1StopPipelineExecutionCommandError(output, context); } - await collectBody(output.body, context); - const response: StartNotebookInstanceCommandOutput = { + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1StopPipelineExecutionResponse(data, context); + const response: StopPipelineExecutionCommandOutput = { $metadata: deserializeMetadata(output), + ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1StartNotebookInstanceCommandError = async ( +const deserializeAws_json1_1StopPipelineExecutionCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -9550,10 +13093,10 @@ const deserializeAws_json1_1StartNotebookInstanceCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceLimitExceeded": - case "com.amazonaws.sagemaker#ResourceLimitExceeded": + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": response = { - ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), name: errorCode, $metadata: deserializeMetadata(output), }; @@ -9575,24 +13118,24 @@ const deserializeAws_json1_1StartNotebookInstanceCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1StopAutoMLJobCommand = async ( +export const deserializeAws_json1_1StopProcessingJobCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1StopAutoMLJobCommandError(output, context); + return deserializeAws_json1_1StopProcessingJobCommandError(output, context); } await collectBody(output.body, context); - const response: StopAutoMLJobCommandOutput = { + const response: StopProcessingJobCommandOutput = { $metadata: deserializeMetadata(output), }; return Promise.resolve(response); }; -const deserializeAws_json1_1StopAutoMLJobCommandError = async ( +const deserializeAws_json1_1StopProcessingJobCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -9627,24 +13170,24 @@ const deserializeAws_json1_1StopAutoMLJobCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1StopCompilationJobCommand = async ( +export const deserializeAws_json1_1StopTrainingJobCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1StopCompilationJobCommandError(output, context); + return deserializeAws_json1_1StopTrainingJobCommandError(output, context); } await collectBody(output.body, context); - const response: StopCompilationJobCommandOutput = { + const response: StopTrainingJobCommandOutput = { $metadata: deserializeMetadata(output), }; return Promise.resolve(response); }; -const deserializeAws_json1_1StopCompilationJobCommandError = async ( +const deserializeAws_json1_1StopTrainingJobCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -9679,24 +13222,24 @@ const deserializeAws_json1_1StopCompilationJobCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1StopHyperParameterTuningJobCommand = async ( +export const deserializeAws_json1_1StopTransformJobCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1StopHyperParameterTuningJobCommandError(output, context); + return deserializeAws_json1_1StopTransformJobCommandError(output, context); } await collectBody(output.body, context); - const response: StopHyperParameterTuningJobCommandOutput = { + const response: StopTransformJobCommandOutput = { $metadata: deserializeMetadata(output), }; return Promise.resolve(response); }; -const deserializeAws_json1_1StopHyperParameterTuningJobCommandError = async ( +const deserializeAws_json1_1StopTransformJobCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -9731,24 +13274,27 @@ const deserializeAws_json1_1StopHyperParameterTuningJobCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1StopLabelingJobCommand = async ( +export const deserializeAws_json1_1UpdateActionCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1StopLabelingJobCommandError(output, context); + return deserializeAws_json1_1UpdateActionCommandError(output, context); } - await collectBody(output.body, context); - const response: StopLabelingJobCommandOutput = { + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1UpdateActionResponse(data, context); + const response: UpdateActionCommandOutput = { $metadata: deserializeMetadata(output), + ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1StopLabelingJobCommandError = async ( +const deserializeAws_json1_1UpdateActionCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -9758,6 +13304,14 @@ const deserializeAws_json1_1StopLabelingJobCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ConflictException": + case "com.amazonaws.sagemaker#ConflictException": + response = { + ...(await deserializeAws_json1_1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; case "ResourceNotFound": case "com.amazonaws.sagemaker#ResourceNotFound": response = { @@ -9783,24 +13337,27 @@ const deserializeAws_json1_1StopLabelingJobCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1StopMonitoringScheduleCommand = async ( +export const deserializeAws_json1_1UpdateAppImageConfigCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1StopMonitoringScheduleCommandError(output, context); + return deserializeAws_json1_1UpdateAppImageConfigCommandError(output, context); } - await collectBody(output.body, context); - const response: StopMonitoringScheduleCommandOutput = { + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1UpdateAppImageConfigResponse(data, context); + const response: UpdateAppImageConfigCommandOutput = { $metadata: deserializeMetadata(output), + ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1StopMonitoringScheduleCommandError = async ( +const deserializeAws_json1_1UpdateAppImageConfigCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -9835,24 +13392,27 @@ const deserializeAws_json1_1StopMonitoringScheduleCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1StopNotebookInstanceCommand = async ( +export const deserializeAws_json1_1UpdateArtifactCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1StopNotebookInstanceCommandError(output, context); + return deserializeAws_json1_1UpdateArtifactCommandError(output, context); } - await collectBody(output.body, context); - const response: StopNotebookInstanceCommandOutput = { + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1UpdateArtifactResponse(data, context); + const response: UpdateArtifactCommandOutput = { $metadata: deserializeMetadata(output), + ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1StopNotebookInstanceCommandError = async ( +const deserializeAws_json1_1UpdateArtifactCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -9862,6 +13422,22 @@ const deserializeAws_json1_1StopNotebookInstanceCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ConflictException": + case "com.amazonaws.sagemaker#ConflictException": + response = { + ...(await deserializeAws_json1_1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -9879,24 +13455,27 @@ const deserializeAws_json1_1StopNotebookInstanceCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1StopProcessingJobCommand = async ( +export const deserializeAws_json1_1UpdateCodeRepositoryCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1StopProcessingJobCommandError(output, context); + return deserializeAws_json1_1UpdateCodeRepositoryCommandError(output, context); } - await collectBody(output.body, context); - const response: StopProcessingJobCommandOutput = { + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1UpdateCodeRepositoryOutput(data, context); + const response: UpdateCodeRepositoryCommandOutput = { $metadata: deserializeMetadata(output), + ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1StopProcessingJobCommandError = async ( +const deserializeAws_json1_1UpdateCodeRepositoryCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -9906,14 +13485,6 @@ const deserializeAws_json1_1StopProcessingJobCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -9931,24 +13502,27 @@ const deserializeAws_json1_1StopProcessingJobCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1StopTrainingJobCommand = async ( +export const deserializeAws_json1_1UpdateContextCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1StopTrainingJobCommandError(output, context); + return deserializeAws_json1_1UpdateContextCommandError(output, context); } - await collectBody(output.body, context); - const response: StopTrainingJobCommandOutput = { + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1UpdateContextResponse(data, context); + const response: UpdateContextCommandOutput = { $metadata: deserializeMetadata(output), + ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1StopTrainingJobCommandError = async ( +const deserializeAws_json1_1UpdateContextCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -9958,6 +13532,14 @@ const deserializeAws_json1_1StopTrainingJobCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ConflictException": + case "com.amazonaws.sagemaker#ConflictException": + response = { + ...(await deserializeAws_json1_1ConflictExceptionResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; case "ResourceNotFound": case "com.amazonaws.sagemaker#ResourceNotFound": response = { @@ -9983,24 +13565,27 @@ const deserializeAws_json1_1StopTrainingJobCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1StopTransformJobCommand = async ( +export const deserializeAws_json1_1UpdateDomainCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1StopTransformJobCommandError(output, context); + return deserializeAws_json1_1UpdateDomainCommandError(output, context); } - await collectBody(output.body, context); - const response: StopTransformJobCommandOutput = { + const data: any = await parseBody(output.body, context); + let contents: any = {}; + contents = deserializeAws_json1_1UpdateDomainResponse(data, context); + const response: UpdateDomainCommandOutput = { $metadata: deserializeMetadata(output), + ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1StopTransformJobCommandError = async ( +const deserializeAws_json1_1UpdateDomainCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -10010,6 +13595,22 @@ const deserializeAws_json1_1StopTransformJobCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceInUse": + case "com.amazonaws.sagemaker#ResourceInUse": + response = { + ...(await deserializeAws_json1_1ResourceInUseResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceLimitExceeded": + case "com.amazonaws.sagemaker#ResourceLimitExceeded": + response = { + ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; case "ResourceNotFound": case "com.amazonaws.sagemaker#ResourceNotFound": response = { @@ -10035,27 +13636,27 @@ const deserializeAws_json1_1StopTransformJobCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1UpdateAppImageConfigCommand = async ( +export const deserializeAws_json1_1UpdateEndpointCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1UpdateAppImageConfigCommandError(output, context); + return deserializeAws_json1_1UpdateEndpointCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1UpdateAppImageConfigResponse(data, context); - const response: UpdateAppImageConfigCommandOutput = { + contents = deserializeAws_json1_1UpdateEndpointOutput(data, context); + const response: UpdateEndpointCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1UpdateAppImageConfigCommandError = async ( +const deserializeAws_json1_1UpdateEndpointCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -10065,10 +13666,10 @@ const deserializeAws_json1_1UpdateAppImageConfigCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": + case "ResourceLimitExceeded": + case "com.amazonaws.sagemaker#ResourceLimitExceeded": response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), name: errorCode, $metadata: deserializeMetadata(output), }; @@ -10090,27 +13691,27 @@ const deserializeAws_json1_1UpdateAppImageConfigCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1UpdateCodeRepositoryCommand = async ( +export const deserializeAws_json1_1UpdateEndpointWeightsAndCapacitiesCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1UpdateCodeRepositoryCommandError(output, context); + return deserializeAws_json1_1UpdateEndpointWeightsAndCapacitiesCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1UpdateCodeRepositoryOutput(data, context); - const response: UpdateCodeRepositoryCommandOutput = { + contents = deserializeAws_json1_1UpdateEndpointWeightsAndCapacitiesOutput(data, context); + const response: UpdateEndpointWeightsAndCapacitiesCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1UpdateCodeRepositoryCommandError = async ( +const deserializeAws_json1_1UpdateEndpointWeightsAndCapacitiesCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -10120,6 +13721,14 @@ const deserializeAws_json1_1UpdateCodeRepositoryCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { + case "ResourceLimitExceeded": + case "com.amazonaws.sagemaker#ResourceLimitExceeded": + response = { + ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -10137,27 +13746,27 @@ const deserializeAws_json1_1UpdateCodeRepositoryCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1UpdateDomainCommand = async ( +export const deserializeAws_json1_1UpdateExperimentCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1UpdateDomainCommandError(output, context); + return deserializeAws_json1_1UpdateExperimentCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1UpdateDomainResponse(data, context); - const response: UpdateDomainCommandOutput = { + contents = deserializeAws_json1_1UpdateExperimentResponse(data, context); + const response: UpdateExperimentCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1UpdateDomainCommandError = async ( +const deserializeAws_json1_1UpdateExperimentCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -10167,18 +13776,10 @@ const deserializeAws_json1_1UpdateDomainCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceInUse": - case "com.amazonaws.sagemaker#ResourceInUse": - response = { - ...(await deserializeAws_json1_1ResourceInUseResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; - case "ResourceLimitExceeded": - case "com.amazonaws.sagemaker#ResourceLimitExceeded": + case "ConflictException": + case "com.amazonaws.sagemaker#ConflictException": response = { - ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), + ...(await deserializeAws_json1_1ConflictExceptionResponse(parsedOutput, context)), name: errorCode, $metadata: deserializeMetadata(output), }; @@ -10208,27 +13809,27 @@ const deserializeAws_json1_1UpdateDomainCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1UpdateEndpointCommand = async ( +export const deserializeAws_json1_1UpdateImageCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1UpdateEndpointCommandError(output, context); + return deserializeAws_json1_1UpdateImageCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1UpdateEndpointOutput(data, context); - const response: UpdateEndpointCommandOutput = { + contents = deserializeAws_json1_1UpdateImageResponse(data, context); + const response: UpdateImageCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1UpdateEndpointCommandError = async ( +const deserializeAws_json1_1UpdateImageCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -10238,10 +13839,18 @@ const deserializeAws_json1_1UpdateEndpointCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceLimitExceeded": - case "com.amazonaws.sagemaker#ResourceLimitExceeded": + case "ResourceInUse": + case "com.amazonaws.sagemaker#ResourceInUse": response = { - ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), + ...(await deserializeAws_json1_1ResourceInUseResponse(parsedOutput, context)), + name: errorCode, + $metadata: deserializeMetadata(output), + }; + break; + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": + response = { + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), name: errorCode, $metadata: deserializeMetadata(output), }; @@ -10263,44 +13872,36 @@ const deserializeAws_json1_1UpdateEndpointCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1UpdateEndpointWeightsAndCapacitiesCommand = async ( +export const deserializeAws_json1_1UpdateModelPackageCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1UpdateEndpointWeightsAndCapacitiesCommandError(output, context); + return deserializeAws_json1_1UpdateModelPackageCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1UpdateEndpointWeightsAndCapacitiesOutput(data, context); - const response: UpdateEndpointWeightsAndCapacitiesCommandOutput = { + contents = deserializeAws_json1_1UpdateModelPackageOutput(data, context); + const response: UpdateModelPackageCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1UpdateEndpointWeightsAndCapacitiesCommandError = async ( +const deserializeAws_json1_1UpdateModelPackageCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), - }; - let response: __SmithyException & __MetadataBearer & { [key: string]: any }; - let errorCode: string = "UnknownError"; - const errorTypeParts: String = parsedOutput.body["__type"].split("#"); - errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; - switch (errorCode) { - case "ResourceLimitExceeded": - case "com.amazonaws.sagemaker#ResourceLimitExceeded": - response = { - ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; + }; + let response: __SmithyException & __MetadataBearer & { [key: string]: any }; + let errorCode: string = "UnknownError"; + const errorTypeParts: String = parsedOutput.body["__type"].split("#"); + errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; + switch (errorCode) { default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -10318,27 +13919,27 @@ const deserializeAws_json1_1UpdateEndpointWeightsAndCapacitiesCommandError = asy return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1UpdateExperimentCommand = async ( +export const deserializeAws_json1_1UpdateMonitoringScheduleCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1UpdateExperimentCommandError(output, context); + return deserializeAws_json1_1UpdateMonitoringScheduleCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1UpdateExperimentResponse(data, context); - const response: UpdateExperimentCommandOutput = { + contents = deserializeAws_json1_1UpdateMonitoringScheduleResponse(data, context); + const response: UpdateMonitoringScheduleCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1UpdateExperimentCommandError = async ( +const deserializeAws_json1_1UpdateMonitoringScheduleCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -10348,10 +13949,10 @@ const deserializeAws_json1_1UpdateExperimentCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ConflictException": - case "com.amazonaws.sagemaker#ConflictException": + case "ResourceLimitExceeded": + case "com.amazonaws.sagemaker#ResourceLimitExceeded": response = { - ...(await deserializeAws_json1_1ConflictExceptionResponse(parsedOutput, context)), + ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), name: errorCode, $metadata: deserializeMetadata(output), }; @@ -10381,27 +13982,27 @@ const deserializeAws_json1_1UpdateExperimentCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1UpdateImageCommand = async ( +export const deserializeAws_json1_1UpdateNotebookInstanceCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1UpdateImageCommandError(output, context); + return deserializeAws_json1_1UpdateNotebookInstanceCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1UpdateImageResponse(data, context); - const response: UpdateImageCommandOutput = { + contents = deserializeAws_json1_1UpdateNotebookInstanceOutput(data, context); + const response: UpdateNotebookInstanceCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1UpdateImageCommandError = async ( +const deserializeAws_json1_1UpdateNotebookInstanceCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -10411,18 +14012,10 @@ const deserializeAws_json1_1UpdateImageCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceInUse": - case "com.amazonaws.sagemaker#ResourceInUse": - response = { - ...(await deserializeAws_json1_1ResourceInUseResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": + case "ResourceLimitExceeded": + case "com.amazonaws.sagemaker#ResourceLimitExceeded": response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), + ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), name: errorCode, $metadata: deserializeMetadata(output), }; @@ -10444,27 +14037,27 @@ const deserializeAws_json1_1UpdateImageCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1UpdateMonitoringScheduleCommand = async ( +export const deserializeAws_json1_1UpdateNotebookInstanceLifecycleConfigCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1UpdateMonitoringScheduleCommandError(output, context); + return deserializeAws_json1_1UpdateNotebookInstanceLifecycleConfigCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1UpdateMonitoringScheduleResponse(data, context); - const response: UpdateMonitoringScheduleCommandOutput = { + contents = deserializeAws_json1_1UpdateNotebookInstanceLifecycleConfigOutput(data, context); + const response: UpdateNotebookInstanceLifecycleConfigCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1UpdateMonitoringScheduleCommandError = async ( +const deserializeAws_json1_1UpdateNotebookInstanceLifecycleConfigCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -10482,14 +14075,6 @@ const deserializeAws_json1_1UpdateMonitoringScheduleCommandError = async ( $metadata: deserializeMetadata(output), }; break; - case "ResourceNotFound": - case "com.amazonaws.sagemaker#ResourceNotFound": - response = { - ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), - name: errorCode, - $metadata: deserializeMetadata(output), - }; - break; default: const parsedBody = parsedOutput.body; errorCode = parsedBody.code || parsedBody.Code || errorCode; @@ -10507,27 +14092,27 @@ const deserializeAws_json1_1UpdateMonitoringScheduleCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1UpdateNotebookInstanceCommand = async ( +export const deserializeAws_json1_1UpdatePipelineCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1UpdateNotebookInstanceCommandError(output, context); + return deserializeAws_json1_1UpdatePipelineCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1UpdateNotebookInstanceOutput(data, context); - const response: UpdateNotebookInstanceCommandOutput = { + contents = deserializeAws_json1_1UpdatePipelineResponse(data, context); + const response: UpdatePipelineCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1UpdateNotebookInstanceCommandError = async ( +const deserializeAws_json1_1UpdatePipelineCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -10537,10 +14122,10 @@ const deserializeAws_json1_1UpdateNotebookInstanceCommandError = async ( const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceLimitExceeded": - case "com.amazonaws.sagemaker#ResourceLimitExceeded": + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": response = { - ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), name: errorCode, $metadata: deserializeMetadata(output), }; @@ -10562,27 +14147,27 @@ const deserializeAws_json1_1UpdateNotebookInstanceCommandError = async ( return Promise.reject(Object.assign(new Error(message), response)); }; -export const deserializeAws_json1_1UpdateNotebookInstanceLifecycleConfigCommand = async ( +export const deserializeAws_json1_1UpdatePipelineExecutionCommand = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { if (output.statusCode >= 300) { - return deserializeAws_json1_1UpdateNotebookInstanceLifecycleConfigCommandError(output, context); + return deserializeAws_json1_1UpdatePipelineExecutionCommandError(output, context); } const data: any = await parseBody(output.body, context); let contents: any = {}; - contents = deserializeAws_json1_1UpdateNotebookInstanceLifecycleConfigOutput(data, context); - const response: UpdateNotebookInstanceLifecycleConfigCommandOutput = { + contents = deserializeAws_json1_1UpdatePipelineExecutionResponse(data, context); + const response: UpdatePipelineExecutionCommandOutput = { $metadata: deserializeMetadata(output), ...contents, }; return Promise.resolve(response); }; -const deserializeAws_json1_1UpdateNotebookInstanceLifecycleConfigCommandError = async ( +const deserializeAws_json1_1UpdatePipelineExecutionCommandError = async ( output: __HttpResponse, context: __SerdeContext -): Promise => { +): Promise => { const parsedOutput: any = { ...output, body: await parseBody(output.body, context), @@ -10592,10 +14177,10 @@ const deserializeAws_json1_1UpdateNotebookInstanceLifecycleConfigCommandError = const errorTypeParts: String = parsedOutput.body["__type"].split("#"); errorCode = errorTypeParts[1] === undefined ? errorTypeParts[0] : errorTypeParts[1]; switch (errorCode) { - case "ResourceLimitExceeded": - case "com.amazonaws.sagemaker#ResourceLimitExceeded": + case "ResourceNotFound": + case "com.amazonaws.sagemaker#ResourceNotFound": response = { - ...(await deserializeAws_json1_1ResourceLimitExceededResponse(parsedOutput, context)), + ...(await deserializeAws_json1_1ResourceNotFoundResponse(parsedOutput, context)), name: errorCode, $metadata: deserializeMetadata(output), }; @@ -10976,6 +14561,22 @@ const deserializeAws_json1_1ResourceNotFoundResponse = async ( return contents; }; +const serializeAws_json1_1ActionSource = (input: ActionSource, context: __SerdeContext): any => { + return { + ...(input.SourceId !== undefined && { SourceId: input.SourceId }), + ...(input.SourceType !== undefined && { SourceType: input.SourceType }), + ...(input.SourceUri !== undefined && { SourceUri: input.SourceUri }), + }; +}; + +const serializeAws_json1_1AddAssociationRequest = (input: AddAssociationRequest, context: __SerdeContext): any => { + return { + ...(input.AssociationType !== undefined && { AssociationType: input.AssociationType }), + ...(input.DestinationArn !== undefined && { DestinationArn: input.DestinationArn }), + ...(input.SourceArn !== undefined && { SourceArn: input.SourceArn }), + }; +}; + const serializeAws_json1_1AdditionalCodeRepositoryNamesOrUrls = (input: string[], context: __SerdeContext): any => { return input.map((entry) => entry); }; @@ -10987,6 +14588,16 @@ const serializeAws_json1_1AddTagsInput = (input: AddTagsInput, context: __SerdeC }; }; +const serializeAws_json1_1Alarm = (input: Alarm, context: __SerdeContext): any => { + return { + ...(input.AlarmName !== undefined && { AlarmName: input.AlarmName }), + }; +}; + +const serializeAws_json1_1AlarmList = (input: Alarm[], context: __SerdeContext): any => { + return input.map((entry) => serializeAws_json1_1Alarm(entry, context)); +}; + const serializeAws_json1_1AlgorithmSpecification = (input: AlgorithmSpecification, context: __SerdeContext): any => { return { ...(input.AlgorithmName !== undefined && { AlgorithmName: input.AlgorithmName }), @@ -11058,6 +14669,26 @@ const serializeAws_json1_1AppSpecification = (input: AppSpecification, context: }; }; +const serializeAws_json1_1ArtifactSource = (input: ArtifactSource, context: __SerdeContext): any => { + return { + ...(input.SourceTypes !== undefined && { + SourceTypes: serializeAws_json1_1ArtifactSourceTypes(input.SourceTypes, context), + }), + ...(input.SourceUri !== undefined && { SourceUri: input.SourceUri }), + }; +}; + +const serializeAws_json1_1ArtifactSourceType = (input: ArtifactSourceType, context: __SerdeContext): any => { + return { + ...(input.SourceIdType !== undefined && { SourceIdType: input.SourceIdType }), + ...(input.Value !== undefined && { Value: input.Value }), + }; +}; + +const serializeAws_json1_1ArtifactSourceTypes = (input: ArtifactSourceType[], context: __SerdeContext): any => { + return input.map((entry) => serializeAws_json1_1ArtifactSourceType(entry, context)); +}; + const serializeAws_json1_1AssociateTrialComponentRequest = ( input: AssociateTrialComponentRequest, context: __SerdeContext @@ -11068,6 +14699,19 @@ const serializeAws_json1_1AssociateTrialComponentRequest = ( }; }; +const serializeAws_json1_1AthenaDatasetDefinition = (input: AthenaDatasetDefinition, context: __SerdeContext): any => { + return { + ...(input.Catalog !== undefined && { Catalog: input.Catalog }), + ...(input.Database !== undefined && { Database: input.Database }), + ...(input.KmsKeyId !== undefined && { KmsKeyId: input.KmsKeyId }), + ...(input.OutputCompression !== undefined && { OutputCompression: input.OutputCompression }), + ...(input.OutputFormat !== undefined && { OutputFormat: input.OutputFormat }), + ...(input.OutputS3Uri !== undefined && { OutputS3Uri: input.OutputS3Uri }), + ...(input.QueryString !== undefined && { QueryString: input.QueryString }), + ...(input.WorkGroup !== undefined && { WorkGroup: input.WorkGroup }), + }; +}; + const serializeAws_json1_1AttributeNames = (input: string[], context: __SerdeContext): any => { return input.map((entry) => entry); }; @@ -11150,6 +14794,37 @@ const serializeAws_json1_1AutoMLSecurityConfig = (input: AutoMLSecurityConfig, c }; }; +const serializeAws_json1_1AutoRollbackConfig = (input: AutoRollbackConfig, context: __SerdeContext): any => { + return { + ...(input.Alarms !== undefined && { Alarms: serializeAws_json1_1AlarmList(input.Alarms, context) }), + }; +}; + +const serializeAws_json1_1Bias = (input: Bias, context: __SerdeContext): any => { + return { + ...(input.Report !== undefined && { Report: serializeAws_json1_1MetricsSource(input.Report, context) }), + }; +}; + +const serializeAws_json1_1BlueGreenUpdatePolicy = (input: BlueGreenUpdatePolicy, context: __SerdeContext): any => { + return { + ...(input.MaximumExecutionTimeoutInSeconds !== undefined && { + MaximumExecutionTimeoutInSeconds: input.MaximumExecutionTimeoutInSeconds, + }), + ...(input.TerminationWaitInSeconds !== undefined && { TerminationWaitInSeconds: input.TerminationWaitInSeconds }), + ...(input.TrafficRoutingConfiguration !== undefined && { + TrafficRoutingConfiguration: serializeAws_json1_1TrafficRoutingConfig(input.TrafficRoutingConfiguration, context), + }), + }; +}; + +const serializeAws_json1_1CapacitySize = (input: CapacitySize, context: __SerdeContext): any => { + return { + ...(input.Type !== undefined && { Type: input.Type }), + ...(input.Value !== undefined && { Value: input.Value }), + }; +}; + const serializeAws_json1_1CaptureContentTypeHeader = ( input: CaptureContentTypeHeader, context: __SerdeContext @@ -11330,6 +15005,14 @@ const serializeAws_json1_1ContentTypes = (input: string[], context: __SerdeConte return input.map((entry) => entry); }; +const serializeAws_json1_1ContextSource = (input: ContextSource, context: __SerdeContext): any => { + return { + ...(input.SourceId !== undefined && { SourceId: input.SourceId }), + ...(input.SourceType !== undefined && { SourceType: input.SourceType }), + ...(input.SourceUri !== undefined && { SourceUri: input.SourceUri }), + }; +}; + const serializeAws_json1_1ContinuousParameterRange = ( input: ContinuousParameterRange, context: __SerdeContext @@ -11359,6 +15042,23 @@ const serializeAws_json1_1ContinuousParameterRangeSpecification = ( }; }; +const serializeAws_json1_1CreateActionRequest = (input: CreateActionRequest, context: __SerdeContext): any => { + return { + ...(input.ActionName !== undefined && { ActionName: input.ActionName }), + ...(input.ActionType !== undefined && { ActionType: input.ActionType }), + ...(input.Description !== undefined && { Description: input.Description }), + ...(input.MetadataProperties !== undefined && { + MetadataProperties: serializeAws_json1_1MetadataProperties(input.MetadataProperties, context), + }), + ...(input.Properties !== undefined && { + Properties: serializeAws_json1_1LineageEntityParameters(input.Properties, context), + }), + ...(input.Source !== undefined && { Source: serializeAws_json1_1ActionSource(input.Source, context) }), + ...(input.Status !== undefined && { Status: input.Status }), + ...(input.Tags !== undefined && { Tags: serializeAws_json1_1TagList(input.Tags, context) }), + }; +}; + const serializeAws_json1_1CreateAlgorithmInput = (input: CreateAlgorithmInput, context: __SerdeContext): any => { return { ...(input.AlgorithmDescription !== undefined && { AlgorithmDescription: input.AlgorithmDescription }), @@ -11367,6 +15067,7 @@ const serializeAws_json1_1CreateAlgorithmInput = (input: CreateAlgorithmInput, c ...(input.InferenceSpecification !== undefined && { InferenceSpecification: serializeAws_json1_1InferenceSpecification(input.InferenceSpecification, context), }), + ...(input.Tags !== undefined && { Tags: serializeAws_json1_1TagList(input.Tags, context) }), ...(input.TrainingSpecification !== undefined && { TrainingSpecification: serializeAws_json1_1TrainingSpecification(input.TrainingSpecification, context), }), @@ -11405,6 +15106,21 @@ const serializeAws_json1_1CreateAppRequest = (input: CreateAppRequest, context: }; }; +const serializeAws_json1_1CreateArtifactRequest = (input: CreateArtifactRequest, context: __SerdeContext): any => { + return { + ...(input.ArtifactName !== undefined && { ArtifactName: input.ArtifactName }), + ...(input.ArtifactType !== undefined && { ArtifactType: input.ArtifactType }), + ...(input.MetadataProperties !== undefined && { + MetadataProperties: serializeAws_json1_1MetadataProperties(input.MetadataProperties, context), + }), + ...(input.Properties !== undefined && { + Properties: serializeAws_json1_1LineageEntityParameters(input.Properties, context), + }), + ...(input.Source !== undefined && { Source: serializeAws_json1_1ArtifactSource(input.Source, context) }), + ...(input.Tags !== undefined && { Tags: serializeAws_json1_1TagList(input.Tags, context) }), + }; +}; + const serializeAws_json1_1CreateAutoMLJobRequest = (input: CreateAutoMLJobRequest, context: __SerdeContext): any => { return { ...(input.AutoMLJobConfig !== undefined && { @@ -11436,6 +15152,7 @@ const serializeAws_json1_1CreateCodeRepositoryInput = ( return { ...(input.CodeRepositoryName !== undefined && { CodeRepositoryName: input.CodeRepositoryName }), ...(input.GitConfig !== undefined && { GitConfig: serializeAws_json1_1GitConfig(input.GitConfig, context) }), + ...(input.Tags !== undefined && { Tags: serializeAws_json1_1TagList(input.Tags, context) }), }; }; @@ -11459,6 +15176,19 @@ const serializeAws_json1_1CreateCompilationJobRequest = ( }; }; +const serializeAws_json1_1CreateContextRequest = (input: CreateContextRequest, context: __SerdeContext): any => { + return { + ...(input.ContextName !== undefined && { ContextName: input.ContextName }), + ...(input.ContextType !== undefined && { ContextType: input.ContextType }), + ...(input.Description !== undefined && { Description: input.Description }), + ...(input.Properties !== undefined && { + Properties: serializeAws_json1_1LineageEntityParameters(input.Properties, context), + }), + ...(input.Source !== undefined && { Source: serializeAws_json1_1ContextSource(input.Source, context) }), + ...(input.Tags !== undefined && { Tags: serializeAws_json1_1TagList(input.Tags, context) }), + }; +}; + const serializeAws_json1_1CreateDomainRequest = (input: CreateDomainRequest, context: __SerdeContext): any => { return { ...(input.AppNetworkAccessType !== undefined && { AppNetworkAccessType: input.AppNetworkAccessType }), @@ -11511,6 +15241,31 @@ const serializeAws_json1_1CreateExperimentRequest = (input: CreateExperimentRequ }; }; +const serializeAws_json1_1CreateFeatureGroupRequest = ( + input: CreateFeatureGroupRequest, + context: __SerdeContext +): any => { + return { + ...(input.Description !== undefined && { Description: input.Description }), + ...(input.EventTimeFeatureName !== undefined && { EventTimeFeatureName: input.EventTimeFeatureName }), + ...(input.FeatureDefinitions !== undefined && { + FeatureDefinitions: serializeAws_json1_1FeatureDefinitions(input.FeatureDefinitions, context), + }), + ...(input.FeatureGroupName !== undefined && { FeatureGroupName: input.FeatureGroupName }), + ...(input.OfflineStoreConfig !== undefined && { + OfflineStoreConfig: serializeAws_json1_1OfflineStoreConfig(input.OfflineStoreConfig, context), + }), + ...(input.OnlineStoreConfig !== undefined && { + OnlineStoreConfig: serializeAws_json1_1OnlineStoreConfig(input.OnlineStoreConfig, context), + }), + ...(input.RecordIdentifierFeatureName !== undefined && { + RecordIdentifierFeatureName: input.RecordIdentifierFeatureName, + }), + ...(input.RoleArn !== undefined && { RoleArn: input.RoleArn }), + ...(input.Tags !== undefined && { Tags: serializeAws_json1_1TagList(input.Tags, context) }), + }; +}; + const serializeAws_json1_1CreateFlowDefinitionRequest = ( input: CreateFlowDefinitionRequest, context: __SerdeContext @@ -11649,13 +15404,35 @@ const serializeAws_json1_1CreateModelInput = (input: CreateModelInput, context: }; }; +const serializeAws_json1_1CreateModelPackageGroupInput = ( + input: CreateModelPackageGroupInput, + context: __SerdeContext +): any => { + return { + ...(input.ModelPackageGroupDescription !== undefined && { + ModelPackageGroupDescription: input.ModelPackageGroupDescription, + }), + ...(input.ModelPackageGroupName !== undefined && { ModelPackageGroupName: input.ModelPackageGroupName }), + ...(input.Tags !== undefined && { Tags: serializeAws_json1_1TagList(input.Tags, context) }), + }; +}; + const serializeAws_json1_1CreateModelPackageInput = (input: CreateModelPackageInput, context: __SerdeContext): any => { return { ...(input.CertifyForMarketplace !== undefined && { CertifyForMarketplace: input.CertifyForMarketplace }), + ClientToken: input.ClientToken ?? generateIdempotencyToken(), ...(input.InferenceSpecification !== undefined && { InferenceSpecification: serializeAws_json1_1InferenceSpecification(input.InferenceSpecification, context), }), + ...(input.MetadataProperties !== undefined && { + MetadataProperties: serializeAws_json1_1MetadataProperties(input.MetadataProperties, context), + }), + ...(input.ModelApprovalStatus !== undefined && { ModelApprovalStatus: input.ModelApprovalStatus }), + ...(input.ModelMetrics !== undefined && { + ModelMetrics: serializeAws_json1_1ModelMetrics(input.ModelMetrics, context), + }), ...(input.ModelPackageDescription !== undefined && { ModelPackageDescription: input.ModelPackageDescription }), + ...(input.ModelPackageGroupName !== undefined && { ModelPackageGroupName: input.ModelPackageGroupName }), ...(input.ModelPackageName !== undefined && { ModelPackageName: input.ModelPackageName }), ...(input.SourceAlgorithmSpecification !== undefined && { SourceAlgorithmSpecification: serializeAws_json1_1SourceAlgorithmSpecification( @@ -11663,6 +15440,7 @@ const serializeAws_json1_1CreateModelPackageInput = (input: CreateModelPackageIn context ), }), + ...(input.Tags !== undefined && { Tags: serializeAws_json1_1TagList(input.Tags, context) }), ...(input.ValidationSpecification !== undefined && { ValidationSpecification: serializeAws_json1_1ModelPackageValidationSpecification( input.ValidationSpecification, @@ -11733,6 +15511,18 @@ const serializeAws_json1_1CreateNotebookInstanceLifecycleConfigInput = ( }; }; +const serializeAws_json1_1CreatePipelineRequest = (input: CreatePipelineRequest, context: __SerdeContext): any => { + return { + ClientRequestToken: input.ClientRequestToken ?? generateIdempotencyToken(), + ...(input.PipelineDefinition !== undefined && { PipelineDefinition: input.PipelineDefinition }), + ...(input.PipelineDescription !== undefined && { PipelineDescription: input.PipelineDescription }), + ...(input.PipelineDisplayName !== undefined && { PipelineDisplayName: input.PipelineDisplayName }), + ...(input.PipelineName !== undefined && { PipelineName: input.PipelineName }), + ...(input.RoleArn !== undefined && { RoleArn: input.RoleArn }), + ...(input.Tags !== undefined && { Tags: serializeAws_json1_1TagList(input.Tags, context) }), + }; +}; + const serializeAws_json1_1CreatePresignedDomainUrlRequest = ( input: CreatePresignedDomainUrlRequest, context: __SerdeContext @@ -11793,6 +15583,20 @@ const serializeAws_json1_1CreateProcessingJobRequest = ( }; }; +const serializeAws_json1_1CreateProjectInput = (input: CreateProjectInput, context: __SerdeContext): any => { + return { + ...(input.ProjectDescription !== undefined && { ProjectDescription: input.ProjectDescription }), + ...(input.ProjectName !== undefined && { ProjectName: input.ProjectName }), + ...(input.ServiceCatalogProvisioningDetails !== undefined && { + ServiceCatalogProvisioningDetails: serializeAws_json1_1ServiceCatalogProvisioningDetails( + input.ServiceCatalogProvisioningDetails, + context + ), + }), + ...(input.Tags !== undefined && { Tags: serializeAws_json1_1TagList(input.Tags, context) }), + }; +}; + const serializeAws_json1_1CreateTrainingJobRequest = ( input: CreateTrainingJobRequest, context: __SerdeContext @@ -11890,6 +15694,9 @@ const serializeAws_json1_1CreateTrialComponentRequest = ( ...(input.InputArtifacts !== undefined && { InputArtifacts: serializeAws_json1_1TrialComponentArtifacts(input.InputArtifacts, context), }), + ...(input.MetadataProperties !== undefined && { + MetadataProperties: serializeAws_json1_1MetadataProperties(input.MetadataProperties, context), + }), ...(input.OutputArtifacts !== undefined && { OutputArtifacts: serializeAws_json1_1TrialComponentArtifacts(input.OutputArtifacts, context), }), @@ -11907,6 +15714,9 @@ const serializeAws_json1_1CreateTrialRequest = (input: CreateTrialRequest, conte return { ...(input.DisplayName !== undefined && { DisplayName: input.DisplayName }), ...(input.ExperimentName !== undefined && { ExperimentName: input.ExperimentName }), + ...(input.MetadataProperties !== undefined && { + MetadataProperties: serializeAws_json1_1MetadataProperties(input.MetadataProperties, context), + }), ...(input.Tags !== undefined && { Tags: serializeAws_json1_1TagList(input.Tags, context) }), ...(input.TrialName !== undefined && { TrialName: input.TrialName }), }; @@ -11995,6 +15805,14 @@ const serializeAws_json1_1DataCaptureConfig = (input: DataCaptureConfig, context }; }; +const serializeAws_json1_1DataCatalogConfig = (input: DataCatalogConfig, context: __SerdeContext): any => { + return { + ...(input.Catalog !== undefined && { Catalog: input.Catalog }), + ...(input.Database !== undefined && { Database: input.Database }), + ...(input.TableName !== undefined && { TableName: input.TableName }), + }; +}; + const serializeAws_json1_1DataProcessing = (input: DataProcessing, context: __SerdeContext): any => { return { ...(input.InputFilter !== undefined && { InputFilter: input.InputFilter }), @@ -12003,6 +15821,23 @@ const serializeAws_json1_1DataProcessing = (input: DataProcessing, context: __Se }; }; +const serializeAws_json1_1DatasetDefinition = (input: DatasetDefinition, context: __SerdeContext): any => { + return { + ...(input.AthenaDatasetDefinition !== undefined && { + AthenaDatasetDefinition: serializeAws_json1_1AthenaDatasetDefinition(input.AthenaDatasetDefinition, context), + }), + ...(input.DataDistributionType !== undefined && { DataDistributionType: input.DataDistributionType }), + ...(input.InputMode !== undefined && { InputMode: input.InputMode }), + ...(input.LocalPath !== undefined && { LocalPath: input.LocalPath }), + ...(input.RedshiftDatasetDefinition !== undefined && { + RedshiftDatasetDefinition: serializeAws_json1_1RedshiftDatasetDefinition( + input.RedshiftDatasetDefinition, + context + ), + }), + }; +}; + const serializeAws_json1_1DataSource = (input: DataSource, context: __SerdeContext): any => { return { ...(input.FileSystemDataSource !== undefined && { @@ -12045,6 +15880,12 @@ const serializeAws_json1_1DebugRuleConfigurations = (input: DebugRuleConfigurati return input.map((entry) => serializeAws_json1_1DebugRuleConfiguration(entry, context)); }; +const serializeAws_json1_1DeleteActionRequest = (input: DeleteActionRequest, context: __SerdeContext): any => { + return { + ...(input.ActionName !== undefined && { ActionName: input.ActionName }), + }; +}; + const serializeAws_json1_1DeleteAlgorithmInput = (input: DeleteAlgorithmInput, context: __SerdeContext): any => { return { ...(input.AlgorithmName !== undefined && { AlgorithmName: input.AlgorithmName }), @@ -12069,6 +15910,23 @@ const serializeAws_json1_1DeleteAppRequest = (input: DeleteAppRequest, context: }; }; +const serializeAws_json1_1DeleteArtifactRequest = (input: DeleteArtifactRequest, context: __SerdeContext): any => { + return { + ...(input.ArtifactArn !== undefined && { ArtifactArn: input.ArtifactArn }), + ...(input.Source !== undefined && { Source: serializeAws_json1_1ArtifactSource(input.Source, context) }), + }; +}; + +const serializeAws_json1_1DeleteAssociationRequest = ( + input: DeleteAssociationRequest, + context: __SerdeContext +): any => { + return { + ...(input.DestinationArn !== undefined && { DestinationArn: input.DestinationArn }), + ...(input.SourceArn !== undefined && { SourceArn: input.SourceArn }), + }; +}; + const serializeAws_json1_1DeleteCodeRepositoryInput = ( input: DeleteCodeRepositoryInput, context: __SerdeContext @@ -12078,6 +15936,12 @@ const serializeAws_json1_1DeleteCodeRepositoryInput = ( }; }; +const serializeAws_json1_1DeleteContextRequest = (input: DeleteContextRequest, context: __SerdeContext): any => { + return { + ...(input.ContextName !== undefined && { ContextName: input.ContextName }), + }; +}; + const serializeAws_json1_1DeleteDomainRequest = (input: DeleteDomainRequest, context: __SerdeContext): any => { return { ...(input.DomainId !== undefined && { DomainId: input.DomainId }), @@ -12108,6 +15972,15 @@ const serializeAws_json1_1DeleteExperimentRequest = (input: DeleteExperimentRequ }; }; +const serializeAws_json1_1DeleteFeatureGroupRequest = ( + input: DeleteFeatureGroupRequest, + context: __SerdeContext +): any => { + return { + ...(input.FeatureGroupName !== undefined && { FeatureGroupName: input.FeatureGroupName }), + }; +}; + const serializeAws_json1_1DeleteFlowDefinitionRequest = ( input: DeleteFlowDefinitionRequest, context: __SerdeContext @@ -12148,6 +16021,24 @@ const serializeAws_json1_1DeleteModelInput = (input: DeleteModelInput, context: }; }; +const serializeAws_json1_1DeleteModelPackageGroupInput = ( + input: DeleteModelPackageGroupInput, + context: __SerdeContext +): any => { + return { + ...(input.ModelPackageGroupName !== undefined && { ModelPackageGroupName: input.ModelPackageGroupName }), + }; +}; + +const serializeAws_json1_1DeleteModelPackageGroupPolicyInput = ( + input: DeleteModelPackageGroupPolicyInput, + context: __SerdeContext +): any => { + return { + ...(input.ModelPackageGroupName !== undefined && { ModelPackageGroupName: input.ModelPackageGroupName }), + }; +}; + const serializeAws_json1_1DeleteModelPackageInput = (input: DeleteModelPackageInput, context: __SerdeContext): any => { return { ...(input.ModelPackageName !== undefined && { ModelPackageName: input.ModelPackageName }), @@ -12183,6 +16074,19 @@ const serializeAws_json1_1DeleteNotebookInstanceLifecycleConfigInput = ( }; }; +const serializeAws_json1_1DeletePipelineRequest = (input: DeletePipelineRequest, context: __SerdeContext): any => { + return { + ClientRequestToken: input.ClientRequestToken ?? generateIdempotencyToken(), + ...(input.PipelineName !== undefined && { PipelineName: input.PipelineName }), + }; +}; + +const serializeAws_json1_1DeleteProjectInput = (input: DeleteProjectInput, context: __SerdeContext): any => { + return { + ...(input.ProjectName !== undefined && { ProjectName: input.ProjectName }), + }; +}; + const serializeAws_json1_1DeleteTagsInput = (input: DeleteTagsInput, context: __SerdeContext): any => { return { ...(input.ResourceArn !== undefined && { ResourceArn: input.ResourceArn }), @@ -12227,6 +16131,23 @@ const serializeAws_json1_1DeleteWorkteamRequest = (input: DeleteWorkteamRequest, }; }; +const serializeAws_json1_1DeploymentConfig = (input: DeploymentConfig, context: __SerdeContext): any => { + return { + ...(input.AutoRollbackConfiguration !== undefined && { + AutoRollbackConfiguration: serializeAws_json1_1AutoRollbackConfig(input.AutoRollbackConfiguration, context), + }), + ...(input.BlueGreenUpdatePolicy !== undefined && { + BlueGreenUpdatePolicy: serializeAws_json1_1BlueGreenUpdatePolicy(input.BlueGreenUpdatePolicy, context), + }), + }; +}; + +const serializeAws_json1_1DescribeActionRequest = (input: DescribeActionRequest, context: __SerdeContext): any => { + return { + ...(input.ActionName !== undefined && { ActionName: input.ActionName }), + }; +}; + const serializeAws_json1_1DescribeAlgorithmInput = (input: DescribeAlgorithmInput, context: __SerdeContext): any => { return { ...(input.AlgorithmName !== undefined && { AlgorithmName: input.AlgorithmName }), @@ -12251,6 +16172,12 @@ const serializeAws_json1_1DescribeAppRequest = (input: DescribeAppRequest, conte }; }; +const serializeAws_json1_1DescribeArtifactRequest = (input: DescribeArtifactRequest, context: __SerdeContext): any => { + return { + ...(input.ArtifactArn !== undefined && { ArtifactArn: input.ArtifactArn }), + }; +}; + const serializeAws_json1_1DescribeAutoMLJobRequest = ( input: DescribeAutoMLJobRequest, context: __SerdeContext @@ -12278,6 +16205,12 @@ const serializeAws_json1_1DescribeCompilationJobRequest = ( }; }; +const serializeAws_json1_1DescribeContextRequest = (input: DescribeContextRequest, context: __SerdeContext): any => { + return { + ...(input.ContextName !== undefined && { ContextName: input.ContextName }), + }; +}; + const serializeAws_json1_1DescribeDomainRequest = (input: DescribeDomainRequest, context: __SerdeContext): any => { return { ...(input.DomainId !== undefined && { DomainId: input.DomainId }), @@ -12308,6 +16241,16 @@ const serializeAws_json1_1DescribeExperimentRequest = ( }; }; +const serializeAws_json1_1DescribeFeatureGroupRequest = ( + input: DescribeFeatureGroupRequest, + context: __SerdeContext +): any => { + return { + ...(input.FeatureGroupName !== undefined && { FeatureGroupName: input.FeatureGroupName }), + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + }; +}; + const serializeAws_json1_1DescribeFlowDefinitionRequest = ( input: DescribeFlowDefinitionRequest, context: __SerdeContext @@ -12368,6 +16311,15 @@ const serializeAws_json1_1DescribeModelInput = (input: DescribeModelInput, conte }; }; +const serializeAws_json1_1DescribeModelPackageGroupInput = ( + input: DescribeModelPackageGroupInput, + context: __SerdeContext +): any => { + return { + ...(input.ModelPackageGroupName !== undefined && { ModelPackageGroupName: input.ModelPackageGroupName }), + }; +}; + const serializeAws_json1_1DescribeModelPackageInput = ( input: DescribeModelPackageInput, context: __SerdeContext @@ -12406,6 +16358,30 @@ const serializeAws_json1_1DescribeNotebookInstanceLifecycleConfigInput = ( }; }; +const serializeAws_json1_1DescribePipelineDefinitionForExecutionRequest = ( + input: DescribePipelineDefinitionForExecutionRequest, + context: __SerdeContext +): any => { + return { + ...(input.PipelineExecutionArn !== undefined && { PipelineExecutionArn: input.PipelineExecutionArn }), + }; +}; + +const serializeAws_json1_1DescribePipelineExecutionRequest = ( + input: DescribePipelineExecutionRequest, + context: __SerdeContext +): any => { + return { + ...(input.PipelineExecutionArn !== undefined && { PipelineExecutionArn: input.PipelineExecutionArn }), + }; +}; + +const serializeAws_json1_1DescribePipelineRequest = (input: DescribePipelineRequest, context: __SerdeContext): any => { + return { + ...(input.PipelineName !== undefined && { PipelineName: input.PipelineName }), + }; +}; + const serializeAws_json1_1DescribeProcessingJobRequest = ( input: DescribeProcessingJobRequest, context: __SerdeContext @@ -12415,6 +16391,12 @@ const serializeAws_json1_1DescribeProcessingJobRequest = ( }; }; +const serializeAws_json1_1DescribeProjectInput = (input: DescribeProjectInput, context: __SerdeContext): any => { + return { + ...(input.ProjectName !== undefined && { ProjectName: input.ProjectName }), + }; +}; + const serializeAws_json1_1DescribeSubscribedWorkteamRequest = ( input: DescribeSubscribedWorkteamRequest, context: __SerdeContext @@ -12500,6 +16482,13 @@ const serializeAws_json1_1DesiredWeightAndCapacityList = ( return input.map((entry) => serializeAws_json1_1DesiredWeightAndCapacity(entry, context)); }; +const serializeAws_json1_1DisableSagemakerServicecatalogPortfolioInput = ( + input: DisableSagemakerServicecatalogPortfolioInput, + context: __SerdeContext +): any => { + return {}; +}; + const serializeAws_json1_1DisassociateTrialComponentRequest = ( input: DisassociateTrialComponentRequest, context: __SerdeContext @@ -12510,6 +16499,13 @@ const serializeAws_json1_1DisassociateTrialComponentRequest = ( }; }; +const serializeAws_json1_1EnableSagemakerServicecatalogPortfolioInput = ( + input: EnableSagemakerServicecatalogPortfolioInput, + context: __SerdeContext +): any => { + return {}; +}; + const serializeAws_json1_1EndpointInput = (input: EndpointInput, context: __SerdeContext): any => { return { ...(input.EndpointName !== undefined && { EndpointName: input.EndpointName }), @@ -12539,6 +16535,23 @@ const serializeAws_json1_1ExperimentConfig = (input: ExperimentConfig, context: }; }; +const serializeAws_json1_1Explainability = (input: Explainability, context: __SerdeContext): any => { + return { + ...(input.Report !== undefined && { Report: serializeAws_json1_1MetricsSource(input.Report, context) }), + }; +}; + +const serializeAws_json1_1FeatureDefinition = (input: FeatureDefinition, context: __SerdeContext): any => { + return { + ...(input.FeatureName !== undefined && { FeatureName: input.FeatureName }), + ...(input.FeatureType !== undefined && { FeatureType: input.FeatureType }), + }; +}; + +const serializeAws_json1_1FeatureDefinitions = (input: FeatureDefinition[], context: __SerdeContext): any => { + return input.map((entry) => serializeAws_json1_1FeatureDefinition(entry, context)); +}; + const serializeAws_json1_1FileSystemConfig = (input: FileSystemConfig, context: __SerdeContext): any => { return { ...(input.DefaultGid !== undefined && { DefaultGid: input.DefaultGid }), @@ -12582,6 +16595,22 @@ const serializeAws_json1_1FlowDefinitionTaskKeywords = (input: string[], context return input.map((entry) => entry); }; +const serializeAws_json1_1GetModelPackageGroupPolicyInput = ( + input: GetModelPackageGroupPolicyInput, + context: __SerdeContext +): any => { + return { + ...(input.ModelPackageGroupName !== undefined && { ModelPackageGroupName: input.ModelPackageGroupName }), + }; +}; + +const serializeAws_json1_1GetSagemakerServicecatalogPortfolioStatusInput = ( + input: GetSagemakerServicecatalogPortfolioStatusInput, + context: __SerdeContext +): any => { + return {}; +}; + const serializeAws_json1_1GetSearchSuggestionsRequest = ( input: GetSearchSuggestionsRequest, context: __SerdeContext @@ -13096,6 +17125,32 @@ const serializeAws_json1_1LabelingJobStoppingConditions = ( }; }; +const serializeAws_json1_1LineageEntityParameters = ( + input: { [key: string]: string }, + context: __SerdeContext +): any => { + return Object.entries(input).reduce( + (acc: { [key: string]: string }, [key, value]: [string, any]) => ({ + ...acc, + [key]: value, + }), + {} + ); +}; + +const serializeAws_json1_1ListActionsRequest = (input: ListActionsRequest, context: __SerdeContext): any => { + return { + ...(input.ActionType !== undefined && { ActionType: input.ActionType }), + ...(input.CreatedAfter !== undefined && { CreatedAfter: Math.round(input.CreatedAfter.getTime() / 1000) }), + ...(input.CreatedBefore !== undefined && { CreatedBefore: Math.round(input.CreatedBefore.getTime() / 1000) }), + ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + ...(input.SortBy !== undefined && { SortBy: input.SortBy }), + ...(input.SortOrder !== undefined && { SortOrder: input.SortOrder }), + ...(input.SourceUri !== undefined && { SourceUri: input.SourceUri }), + }; +}; + const serializeAws_json1_1ListAlgorithmsInput = (input: ListAlgorithmsInput, context: __SerdeContext): any => { return { ...(input.CreationTimeAfter !== undefined && { @@ -13148,6 +17203,35 @@ const serializeAws_json1_1ListAppsRequest = (input: ListAppsRequest, context: __ }; }; +const serializeAws_json1_1ListArtifactsRequest = (input: ListArtifactsRequest, context: __SerdeContext): any => { + return { + ...(input.ArtifactType !== undefined && { ArtifactType: input.ArtifactType }), + ...(input.CreatedAfter !== undefined && { CreatedAfter: Math.round(input.CreatedAfter.getTime() / 1000) }), + ...(input.CreatedBefore !== undefined && { CreatedBefore: Math.round(input.CreatedBefore.getTime() / 1000) }), + ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + ...(input.SortBy !== undefined && { SortBy: input.SortBy }), + ...(input.SortOrder !== undefined && { SortOrder: input.SortOrder }), + ...(input.SourceUri !== undefined && { SourceUri: input.SourceUri }), + }; +}; + +const serializeAws_json1_1ListAssociationsRequest = (input: ListAssociationsRequest, context: __SerdeContext): any => { + return { + ...(input.AssociationType !== undefined && { AssociationType: input.AssociationType }), + ...(input.CreatedAfter !== undefined && { CreatedAfter: Math.round(input.CreatedAfter.getTime() / 1000) }), + ...(input.CreatedBefore !== undefined && { CreatedBefore: Math.round(input.CreatedBefore.getTime() / 1000) }), + ...(input.DestinationArn !== undefined && { DestinationArn: input.DestinationArn }), + ...(input.DestinationType !== undefined && { DestinationType: input.DestinationType }), + ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + ...(input.SortBy !== undefined && { SortBy: input.SortBy }), + ...(input.SortOrder !== undefined && { SortOrder: input.SortOrder }), + ...(input.SourceArn !== undefined && { SourceArn: input.SourceArn }), + ...(input.SourceType !== undefined && { SourceType: input.SourceType }), + }; +}; + const serializeAws_json1_1ListAutoMLJobsRequest = (input: ListAutoMLJobsRequest, context: __SerdeContext): any => { return { ...(input.CreationTimeAfter !== undefined && { @@ -13237,6 +17321,19 @@ const serializeAws_json1_1ListCompilationJobsRequest = ( }; }; +const serializeAws_json1_1ListContextsRequest = (input: ListContextsRequest, context: __SerdeContext): any => { + return { + ...(input.ContextType !== undefined && { ContextType: input.ContextType }), + ...(input.CreatedAfter !== undefined && { CreatedAfter: Math.round(input.CreatedAfter.getTime() / 1000) }), + ...(input.CreatedBefore !== undefined && { CreatedBefore: Math.round(input.CreatedBefore.getTime() / 1000) }), + ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + ...(input.SortBy !== undefined && { SortBy: input.SortBy }), + ...(input.SortOrder !== undefined && { SortOrder: input.SortOrder }), + ...(input.SourceUri !== undefined && { SourceUri: input.SourceUri }), + }; +}; + const serializeAws_json1_1ListDomainsRequest = (input: ListDomainsRequest, context: __SerdeContext): any => { return { ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), @@ -13297,6 +17394,27 @@ const serializeAws_json1_1ListExperimentsRequest = (input: ListExperimentsReques }; }; +const serializeAws_json1_1ListFeatureGroupsRequest = ( + input: ListFeatureGroupsRequest, + context: __SerdeContext +): any => { + return { + ...(input.CreationTimeAfter !== undefined && { + CreationTimeAfter: Math.round(input.CreationTimeAfter.getTime() / 1000), + }), + ...(input.CreationTimeBefore !== undefined && { + CreationTimeBefore: Math.round(input.CreationTimeBefore.getTime() / 1000), + }), + ...(input.FeatureGroupStatusEquals !== undefined && { FeatureGroupStatusEquals: input.FeatureGroupStatusEquals }), + ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), + ...(input.NameContains !== undefined && { NameContains: input.NameContains }), + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + ...(input.OfflineStoreStatusEquals !== undefined && { OfflineStoreStatusEquals: input.OfflineStoreStatusEquals }), + ...(input.SortBy !== undefined && { SortBy: input.SortBy }), + ...(input.SortOrder !== undefined && { SortOrder: input.SortOrder }), + }; +}; + const serializeAws_json1_1ListFlowDefinitionsRequest = ( input: ListFlowDefinitionsRequest, context: __SerdeContext @@ -13444,6 +17562,29 @@ const serializeAws_json1_1ListLabelingJobsRequest = (input: ListLabelingJobsRequ }; }; +const serializeAws_json1_1ListLineageEntityParameterKey = (input: string[], context: __SerdeContext): any => { + return input.map((entry) => entry); +}; + +const serializeAws_json1_1ListModelPackageGroupsInput = ( + input: ListModelPackageGroupsInput, + context: __SerdeContext +): any => { + return { + ...(input.CreationTimeAfter !== undefined && { + CreationTimeAfter: Math.round(input.CreationTimeAfter.getTime() / 1000), + }), + ...(input.CreationTimeBefore !== undefined && { + CreationTimeBefore: Math.round(input.CreationTimeBefore.getTime() / 1000), + }), + ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), + ...(input.NameContains !== undefined && { NameContains: input.NameContains }), + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + ...(input.SortBy !== undefined && { SortBy: input.SortBy }), + ...(input.SortOrder !== undefined && { SortOrder: input.SortOrder }), + }; +}; + const serializeAws_json1_1ListModelPackagesInput = (input: ListModelPackagesInput, context: __SerdeContext): any => { return { ...(input.CreationTimeAfter !== undefined && { @@ -13453,6 +17594,9 @@ const serializeAws_json1_1ListModelPackagesInput = (input: ListModelPackagesInpu CreationTimeBefore: Math.round(input.CreationTimeBefore.getTime() / 1000), }), ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), + ...(input.ModelApprovalStatus !== undefined && { ModelApprovalStatus: input.ModelApprovalStatus }), + ...(input.ModelPackageGroupName !== undefined && { ModelPackageGroupName: input.ModelPackageGroupName }), + ...(input.ModelPackageType !== undefined && { ModelPackageType: input.ModelPackageType }), ...(input.NameContains !== undefined && { NameContains: input.NameContains }), ...(input.NextToken !== undefined && { NextToken: input.NextToken }), ...(input.SortBy !== undefined && { SortBy: input.SortBy }), @@ -13596,6 +17740,56 @@ const serializeAws_json1_1ListNotebookInstancesInput = ( }; }; +const serializeAws_json1_1ListPipelineExecutionsRequest = ( + input: ListPipelineExecutionsRequest, + context: __SerdeContext +): any => { + return { + ...(input.CreatedAfter !== undefined && { CreatedAfter: Math.round(input.CreatedAfter.getTime() / 1000) }), + ...(input.CreatedBefore !== undefined && { CreatedBefore: Math.round(input.CreatedBefore.getTime() / 1000) }), + ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + ...(input.PipelineName !== undefined && { PipelineName: input.PipelineName }), + ...(input.SortBy !== undefined && { SortBy: input.SortBy }), + ...(input.SortOrder !== undefined && { SortOrder: input.SortOrder }), + }; +}; + +const serializeAws_json1_1ListPipelineExecutionStepsRequest = ( + input: ListPipelineExecutionStepsRequest, + context: __SerdeContext +): any => { + return { + ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + ...(input.PipelineExecutionArn !== undefined && { PipelineExecutionArn: input.PipelineExecutionArn }), + ...(input.SortOrder !== undefined && { SortOrder: input.SortOrder }), + }; +}; + +const serializeAws_json1_1ListPipelineParametersForExecutionRequest = ( + input: ListPipelineParametersForExecutionRequest, + context: __SerdeContext +): any => { + return { + ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + ...(input.PipelineExecutionArn !== undefined && { PipelineExecutionArn: input.PipelineExecutionArn }), + }; +}; + +const serializeAws_json1_1ListPipelinesRequest = (input: ListPipelinesRequest, context: __SerdeContext): any => { + return { + ...(input.CreatedAfter !== undefined && { CreatedAfter: Math.round(input.CreatedAfter.getTime() / 1000) }), + ...(input.CreatedBefore !== undefined && { CreatedBefore: Math.round(input.CreatedBefore.getTime() / 1000) }), + ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + ...(input.PipelineNamePrefix !== undefined && { PipelineNamePrefix: input.PipelineNamePrefix }), + ...(input.SortBy !== undefined && { SortBy: input.SortBy }), + ...(input.SortOrder !== undefined && { SortOrder: input.SortOrder }), + }; +}; + const serializeAws_json1_1ListProcessingJobsRequest = ( input: ListProcessingJobsRequest, context: __SerdeContext @@ -13607,18 +17801,34 @@ const serializeAws_json1_1ListProcessingJobsRequest = ( ...(input.CreationTimeBefore !== undefined && { CreationTimeBefore: Math.round(input.CreationTimeBefore.getTime() / 1000), }), - ...(input.LastModifiedTimeAfter !== undefined && { - LastModifiedTimeAfter: Math.round(input.LastModifiedTimeAfter.getTime() / 1000), - }), - ...(input.LastModifiedTimeBefore !== undefined && { - LastModifiedTimeBefore: Math.round(input.LastModifiedTimeBefore.getTime() / 1000), - }), + ...(input.LastModifiedTimeAfter !== undefined && { + LastModifiedTimeAfter: Math.round(input.LastModifiedTimeAfter.getTime() / 1000), + }), + ...(input.LastModifiedTimeBefore !== undefined && { + LastModifiedTimeBefore: Math.round(input.LastModifiedTimeBefore.getTime() / 1000), + }), + ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), + ...(input.NameContains !== undefined && { NameContains: input.NameContains }), + ...(input.NextToken !== undefined && { NextToken: input.NextToken }), + ...(input.SortBy !== undefined && { SortBy: input.SortBy }), + ...(input.SortOrder !== undefined && { SortOrder: input.SortOrder }), + ...(input.StatusEquals !== undefined && { StatusEquals: input.StatusEquals }), + }; +}; + +const serializeAws_json1_1ListProjectsInput = (input: ListProjectsInput, context: __SerdeContext): any => { + return { + ...(input.CreationTimeAfter !== undefined && { + CreationTimeAfter: Math.round(input.CreationTimeAfter.getTime() / 1000), + }), + ...(input.CreationTimeBefore !== undefined && { + CreationTimeBefore: Math.round(input.CreationTimeBefore.getTime() / 1000), + }), ...(input.MaxResults !== undefined && { MaxResults: input.MaxResults }), ...(input.NameContains !== undefined && { NameContains: input.NameContains }), ...(input.NextToken !== undefined && { NextToken: input.NextToken }), ...(input.SortBy !== undefined && { SortBy: input.SortBy }), ...(input.SortOrder !== undefined && { SortOrder: input.SortOrder }), - ...(input.StatusEquals !== undefined && { StatusEquals: input.StatusEquals }), }; }; @@ -13786,6 +17996,15 @@ const serializeAws_json1_1MemberDefinitions = (input: MemberDefinition[], contex return input.map((entry) => serializeAws_json1_1MemberDefinition(entry, context)); }; +const serializeAws_json1_1MetadataProperties = (input: MetadataProperties, context: __SerdeContext): any => { + return { + ...(input.CommitId !== undefined && { CommitId: input.CommitId }), + ...(input.GeneratedBy !== undefined && { GeneratedBy: input.GeneratedBy }), + ...(input.ProjectId !== undefined && { ProjectId: input.ProjectId }), + ...(input.Repository !== undefined && { Repository: input.Repository }), + }; +}; + const serializeAws_json1_1MetricDefinition = (input: MetricDefinition, context: __SerdeContext): any => { return { ...(input.Name !== undefined && { Name: input.Name }), @@ -13797,6 +18016,14 @@ const serializeAws_json1_1MetricDefinitionList = (input: MetricDefinition[], con return input.map((entry) => serializeAws_json1_1MetricDefinition(entry, context)); }; +const serializeAws_json1_1MetricsSource = (input: MetricsSource, context: __SerdeContext): any => { + return { + ...(input.ContentDigest !== undefined && { ContentDigest: input.ContentDigest }), + ...(input.ContentType !== undefined && { ContentType: input.ContentType }), + ...(input.S3Uri !== undefined && { S3Uri: input.S3Uri }), + }; +}; + const serializeAws_json1_1ModelClientConfig = (input: ModelClientConfig, context: __SerdeContext): any => { return { ...(input.InvocationsMaxRetries !== undefined && { InvocationsMaxRetries: input.InvocationsMaxRetries }), @@ -13806,6 +18033,30 @@ const serializeAws_json1_1ModelClientConfig = (input: ModelClientConfig, context }; }; +const serializeAws_json1_1ModelDataQuality = (input: ModelDataQuality, context: __SerdeContext): any => { + return { + ...(input.Constraints !== undefined && { + Constraints: serializeAws_json1_1MetricsSource(input.Constraints, context), + }), + ...(input.Statistics !== undefined && { Statistics: serializeAws_json1_1MetricsSource(input.Statistics, context) }), + }; +}; + +const serializeAws_json1_1ModelMetrics = (input: ModelMetrics, context: __SerdeContext): any => { + return { + ...(input.Bias !== undefined && { Bias: serializeAws_json1_1Bias(input.Bias, context) }), + ...(input.Explainability !== undefined && { + Explainability: serializeAws_json1_1Explainability(input.Explainability, context), + }), + ...(input.ModelDataQuality !== undefined && { + ModelDataQuality: serializeAws_json1_1ModelDataQuality(input.ModelDataQuality, context), + }), + ...(input.ModelQuality !== undefined && { + ModelQuality: serializeAws_json1_1ModelQuality(input.ModelQuality, context), + }), + }; +}; + const serializeAws_json1_1ModelPackageContainerDefinition = ( input: ModelPackageContainerDefinition, context: __SerdeContext @@ -13857,6 +18108,15 @@ const serializeAws_json1_1ModelPackageValidationSpecification = ( }; }; +const serializeAws_json1_1ModelQuality = (input: ModelQuality, context: __SerdeContext): any => { + return { + ...(input.Constraints !== undefined && { + Constraints: serializeAws_json1_1MetricsSource(input.Constraints, context), + }), + ...(input.Statistics !== undefined && { Statistics: serializeAws_json1_1MetricsSource(input.Statistics, context) }), + }; +}; + const serializeAws_json1_1MonitoringAppSpecification = ( input: MonitoringAppSpecification, context: __SerdeContext @@ -14092,6 +18352,18 @@ const serializeAws_json1_1NotificationConfiguration = ( }; }; +const serializeAws_json1_1OfflineStoreConfig = (input: OfflineStoreConfig, context: __SerdeContext): any => { + return { + ...(input.DataCatalogConfig !== undefined && { + DataCatalogConfig: serializeAws_json1_1DataCatalogConfig(input.DataCatalogConfig, context), + }), + ...(input.DisableGlueTableCreation !== undefined && { DisableGlueTableCreation: input.DisableGlueTableCreation }), + ...(input.S3StorageConfig !== undefined && { + S3StorageConfig: serializeAws_json1_1S3StorageConfig(input.S3StorageConfig, context), + }), + }; +}; + const serializeAws_json1_1OidcConfig = (input: OidcConfig, context: __SerdeContext): any => { return { ...(input.AuthorizationEndpoint !== undefined && { AuthorizationEndpoint: input.AuthorizationEndpoint }), @@ -14111,9 +18383,28 @@ const serializeAws_json1_1OidcMemberDefinition = (input: OidcMemberDefinition, c }; }; +const serializeAws_json1_1OnlineStoreConfig = (input: OnlineStoreConfig, context: __SerdeContext): any => { + return { + ...(input.EnableOnlineStore !== undefined && { EnableOnlineStore: input.EnableOnlineStore }), + ...(input.SecurityConfig !== undefined && { + SecurityConfig: serializeAws_json1_1OnlineStoreSecurityConfig(input.SecurityConfig, context), + }), + }; +}; + +const serializeAws_json1_1OnlineStoreSecurityConfig = ( + input: OnlineStoreSecurityConfig, + context: __SerdeContext +): any => { + return { + ...(input.KmsKeyId !== undefined && { KmsKeyId: input.KmsKeyId }), + }; +}; + const serializeAws_json1_1OutputConfig = (input: OutputConfig, context: __SerdeContext): any => { return { ...(input.CompilerOptions !== undefined && { CompilerOptions: input.CompilerOptions }), + ...(input.KmsKeyId !== undefined && { KmsKeyId: input.KmsKeyId }), ...(input.S3OutputLocation !== undefined && { S3OutputLocation: input.S3OutputLocation }), ...(input.TargetDevice !== undefined && { TargetDevice: input.TargetDevice }), ...(input.TargetPlatform !== undefined && { @@ -14129,6 +18420,17 @@ const serializeAws_json1_1OutputDataConfig = (input: OutputDataConfig, context: }; }; +const serializeAws_json1_1Parameter = (input: Parameter, context: __SerdeContext): any => { + return { + ...(input.Name !== undefined && { Name: input.Name }), + ...(input.Value !== undefined && { Value: input.Value }), + }; +}; + +const serializeAws_json1_1ParameterList = (input: Parameter[], context: __SerdeContext): any => { + return input.map((entry) => serializeAws_json1_1Parameter(entry, context)); +}; + const serializeAws_json1_1ParameterRange = (input: ParameterRange, context: __SerdeContext): any => { return { ...(input.CategoricalParameterRangeSpecification !== undefined && { @@ -14216,8 +18518,21 @@ const serializeAws_json1_1ProcessingEnvironmentMap = ( ); }; +const serializeAws_json1_1ProcessingFeatureStoreOutput = ( + input: ProcessingFeatureStoreOutput, + context: __SerdeContext +): any => { + return { + ...(input.FeatureGroupName !== undefined && { FeatureGroupName: input.FeatureGroupName }), + }; +}; + const serializeAws_json1_1ProcessingInput = (input: ProcessingInput, context: __SerdeContext): any => { return { + ...(input.AppManaged !== undefined && { AppManaged: input.AppManaged }), + ...(input.DatasetDefinition !== undefined && { + DatasetDefinition: serializeAws_json1_1DatasetDefinition(input.DatasetDefinition, context), + }), ...(input.InputName !== undefined && { InputName: input.InputName }), ...(input.S3Input !== undefined && { S3Input: serializeAws_json1_1ProcessingS3Input(input.S3Input, context) }), }; @@ -14229,6 +18544,10 @@ const serializeAws_json1_1ProcessingInputs = (input: ProcessingInput[], context: const serializeAws_json1_1ProcessingOutput = (input: ProcessingOutput, context: __SerdeContext): any => { return { + ...(input.AppManaged !== undefined && { AppManaged: input.AppManaged }), + ...(input.FeatureStoreOutput !== undefined && { + FeatureStoreOutput: serializeAws_json1_1ProcessingFeatureStoreOutput(input.FeatureStoreOutput, context), + }), ...(input.OutputName !== undefined && { OutputName: input.OutputName }), ...(input.S3Output !== undefined && { S3Output: serializeAws_json1_1ProcessingS3Output(input.S3Output, context) }), }; @@ -14302,6 +18621,17 @@ const serializeAws_json1_1PropertyNameQuery = (input: PropertyNameQuery, context }; }; +const serializeAws_json1_1ProvisioningParameter = (input: ProvisioningParameter, context: __SerdeContext): any => { + return { + ...(input.Key !== undefined && { Key: input.Key }), + ...(input.Value !== undefined && { Value: input.Value }), + }; +}; + +const serializeAws_json1_1ProvisioningParameters = (input: ProvisioningParameter[], context: __SerdeContext): any => { + return input.map((entry) => serializeAws_json1_1ProvisioningParameter(entry, context)); +}; + const serializeAws_json1_1PublicWorkforceTaskPrice = ( input: PublicWorkforceTaskPrice, context: __SerdeContext @@ -14311,6 +18641,16 @@ const serializeAws_json1_1PublicWorkforceTaskPrice = ( }; }; +const serializeAws_json1_1PutModelPackageGroupPolicyInput = ( + input: PutModelPackageGroupPolicyInput, + context: __SerdeContext +): any => { + return { + ...(input.ModelPackageGroupName !== undefined && { ModelPackageGroupName: input.ModelPackageGroupName }), + ...(input.ResourcePolicy !== undefined && { ResourcePolicy: input.ResourcePolicy }), + }; +}; + const serializeAws_json1_1RealtimeInferenceInstanceTypes = ( input: (ProductionVariantInstanceType | string)[], context: __SerdeContext @@ -14318,6 +18658,23 @@ const serializeAws_json1_1RealtimeInferenceInstanceTypes = ( return input.map((entry) => entry); }; +const serializeAws_json1_1RedshiftDatasetDefinition = ( + input: RedshiftDatasetDefinition, + context: __SerdeContext +): any => { + return { + ...(input.ClusterId !== undefined && { ClusterId: input.ClusterId }), + ...(input.ClusterRoleArn !== undefined && { ClusterRoleArn: input.ClusterRoleArn }), + ...(input.Database !== undefined && { Database: input.Database }), + ...(input.DbUser !== undefined && { DbUser: input.DbUser }), + ...(input.KmsKeyId !== undefined && { KmsKeyId: input.KmsKeyId }), + ...(input.OutputCompression !== undefined && { OutputCompression: input.OutputCompression }), + ...(input.OutputFormat !== undefined && { OutputFormat: input.OutputFormat }), + ...(input.OutputS3Uri !== undefined && { OutputS3Uri: input.OutputS3Uri }), + ...(input.QueryString !== undefined && { QueryString: input.QueryString }), + }; +}; + const serializeAws_json1_1RenderableTask = (input: RenderableTask, context: __SerdeContext): any => { return { ...(input.Input !== undefined && { Input: input.Input }), @@ -14388,6 +18745,13 @@ const serializeAws_json1_1S3DataSource = (input: S3DataSource, context: __SerdeC }; }; +const serializeAws_json1_1S3StorageConfig = (input: S3StorageConfig, context: __SerdeContext): any => { + return { + ...(input.KmsKeyId !== undefined && { KmsKeyId: input.KmsKeyId }), + ...(input.S3Uri !== undefined && { S3Uri: input.S3Uri }), + }; +}; + const serializeAws_json1_1ScheduleConfig = (input: ScheduleConfig, context: __SerdeContext): any => { return { ...(input.ScheduleExpression !== undefined && { ScheduleExpression: input.ScheduleExpression }), @@ -14428,6 +18792,20 @@ const serializeAws_json1_1SecurityGroupIds = (input: string[], context: __SerdeC return input.map((entry) => entry); }; +const serializeAws_json1_1ServiceCatalogProvisioningDetails = ( + input: ServiceCatalogProvisioningDetails, + context: __SerdeContext +): any => { + return { + ...(input.PathId !== undefined && { PathId: input.PathId }), + ...(input.ProductId !== undefined && { ProductId: input.ProductId }), + ...(input.ProvisioningArtifactId !== undefined && { ProvisioningArtifactId: input.ProvisioningArtifactId }), + ...(input.ProvisioningParameters !== undefined && { + ProvisioningParameters: serializeAws_json1_1ProvisioningParameters(input.ProvisioningParameters, context), + }), + }; +}; + const serializeAws_json1_1SharingSettings = (input: SharingSettings, context: __SerdeContext): any => { return { ...(input.NotebookOutputOption !== undefined && { NotebookOutputOption: input.NotebookOutputOption }), @@ -14488,6 +18866,25 @@ const serializeAws_json1_1StartNotebookInstanceInput = ( }; }; +const serializeAws_json1_1StartPipelineExecutionRequest = ( + input: StartPipelineExecutionRequest, + context: __SerdeContext +): any => { + return { + ClientRequestToken: input.ClientRequestToken ?? generateIdempotencyToken(), + ...(input.PipelineExecutionDescription !== undefined && { + PipelineExecutionDescription: input.PipelineExecutionDescription, + }), + ...(input.PipelineExecutionDisplayName !== undefined && { + PipelineExecutionDisplayName: input.PipelineExecutionDisplayName, + }), + ...(input.PipelineName !== undefined && { PipelineName: input.PipelineName }), + ...(input.PipelineParameters !== undefined && { + PipelineParameters: serializeAws_json1_1ParameterList(input.PipelineParameters, context), + }), + }; +}; + const serializeAws_json1_1StopAutoMLJobRequest = (input: StopAutoMLJobRequest, context: __SerdeContext): any => { return { ...(input.AutoMLJobName !== undefined && { AutoMLJobName: input.AutoMLJobName }), @@ -14545,6 +18942,16 @@ const serializeAws_json1_1StoppingCondition = (input: StoppingCondition, context }; }; +const serializeAws_json1_1StopPipelineExecutionRequest = ( + input: StopPipelineExecutionRequest, + context: __SerdeContext +): any => { + return { + ClientRequestToken: input.ClientRequestToken ?? generateIdempotencyToken(), + ...(input.PipelineExecutionArn !== undefined && { PipelineExecutionArn: input.PipelineExecutionArn }), + }; +}; + const serializeAws_json1_1StopProcessingJobRequest = ( input: StopProcessingJobRequest, context: __SerdeContext @@ -14620,6 +19027,14 @@ const serializeAws_json1_1TensorBoardOutputConfig = (input: TensorBoardOutputCon }; }; +const serializeAws_json1_1TrafficRoutingConfig = (input: TrafficRoutingConfig, context: __SerdeContext): any => { + return { + ...(input.CanarySize !== undefined && { CanarySize: serializeAws_json1_1CapacitySize(input.CanarySize, context) }), + ...(input.Type !== undefined && { Type: input.Type }), + ...(input.WaitIntervalInSeconds !== undefined && { WaitIntervalInSeconds: input.WaitIntervalInSeconds }), + }; +}; + const serializeAws_json1_1TrainingInstanceTypes = ( input: (TrainingInstanceType | string)[], context: __SerdeContext @@ -14802,10 +19217,11 @@ const serializeAws_json1_1TrialComponentParameterValue = ( input: TrialComponentParameterValue, context: __SerdeContext ): any => { - return { - ...(input.NumberValue !== undefined && { NumberValue: input.NumberValue }), - ...(input.StringValue !== undefined && { StringValue: input.StringValue }), - }; + return TrialComponentParameterValue.visit(input, { + NumberValue: (value) => ({ NumberValue: value }), + StringValue: (value) => ({ StringValue: value }), + _: (name, value) => ({ name: value } as any), + }); }; const serializeAws_json1_1TrialComponentStatus = (input: TrialComponentStatus, context: __SerdeContext): any => { @@ -14839,6 +19255,20 @@ const serializeAws_json1_1UiTemplate = (input: UiTemplate, context: __SerdeConte }; }; +const serializeAws_json1_1UpdateActionRequest = (input: UpdateActionRequest, context: __SerdeContext): any => { + return { + ...(input.ActionName !== undefined && { ActionName: input.ActionName }), + ...(input.Description !== undefined && { Description: input.Description }), + ...(input.Properties !== undefined && { + Properties: serializeAws_json1_1LineageEntityParameters(input.Properties, context), + }), + ...(input.PropertiesToRemove !== undefined && { + PropertiesToRemove: serializeAws_json1_1ListLineageEntityParameterKey(input.PropertiesToRemove, context), + }), + ...(input.Status !== undefined && { Status: input.Status }), + }; +}; + const serializeAws_json1_1UpdateAppImageConfigRequest = ( input: UpdateAppImageConfigRequest, context: __SerdeContext @@ -14851,6 +19281,19 @@ const serializeAws_json1_1UpdateAppImageConfigRequest = ( }; }; +const serializeAws_json1_1UpdateArtifactRequest = (input: UpdateArtifactRequest, context: __SerdeContext): any => { + return { + ...(input.ArtifactArn !== undefined && { ArtifactArn: input.ArtifactArn }), + ...(input.ArtifactName !== undefined && { ArtifactName: input.ArtifactName }), + ...(input.Properties !== undefined && { + Properties: serializeAws_json1_1LineageEntityParameters(input.Properties, context), + }), + ...(input.PropertiesToRemove !== undefined && { + PropertiesToRemove: serializeAws_json1_1ListLineageEntityParameterKey(input.PropertiesToRemove, context), + }), + }; +}; + const serializeAws_json1_1UpdateCodeRepositoryInput = ( input: UpdateCodeRepositoryInput, context: __SerdeContext @@ -14863,6 +19306,19 @@ const serializeAws_json1_1UpdateCodeRepositoryInput = ( }; }; +const serializeAws_json1_1UpdateContextRequest = (input: UpdateContextRequest, context: __SerdeContext): any => { + return { + ...(input.ContextName !== undefined && { ContextName: input.ContextName }), + ...(input.Description !== undefined && { Description: input.Description }), + ...(input.Properties !== undefined && { + Properties: serializeAws_json1_1LineageEntityParameters(input.Properties, context), + }), + ...(input.PropertiesToRemove !== undefined && { + PropertiesToRemove: serializeAws_json1_1ListLineageEntityParameterKey(input.PropertiesToRemove, context), + }), + }; +}; + const serializeAws_json1_1UpdateDomainRequest = (input: UpdateDomainRequest, context: __SerdeContext): any => { return { ...(input.DefaultUserSettings !== undefined && { @@ -14874,6 +19330,9 @@ const serializeAws_json1_1UpdateDomainRequest = (input: UpdateDomainRequest, con const serializeAws_json1_1UpdateEndpointInput = (input: UpdateEndpointInput, context: __SerdeContext): any => { return { + ...(input.DeploymentConfig !== undefined && { + DeploymentConfig: serializeAws_json1_1DeploymentConfig(input.DeploymentConfig, context), + }), ...(input.EndpointConfigName !== undefined && { EndpointConfigName: input.EndpointConfigName }), ...(input.EndpointName !== undefined && { EndpointName: input.EndpointName }), ...(input.ExcludeRetainedVariantProperties !== undefined && { @@ -14923,6 +19382,14 @@ const serializeAws_json1_1UpdateImageRequest = (input: UpdateImageRequest, conte }; }; +const serializeAws_json1_1UpdateModelPackageInput = (input: UpdateModelPackageInput, context: __SerdeContext): any => { + return { + ...(input.ApprovalDescription !== undefined && { ApprovalDescription: input.ApprovalDescription }), + ...(input.ModelApprovalStatus !== undefined && { ModelApprovalStatus: input.ModelApprovalStatus }), + ...(input.ModelPackageArn !== undefined && { ModelPackageArn: input.ModelPackageArn }), + }; +}; + const serializeAws_json1_1UpdateMonitoringScheduleRequest = ( input: UpdateMonitoringScheduleRequest, context: __SerdeContext @@ -14988,6 +19455,31 @@ const serializeAws_json1_1UpdateNotebookInstanceLifecycleConfigInput = ( }; }; +const serializeAws_json1_1UpdatePipelineExecutionRequest = ( + input: UpdatePipelineExecutionRequest, + context: __SerdeContext +): any => { + return { + ...(input.PipelineExecutionArn !== undefined && { PipelineExecutionArn: input.PipelineExecutionArn }), + ...(input.PipelineExecutionDescription !== undefined && { + PipelineExecutionDescription: input.PipelineExecutionDescription, + }), + ...(input.PipelineExecutionDisplayName !== undefined && { + PipelineExecutionDisplayName: input.PipelineExecutionDisplayName, + }), + }; +}; + +const serializeAws_json1_1UpdatePipelineRequest = (input: UpdatePipelineRequest, context: __SerdeContext): any => { + return { + ...(input.PipelineDefinition !== undefined && { PipelineDefinition: input.PipelineDefinition }), + ...(input.PipelineDescription !== undefined && { PipelineDescription: input.PipelineDescription }), + ...(input.PipelineDisplayName !== undefined && { PipelineDisplayName: input.PipelineDisplayName }), + ...(input.PipelineName !== undefined && { PipelineName: input.PipelineName }), + ...(input.RoleArn !== undefined && { RoleArn: input.RoleArn }), + }; +}; + const serializeAws_json1_1UpdateTrialComponentRequest = ( input: UpdateTrialComponentRequest, context: __SerdeContext @@ -15117,6 +19609,47 @@ const serializeAws_json1_1VpcSecurityGroupIds = (input: string[], context: __Ser return input.map((entry) => entry); }; +const deserializeAws_json1_1ActionSource = (output: any, context: __SerdeContext): ActionSource => { + return { + SourceId: output.SourceId !== undefined && output.SourceId !== null ? output.SourceId : undefined, + SourceType: output.SourceType !== undefined && output.SourceType !== null ? output.SourceType : undefined, + SourceUri: output.SourceUri !== undefined && output.SourceUri !== null ? output.SourceUri : undefined, + } as any; +}; + +const deserializeAws_json1_1ActionSummaries = (output: any, context: __SerdeContext): ActionSummary[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1ActionSummary(entry, context)); +}; + +const deserializeAws_json1_1ActionSummary = (output: any, context: __SerdeContext): ActionSummary => { + return { + ActionArn: output.ActionArn !== undefined && output.ActionArn !== null ? output.ActionArn : undefined, + ActionName: output.ActionName !== undefined && output.ActionName !== null ? output.ActionName : undefined, + ActionType: output.ActionType !== undefined && output.ActionType !== null ? output.ActionType : undefined, + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + LastModifiedTime: + output.LastModifiedTime !== undefined && output.LastModifiedTime !== null + ? new Date(Math.round(output.LastModifiedTime * 1000)) + : undefined, + Source: + output.Source !== undefined && output.Source !== null + ? deserializeAws_json1_1ActionSource(output.Source, context) + : undefined, + Status: output.Status !== undefined && output.Status !== null ? output.Status : undefined, + } as any; +}; + +const deserializeAws_json1_1AddAssociationResponse = (output: any, context: __SerdeContext): AddAssociationResponse => { + return { + DestinationArn: + output.DestinationArn !== undefined && output.DestinationArn !== null ? output.DestinationArn : undefined, + SourceArn: output.SourceArn !== undefined && output.SourceArn !== null ? output.SourceArn : undefined, + } as any; +}; + const deserializeAws_json1_1AdditionalCodeRepositoryNamesOrUrls = (output: any, context: __SerdeContext): string[] => { return (output || []).map((entry: any) => entry); }; @@ -15130,6 +19663,16 @@ const deserializeAws_json1_1AddTagsOutput = (output: any, context: __SerdeContex } as any; }; +const deserializeAws_json1_1Alarm = (output: any, context: __SerdeContext): Alarm => { + return { + AlarmName: output.AlarmName !== undefined && output.AlarmName !== null ? output.AlarmName : undefined, + } as any; +}; + +const deserializeAws_json1_1AlarmList = (output: any, context: __SerdeContext): Alarm[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1Alarm(entry, context)); +}; + const deserializeAws_json1_1AlgorithmSpecification = (output: any, context: __SerdeContext): AlgorithmSpecification => { return { AlgorithmName: @@ -15289,38 +19832,130 @@ const deserializeAws_json1_1AppImageConfigDetails = (output: any, context: __Ser } as any; }; -const deserializeAws_json1_1AppImageConfigList = (output: any, context: __SerdeContext): AppImageConfigDetails[] => { - return (output || []).map((entry: any) => deserializeAws_json1_1AppImageConfigDetails(entry, context)); -}; - -const deserializeAws_json1_1AppList = (output: any, context: __SerdeContext): AppDetails[] => { - return (output || []).map((entry: any) => deserializeAws_json1_1AppDetails(entry, context)); +const deserializeAws_json1_1AppImageConfigList = (output: any, context: __SerdeContext): AppImageConfigDetails[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1AppImageConfigDetails(entry, context)); +}; + +const deserializeAws_json1_1AppList = (output: any, context: __SerdeContext): AppDetails[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1AppDetails(entry, context)); +}; + +const deserializeAws_json1_1AppSpecification = (output: any, context: __SerdeContext): AppSpecification => { + return { + ContainerArguments: + output.ContainerArguments !== undefined && output.ContainerArguments !== null + ? deserializeAws_json1_1ContainerArguments(output.ContainerArguments, context) + : undefined, + ContainerEntrypoint: + output.ContainerEntrypoint !== undefined && output.ContainerEntrypoint !== null + ? deserializeAws_json1_1ContainerEntrypoint(output.ContainerEntrypoint, context) + : undefined, + ImageUri: output.ImageUri !== undefined && output.ImageUri !== null ? output.ImageUri : undefined, + } as any; +}; + +const deserializeAws_json1_1ArtifactSource = (output: any, context: __SerdeContext): ArtifactSource => { + return { + SourceTypes: + output.SourceTypes !== undefined && output.SourceTypes !== null + ? deserializeAws_json1_1ArtifactSourceTypes(output.SourceTypes, context) + : undefined, + SourceUri: output.SourceUri !== undefined && output.SourceUri !== null ? output.SourceUri : undefined, + } as any; +}; + +const deserializeAws_json1_1ArtifactSourceType = (output: any, context: __SerdeContext): ArtifactSourceType => { + return { + SourceIdType: output.SourceIdType !== undefined && output.SourceIdType !== null ? output.SourceIdType : undefined, + Value: output.Value !== undefined && output.Value !== null ? output.Value : undefined, + } as any; +}; + +const deserializeAws_json1_1ArtifactSourceTypes = (output: any, context: __SerdeContext): ArtifactSourceType[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1ArtifactSourceType(entry, context)); +}; + +const deserializeAws_json1_1ArtifactSummaries = (output: any, context: __SerdeContext): ArtifactSummary[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1ArtifactSummary(entry, context)); +}; + +const deserializeAws_json1_1ArtifactSummary = (output: any, context: __SerdeContext): ArtifactSummary => { + return { + ArtifactArn: output.ArtifactArn !== undefined && output.ArtifactArn !== null ? output.ArtifactArn : undefined, + ArtifactName: output.ArtifactName !== undefined && output.ArtifactName !== null ? output.ArtifactName : undefined, + ArtifactType: output.ArtifactType !== undefined && output.ArtifactType !== null ? output.ArtifactType : undefined, + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + LastModifiedTime: + output.LastModifiedTime !== undefined && output.LastModifiedTime !== null + ? new Date(Math.round(output.LastModifiedTime * 1000)) + : undefined, + Source: + output.Source !== undefined && output.Source !== null + ? deserializeAws_json1_1ArtifactSource(output.Source, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1AssociateTrialComponentResponse = ( + output: any, + context: __SerdeContext +): AssociateTrialComponentResponse => { + return { + TrialArn: output.TrialArn !== undefined && output.TrialArn !== null ? output.TrialArn : undefined, + TrialComponentArn: + output.TrialComponentArn !== undefined && output.TrialComponentArn !== null + ? output.TrialComponentArn + : undefined, + } as any; +}; + +const deserializeAws_json1_1AssociationSummaries = (output: any, context: __SerdeContext): AssociationSummary[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1AssociationSummary(entry, context)); }; -const deserializeAws_json1_1AppSpecification = (output: any, context: __SerdeContext): AppSpecification => { +const deserializeAws_json1_1AssociationSummary = (output: any, context: __SerdeContext): AssociationSummary => { return { - ContainerArguments: - output.ContainerArguments !== undefined && output.ContainerArguments !== null - ? deserializeAws_json1_1ContainerArguments(output.ContainerArguments, context) + AssociationType: + output.AssociationType !== undefined && output.AssociationType !== null ? output.AssociationType : undefined, + CreatedBy: + output.CreatedBy !== undefined && output.CreatedBy !== null + ? deserializeAws_json1_1UserContext(output.CreatedBy, context) : undefined, - ContainerEntrypoint: - output.ContainerEntrypoint !== undefined && output.ContainerEntrypoint !== null - ? deserializeAws_json1_1ContainerEntrypoint(output.ContainerEntrypoint, context) + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) : undefined, - ImageUri: output.ImageUri !== undefined && output.ImageUri !== null ? output.ImageUri : undefined, + DestinationArn: + output.DestinationArn !== undefined && output.DestinationArn !== null ? output.DestinationArn : undefined, + DestinationName: + output.DestinationName !== undefined && output.DestinationName !== null ? output.DestinationName : undefined, + DestinationType: + output.DestinationType !== undefined && output.DestinationType !== null ? output.DestinationType : undefined, + SourceArn: output.SourceArn !== undefined && output.SourceArn !== null ? output.SourceArn : undefined, + SourceName: output.SourceName !== undefined && output.SourceName !== null ? output.SourceName : undefined, + SourceType: output.SourceType !== undefined && output.SourceType !== null ? output.SourceType : undefined, } as any; }; -const deserializeAws_json1_1AssociateTrialComponentResponse = ( +const deserializeAws_json1_1AthenaDatasetDefinition = ( output: any, context: __SerdeContext -): AssociateTrialComponentResponse => { +): AthenaDatasetDefinition => { return { - TrialArn: output.TrialArn !== undefined && output.TrialArn !== null ? output.TrialArn : undefined, - TrialComponentArn: - output.TrialComponentArn !== undefined && output.TrialComponentArn !== null - ? output.TrialComponentArn + Catalog: output.Catalog !== undefined && output.Catalog !== null ? output.Catalog : undefined, + Database: output.Database !== undefined && output.Database !== null ? output.Database : undefined, + KmsKeyId: output.KmsKeyId !== undefined && output.KmsKeyId !== null ? output.KmsKeyId : undefined, + OutputCompression: + output.OutputCompression !== undefined && output.OutputCompression !== null + ? output.OutputCompression : undefined, + OutputFormat: output.OutputFormat !== undefined && output.OutputFormat !== null ? output.OutputFormat : undefined, + OutputS3Uri: output.OutputS3Uri !== undefined && output.OutputS3Uri !== null ? output.OutputS3Uri : undefined, + QueryString: output.QueryString !== undefined && output.QueryString !== null ? output.QueryString : undefined, + WorkGroup: output.WorkGroup !== undefined && output.WorkGroup !== null ? output.WorkGroup : undefined, } as any; }; @@ -15541,10 +20176,61 @@ const deserializeAws_json1_1AutoMLSecurityConfig = (output: any, context: __Serd } as any; }; +const deserializeAws_json1_1AutoRollbackConfig = (output: any, context: __SerdeContext): AutoRollbackConfig => { + return { + Alarms: + output.Alarms !== undefined && output.Alarms !== null + ? deserializeAws_json1_1AlarmList(output.Alarms, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1Bias = (output: any, context: __SerdeContext): Bias => { + return { + Report: + output.Report !== undefined && output.Report !== null + ? deserializeAws_json1_1MetricsSource(output.Report, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1BlueGreenUpdatePolicy = (output: any, context: __SerdeContext): BlueGreenUpdatePolicy => { + return { + MaximumExecutionTimeoutInSeconds: + output.MaximumExecutionTimeoutInSeconds !== undefined && output.MaximumExecutionTimeoutInSeconds !== null + ? output.MaximumExecutionTimeoutInSeconds + : undefined, + TerminationWaitInSeconds: + output.TerminationWaitInSeconds !== undefined && output.TerminationWaitInSeconds !== null + ? output.TerminationWaitInSeconds + : undefined, + TrafficRoutingConfiguration: + output.TrafficRoutingConfiguration !== undefined && output.TrafficRoutingConfiguration !== null + ? deserializeAws_json1_1TrafficRoutingConfig(output.TrafficRoutingConfiguration, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1CacheHitResult = (output: any, context: __SerdeContext): CacheHitResult => { + return { + SourcePipelineExecutionArn: + output.SourcePipelineExecutionArn !== undefined && output.SourcePipelineExecutionArn !== null + ? output.SourcePipelineExecutionArn + : undefined, + } as any; +}; + const deserializeAws_json1_1CandidateSteps = (output: any, context: __SerdeContext): AutoMLCandidateStep[] => { return (output || []).map((entry: any) => deserializeAws_json1_1AutoMLCandidateStep(entry, context)); }; +const deserializeAws_json1_1CapacitySize = (output: any, context: __SerdeContext): CapacitySize => { + return { + Type: output.Type !== undefined && output.Type !== null ? output.Type : undefined, + Value: output.Value !== undefined && output.Value !== null ? output.Value : undefined, + } as any; +}; + const deserializeAws_json1_1CaptureContentTypeHeader = ( output: any, context: __SerdeContext @@ -15804,6 +20490,12 @@ const deserializeAws_json1_1CompressionTypes = (output: any, context: __SerdeCon return (output || []).map((entry: any) => entry); }; +const deserializeAws_json1_1ConditionStepMetadata = (output: any, context: __SerdeContext): ConditionStepMetadata => { + return { + Outcome: output.Outcome !== undefined && output.Outcome !== null ? output.Outcome : undefined, + } as any; +}; + const deserializeAws_json1_1ConflictException = (output: any, context: __SerdeContext): ConflictException => { return { Message: output.Message !== undefined && output.Message !== null ? output.Message : undefined, @@ -15855,6 +20547,38 @@ const deserializeAws_json1_1ContentTypes = (output: any, context: __SerdeContext return (output || []).map((entry: any) => entry); }; +const deserializeAws_json1_1ContextSource = (output: any, context: __SerdeContext): ContextSource => { + return { + SourceId: output.SourceId !== undefined && output.SourceId !== null ? output.SourceId : undefined, + SourceType: output.SourceType !== undefined && output.SourceType !== null ? output.SourceType : undefined, + SourceUri: output.SourceUri !== undefined && output.SourceUri !== null ? output.SourceUri : undefined, + } as any; +}; + +const deserializeAws_json1_1ContextSummaries = (output: any, context: __SerdeContext): ContextSummary[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1ContextSummary(entry, context)); +}; + +const deserializeAws_json1_1ContextSummary = (output: any, context: __SerdeContext): ContextSummary => { + return { + ContextArn: output.ContextArn !== undefined && output.ContextArn !== null ? output.ContextArn : undefined, + ContextName: output.ContextName !== undefined && output.ContextName !== null ? output.ContextName : undefined, + ContextType: output.ContextType !== undefined && output.ContextType !== null ? output.ContextType : undefined, + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + LastModifiedTime: + output.LastModifiedTime !== undefined && output.LastModifiedTime !== null + ? new Date(Math.round(output.LastModifiedTime * 1000)) + : undefined, + Source: + output.Source !== undefined && output.Source !== null + ? deserializeAws_json1_1ContextSource(output.Source, context) + : undefined, + } as any; +}; + const deserializeAws_json1_1ContinuousParameterRange = ( output: any, context: __SerdeContext @@ -15884,6 +20608,12 @@ const deserializeAws_json1_1ContinuousParameterRangeSpecification = ( } as any; }; +const deserializeAws_json1_1CreateActionResponse = (output: any, context: __SerdeContext): CreateActionResponse => { + return { + ActionArn: output.ActionArn !== undefined && output.ActionArn !== null ? output.ActionArn : undefined, + } as any; +}; + const deserializeAws_json1_1CreateAlgorithmOutput = (output: any, context: __SerdeContext): CreateAlgorithmOutput => { return { AlgorithmArn: output.AlgorithmArn !== undefined && output.AlgorithmArn !== null ? output.AlgorithmArn : undefined, @@ -15908,6 +20638,12 @@ const deserializeAws_json1_1CreateAppResponse = (output: any, context: __SerdeCo } as any; }; +const deserializeAws_json1_1CreateArtifactResponse = (output: any, context: __SerdeContext): CreateArtifactResponse => { + return { + ArtifactArn: output.ArtifactArn !== undefined && output.ArtifactArn !== null ? output.ArtifactArn : undefined, + } as any; +}; + const deserializeAws_json1_1CreateAutoMLJobResponse = ( output: any, context: __SerdeContext @@ -15941,6 +20677,12 @@ const deserializeAws_json1_1CreateCompilationJobResponse = ( } as any; }; +const deserializeAws_json1_1CreateContextResponse = (output: any, context: __SerdeContext): CreateContextResponse => { + return { + ContextArn: output.ContextArn !== undefined && output.ContextArn !== null ? output.ContextArn : undefined, + } as any; +}; + const deserializeAws_json1_1CreateDomainResponse = (output: any, context: __SerdeContext): CreateDomainResponse => { return { DomainArn: output.DomainArn !== undefined && output.DomainArn !== null ? output.DomainArn : undefined, @@ -15976,6 +20718,16 @@ const deserializeAws_json1_1CreateExperimentResponse = ( } as any; }; +const deserializeAws_json1_1CreateFeatureGroupResponse = ( + output: any, + context: __SerdeContext +): CreateFeatureGroupResponse => { + return { + FeatureGroupArn: + output.FeatureGroupArn !== undefined && output.FeatureGroupArn !== null ? output.FeatureGroupArn : undefined, + } as any; +}; + const deserializeAws_json1_1CreateFlowDefinitionResponse = ( output: any, context: __SerdeContext @@ -16042,6 +20794,18 @@ const deserializeAws_json1_1CreateModelOutput = (output: any, context: __SerdeCo } as any; }; +const deserializeAws_json1_1CreateModelPackageGroupOutput = ( + output: any, + context: __SerdeContext +): CreateModelPackageGroupOutput => { + return { + ModelPackageGroupArn: + output.ModelPackageGroupArn !== undefined && output.ModelPackageGroupArn !== null + ? output.ModelPackageGroupArn + : undefined, + } as any; +}; + const deserializeAws_json1_1CreateModelPackageOutput = ( output: any, context: __SerdeContext @@ -16088,6 +20852,12 @@ const deserializeAws_json1_1CreateNotebookInstanceOutput = ( } as any; }; +const deserializeAws_json1_1CreatePipelineResponse = (output: any, context: __SerdeContext): CreatePipelineResponse => { + return { + PipelineArn: output.PipelineArn !== undefined && output.PipelineArn !== null ? output.PipelineArn : undefined, + } as any; +}; + const deserializeAws_json1_1CreatePresignedDomainUrlResponse = ( output: any, context: __SerdeContext @@ -16118,6 +20888,13 @@ const deserializeAws_json1_1CreateProcessingJobResponse = ( } as any; }; +const deserializeAws_json1_1CreateProjectOutput = (output: any, context: __SerdeContext): CreateProjectOutput => { + return { + ProjectArn: output.ProjectArn !== undefined && output.ProjectArn !== null ? output.ProjectArn : undefined, + ProjectId: output.ProjectId !== undefined && output.ProjectId !== null ? output.ProjectId : undefined, + } as any; +}; + const deserializeAws_json1_1CreateTrainingJobResponse = ( output: any, context: __SerdeContext @@ -16244,6 +21021,14 @@ const deserializeAws_json1_1DataCaptureConfigSummary = ( } as any; }; +const deserializeAws_json1_1DataCatalogConfig = (output: any, context: __SerdeContext): DataCatalogConfig => { + return { + Catalog: output.Catalog !== undefined && output.Catalog !== null ? output.Catalog : undefined, + Database: output.Database !== undefined && output.Database !== null ? output.Database : undefined, + TableName: output.TableName !== undefined && output.TableName !== null ? output.TableName : undefined, + } as any; +}; + const deserializeAws_json1_1DataProcessing = (output: any, context: __SerdeContext): DataProcessing => { return { InputFilter: output.InputFilter !== undefined && output.InputFilter !== null ? output.InputFilter : undefined, @@ -16252,6 +21037,25 @@ const deserializeAws_json1_1DataProcessing = (output: any, context: __SerdeConte } as any; }; +const deserializeAws_json1_1DatasetDefinition = (output: any, context: __SerdeContext): DatasetDefinition => { + return { + AthenaDatasetDefinition: + output.AthenaDatasetDefinition !== undefined && output.AthenaDatasetDefinition !== null + ? deserializeAws_json1_1AthenaDatasetDefinition(output.AthenaDatasetDefinition, context) + : undefined, + DataDistributionType: + output.DataDistributionType !== undefined && output.DataDistributionType !== null + ? output.DataDistributionType + : undefined, + InputMode: output.InputMode !== undefined && output.InputMode !== null ? output.InputMode : undefined, + LocalPath: output.LocalPath !== undefined && output.LocalPath !== null ? output.LocalPath : undefined, + RedshiftDatasetDefinition: + output.RedshiftDatasetDefinition !== undefined && output.RedshiftDatasetDefinition !== null + ? deserializeAws_json1_1RedshiftDatasetDefinition(output.RedshiftDatasetDefinition, context) + : undefined, + } as any; +}; + const deserializeAws_json1_1DataSource = (output: any, context: __SerdeContext): DataSource => { return { FileSystemDataSource: @@ -16342,6 +21146,35 @@ const deserializeAws_json1_1DebugRuleEvaluationStatuses = ( return (output || []).map((entry: any) => deserializeAws_json1_1DebugRuleEvaluationStatus(entry, context)); }; +const deserializeAws_json1_1DeleteActionResponse = (output: any, context: __SerdeContext): DeleteActionResponse => { + return { + ActionArn: output.ActionArn !== undefined && output.ActionArn !== null ? output.ActionArn : undefined, + } as any; +}; + +const deserializeAws_json1_1DeleteArtifactResponse = (output: any, context: __SerdeContext): DeleteArtifactResponse => { + return { + ArtifactArn: output.ArtifactArn !== undefined && output.ArtifactArn !== null ? output.ArtifactArn : undefined, + } as any; +}; + +const deserializeAws_json1_1DeleteAssociationResponse = ( + output: any, + context: __SerdeContext +): DeleteAssociationResponse => { + return { + DestinationArn: + output.DestinationArn !== undefined && output.DestinationArn !== null ? output.DestinationArn : undefined, + SourceArn: output.SourceArn !== undefined && output.SourceArn !== null ? output.SourceArn : undefined, + } as any; +}; + +const deserializeAws_json1_1DeleteContextResponse = (output: any, context: __SerdeContext): DeleteContextResponse => { + return { + ContextArn: output.ContextArn !== undefined && output.ContextArn !== null ? output.ContextArn : undefined, + } as any; +}; + const deserializeAws_json1_1DeleteExperimentResponse = ( output: any, context: __SerdeContext @@ -16377,6 +21210,12 @@ const deserializeAws_json1_1DeleteImageVersionResponse = ( return {} as any; }; +const deserializeAws_json1_1DeletePipelineResponse = (output: any, context: __SerdeContext): DeletePipelineResponse => { + return { + PipelineArn: output.PipelineArn !== undefined && output.PipelineArn !== null ? output.PipelineArn : undefined, + } as any; +}; + const deserializeAws_json1_1DeleteTagsOutput = (output: any, context: __SerdeContext): DeleteTagsOutput => { return {} as any; }; @@ -16429,6 +21268,57 @@ const deserializeAws_json1_1DeployedImages = (output: any, context: __SerdeConte return (output || []).map((entry: any) => deserializeAws_json1_1DeployedImage(entry, context)); }; +const deserializeAws_json1_1DeploymentConfig = (output: any, context: __SerdeContext): DeploymentConfig => { + return { + AutoRollbackConfiguration: + output.AutoRollbackConfiguration !== undefined && output.AutoRollbackConfiguration !== null + ? deserializeAws_json1_1AutoRollbackConfig(output.AutoRollbackConfiguration, context) + : undefined, + BlueGreenUpdatePolicy: + output.BlueGreenUpdatePolicy !== undefined && output.BlueGreenUpdatePolicy !== null + ? deserializeAws_json1_1BlueGreenUpdatePolicy(output.BlueGreenUpdatePolicy, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1DescribeActionResponse = (output: any, context: __SerdeContext): DescribeActionResponse => { + return { + ActionArn: output.ActionArn !== undefined && output.ActionArn !== null ? output.ActionArn : undefined, + ActionName: output.ActionName !== undefined && output.ActionName !== null ? output.ActionName : undefined, + ActionType: output.ActionType !== undefined && output.ActionType !== null ? output.ActionType : undefined, + CreatedBy: + output.CreatedBy !== undefined && output.CreatedBy !== null + ? deserializeAws_json1_1UserContext(output.CreatedBy, context) + : undefined, + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + Description: output.Description !== undefined && output.Description !== null ? output.Description : undefined, + LastModifiedBy: + output.LastModifiedBy !== undefined && output.LastModifiedBy !== null + ? deserializeAws_json1_1UserContext(output.LastModifiedBy, context) + : undefined, + LastModifiedTime: + output.LastModifiedTime !== undefined && output.LastModifiedTime !== null + ? new Date(Math.round(output.LastModifiedTime * 1000)) + : undefined, + MetadataProperties: + output.MetadataProperties !== undefined && output.MetadataProperties !== null + ? deserializeAws_json1_1MetadataProperties(output.MetadataProperties, context) + : undefined, + Properties: + output.Properties !== undefined && output.Properties !== null + ? deserializeAws_json1_1LineageEntityParameters(output.Properties, context) + : undefined, + Source: + output.Source !== undefined && output.Source !== null + ? deserializeAws_json1_1ActionSource(output.Source, context) + : undefined, + Status: output.Status !== undefined && output.Status !== null ? output.Status : undefined, + } as any; +}; + const deserializeAws_json1_1DescribeAlgorithmOutput = ( output: any, context: __SerdeContext @@ -16529,6 +21419,45 @@ const deserializeAws_json1_1DescribeAppResponse = (output: any, context: __Serde } as any; }; +const deserializeAws_json1_1DescribeArtifactResponse = ( + output: any, + context: __SerdeContext +): DescribeArtifactResponse => { + return { + ArtifactArn: output.ArtifactArn !== undefined && output.ArtifactArn !== null ? output.ArtifactArn : undefined, + ArtifactName: output.ArtifactName !== undefined && output.ArtifactName !== null ? output.ArtifactName : undefined, + ArtifactType: output.ArtifactType !== undefined && output.ArtifactType !== null ? output.ArtifactType : undefined, + CreatedBy: + output.CreatedBy !== undefined && output.CreatedBy !== null + ? deserializeAws_json1_1UserContext(output.CreatedBy, context) + : undefined, + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + LastModifiedBy: + output.LastModifiedBy !== undefined && output.LastModifiedBy !== null + ? deserializeAws_json1_1UserContext(output.LastModifiedBy, context) + : undefined, + LastModifiedTime: + output.LastModifiedTime !== undefined && output.LastModifiedTime !== null + ? new Date(Math.round(output.LastModifiedTime * 1000)) + : undefined, + MetadataProperties: + output.MetadataProperties !== undefined && output.MetadataProperties !== null + ? deserializeAws_json1_1MetadataProperties(output.MetadataProperties, context) + : undefined, + Properties: + output.Properties !== undefined && output.Properties !== null + ? deserializeAws_json1_1LineageEntityParameters(output.Properties, context) + : undefined, + Source: + output.Source !== undefined && output.Source !== null + ? deserializeAws_json1_1ArtifactSource(output.Source, context) + : undefined, + } as any; +}; + const deserializeAws_json1_1DescribeAutoMLJobResponse = ( output: any, context: __SerdeContext @@ -16659,18 +21588,58 @@ const deserializeAws_json1_1DescribeCompilationJobResponse = ( output.LastModifiedTime !== undefined && output.LastModifiedTime !== null ? new Date(Math.round(output.LastModifiedTime * 1000)) : undefined, - ModelArtifacts: - output.ModelArtifacts !== undefined && output.ModelArtifacts !== null - ? deserializeAws_json1_1ModelArtifacts(output.ModelArtifacts, context) - : undefined, - OutputConfig: - output.OutputConfig !== undefined && output.OutputConfig !== null - ? deserializeAws_json1_1OutputConfig(output.OutputConfig, context) + ModelArtifacts: + output.ModelArtifacts !== undefined && output.ModelArtifacts !== null + ? deserializeAws_json1_1ModelArtifacts(output.ModelArtifacts, context) + : undefined, + ModelDigests: + output.ModelDigests !== undefined && output.ModelDigests !== null + ? deserializeAws_json1_1ModelDigests(output.ModelDigests, context) + : undefined, + OutputConfig: + output.OutputConfig !== undefined && output.OutputConfig !== null + ? deserializeAws_json1_1OutputConfig(output.OutputConfig, context) + : undefined, + RoleArn: output.RoleArn !== undefined && output.RoleArn !== null ? output.RoleArn : undefined, + StoppingCondition: + output.StoppingCondition !== undefined && output.StoppingCondition !== null + ? deserializeAws_json1_1StoppingCondition(output.StoppingCondition, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1DescribeContextResponse = ( + output: any, + context: __SerdeContext +): DescribeContextResponse => { + return { + ContextArn: output.ContextArn !== undefined && output.ContextArn !== null ? output.ContextArn : undefined, + ContextName: output.ContextName !== undefined && output.ContextName !== null ? output.ContextName : undefined, + ContextType: output.ContextType !== undefined && output.ContextType !== null ? output.ContextType : undefined, + CreatedBy: + output.CreatedBy !== undefined && output.CreatedBy !== null + ? deserializeAws_json1_1UserContext(output.CreatedBy, context) + : undefined, + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + Description: output.Description !== undefined && output.Description !== null ? output.Description : undefined, + LastModifiedBy: + output.LastModifiedBy !== undefined && output.LastModifiedBy !== null + ? deserializeAws_json1_1UserContext(output.LastModifiedBy, context) + : undefined, + LastModifiedTime: + output.LastModifiedTime !== undefined && output.LastModifiedTime !== null + ? new Date(Math.round(output.LastModifiedTime * 1000)) + : undefined, + Properties: + output.Properties !== undefined && output.Properties !== null + ? deserializeAws_json1_1LineageEntityParameters(output.Properties, context) : undefined, - RoleArn: output.RoleArn !== undefined && output.RoleArn !== null ? output.RoleArn : undefined, - StoppingCondition: - output.StoppingCondition !== undefined && output.StoppingCondition !== null - ? deserializeAws_json1_1StoppingCondition(output.StoppingCondition, context) + Source: + output.Source !== undefined && output.Source !== null + ? deserializeAws_json1_1ContextSource(output.Source, context) : undefined, } as any; }; @@ -16772,6 +21741,10 @@ const deserializeAws_json1_1DescribeEndpointOutput = (output: any, context: __Se output.EndpointStatus !== undefined && output.EndpointStatus !== null ? output.EndpointStatus : undefined, FailureReason: output.FailureReason !== undefined && output.FailureReason !== null ? output.FailureReason : undefined, + LastDeploymentConfig: + output.LastDeploymentConfig !== undefined && output.LastDeploymentConfig !== null + ? deserializeAws_json1_1DeploymentConfig(output.LastDeploymentConfig, context) + : undefined, LastModifiedTime: output.LastModifiedTime !== undefined && output.LastModifiedTime !== null ? new Date(Math.round(output.LastModifiedTime * 1000)) @@ -16817,6 +21790,55 @@ const deserializeAws_json1_1DescribeExperimentResponse = ( } as any; }; +const deserializeAws_json1_1DescribeFeatureGroupResponse = ( + output: any, + context: __SerdeContext +): DescribeFeatureGroupResponse => { + return { + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + Description: output.Description !== undefined && output.Description !== null ? output.Description : undefined, + EventTimeFeatureName: + output.EventTimeFeatureName !== undefined && output.EventTimeFeatureName !== null + ? output.EventTimeFeatureName + : undefined, + FailureReason: + output.FailureReason !== undefined && output.FailureReason !== null ? output.FailureReason : undefined, + FeatureDefinitions: + output.FeatureDefinitions !== undefined && output.FeatureDefinitions !== null + ? deserializeAws_json1_1FeatureDefinitions(output.FeatureDefinitions, context) + : undefined, + FeatureGroupArn: + output.FeatureGroupArn !== undefined && output.FeatureGroupArn !== null ? output.FeatureGroupArn : undefined, + FeatureGroupName: + output.FeatureGroupName !== undefined && output.FeatureGroupName !== null ? output.FeatureGroupName : undefined, + FeatureGroupStatus: + output.FeatureGroupStatus !== undefined && output.FeatureGroupStatus !== null + ? output.FeatureGroupStatus + : undefined, + NextToken: output.NextToken !== undefined && output.NextToken !== null ? output.NextToken : undefined, + OfflineStoreConfig: + output.OfflineStoreConfig !== undefined && output.OfflineStoreConfig !== null + ? deserializeAws_json1_1OfflineStoreConfig(output.OfflineStoreConfig, context) + : undefined, + OfflineStoreStatus: + output.OfflineStoreStatus !== undefined && output.OfflineStoreStatus !== null + ? deserializeAws_json1_1OfflineStoreStatus(output.OfflineStoreStatus, context) + : undefined, + OnlineStoreConfig: + output.OnlineStoreConfig !== undefined && output.OnlineStoreConfig !== null + ? deserializeAws_json1_1OnlineStoreConfig(output.OnlineStoreConfig, context) + : undefined, + RecordIdentifierFeatureName: + output.RecordIdentifierFeatureName !== undefined && output.RecordIdentifierFeatureName !== null + ? output.RecordIdentifierFeatureName + : undefined, + RoleArn: output.RoleArn !== undefined && output.RoleArn !== null ? output.RoleArn : undefined, + } as any; +}; + const deserializeAws_json1_1DescribeFlowDefinitionResponse = ( output: any, context: __SerdeContext @@ -17098,15 +22120,55 @@ const deserializeAws_json1_1DescribeModelOutput = (output: any, context: __Serde } as any; }; +const deserializeAws_json1_1DescribeModelPackageGroupOutput = ( + output: any, + context: __SerdeContext +): DescribeModelPackageGroupOutput => { + return { + CreatedBy: + output.CreatedBy !== undefined && output.CreatedBy !== null + ? deserializeAws_json1_1UserContext(output.CreatedBy, context) + : undefined, + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + ModelPackageGroupArn: + output.ModelPackageGroupArn !== undefined && output.ModelPackageGroupArn !== null + ? output.ModelPackageGroupArn + : undefined, + ModelPackageGroupDescription: + output.ModelPackageGroupDescription !== undefined && output.ModelPackageGroupDescription !== null + ? output.ModelPackageGroupDescription + : undefined, + ModelPackageGroupName: + output.ModelPackageGroupName !== undefined && output.ModelPackageGroupName !== null + ? output.ModelPackageGroupName + : undefined, + ModelPackageGroupStatus: + output.ModelPackageGroupStatus !== undefined && output.ModelPackageGroupStatus !== null + ? output.ModelPackageGroupStatus + : undefined, + } as any; +}; + const deserializeAws_json1_1DescribeModelPackageOutput = ( output: any, context: __SerdeContext ): DescribeModelPackageOutput => { return { + ApprovalDescription: + output.ApprovalDescription !== undefined && output.ApprovalDescription !== null + ? output.ApprovalDescription + : undefined, CertifyForMarketplace: output.CertifyForMarketplace !== undefined && output.CertifyForMarketplace !== null ? output.CertifyForMarketplace : undefined, + CreatedBy: + output.CreatedBy !== undefined && output.CreatedBy !== null + ? deserializeAws_json1_1UserContext(output.CreatedBy, context) + : undefined, CreationTime: output.CreationTime !== undefined && output.CreationTime !== null ? new Date(Math.round(output.CreationTime * 1000)) @@ -17115,12 +22177,36 @@ const deserializeAws_json1_1DescribeModelPackageOutput = ( output.InferenceSpecification !== undefined && output.InferenceSpecification !== null ? deserializeAws_json1_1InferenceSpecification(output.InferenceSpecification, context) : undefined, + LastModifiedBy: + output.LastModifiedBy !== undefined && output.LastModifiedBy !== null + ? deserializeAws_json1_1UserContext(output.LastModifiedBy, context) + : undefined, + LastModifiedTime: + output.LastModifiedTime !== undefined && output.LastModifiedTime !== null + ? new Date(Math.round(output.LastModifiedTime * 1000)) + : undefined, + MetadataProperties: + output.MetadataProperties !== undefined && output.MetadataProperties !== null + ? deserializeAws_json1_1MetadataProperties(output.MetadataProperties, context) + : undefined, + ModelApprovalStatus: + output.ModelApprovalStatus !== undefined && output.ModelApprovalStatus !== null + ? output.ModelApprovalStatus + : undefined, + ModelMetrics: + output.ModelMetrics !== undefined && output.ModelMetrics !== null + ? deserializeAws_json1_1ModelMetrics(output.ModelMetrics, context) + : undefined, ModelPackageArn: output.ModelPackageArn !== undefined && output.ModelPackageArn !== null ? output.ModelPackageArn : undefined, ModelPackageDescription: output.ModelPackageDescription !== undefined && output.ModelPackageDescription !== null ? output.ModelPackageDescription : undefined, + ModelPackageGroupName: + output.ModelPackageGroupName !== undefined && output.ModelPackageGroupName !== null + ? output.ModelPackageGroupName + : undefined, ModelPackageName: output.ModelPackageName !== undefined && output.ModelPackageName !== null ? output.ModelPackageName : undefined, ModelPackageStatus: @@ -17131,6 +22217,10 @@ const deserializeAws_json1_1DescribeModelPackageOutput = ( output.ModelPackageStatusDetails !== undefined && output.ModelPackageStatusDetails !== null ? deserializeAws_json1_1ModelPackageStatusDetails(output.ModelPackageStatusDetails, context) : undefined, + ModelPackageVersion: + output.ModelPackageVersion !== undefined && output.ModelPackageVersion !== null + ? output.ModelPackageVersion + : undefined, SourceAlgorithmSpecification: output.SourceAlgorithmSpecification !== undefined && output.SourceAlgorithmSpecification !== null ? deserializeAws_json1_1SourceAlgorithmSpecification(output.SourceAlgorithmSpecification, context) @@ -17279,6 +22369,108 @@ const deserializeAws_json1_1DescribeNotebookInstanceOutput = ( } as any; }; +const deserializeAws_json1_1DescribePipelineDefinitionForExecutionResponse = ( + output: any, + context: __SerdeContext +): DescribePipelineDefinitionForExecutionResponse => { + return { + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + PipelineDefinition: + output.PipelineDefinition !== undefined && output.PipelineDefinition !== null + ? output.PipelineDefinition + : undefined, + } as any; +}; + +const deserializeAws_json1_1DescribePipelineExecutionResponse = ( + output: any, + context: __SerdeContext +): DescribePipelineExecutionResponse => { + return { + CreatedBy: + output.CreatedBy !== undefined && output.CreatedBy !== null + ? deserializeAws_json1_1UserContext(output.CreatedBy, context) + : undefined, + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + LastModifiedBy: + output.LastModifiedBy !== undefined && output.LastModifiedBy !== null + ? deserializeAws_json1_1UserContext(output.LastModifiedBy, context) + : undefined, + LastModifiedTime: + output.LastModifiedTime !== undefined && output.LastModifiedTime !== null + ? new Date(Math.round(output.LastModifiedTime * 1000)) + : undefined, + PipelineArn: output.PipelineArn !== undefined && output.PipelineArn !== null ? output.PipelineArn : undefined, + PipelineExecutionArn: + output.PipelineExecutionArn !== undefined && output.PipelineExecutionArn !== null + ? output.PipelineExecutionArn + : undefined, + PipelineExecutionDescription: + output.PipelineExecutionDescription !== undefined && output.PipelineExecutionDescription !== null + ? output.PipelineExecutionDescription + : undefined, + PipelineExecutionDisplayName: + output.PipelineExecutionDisplayName !== undefined && output.PipelineExecutionDisplayName !== null + ? output.PipelineExecutionDisplayName + : undefined, + PipelineExecutionStatus: + output.PipelineExecutionStatus !== undefined && output.PipelineExecutionStatus !== null + ? output.PipelineExecutionStatus + : undefined, + } as any; +}; + +const deserializeAws_json1_1DescribePipelineResponse = ( + output: any, + context: __SerdeContext +): DescribePipelineResponse => { + return { + CreatedBy: + output.CreatedBy !== undefined && output.CreatedBy !== null + ? deserializeAws_json1_1UserContext(output.CreatedBy, context) + : undefined, + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + LastModifiedBy: + output.LastModifiedBy !== undefined && output.LastModifiedBy !== null + ? deserializeAws_json1_1UserContext(output.LastModifiedBy, context) + : undefined, + LastModifiedTime: + output.LastModifiedTime !== undefined && output.LastModifiedTime !== null + ? new Date(Math.round(output.LastModifiedTime * 1000)) + : undefined, + LastRunTime: + output.LastRunTime !== undefined && output.LastRunTime !== null + ? new Date(Math.round(output.LastRunTime * 1000)) + : undefined, + PipelineArn: output.PipelineArn !== undefined && output.PipelineArn !== null ? output.PipelineArn : undefined, + PipelineDefinition: + output.PipelineDefinition !== undefined && output.PipelineDefinition !== null + ? output.PipelineDefinition + : undefined, + PipelineDescription: + output.PipelineDescription !== undefined && output.PipelineDescription !== null + ? output.PipelineDescription + : undefined, + PipelineDisplayName: + output.PipelineDisplayName !== undefined && output.PipelineDisplayName !== null + ? output.PipelineDisplayName + : undefined, + PipelineName: output.PipelineName !== undefined && output.PipelineName !== null ? output.PipelineName : undefined, + PipelineStatus: + output.PipelineStatus !== undefined && output.PipelineStatus !== null ? output.PipelineStatus : undefined, + RoleArn: output.RoleArn !== undefined && output.RoleArn !== null ? output.RoleArn : undefined, + } as any; +}; + const deserializeAws_json1_1DescribeProcessingJobResponse = ( output: any, context: __SerdeContext @@ -17356,6 +22548,40 @@ const deserializeAws_json1_1DescribeProcessingJobResponse = ( } as any; }; +const deserializeAws_json1_1DescribeProjectOutput = (output: any, context: __SerdeContext): DescribeProjectOutput => { + return { + CreatedBy: + output.CreatedBy !== undefined && output.CreatedBy !== null + ? deserializeAws_json1_1UserContext(output.CreatedBy, context) + : undefined, + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + ProjectArn: output.ProjectArn !== undefined && output.ProjectArn !== null ? output.ProjectArn : undefined, + ProjectDescription: + output.ProjectDescription !== undefined && output.ProjectDescription !== null + ? output.ProjectDescription + : undefined, + ProjectId: output.ProjectId !== undefined && output.ProjectId !== null ? output.ProjectId : undefined, + ProjectName: output.ProjectName !== undefined && output.ProjectName !== null ? output.ProjectName : undefined, + ProjectStatus: + output.ProjectStatus !== undefined && output.ProjectStatus !== null ? output.ProjectStatus : undefined, + ServiceCatalogProvisionedProductDetails: + output.ServiceCatalogProvisionedProductDetails !== undefined && + output.ServiceCatalogProvisionedProductDetails !== null + ? deserializeAws_json1_1ServiceCatalogProvisionedProductDetails( + output.ServiceCatalogProvisionedProductDetails, + context + ) + : undefined, + ServiceCatalogProvisioningDetails: + output.ServiceCatalogProvisioningDetails !== undefined && output.ServiceCatalogProvisioningDetails !== null + ? deserializeAws_json1_1ServiceCatalogProvisioningDetails(output.ServiceCatalogProvisioningDetails, context) + : undefined, + } as any; +}; + const deserializeAws_json1_1DescribeSubscribedWorkteamResponse = ( output: any, context: __SerdeContext @@ -17592,6 +22818,10 @@ const deserializeAws_json1_1DescribeTrialComponentResponse = ( output.LastModifiedTime !== undefined && output.LastModifiedTime !== null ? new Date(Math.round(output.LastModifiedTime * 1000)) : undefined, + MetadataProperties: + output.MetadataProperties !== undefined && output.MetadataProperties !== null + ? deserializeAws_json1_1MetadataProperties(output.MetadataProperties, context) + : undefined, Metrics: output.Metrics !== undefined && output.Metrics !== null ? deserializeAws_json1_1TrialComponentMetricSummaries(output.Metrics, context) @@ -17648,6 +22878,10 @@ const deserializeAws_json1_1DescribeTrialResponse = (output: any, context: __Ser output.LastModifiedTime !== undefined && output.LastModifiedTime !== null ? new Date(Math.round(output.LastModifiedTime * 1000)) : undefined, + MetadataProperties: + output.MetadataProperties !== undefined && output.MetadataProperties !== null + ? deserializeAws_json1_1MetadataProperties(output.MetadataProperties, context) + : undefined, Source: output.Source !== undefined && output.Source !== null ? deserializeAws_json1_1TrialSource(output.Source, context) @@ -17721,6 +22955,13 @@ const deserializeAws_json1_1DescribeWorkteamResponse = ( } as any; }; +const deserializeAws_json1_1DisableSagemakerServicecatalogPortfolioOutput = ( + output: any, + context: __SerdeContext +): DisableSagemakerServicecatalogPortfolioOutput => { + return {} as any; +}; + const deserializeAws_json1_1DisassociateTrialComponentResponse = ( output: any, context: __SerdeContext @@ -17756,6 +22997,52 @@ const deserializeAws_json1_1DomainList = (output: any, context: __SerdeContext): return (output || []).map((entry: any) => deserializeAws_json1_1DomainDetails(entry, context)); }; +const deserializeAws_json1_1EnableSagemakerServicecatalogPortfolioOutput = ( + output: any, + context: __SerdeContext +): EnableSagemakerServicecatalogPortfolioOutput => { + return {} as any; +}; + +const deserializeAws_json1_1Endpoint = (output: any, context: __SerdeContext): Endpoint => { + return { + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + DataCaptureConfig: + output.DataCaptureConfig !== undefined && output.DataCaptureConfig !== null + ? deserializeAws_json1_1DataCaptureConfigSummary(output.DataCaptureConfig, context) + : undefined, + EndpointArn: output.EndpointArn !== undefined && output.EndpointArn !== null ? output.EndpointArn : undefined, + EndpointConfigName: + output.EndpointConfigName !== undefined && output.EndpointConfigName !== null + ? output.EndpointConfigName + : undefined, + EndpointName: output.EndpointName !== undefined && output.EndpointName !== null ? output.EndpointName : undefined, + EndpointStatus: + output.EndpointStatus !== undefined && output.EndpointStatus !== null ? output.EndpointStatus : undefined, + FailureReason: + output.FailureReason !== undefined && output.FailureReason !== null ? output.FailureReason : undefined, + LastModifiedTime: + output.LastModifiedTime !== undefined && output.LastModifiedTime !== null + ? new Date(Math.round(output.LastModifiedTime * 1000)) + : undefined, + MonitoringSchedules: + output.MonitoringSchedules !== undefined && output.MonitoringSchedules !== null + ? deserializeAws_json1_1MonitoringScheduleList(output.MonitoringSchedules, context) + : undefined, + ProductionVariants: + output.ProductionVariants !== undefined && output.ProductionVariants !== null + ? deserializeAws_json1_1ProductionVariantSummaryList(output.ProductionVariants, context) + : undefined, + Tags: + output.Tags !== undefined && output.Tags !== null + ? deserializeAws_json1_1TagList(output.Tags, context) + : undefined, + } as any; +}; + const deserializeAws_json1_1EndpointConfigSummary = (output: any, context: __SerdeContext): EndpointConfigSummary => { return { CreationTime: @@ -17866,39 +23153,133 @@ const deserializeAws_json1_1ExperimentConfig = (output: any, context: __SerdeCon output.TrialComponentDisplayName !== undefined && output.TrialComponentDisplayName !== null ? output.TrialComponentDisplayName : undefined, - TrialName: output.TrialName !== undefined && output.TrialName !== null ? output.TrialName : undefined, - } as any; -}; - -const deserializeAws_json1_1ExperimentSource = (output: any, context: __SerdeContext): ExperimentSource => { - return { - SourceArn: output.SourceArn !== undefined && output.SourceArn !== null ? output.SourceArn : undefined, - SourceType: output.SourceType !== undefined && output.SourceType !== null ? output.SourceType : undefined, + TrialName: output.TrialName !== undefined && output.TrialName !== null ? output.TrialName : undefined, + } as any; +}; + +const deserializeAws_json1_1ExperimentSource = (output: any, context: __SerdeContext): ExperimentSource => { + return { + SourceArn: output.SourceArn !== undefined && output.SourceArn !== null ? output.SourceArn : undefined, + SourceType: output.SourceType !== undefined && output.SourceType !== null ? output.SourceType : undefined, + } as any; +}; + +const deserializeAws_json1_1ExperimentSummaries = (output: any, context: __SerdeContext): ExperimentSummary[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1ExperimentSummary(entry, context)); +}; + +const deserializeAws_json1_1ExperimentSummary = (output: any, context: __SerdeContext): ExperimentSummary => { + return { + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + DisplayName: output.DisplayName !== undefined && output.DisplayName !== null ? output.DisplayName : undefined, + ExperimentArn: + output.ExperimentArn !== undefined && output.ExperimentArn !== null ? output.ExperimentArn : undefined, + ExperimentName: + output.ExperimentName !== undefined && output.ExperimentName !== null ? output.ExperimentName : undefined, + ExperimentSource: + output.ExperimentSource !== undefined && output.ExperimentSource !== null + ? deserializeAws_json1_1ExperimentSource(output.ExperimentSource, context) + : undefined, + LastModifiedTime: + output.LastModifiedTime !== undefined && output.LastModifiedTime !== null + ? new Date(Math.round(output.LastModifiedTime * 1000)) + : undefined, + } as any; +}; + +const deserializeAws_json1_1Explainability = (output: any, context: __SerdeContext): Explainability => { + return { + Report: + output.Report !== undefined && output.Report !== null + ? deserializeAws_json1_1MetricsSource(output.Report, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1FeatureDefinition = (output: any, context: __SerdeContext): FeatureDefinition => { + return { + FeatureName: output.FeatureName !== undefined && output.FeatureName !== null ? output.FeatureName : undefined, + FeatureType: output.FeatureType !== undefined && output.FeatureType !== null ? output.FeatureType : undefined, + } as any; +}; + +const deserializeAws_json1_1FeatureDefinitions = (output: any, context: __SerdeContext): FeatureDefinition[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1FeatureDefinition(entry, context)); +}; + +const deserializeAws_json1_1FeatureGroup = (output: any, context: __SerdeContext): FeatureGroup => { + return { + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + Description: output.Description !== undefined && output.Description !== null ? output.Description : undefined, + EventTimeFeatureName: + output.EventTimeFeatureName !== undefined && output.EventTimeFeatureName !== null + ? output.EventTimeFeatureName + : undefined, + FailureReason: + output.FailureReason !== undefined && output.FailureReason !== null ? output.FailureReason : undefined, + FeatureDefinitions: + output.FeatureDefinitions !== undefined && output.FeatureDefinitions !== null + ? deserializeAws_json1_1FeatureDefinitions(output.FeatureDefinitions, context) + : undefined, + FeatureGroupArn: + output.FeatureGroupArn !== undefined && output.FeatureGroupArn !== null ? output.FeatureGroupArn : undefined, + FeatureGroupName: + output.FeatureGroupName !== undefined && output.FeatureGroupName !== null ? output.FeatureGroupName : undefined, + FeatureGroupStatus: + output.FeatureGroupStatus !== undefined && output.FeatureGroupStatus !== null + ? output.FeatureGroupStatus + : undefined, + OfflineStoreConfig: + output.OfflineStoreConfig !== undefined && output.OfflineStoreConfig !== null + ? deserializeAws_json1_1OfflineStoreConfig(output.OfflineStoreConfig, context) + : undefined, + OfflineStoreStatus: + output.OfflineStoreStatus !== undefined && output.OfflineStoreStatus !== null + ? deserializeAws_json1_1OfflineStoreStatus(output.OfflineStoreStatus, context) + : undefined, + OnlineStoreConfig: + output.OnlineStoreConfig !== undefined && output.OnlineStoreConfig !== null + ? deserializeAws_json1_1OnlineStoreConfig(output.OnlineStoreConfig, context) + : undefined, + RecordIdentifierFeatureName: + output.RecordIdentifierFeatureName !== undefined && output.RecordIdentifierFeatureName !== null + ? output.RecordIdentifierFeatureName + : undefined, + RoleArn: output.RoleArn !== undefined && output.RoleArn !== null ? output.RoleArn : undefined, + Tags: + output.Tags !== undefined && output.Tags !== null + ? deserializeAws_json1_1TagList(output.Tags, context) + : undefined, } as any; }; -const deserializeAws_json1_1ExperimentSummaries = (output: any, context: __SerdeContext): ExperimentSummary[] => { - return (output || []).map((entry: any) => deserializeAws_json1_1ExperimentSummary(entry, context)); +const deserializeAws_json1_1FeatureGroupSummaries = (output: any, context: __SerdeContext): FeatureGroupSummary[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1FeatureGroupSummary(entry, context)); }; -const deserializeAws_json1_1ExperimentSummary = (output: any, context: __SerdeContext): ExperimentSummary => { +const deserializeAws_json1_1FeatureGroupSummary = (output: any, context: __SerdeContext): FeatureGroupSummary => { return { CreationTime: output.CreationTime !== undefined && output.CreationTime !== null ? new Date(Math.round(output.CreationTime * 1000)) : undefined, - DisplayName: output.DisplayName !== undefined && output.DisplayName !== null ? output.DisplayName : undefined, - ExperimentArn: - output.ExperimentArn !== undefined && output.ExperimentArn !== null ? output.ExperimentArn : undefined, - ExperimentName: - output.ExperimentName !== undefined && output.ExperimentName !== null ? output.ExperimentName : undefined, - ExperimentSource: - output.ExperimentSource !== undefined && output.ExperimentSource !== null - ? deserializeAws_json1_1ExperimentSource(output.ExperimentSource, context) + FeatureGroupArn: + output.FeatureGroupArn !== undefined && output.FeatureGroupArn !== null ? output.FeatureGroupArn : undefined, + FeatureGroupName: + output.FeatureGroupName !== undefined && output.FeatureGroupName !== null ? output.FeatureGroupName : undefined, + FeatureGroupStatus: + output.FeatureGroupStatus !== undefined && output.FeatureGroupStatus !== null + ? output.FeatureGroupStatus : undefined, - LastModifiedTime: - output.LastModifiedTime !== undefined && output.LastModifiedTime !== null - ? new Date(Math.round(output.LastModifiedTime * 1000)) + OfflineStoreStatus: + output.OfflineStoreStatus !== undefined && output.OfflineStoreStatus !== null + ? deserializeAws_json1_1OfflineStoreStatus(output.OfflineStoreStatus, context) : undefined, } as any; }; @@ -17995,6 +23376,25 @@ const deserializeAws_json1_1FlowDefinitionTaskKeywords = (output: any, context: return (output || []).map((entry: any) => entry); }; +const deserializeAws_json1_1GetModelPackageGroupPolicyOutput = ( + output: any, + context: __SerdeContext +): GetModelPackageGroupPolicyOutput => { + return { + ResourcePolicy: + output.ResourcePolicy !== undefined && output.ResourcePolicy !== null ? output.ResourcePolicy : undefined, + } as any; +}; + +const deserializeAws_json1_1GetSagemakerServicecatalogPortfolioStatusOutput = ( + output: any, + context: __SerdeContext +): GetSagemakerServicecatalogPortfolioStatusOutput => { + return { + Status: output.Status !== undefined && output.Status !== null ? output.Status : undefined, + } as any; +}; + const deserializeAws_json1_1GetSearchSuggestionsResponse = ( output: any, context: __SerdeContext @@ -18871,6 +24271,29 @@ const deserializeAws_json1_1LabelingJobSummaryList = (output: any, context: __Se return (output || []).map((entry: any) => deserializeAws_json1_1LabelingJobSummary(entry, context)); }; +const deserializeAws_json1_1LineageEntityParameters = ( + output: any, + context: __SerdeContext +): { [key: string]: string } => { + return Object.entries(output).reduce( + (acc: { [key: string]: string }, [key, value]: [string, any]) => ({ + ...acc, + [key]: value, + }), + {} + ); +}; + +const deserializeAws_json1_1ListActionsResponse = (output: any, context: __SerdeContext): ListActionsResponse => { + return { + ActionSummaries: + output.ActionSummaries !== undefined && output.ActionSummaries !== null + ? deserializeAws_json1_1ActionSummaries(output.ActionSummaries, context) + : undefined, + NextToken: output.NextToken !== undefined && output.NextToken !== null ? output.NextToken : undefined, + } as any; +}; + const deserializeAws_json1_1ListAlgorithmsOutput = (output: any, context: __SerdeContext): ListAlgorithmsOutput => { return { AlgorithmSummaryList: @@ -18904,6 +24327,29 @@ const deserializeAws_json1_1ListAppsResponse = (output: any, context: __SerdeCon } as any; }; +const deserializeAws_json1_1ListArtifactsResponse = (output: any, context: __SerdeContext): ListArtifactsResponse => { + return { + ArtifactSummaries: + output.ArtifactSummaries !== undefined && output.ArtifactSummaries !== null + ? deserializeAws_json1_1ArtifactSummaries(output.ArtifactSummaries, context) + : undefined, + NextToken: output.NextToken !== undefined && output.NextToken !== null ? output.NextToken : undefined, + } as any; +}; + +const deserializeAws_json1_1ListAssociationsResponse = ( + output: any, + context: __SerdeContext +): ListAssociationsResponse => { + return { + AssociationSummaries: + output.AssociationSummaries !== undefined && output.AssociationSummaries !== null + ? deserializeAws_json1_1AssociationSummaries(output.AssociationSummaries, context) + : undefined, + NextToken: output.NextToken !== undefined && output.NextToken !== null ? output.NextToken : undefined, + } as any; +}; + const deserializeAws_json1_1ListAutoMLJobsResponse = (output: any, context: __SerdeContext): ListAutoMLJobsResponse => { return { AutoMLJobSummaries: @@ -18953,6 +24399,16 @@ const deserializeAws_json1_1ListCompilationJobsResponse = ( } as any; }; +const deserializeAws_json1_1ListContextsResponse = (output: any, context: __SerdeContext): ListContextsResponse => { + return { + ContextSummaries: + output.ContextSummaries !== undefined && output.ContextSummaries !== null + ? deserializeAws_json1_1ContextSummaries(output.ContextSummaries, context) + : undefined, + NextToken: output.NextToken !== undefined && output.NextToken !== null ? output.NextToken : undefined, + } as any; +}; + const deserializeAws_json1_1ListDomainsResponse = (output: any, context: __SerdeContext): ListDomainsResponse => { return { Domains: @@ -18999,6 +24455,19 @@ const deserializeAws_json1_1ListExperimentsResponse = ( } as any; }; +const deserializeAws_json1_1ListFeatureGroupsResponse = ( + output: any, + context: __SerdeContext +): ListFeatureGroupsResponse => { + return { + FeatureGroupSummaries: + output.FeatureGroupSummaries !== undefined && output.FeatureGroupSummaries !== null + ? deserializeAws_json1_1FeatureGroupSummaries(output.FeatureGroupSummaries, context) + : undefined, + NextToken: output.NextToken !== undefined && output.NextToken !== null ? output.NextToken : undefined, + } as any; +}; + const deserializeAws_json1_1ListFlowDefinitionsResponse = ( output: any, context: __SerdeContext @@ -19087,6 +24556,19 @@ const deserializeAws_json1_1ListLabelingJobsResponse = ( } as any; }; +const deserializeAws_json1_1ListModelPackageGroupsOutput = ( + output: any, + context: __SerdeContext +): ListModelPackageGroupsOutput => { + return { + ModelPackageGroupSummaryList: + output.ModelPackageGroupSummaryList !== undefined && output.ModelPackageGroupSummaryList !== null + ? deserializeAws_json1_1ModelPackageGroupSummaryList(output.ModelPackageGroupSummaryList, context) + : undefined, + NextToken: output.NextToken !== undefined && output.NextToken !== null ? output.NextToken : undefined, + } as any; +}; + const deserializeAws_json1_1ListModelPackagesOutput = ( output: any, context: __SerdeContext @@ -19165,6 +24647,55 @@ const deserializeAws_json1_1ListNotebookInstancesOutput = ( } as any; }; +const deserializeAws_json1_1ListPipelineExecutionsResponse = ( + output: any, + context: __SerdeContext +): ListPipelineExecutionsResponse => { + return { + NextToken: output.NextToken !== undefined && output.NextToken !== null ? output.NextToken : undefined, + PipelineExecutionSummaries: + output.PipelineExecutionSummaries !== undefined && output.PipelineExecutionSummaries !== null + ? deserializeAws_json1_1PipelineExecutionSummaryList(output.PipelineExecutionSummaries, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1ListPipelineExecutionStepsResponse = ( + output: any, + context: __SerdeContext +): ListPipelineExecutionStepsResponse => { + return { + NextToken: output.NextToken !== undefined && output.NextToken !== null ? output.NextToken : undefined, + PipelineExecutionSteps: + output.PipelineExecutionSteps !== undefined && output.PipelineExecutionSteps !== null + ? deserializeAws_json1_1PipelineExecutionStepList(output.PipelineExecutionSteps, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1ListPipelineParametersForExecutionResponse = ( + output: any, + context: __SerdeContext +): ListPipelineParametersForExecutionResponse => { + return { + NextToken: output.NextToken !== undefined && output.NextToken !== null ? output.NextToken : undefined, + PipelineParameters: + output.PipelineParameters !== undefined && output.PipelineParameters !== null + ? deserializeAws_json1_1ParameterList(output.PipelineParameters, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1ListPipelinesResponse = (output: any, context: __SerdeContext): ListPipelinesResponse => { + return { + NextToken: output.NextToken !== undefined && output.NextToken !== null ? output.NextToken : undefined, + PipelineSummaries: + output.PipelineSummaries !== undefined && output.PipelineSummaries !== null + ? deserializeAws_json1_1PipelineSummaryList(output.PipelineSummaries, context) + : undefined, + } as any; +}; + const deserializeAws_json1_1ListProcessingJobsResponse = ( output: any, context: __SerdeContext @@ -19178,6 +24709,16 @@ const deserializeAws_json1_1ListProcessingJobsResponse = ( } as any; }; +const deserializeAws_json1_1ListProjectsOutput = (output: any, context: __SerdeContext): ListProjectsOutput => { + return { + NextToken: output.NextToken !== undefined && output.NextToken !== null ? output.NextToken : undefined, + ProjectSummaryList: + output.ProjectSummaryList !== undefined && output.ProjectSummaryList !== null + ? deserializeAws_json1_1ProjectSummaryList(output.ProjectSummaryList, context) + : undefined, + } as any; +}; + const deserializeAws_json1_1ListSubscribedWorkteamsResponse = ( output: any, context: __SerdeContext @@ -19313,6 +24854,15 @@ const deserializeAws_json1_1MemberDefinitions = (output: any, context: __SerdeCo return (output || []).map((entry: any) => deserializeAws_json1_1MemberDefinition(entry, context)); }; +const deserializeAws_json1_1MetadataProperties = (output: any, context: __SerdeContext): MetadataProperties => { + return { + CommitId: output.CommitId !== undefined && output.CommitId !== null ? output.CommitId : undefined, + GeneratedBy: output.GeneratedBy !== undefined && output.GeneratedBy !== null ? output.GeneratedBy : undefined, + ProjectId: output.ProjectId !== undefined && output.ProjectId !== null ? output.ProjectId : undefined, + Repository: output.Repository !== undefined && output.Repository !== null ? output.Repository : undefined, + } as any; +}; + const deserializeAws_json1_1MetricData = (output: any, context: __SerdeContext): MetricData => { return { MetricName: output.MetricName !== undefined && output.MetricName !== null ? output.MetricName : undefined, @@ -19335,6 +24885,15 @@ const deserializeAws_json1_1MetricDefinitionList = (output: any, context: __Serd return (output || []).map((entry: any) => deserializeAws_json1_1MetricDefinition(entry, context)); }; +const deserializeAws_json1_1MetricsSource = (output: any, context: __SerdeContext): MetricsSource => { + return { + ContentDigest: + output.ContentDigest !== undefined && output.ContentDigest !== null ? output.ContentDigest : undefined, + ContentType: output.ContentType !== undefined && output.ContentType !== null ? output.ContentType : undefined, + S3Uri: output.S3Uri !== undefined && output.S3Uri !== null ? output.S3Uri : undefined, + } as any; +}; + const deserializeAws_json1_1ModelArtifacts = (output: any, context: __SerdeContext): ModelArtifacts => { return { S3ModelArtifacts: @@ -19348,34 +24907,222 @@ const deserializeAws_json1_1ModelClientConfig = (output: any, context: __SerdeCo output.InvocationsMaxRetries !== undefined && output.InvocationsMaxRetries !== null ? output.InvocationsMaxRetries : undefined, - InvocationsTimeoutInSeconds: - output.InvocationsTimeoutInSeconds !== undefined && output.InvocationsTimeoutInSeconds !== null - ? output.InvocationsTimeoutInSeconds + InvocationsTimeoutInSeconds: + output.InvocationsTimeoutInSeconds !== undefined && output.InvocationsTimeoutInSeconds !== null + ? output.InvocationsTimeoutInSeconds + : undefined, + } as any; +}; + +const deserializeAws_json1_1ModelDataQuality = (output: any, context: __SerdeContext): ModelDataQuality => { + return { + Constraints: + output.Constraints !== undefined && output.Constraints !== null + ? deserializeAws_json1_1MetricsSource(output.Constraints, context) + : undefined, + Statistics: + output.Statistics !== undefined && output.Statistics !== null + ? deserializeAws_json1_1MetricsSource(output.Statistics, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1ModelDigests = (output: any, context: __SerdeContext): ModelDigests => { + return { + ArtifactDigest: + output.ArtifactDigest !== undefined && output.ArtifactDigest !== null ? output.ArtifactDigest : undefined, + } as any; +}; + +const deserializeAws_json1_1ModelMetrics = (output: any, context: __SerdeContext): ModelMetrics => { + return { + Bias: + output.Bias !== undefined && output.Bias !== null ? deserializeAws_json1_1Bias(output.Bias, context) : undefined, + Explainability: + output.Explainability !== undefined && output.Explainability !== null + ? deserializeAws_json1_1Explainability(output.Explainability, context) + : undefined, + ModelDataQuality: + output.ModelDataQuality !== undefined && output.ModelDataQuality !== null + ? deserializeAws_json1_1ModelDataQuality(output.ModelDataQuality, context) + : undefined, + ModelQuality: + output.ModelQuality !== undefined && output.ModelQuality !== null + ? deserializeAws_json1_1ModelQuality(output.ModelQuality, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1ModelPackage = (output: any, context: __SerdeContext): ModelPackage => { + return { + ApprovalDescription: + output.ApprovalDescription !== undefined && output.ApprovalDescription !== null + ? output.ApprovalDescription + : undefined, + CertifyForMarketplace: + output.CertifyForMarketplace !== undefined && output.CertifyForMarketplace !== null + ? output.CertifyForMarketplace + : undefined, + CreatedBy: + output.CreatedBy !== undefined && output.CreatedBy !== null + ? deserializeAws_json1_1UserContext(output.CreatedBy, context) + : undefined, + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + InferenceSpecification: + output.InferenceSpecification !== undefined && output.InferenceSpecification !== null + ? deserializeAws_json1_1InferenceSpecification(output.InferenceSpecification, context) + : undefined, + LastModifiedBy: + output.LastModifiedBy !== undefined && output.LastModifiedBy !== null + ? deserializeAws_json1_1UserContext(output.LastModifiedBy, context) + : undefined, + LastModifiedTime: + output.LastModifiedTime !== undefined && output.LastModifiedTime !== null + ? new Date(Math.round(output.LastModifiedTime * 1000)) + : undefined, + MetadataProperties: + output.MetadataProperties !== undefined && output.MetadataProperties !== null + ? deserializeAws_json1_1MetadataProperties(output.MetadataProperties, context) + : undefined, + ModelApprovalStatus: + output.ModelApprovalStatus !== undefined && output.ModelApprovalStatus !== null + ? output.ModelApprovalStatus + : undefined, + ModelMetrics: + output.ModelMetrics !== undefined && output.ModelMetrics !== null + ? deserializeAws_json1_1ModelMetrics(output.ModelMetrics, context) + : undefined, + ModelPackageArn: + output.ModelPackageArn !== undefined && output.ModelPackageArn !== null ? output.ModelPackageArn : undefined, + ModelPackageDescription: + output.ModelPackageDescription !== undefined && output.ModelPackageDescription !== null + ? output.ModelPackageDescription + : undefined, + ModelPackageGroupName: + output.ModelPackageGroupName !== undefined && output.ModelPackageGroupName !== null + ? output.ModelPackageGroupName + : undefined, + ModelPackageName: + output.ModelPackageName !== undefined && output.ModelPackageName !== null ? output.ModelPackageName : undefined, + ModelPackageStatus: + output.ModelPackageStatus !== undefined && output.ModelPackageStatus !== null + ? output.ModelPackageStatus + : undefined, + ModelPackageStatusDetails: + output.ModelPackageStatusDetails !== undefined && output.ModelPackageStatusDetails !== null + ? deserializeAws_json1_1ModelPackageStatusDetails(output.ModelPackageStatusDetails, context) + : undefined, + ModelPackageVersion: + output.ModelPackageVersion !== undefined && output.ModelPackageVersion !== null + ? output.ModelPackageVersion + : undefined, + SourceAlgorithmSpecification: + output.SourceAlgorithmSpecification !== undefined && output.SourceAlgorithmSpecification !== null + ? deserializeAws_json1_1SourceAlgorithmSpecification(output.SourceAlgorithmSpecification, context) + : undefined, + Tags: + output.Tags !== undefined && output.Tags !== null + ? deserializeAws_json1_1TagList(output.Tags, context) + : undefined, + ValidationSpecification: + output.ValidationSpecification !== undefined && output.ValidationSpecification !== null + ? deserializeAws_json1_1ModelPackageValidationSpecification(output.ValidationSpecification, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1ModelPackageContainerDefinition = ( + output: any, + context: __SerdeContext +): ModelPackageContainerDefinition => { + return { + ContainerHostname: + output.ContainerHostname !== undefined && output.ContainerHostname !== null + ? output.ContainerHostname + : undefined, + Image: output.Image !== undefined && output.Image !== null ? output.Image : undefined, + ImageDigest: output.ImageDigest !== undefined && output.ImageDigest !== null ? output.ImageDigest : undefined, + ModelDataUrl: output.ModelDataUrl !== undefined && output.ModelDataUrl !== null ? output.ModelDataUrl : undefined, + ProductId: output.ProductId !== undefined && output.ProductId !== null ? output.ProductId : undefined, + } as any; +}; + +const deserializeAws_json1_1ModelPackageContainerDefinitionList = ( + output: any, + context: __SerdeContext +): ModelPackageContainerDefinition[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1ModelPackageContainerDefinition(entry, context)); +}; + +const deserializeAws_json1_1ModelPackageGroup = (output: any, context: __SerdeContext): ModelPackageGroup => { + return { + CreatedBy: + output.CreatedBy !== undefined && output.CreatedBy !== null + ? deserializeAws_json1_1UserContext(output.CreatedBy, context) + : undefined, + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + ModelPackageGroupArn: + output.ModelPackageGroupArn !== undefined && output.ModelPackageGroupArn !== null + ? output.ModelPackageGroupArn + : undefined, + ModelPackageGroupDescription: + output.ModelPackageGroupDescription !== undefined && output.ModelPackageGroupDescription !== null + ? output.ModelPackageGroupDescription + : undefined, + ModelPackageGroupName: + output.ModelPackageGroupName !== undefined && output.ModelPackageGroupName !== null + ? output.ModelPackageGroupName + : undefined, + ModelPackageGroupStatus: + output.ModelPackageGroupStatus !== undefined && output.ModelPackageGroupStatus !== null + ? output.ModelPackageGroupStatus + : undefined, + Tags: + output.Tags !== undefined && output.Tags !== null + ? deserializeAws_json1_1TagList(output.Tags, context) : undefined, } as any; }; -const deserializeAws_json1_1ModelPackageContainerDefinition = ( +const deserializeAws_json1_1ModelPackageGroupSummary = ( output: any, context: __SerdeContext -): ModelPackageContainerDefinition => { +): ModelPackageGroupSummary => { return { - ContainerHostname: - output.ContainerHostname !== undefined && output.ContainerHostname !== null - ? output.ContainerHostname + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + ModelPackageGroupArn: + output.ModelPackageGroupArn !== undefined && output.ModelPackageGroupArn !== null + ? output.ModelPackageGroupArn + : undefined, + ModelPackageGroupDescription: + output.ModelPackageGroupDescription !== undefined && output.ModelPackageGroupDescription !== null + ? output.ModelPackageGroupDescription + : undefined, + ModelPackageGroupName: + output.ModelPackageGroupName !== undefined && output.ModelPackageGroupName !== null + ? output.ModelPackageGroupName + : undefined, + ModelPackageGroupStatus: + output.ModelPackageGroupStatus !== undefined && output.ModelPackageGroupStatus !== null + ? output.ModelPackageGroupStatus : undefined, - Image: output.Image !== undefined && output.Image !== null ? output.Image : undefined, - ImageDigest: output.ImageDigest !== undefined && output.ImageDigest !== null ? output.ImageDigest : undefined, - ModelDataUrl: output.ModelDataUrl !== undefined && output.ModelDataUrl !== null ? output.ModelDataUrl : undefined, - ProductId: output.ProductId !== undefined && output.ProductId !== null ? output.ProductId : undefined, } as any; }; -const deserializeAws_json1_1ModelPackageContainerDefinitionList = ( +const deserializeAws_json1_1ModelPackageGroupSummaryList = ( output: any, context: __SerdeContext -): ModelPackageContainerDefinition[] => { - return (output || []).map((entry: any) => deserializeAws_json1_1ModelPackageContainerDefinition(entry, context)); +): ModelPackageGroupSummary[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1ModelPackageGroupSummary(entry, context)); }; const deserializeAws_json1_1ModelPackageStatusDetails = ( @@ -19416,18 +25163,30 @@ const deserializeAws_json1_1ModelPackageSummary = (output: any, context: __Serde output.CreationTime !== undefined && output.CreationTime !== null ? new Date(Math.round(output.CreationTime * 1000)) : undefined, + ModelApprovalStatus: + output.ModelApprovalStatus !== undefined && output.ModelApprovalStatus !== null + ? output.ModelApprovalStatus + : undefined, ModelPackageArn: output.ModelPackageArn !== undefined && output.ModelPackageArn !== null ? output.ModelPackageArn : undefined, ModelPackageDescription: output.ModelPackageDescription !== undefined && output.ModelPackageDescription !== null ? output.ModelPackageDescription : undefined, + ModelPackageGroupName: + output.ModelPackageGroupName !== undefined && output.ModelPackageGroupName !== null + ? output.ModelPackageGroupName + : undefined, ModelPackageName: output.ModelPackageName !== undefined && output.ModelPackageName !== null ? output.ModelPackageName : undefined, ModelPackageStatus: output.ModelPackageStatus !== undefined && output.ModelPackageStatus !== null ? output.ModelPackageStatus : undefined, + ModelPackageVersion: + output.ModelPackageVersion !== undefined && output.ModelPackageVersion !== null + ? output.ModelPackageVersion + : undefined, } as any; }; @@ -19469,6 +25228,25 @@ const deserializeAws_json1_1ModelPackageValidationSpecification = ( } as any; }; +const deserializeAws_json1_1ModelQuality = (output: any, context: __SerdeContext): ModelQuality => { + return { + Constraints: + output.Constraints !== undefined && output.Constraints !== null + ? deserializeAws_json1_1MetricsSource(output.Constraints, context) + : undefined, + Statistics: + output.Statistics !== undefined && output.Statistics !== null + ? deserializeAws_json1_1MetricsSource(output.Statistics, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1ModelStepMetadata = (output: any, context: __SerdeContext): ModelStepMetadata => { + return { + Arn: output.Arn !== undefined && output.Arn !== null ? output.Arn : undefined, + } as any; +}; + const deserializeAws_json1_1ModelSummary = (output: any, context: __SerdeContext): ModelSummary => { return { CreationTime: @@ -19700,6 +25478,46 @@ const deserializeAws_json1_1MonitoringS3Output = (output: any, context: __SerdeC } as any; }; +const deserializeAws_json1_1MonitoringSchedule = (output: any, context: __SerdeContext): MonitoringSchedule => { + return { + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + EndpointName: output.EndpointName !== undefined && output.EndpointName !== null ? output.EndpointName : undefined, + FailureReason: + output.FailureReason !== undefined && output.FailureReason !== null ? output.FailureReason : undefined, + LastModifiedTime: + output.LastModifiedTime !== undefined && output.LastModifiedTime !== null + ? new Date(Math.round(output.LastModifiedTime * 1000)) + : undefined, + LastMonitoringExecutionSummary: + output.LastMonitoringExecutionSummary !== undefined && output.LastMonitoringExecutionSummary !== null + ? deserializeAws_json1_1MonitoringExecutionSummary(output.LastMonitoringExecutionSummary, context) + : undefined, + MonitoringScheduleArn: + output.MonitoringScheduleArn !== undefined && output.MonitoringScheduleArn !== null + ? output.MonitoringScheduleArn + : undefined, + MonitoringScheduleConfig: + output.MonitoringScheduleConfig !== undefined && output.MonitoringScheduleConfig !== null + ? deserializeAws_json1_1MonitoringScheduleConfig(output.MonitoringScheduleConfig, context) + : undefined, + MonitoringScheduleName: + output.MonitoringScheduleName !== undefined && output.MonitoringScheduleName !== null + ? output.MonitoringScheduleName + : undefined, + MonitoringScheduleStatus: + output.MonitoringScheduleStatus !== undefined && output.MonitoringScheduleStatus !== null + ? output.MonitoringScheduleStatus + : undefined, + Tags: + output.Tags !== undefined && output.Tags !== null + ? deserializeAws_json1_1TagList(output.Tags, context) + : undefined, + } as any; +}; + const deserializeAws_json1_1MonitoringScheduleConfig = ( output: any, context: __SerdeContext @@ -19716,6 +25534,10 @@ const deserializeAws_json1_1MonitoringScheduleConfig = ( } as any; }; +const deserializeAws_json1_1MonitoringScheduleList = (output: any, context: __SerdeContext): MonitoringSchedule[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1MonitoringSchedule(entry, context)); +}; + const deserializeAws_json1_1MonitoringScheduleSummary = ( output: any, context: __SerdeContext @@ -19919,6 +25741,31 @@ const deserializeAws_json1_1ObjectiveStatusCounters = ( } as any; }; +const deserializeAws_json1_1OfflineStoreConfig = (output: any, context: __SerdeContext): OfflineStoreConfig => { + return { + DataCatalogConfig: + output.DataCatalogConfig !== undefined && output.DataCatalogConfig !== null + ? deserializeAws_json1_1DataCatalogConfig(output.DataCatalogConfig, context) + : undefined, + DisableGlueTableCreation: + output.DisableGlueTableCreation !== undefined && output.DisableGlueTableCreation !== null + ? output.DisableGlueTableCreation + : undefined, + S3StorageConfig: + output.S3StorageConfig !== undefined && output.S3StorageConfig !== null + ? deserializeAws_json1_1S3StorageConfig(output.S3StorageConfig, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1OfflineStoreStatus = (output: any, context: __SerdeContext): OfflineStoreStatus => { + return { + BlockedReason: + output.BlockedReason !== undefined && output.BlockedReason !== null ? output.BlockedReason : undefined, + Status: output.Status !== undefined && output.Status !== null ? output.Status : undefined, + } as any; +}; + const deserializeAws_json1_1OidcConfigForResponse = (output: any, context: __SerdeContext): OidcConfigForResponse => { return { AuthorizationEndpoint: @@ -19946,10 +25793,33 @@ const deserializeAws_json1_1OidcMemberDefinition = (output: any, context: __Serd } as any; }; +const deserializeAws_json1_1OnlineStoreConfig = (output: any, context: __SerdeContext): OnlineStoreConfig => { + return { + EnableOnlineStore: + output.EnableOnlineStore !== undefined && output.EnableOnlineStore !== null + ? output.EnableOnlineStore + : undefined, + SecurityConfig: + output.SecurityConfig !== undefined && output.SecurityConfig !== null + ? deserializeAws_json1_1OnlineStoreSecurityConfig(output.SecurityConfig, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1OnlineStoreSecurityConfig = ( + output: any, + context: __SerdeContext +): OnlineStoreSecurityConfig => { + return { + KmsKeyId: output.KmsKeyId !== undefined && output.KmsKeyId !== null ? output.KmsKeyId : undefined, + } as any; +}; + const deserializeAws_json1_1OutputConfig = (output: any, context: __SerdeContext): OutputConfig => { return { CompilerOptions: output.CompilerOptions !== undefined && output.CompilerOptions !== null ? output.CompilerOptions : undefined, + KmsKeyId: output.KmsKeyId !== undefined && output.KmsKeyId !== null ? output.KmsKeyId : undefined, S3OutputLocation: output.S3OutputLocation !== undefined && output.S3OutputLocation !== null ? output.S3OutputLocation : undefined, TargetDevice: output.TargetDevice !== undefined && output.TargetDevice !== null ? output.TargetDevice : undefined, @@ -19957,91 +25827,315 @@ const deserializeAws_json1_1OutputConfig = (output: any, context: __SerdeContext output.TargetPlatform !== undefined && output.TargetPlatform !== null ? deserializeAws_json1_1TargetPlatform(output.TargetPlatform, context) : undefined, - } as any; -}; - -const deserializeAws_json1_1OutputDataConfig = (output: any, context: __SerdeContext): OutputDataConfig => { - return { - KmsKeyId: output.KmsKeyId !== undefined && output.KmsKeyId !== null ? output.KmsKeyId : undefined, - S3OutputPath: output.S3OutputPath !== undefined && output.S3OutputPath !== null ? output.S3OutputPath : undefined, - } as any; -}; - -const deserializeAws_json1_1ParameterRange = (output: any, context: __SerdeContext): ParameterRange => { - return { - CategoricalParameterRangeSpecification: - output.CategoricalParameterRangeSpecification !== undefined && - output.CategoricalParameterRangeSpecification !== null - ? deserializeAws_json1_1CategoricalParameterRangeSpecification( - output.CategoricalParameterRangeSpecification, - context - ) + } as any; +}; + +const deserializeAws_json1_1OutputDataConfig = (output: any, context: __SerdeContext): OutputDataConfig => { + return { + KmsKeyId: output.KmsKeyId !== undefined && output.KmsKeyId !== null ? output.KmsKeyId : undefined, + S3OutputPath: output.S3OutputPath !== undefined && output.S3OutputPath !== null ? output.S3OutputPath : undefined, + } as any; +}; + +const deserializeAws_json1_1Parameter = (output: any, context: __SerdeContext): Parameter => { + return { + Name: output.Name !== undefined && output.Name !== null ? output.Name : undefined, + Value: output.Value !== undefined && output.Value !== null ? output.Value : undefined, + } as any; +}; + +const deserializeAws_json1_1ParameterList = (output: any, context: __SerdeContext): Parameter[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1Parameter(entry, context)); +}; + +const deserializeAws_json1_1ParameterRange = (output: any, context: __SerdeContext): ParameterRange => { + return { + CategoricalParameterRangeSpecification: + output.CategoricalParameterRangeSpecification !== undefined && + output.CategoricalParameterRangeSpecification !== null + ? deserializeAws_json1_1CategoricalParameterRangeSpecification( + output.CategoricalParameterRangeSpecification, + context + ) + : undefined, + ContinuousParameterRangeSpecification: + output.ContinuousParameterRangeSpecification !== undefined && + output.ContinuousParameterRangeSpecification !== null + ? deserializeAws_json1_1ContinuousParameterRangeSpecification( + output.ContinuousParameterRangeSpecification, + context + ) + : undefined, + IntegerParameterRangeSpecification: + output.IntegerParameterRangeSpecification !== undefined && output.IntegerParameterRangeSpecification !== null + ? deserializeAws_json1_1IntegerParameterRangeSpecification(output.IntegerParameterRangeSpecification, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1ParameterRanges = (output: any, context: __SerdeContext): ParameterRanges => { + return { + CategoricalParameterRanges: + output.CategoricalParameterRanges !== undefined && output.CategoricalParameterRanges !== null + ? deserializeAws_json1_1CategoricalParameterRanges(output.CategoricalParameterRanges, context) + : undefined, + ContinuousParameterRanges: + output.ContinuousParameterRanges !== undefined && output.ContinuousParameterRanges !== null + ? deserializeAws_json1_1ContinuousParameterRanges(output.ContinuousParameterRanges, context) + : undefined, + IntegerParameterRanges: + output.IntegerParameterRanges !== undefined && output.IntegerParameterRanges !== null + ? deserializeAws_json1_1IntegerParameterRanges(output.IntegerParameterRanges, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1ParameterValues = (output: any, context: __SerdeContext): string[] => { + return (output || []).map((entry: any) => entry); +}; + +const deserializeAws_json1_1Parent = (output: any, context: __SerdeContext): Parent => { + return { + ExperimentName: + output.ExperimentName !== undefined && output.ExperimentName !== null ? output.ExperimentName : undefined, + TrialName: output.TrialName !== undefined && output.TrialName !== null ? output.TrialName : undefined, + } as any; +}; + +const deserializeAws_json1_1ParentHyperParameterTuningJob = ( + output: any, + context: __SerdeContext +): ParentHyperParameterTuningJob => { + return { + HyperParameterTuningJobName: + output.HyperParameterTuningJobName !== undefined && output.HyperParameterTuningJobName !== null + ? output.HyperParameterTuningJobName + : undefined, + } as any; +}; + +const deserializeAws_json1_1ParentHyperParameterTuningJobs = ( + output: any, + context: __SerdeContext +): ParentHyperParameterTuningJob[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1ParentHyperParameterTuningJob(entry, context)); +}; + +const deserializeAws_json1_1Parents = (output: any, context: __SerdeContext): Parent[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1Parent(entry, context)); +}; + +const deserializeAws_json1_1Pipeline = (output: any, context: __SerdeContext): Pipeline => { + return { + CreatedBy: + output.CreatedBy !== undefined && output.CreatedBy !== null + ? deserializeAws_json1_1UserContext(output.CreatedBy, context) + : undefined, + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + LastModifiedBy: + output.LastModifiedBy !== undefined && output.LastModifiedBy !== null + ? deserializeAws_json1_1UserContext(output.LastModifiedBy, context) + : undefined, + LastModifiedTime: + output.LastModifiedTime !== undefined && output.LastModifiedTime !== null + ? new Date(Math.round(output.LastModifiedTime * 1000)) + : undefined, + LastRunTime: + output.LastRunTime !== undefined && output.LastRunTime !== null + ? new Date(Math.round(output.LastRunTime * 1000)) + : undefined, + PipelineArn: output.PipelineArn !== undefined && output.PipelineArn !== null ? output.PipelineArn : undefined, + PipelineDescription: + output.PipelineDescription !== undefined && output.PipelineDescription !== null + ? output.PipelineDescription + : undefined, + PipelineDisplayName: + output.PipelineDisplayName !== undefined && output.PipelineDisplayName !== null + ? output.PipelineDisplayName + : undefined, + PipelineName: output.PipelineName !== undefined && output.PipelineName !== null ? output.PipelineName : undefined, + PipelineStatus: + output.PipelineStatus !== undefined && output.PipelineStatus !== null ? output.PipelineStatus : undefined, + RoleArn: output.RoleArn !== undefined && output.RoleArn !== null ? output.RoleArn : undefined, + Tags: + output.Tags !== undefined && output.Tags !== null + ? deserializeAws_json1_1TagList(output.Tags, context) + : undefined, + } as any; +}; + +const deserializeAws_json1_1PipelineExecution = (output: any, context: __SerdeContext): PipelineExecution => { + return { + CreatedBy: + output.CreatedBy !== undefined && output.CreatedBy !== null + ? deserializeAws_json1_1UserContext(output.CreatedBy, context) + : undefined, + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + LastModifiedBy: + output.LastModifiedBy !== undefined && output.LastModifiedBy !== null + ? deserializeAws_json1_1UserContext(output.LastModifiedBy, context) + : undefined, + LastModifiedTime: + output.LastModifiedTime !== undefined && output.LastModifiedTime !== null + ? new Date(Math.round(output.LastModifiedTime * 1000)) + : undefined, + PipelineArn: output.PipelineArn !== undefined && output.PipelineArn !== null ? output.PipelineArn : undefined, + PipelineExecutionArn: + output.PipelineExecutionArn !== undefined && output.PipelineExecutionArn !== null + ? output.PipelineExecutionArn + : undefined, + PipelineExecutionDescription: + output.PipelineExecutionDescription !== undefined && output.PipelineExecutionDescription !== null + ? output.PipelineExecutionDescription + : undefined, + PipelineExecutionDisplayName: + output.PipelineExecutionDisplayName !== undefined && output.PipelineExecutionDisplayName !== null + ? output.PipelineExecutionDisplayName : undefined, - ContinuousParameterRangeSpecification: - output.ContinuousParameterRangeSpecification !== undefined && - output.ContinuousParameterRangeSpecification !== null - ? deserializeAws_json1_1ContinuousParameterRangeSpecification( - output.ContinuousParameterRangeSpecification, - context - ) + PipelineExecutionStatus: + output.PipelineExecutionStatus !== undefined && output.PipelineExecutionStatus !== null + ? output.PipelineExecutionStatus : undefined, - IntegerParameterRangeSpecification: - output.IntegerParameterRangeSpecification !== undefined && output.IntegerParameterRangeSpecification !== null - ? deserializeAws_json1_1IntegerParameterRangeSpecification(output.IntegerParameterRangeSpecification, context) + PipelineParameters: + output.PipelineParameters !== undefined && output.PipelineParameters !== null + ? deserializeAws_json1_1ParameterList(output.PipelineParameters, context) : undefined, } as any; }; -const deserializeAws_json1_1ParameterRanges = (output: any, context: __SerdeContext): ParameterRanges => { +const deserializeAws_json1_1PipelineExecutionStep = (output: any, context: __SerdeContext): PipelineExecutionStep => { return { - CategoricalParameterRanges: - output.CategoricalParameterRanges !== undefined && output.CategoricalParameterRanges !== null - ? deserializeAws_json1_1CategoricalParameterRanges(output.CategoricalParameterRanges, context) + CacheHitResult: + output.CacheHitResult !== undefined && output.CacheHitResult !== null + ? deserializeAws_json1_1CacheHitResult(output.CacheHitResult, context) : undefined, - ContinuousParameterRanges: - output.ContinuousParameterRanges !== undefined && output.ContinuousParameterRanges !== null - ? deserializeAws_json1_1ContinuousParameterRanges(output.ContinuousParameterRanges, context) + EndTime: + output.EndTime !== undefined && output.EndTime !== null ? new Date(Math.round(output.EndTime * 1000)) : undefined, + FailureReason: + output.FailureReason !== undefined && output.FailureReason !== null ? output.FailureReason : undefined, + Metadata: + output.Metadata !== undefined && output.Metadata !== null + ? deserializeAws_json1_1PipelineExecutionStepMetadata(output.Metadata, context) : undefined, - IntegerParameterRanges: - output.IntegerParameterRanges !== undefined && output.IntegerParameterRanges !== null - ? deserializeAws_json1_1IntegerParameterRanges(output.IntegerParameterRanges, context) + StartTime: + output.StartTime !== undefined && output.StartTime !== null + ? new Date(Math.round(output.StartTime * 1000)) : undefined, + StepName: output.StepName !== undefined && output.StepName !== null ? output.StepName : undefined, + StepStatus: output.StepStatus !== undefined && output.StepStatus !== null ? output.StepStatus : undefined, } as any; }; -const deserializeAws_json1_1ParameterValues = (output: any, context: __SerdeContext): string[] => { - return (output || []).map((entry: any) => entry); +const deserializeAws_json1_1PipelineExecutionStepList = ( + output: any, + context: __SerdeContext +): PipelineExecutionStep[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1PipelineExecutionStep(entry, context)); }; -const deserializeAws_json1_1Parent = (output: any, context: __SerdeContext): Parent => { +const deserializeAws_json1_1PipelineExecutionStepMetadata = ( + output: any, + context: __SerdeContext +): PipelineExecutionStepMetadata => { return { - ExperimentName: - output.ExperimentName !== undefined && output.ExperimentName !== null ? output.ExperimentName : undefined, - TrialName: output.TrialName !== undefined && output.TrialName !== null ? output.TrialName : undefined, + Condition: + output.Condition !== undefined && output.Condition !== null + ? deserializeAws_json1_1ConditionStepMetadata(output.Condition, context) + : undefined, + Model: + output.Model !== undefined && output.Model !== null + ? deserializeAws_json1_1ModelStepMetadata(output.Model, context) + : undefined, + ProcessingJob: + output.ProcessingJob !== undefined && output.ProcessingJob !== null + ? deserializeAws_json1_1ProcessingJobStepMetadata(output.ProcessingJob, context) + : undefined, + RegisterModel: + output.RegisterModel !== undefined && output.RegisterModel !== null + ? deserializeAws_json1_1RegisterModelStepMetadata(output.RegisterModel, context) + : undefined, + TrainingJob: + output.TrainingJob !== undefined && output.TrainingJob !== null + ? deserializeAws_json1_1TrainingJobStepMetadata(output.TrainingJob, context) + : undefined, + TransformJob: + output.TransformJob !== undefined && output.TransformJob !== null + ? deserializeAws_json1_1TransformJobStepMetadata(output.TransformJob, context) + : undefined, } as any; }; -const deserializeAws_json1_1ParentHyperParameterTuningJob = ( +const deserializeAws_json1_1PipelineExecutionSummary = ( output: any, context: __SerdeContext -): ParentHyperParameterTuningJob => { +): PipelineExecutionSummary => { return { - HyperParameterTuningJobName: - output.HyperParameterTuningJobName !== undefined && output.HyperParameterTuningJobName !== null - ? output.HyperParameterTuningJobName + PipelineExecutionArn: + output.PipelineExecutionArn !== undefined && output.PipelineExecutionArn !== null + ? output.PipelineExecutionArn + : undefined, + PipelineExecutionDescription: + output.PipelineExecutionDescription !== undefined && output.PipelineExecutionDescription !== null + ? output.PipelineExecutionDescription + : undefined, + PipelineExecutionDisplayName: + output.PipelineExecutionDisplayName !== undefined && output.PipelineExecutionDisplayName !== null + ? output.PipelineExecutionDisplayName + : undefined, + PipelineExecutionStatus: + output.PipelineExecutionStatus !== undefined && output.PipelineExecutionStatus !== null + ? output.PipelineExecutionStatus + : undefined, + StartTime: + output.StartTime !== undefined && output.StartTime !== null + ? new Date(Math.round(output.StartTime * 1000)) : undefined, } as any; }; -const deserializeAws_json1_1ParentHyperParameterTuningJobs = ( +const deserializeAws_json1_1PipelineExecutionSummaryList = ( output: any, context: __SerdeContext -): ParentHyperParameterTuningJob[] => { - return (output || []).map((entry: any) => deserializeAws_json1_1ParentHyperParameterTuningJob(entry, context)); +): PipelineExecutionSummary[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1PipelineExecutionSummary(entry, context)); }; -const deserializeAws_json1_1Parents = (output: any, context: __SerdeContext): Parent[] => { - return (output || []).map((entry: any) => deserializeAws_json1_1Parent(entry, context)); +const deserializeAws_json1_1PipelineSummary = (output: any, context: __SerdeContext): PipelineSummary => { + return { + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + LastExecutionTime: + output.LastExecutionTime !== undefined && output.LastExecutionTime !== null + ? new Date(Math.round(output.LastExecutionTime * 1000)) + : undefined, + LastModifiedTime: + output.LastModifiedTime !== undefined && output.LastModifiedTime !== null + ? new Date(Math.round(output.LastModifiedTime * 1000)) + : undefined, + PipelineArn: output.PipelineArn !== undefined && output.PipelineArn !== null ? output.PipelineArn : undefined, + PipelineDescription: + output.PipelineDescription !== undefined && output.PipelineDescription !== null + ? output.PipelineDescription + : undefined, + PipelineDisplayName: + output.PipelineDisplayName !== undefined && output.PipelineDisplayName !== null + ? output.PipelineDisplayName + : undefined, + PipelineName: output.PipelineName !== undefined && output.PipelineName !== null ? output.PipelineName : undefined, + RoleArn: output.RoleArn !== undefined && output.RoleArn !== null ? output.RoleArn : undefined, + } as any; +}; + +const deserializeAws_json1_1PipelineSummaryList = (output: any, context: __SerdeContext): PipelineSummary[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1PipelineSummary(entry, context)); }; const deserializeAws_json1_1ProcessingClusterConfig = ( @@ -20072,8 +26166,23 @@ const deserializeAws_json1_1ProcessingEnvironmentMap = ( ); }; +const deserializeAws_json1_1ProcessingFeatureStoreOutput = ( + output: any, + context: __SerdeContext +): ProcessingFeatureStoreOutput => { + return { + FeatureGroupName: + output.FeatureGroupName !== undefined && output.FeatureGroupName !== null ? output.FeatureGroupName : undefined, + } as any; +}; + const deserializeAws_json1_1ProcessingInput = (output: any, context: __SerdeContext): ProcessingInput => { return { + AppManaged: output.AppManaged !== undefined && output.AppManaged !== null ? output.AppManaged : undefined, + DatasetDefinition: + output.DatasetDefinition !== undefined && output.DatasetDefinition !== null + ? deserializeAws_json1_1DatasetDefinition(output.DatasetDefinition, context) + : undefined, InputName: output.InputName !== undefined && output.InputName !== null ? output.InputName : undefined, S3Input: output.S3Input !== undefined && output.S3Input !== null @@ -20164,6 +26273,15 @@ const deserializeAws_json1_1ProcessingJob = (output: any, context: __SerdeContex } as any; }; +const deserializeAws_json1_1ProcessingJobStepMetadata = ( + output: any, + context: __SerdeContext +): ProcessingJobStepMetadata => { + return { + Arn: output.Arn !== undefined && output.Arn !== null ? output.Arn : undefined, + } as any; +}; + const deserializeAws_json1_1ProcessingJobSummaries = (output: any, context: __SerdeContext): ProcessingJobSummary[] => { return (output || []).map((entry: any) => deserializeAws_json1_1ProcessingJobSummary(entry, context)); }; @@ -20200,6 +26318,11 @@ const deserializeAws_json1_1ProcessingJobSummary = (output: any, context: __Serd const deserializeAws_json1_1ProcessingOutput = (output: any, context: __SerdeContext): ProcessingOutput => { return { + AppManaged: output.AppManaged !== undefined && output.AppManaged !== null ? output.AppManaged : undefined, + FeatureStoreOutput: + output.FeatureStoreOutput !== undefined && output.FeatureStoreOutput !== null + ? deserializeAws_json1_1ProcessingFeatureStoreOutput(output.FeatureStoreOutput, context) + : undefined, OutputName: output.OutputName !== undefined && output.OutputName !== null ? output.OutputName : undefined, S3Output: output.S3Output !== undefined && output.S3Output !== null @@ -20326,6 +26449,28 @@ const deserializeAws_json1_1ProductListings = (output: any, context: __SerdeCont return (output || []).map((entry: any) => entry); }; +const deserializeAws_json1_1ProjectSummary = (output: any, context: __SerdeContext): ProjectSummary => { + return { + CreationTime: + output.CreationTime !== undefined && output.CreationTime !== null + ? new Date(Math.round(output.CreationTime * 1000)) + : undefined, + ProjectArn: output.ProjectArn !== undefined && output.ProjectArn !== null ? output.ProjectArn : undefined, + ProjectDescription: + output.ProjectDescription !== undefined && output.ProjectDescription !== null + ? output.ProjectDescription + : undefined, + ProjectId: output.ProjectId !== undefined && output.ProjectId !== null ? output.ProjectId : undefined, + ProjectName: output.ProjectName !== undefined && output.ProjectName !== null ? output.ProjectName : undefined, + ProjectStatus: + output.ProjectStatus !== undefined && output.ProjectStatus !== null ? output.ProjectStatus : undefined, + } as any; +}; + +const deserializeAws_json1_1ProjectSummaryList = (output: any, context: __SerdeContext): ProjectSummary[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1ProjectSummary(entry, context)); +}; + const deserializeAws_json1_1PropertyNameSuggestion = (output: any, context: __SerdeContext): PropertyNameSuggestion => { return { PropertyName: output.PropertyName !== undefined && output.PropertyName !== null ? output.PropertyName : undefined, @@ -20339,6 +26484,20 @@ const deserializeAws_json1_1PropertyNameSuggestionList = ( return (output || []).map((entry: any) => deserializeAws_json1_1PropertyNameSuggestion(entry, context)); }; +const deserializeAws_json1_1ProvisioningParameter = (output: any, context: __SerdeContext): ProvisioningParameter => { + return { + Key: output.Key !== undefined && output.Key !== null ? output.Key : undefined, + Value: output.Value !== undefined && output.Value !== null ? output.Value : undefined, + } as any; +}; + +const deserializeAws_json1_1ProvisioningParameters = ( + output: any, + context: __SerdeContext +): ProvisioningParameter[] => { + return (output || []).map((entry: any) => deserializeAws_json1_1ProvisioningParameter(entry, context)); +}; + const deserializeAws_json1_1PublicWorkforceTaskPrice = ( output: any, context: __SerdeContext @@ -20351,6 +26510,18 @@ const deserializeAws_json1_1PublicWorkforceTaskPrice = ( } as any; }; +const deserializeAws_json1_1PutModelPackageGroupPolicyOutput = ( + output: any, + context: __SerdeContext +): PutModelPackageGroupPolicyOutput => { + return { + ModelPackageGroupArn: + output.ModelPackageGroupArn !== undefined && output.ModelPackageGroupArn !== null + ? output.ModelPackageGroupArn + : undefined, + } as any; +}; + const deserializeAws_json1_1RealtimeInferenceInstanceTypes = ( output: any, context: __SerdeContext @@ -20358,6 +26529,36 @@ const deserializeAws_json1_1RealtimeInferenceInstanceTypes = ( return (output || []).map((entry: any) => entry); }; +const deserializeAws_json1_1RedshiftDatasetDefinition = ( + output: any, + context: __SerdeContext +): RedshiftDatasetDefinition => { + return { + ClusterId: output.ClusterId !== undefined && output.ClusterId !== null ? output.ClusterId : undefined, + ClusterRoleArn: + output.ClusterRoleArn !== undefined && output.ClusterRoleArn !== null ? output.ClusterRoleArn : undefined, + Database: output.Database !== undefined && output.Database !== null ? output.Database : undefined, + DbUser: output.DbUser !== undefined && output.DbUser !== null ? output.DbUser : undefined, + KmsKeyId: output.KmsKeyId !== undefined && output.KmsKeyId !== null ? output.KmsKeyId : undefined, + OutputCompression: + output.OutputCompression !== undefined && output.OutputCompression !== null + ? output.OutputCompression + : undefined, + OutputFormat: output.OutputFormat !== undefined && output.OutputFormat !== null ? output.OutputFormat : undefined, + OutputS3Uri: output.OutputS3Uri !== undefined && output.OutputS3Uri !== null ? output.OutputS3Uri : undefined, + QueryString: output.QueryString !== undefined && output.QueryString !== null ? output.QueryString : undefined, + } as any; +}; + +const deserializeAws_json1_1RegisterModelStepMetadata = ( + output: any, + context: __SerdeContext +): RegisterModelStepMetadata => { + return { + Arn: output.Arn !== undefined && output.Arn !== null ? output.Arn : undefined, + } as any; +}; + const deserializeAws_json1_1RenderingError = (output: any, context: __SerdeContext): RenderingError => { return { Code: output.Code !== undefined && output.Code !== null ? output.Code : undefined, @@ -20483,6 +26684,13 @@ const deserializeAws_json1_1S3DataSource = (output: any, context: __SerdeContext } as any; }; +const deserializeAws_json1_1S3StorageConfig = (output: any, context: __SerdeContext): S3StorageConfig => { + return { + KmsKeyId: output.KmsKeyId !== undefined && output.KmsKeyId !== null ? output.KmsKeyId : undefined, + S3Uri: output.S3Uri !== undefined && output.S3Uri !== null ? output.S3Uri : undefined, + } as any; +}; + const deserializeAws_json1_1ScheduleConfig = (output: any, context: __SerdeContext): ScheduleConfig => { return { ScheduleExpression: @@ -20494,10 +26702,34 @@ const deserializeAws_json1_1ScheduleConfig = (output: any, context: __SerdeConte const deserializeAws_json1_1SearchRecord = (output: any, context: __SerdeContext): SearchRecord => { return { + Endpoint: + output.Endpoint !== undefined && output.Endpoint !== null + ? deserializeAws_json1_1Endpoint(output.Endpoint, context) + : undefined, Experiment: output.Experiment !== undefined && output.Experiment !== null ? deserializeAws_json1_1Experiment(output.Experiment, context) : undefined, + FeatureGroup: + output.FeatureGroup !== undefined && output.FeatureGroup !== null + ? deserializeAws_json1_1FeatureGroup(output.FeatureGroup, context) + : undefined, + ModelPackage: + output.ModelPackage !== undefined && output.ModelPackage !== null + ? deserializeAws_json1_1ModelPackage(output.ModelPackage, context) + : undefined, + ModelPackageGroup: + output.ModelPackageGroup !== undefined && output.ModelPackageGroup !== null + ? deserializeAws_json1_1ModelPackageGroup(output.ModelPackageGroup, context) + : undefined, + Pipeline: + output.Pipeline !== undefined && output.Pipeline !== null + ? deserializeAws_json1_1Pipeline(output.Pipeline, context) + : undefined, + PipelineExecution: + output.PipelineExecution !== undefined && output.PipelineExecution !== null + ? deserializeAws_json1_1PipelineExecution(output.PipelineExecution, context) + : undefined, TrainingJob: output.TrainingJob !== undefined && output.TrainingJob !== null ? deserializeAws_json1_1TrainingJob(output.TrainingJob, context) @@ -20555,6 +26787,40 @@ const deserializeAws_json1_1SecurityGroupIds = (output: any, context: __SerdeCon return (output || []).map((entry: any) => entry); }; +const deserializeAws_json1_1ServiceCatalogProvisionedProductDetails = ( + output: any, + context: __SerdeContext +): ServiceCatalogProvisionedProductDetails => { + return { + ProvisionedProductId: + output.ProvisionedProductId !== undefined && output.ProvisionedProductId !== null + ? output.ProvisionedProductId + : undefined, + ProvisionedProductStatusMessage: + output.ProvisionedProductStatusMessage !== undefined && output.ProvisionedProductStatusMessage !== null + ? output.ProvisionedProductStatusMessage + : undefined, + } as any; +}; + +const deserializeAws_json1_1ServiceCatalogProvisioningDetails = ( + output: any, + context: __SerdeContext +): ServiceCatalogProvisioningDetails => { + return { + PathId: output.PathId !== undefined && output.PathId !== null ? output.PathId : undefined, + ProductId: output.ProductId !== undefined && output.ProductId !== null ? output.ProductId : undefined, + ProvisioningArtifactId: + output.ProvisioningArtifactId !== undefined && output.ProvisioningArtifactId !== null + ? output.ProvisioningArtifactId + : undefined, + ProvisioningParameters: + output.ProvisioningParameters !== undefined && output.ProvisioningParameters !== null + ? deserializeAws_json1_1ProvisioningParameters(output.ProvisioningParameters, context) + : undefined, + } as any; +}; + const deserializeAws_json1_1SharingSettings = (output: any, context: __SerdeContext): SharingSettings => { return { NotebookOutputOption: @@ -20605,6 +26871,18 @@ const deserializeAws_json1_1SourceIpConfig = (output: any, context: __SerdeConte } as any; }; +const deserializeAws_json1_1StartPipelineExecutionResponse = ( + output: any, + context: __SerdeContext +): StartPipelineExecutionResponse => { + return { + PipelineExecutionArn: + output.PipelineExecutionArn !== undefined && output.PipelineExecutionArn !== null + ? output.PipelineExecutionArn + : undefined, + } as any; +}; + const deserializeAws_json1_1StoppingCondition = (output: any, context: __SerdeContext): StoppingCondition => { return { MaxRuntimeInSeconds: @@ -20618,6 +26896,18 @@ const deserializeAws_json1_1StoppingCondition = (output: any, context: __SerdeCo } as any; }; +const deserializeAws_json1_1StopPipelineExecutionResponse = ( + output: any, + context: __SerdeContext +): StopPipelineExecutionResponse => { + return { + PipelineExecutionArn: + output.PipelineExecutionArn !== undefined && output.PipelineExecutionArn !== null + ? output.PipelineExecutionArn + : undefined, + } as any; +}; + const deserializeAws_json1_1Subnets = (output: any, context: __SerdeContext): string[] => { return (output || []).map((entry: any) => entry); }; @@ -20682,6 +26972,20 @@ const deserializeAws_json1_1TensorBoardOutputConfig = ( } as any; }; +const deserializeAws_json1_1TrafficRoutingConfig = (output: any, context: __SerdeContext): TrafficRoutingConfig => { + return { + CanarySize: + output.CanarySize !== undefined && output.CanarySize !== null + ? deserializeAws_json1_1CapacitySize(output.CanarySize, context) + : undefined, + Type: output.Type !== undefined && output.Type !== null ? output.Type : undefined, + WaitIntervalInSeconds: + output.WaitIntervalInSeconds !== undefined && output.WaitIntervalInSeconds !== null + ? output.WaitIntervalInSeconds + : undefined, + } as any; +}; + const deserializeAws_json1_1TrainingInstanceTypes = ( output: any, context: __SerdeContext @@ -20862,6 +27166,15 @@ const deserializeAws_json1_1TrainingJobStatusCounters = ( } as any; }; +const deserializeAws_json1_1TrainingJobStepMetadata = ( + output: any, + context: __SerdeContext +): TrainingJobStepMetadata => { + return { + Arn: output.Arn !== undefined && output.Arn !== null ? output.Arn : undefined, + } as any; +}; + const deserializeAws_json1_1TrainingJobSummaries = (output: any, context: __SerdeContext): TrainingJobSummary[] => { return (output || []).map((entry: any) => deserializeAws_json1_1TrainingJobSummary(entry, context)); }; @@ -21068,6 +27381,15 @@ const deserializeAws_json1_1TransformJobDefinition = (output: any, context: __Se } as any; }; +const deserializeAws_json1_1TransformJobStepMetadata = ( + output: any, + context: __SerdeContext +): TransformJobStepMetadata => { + return { + Arn: output.Arn !== undefined && output.Arn !== null ? output.Arn : undefined, + } as any; +}; + const deserializeAws_json1_1TransformJobSummaries = (output: any, context: __SerdeContext): TransformJobSummary[] => { return (output || []).map((entry: any) => deserializeAws_json1_1TransformJobSummary(entry, context)); }; @@ -21146,6 +27468,10 @@ const deserializeAws_json1_1Trial = (output: any, context: __SerdeContext): Tria output.LastModifiedTime !== undefined && output.LastModifiedTime !== null ? new Date(Math.round(output.LastModifiedTime * 1000)) : undefined, + MetadataProperties: + output.MetadataProperties !== undefined && output.MetadataProperties !== null + ? deserializeAws_json1_1MetadataProperties(output.MetadataProperties, context) + : undefined, Source: output.Source !== undefined && output.Source !== null ? deserializeAws_json1_1TrialSource(output.Source, context) @@ -21188,6 +27514,10 @@ const deserializeAws_json1_1TrialComponent = (output: any, context: __SerdeConte output.LastModifiedTime !== undefined && output.LastModifiedTime !== null ? new Date(Math.round(output.LastModifiedTime * 1000)) : undefined, + MetadataProperties: + output.MetadataProperties !== undefined && output.MetadataProperties !== null + ? deserializeAws_json1_1MetadataProperties(output.MetadataProperties, context) + : undefined, Metrics: output.Metrics !== undefined && output.Metrics !== null ? deserializeAws_json1_1TrialComponentMetricSummaries(output.Metrics, context) @@ -21299,10 +27629,17 @@ const deserializeAws_json1_1TrialComponentParameterValue = ( output: any, context: __SerdeContext ): TrialComponentParameterValue => { - return { - NumberValue: output.NumberValue !== undefined && output.NumberValue !== null ? output.NumberValue : undefined, - StringValue: output.StringValue !== undefined && output.StringValue !== null ? output.StringValue : undefined, - } as any; + if (output.NumberValue !== undefined && output.NumberValue !== null) { + return { + NumberValue: output.NumberValue, + }; + } + if (output.StringValue !== undefined && output.StringValue !== null) { + return { + StringValue: output.StringValue, + }; + } + return { $unknown: Object.entries(output)[0] }; }; const deserializeAws_json1_1TrialComponentSimpleSummaries = ( @@ -21487,6 +27824,12 @@ const deserializeAws_json1_1UiTemplateInfo = (output: any, context: __SerdeConte } as any; }; +const deserializeAws_json1_1UpdateActionResponse = (output: any, context: __SerdeContext): UpdateActionResponse => { + return { + ActionArn: output.ActionArn !== undefined && output.ActionArn !== null ? output.ActionArn : undefined, + } as any; +}; + const deserializeAws_json1_1UpdateAppImageConfigResponse = ( output: any, context: __SerdeContext @@ -21499,6 +27842,12 @@ const deserializeAws_json1_1UpdateAppImageConfigResponse = ( } as any; }; +const deserializeAws_json1_1UpdateArtifactResponse = (output: any, context: __SerdeContext): UpdateArtifactResponse => { + return { + ArtifactArn: output.ArtifactArn !== undefined && output.ArtifactArn !== null ? output.ArtifactArn : undefined, + } as any; +}; + const deserializeAws_json1_1UpdateCodeRepositoryOutput = ( output: any, context: __SerdeContext @@ -21511,6 +27860,12 @@ const deserializeAws_json1_1UpdateCodeRepositoryOutput = ( } as any; }; +const deserializeAws_json1_1UpdateContextResponse = (output: any, context: __SerdeContext): UpdateContextResponse => { + return { + ContextArn: output.ContextArn !== undefined && output.ContextArn !== null ? output.ContextArn : undefined, + } as any; +}; + const deserializeAws_json1_1UpdateDomainResponse = (output: any, context: __SerdeContext): UpdateDomainResponse => { return { DomainArn: output.DomainArn !== undefined && output.DomainArn !== null ? output.DomainArn : undefined, @@ -21548,6 +27903,16 @@ const deserializeAws_json1_1UpdateImageResponse = (output: any, context: __Serde } as any; }; +const deserializeAws_json1_1UpdateModelPackageOutput = ( + output: any, + context: __SerdeContext +): UpdateModelPackageOutput => { + return { + ModelPackageArn: + output.ModelPackageArn !== undefined && output.ModelPackageArn !== null ? output.ModelPackageArn : undefined, + } as any; +}; + const deserializeAws_json1_1UpdateMonitoringScheduleResponse = ( output: any, context: __SerdeContext @@ -21574,6 +27939,24 @@ const deserializeAws_json1_1UpdateNotebookInstanceOutput = ( return {} as any; }; +const deserializeAws_json1_1UpdatePipelineExecutionResponse = ( + output: any, + context: __SerdeContext +): UpdatePipelineExecutionResponse => { + return { + PipelineExecutionArn: + output.PipelineExecutionArn !== undefined && output.PipelineExecutionArn !== null + ? output.PipelineExecutionArn + : undefined, + } as any; +}; + +const deserializeAws_json1_1UpdatePipelineResponse = (output: any, context: __SerdeContext): UpdatePipelineResponse => { + return { + PipelineArn: output.PipelineArn !== undefined && output.PipelineArn !== null ? output.PipelineArn : undefined, + } as any; +}; + const deserializeAws_json1_1UpdateTrialComponentResponse = ( output: any, context: __SerdeContext diff --git a/codegen/sdk-codegen/aws-models/amplifybackend.2020-08-11.json b/codegen/sdk-codegen/aws-models/amplifybackend.2020-08-11.json new file mode 100644 index 000000000000..e0482830cd75 --- /dev/null +++ b/codegen/sdk-codegen/aws-models/amplifybackend.2020-08-11.json @@ -0,0 +1,3843 @@ +{ + "smithy": "1.0", + "metadata": { + "suppressions": [ + { + "id": "HttpMethodSemantics", + "namespace": "*" + }, + { + "id": "HttpResponseCodeSemantics", + "namespace": "*" + }, + { + "id": "PaginatedTrait", + "namespace": "*" + }, + { + "id": "HttpHeaderTrait", + "namespace": "*" + }, + { + "id": "HttpUriConflict", + "namespace": "*" + }, + { + "id": "Service", + "namespace": "*" + } + ] + }, + "shapes": { + "com.amazonaws.amplifybackend#AdditionalConstraintsElement": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "REQUIRE_DIGIT", + "name": "REQUIRE_DIGIT" + }, + { + "value": "REQUIRE_LOWERCASE", + "name": "REQUIRE_LOWERCASE" + }, + { + "value": "REQUIRE_SYMBOL", + "name": "REQUIRE_SYMBOL" + }, + { + "value": "REQUIRE_UPPERCASE", + "name": "REQUIRE_UPPERCASE" + } + ] + } + }, + "com.amazonaws.amplifybackend#AmplifyBackend": { + "type": "service", + "version": "2020-08-11", + "operations": [ + { + "target": "com.amazonaws.amplifybackend#CloneBackend" + }, + { + "target": "com.amazonaws.amplifybackend#CreateBackend" + }, + { + "target": "com.amazonaws.amplifybackend#CreateBackendAPI" + }, + { + "target": "com.amazonaws.amplifybackend#CreateBackendAuth" + }, + { + "target": "com.amazonaws.amplifybackend#CreateBackendConfig" + }, + { + "target": "com.amazonaws.amplifybackend#CreateToken" + }, + { + "target": "com.amazonaws.amplifybackend#DeleteBackend" + }, + { + "target": "com.amazonaws.amplifybackend#DeleteBackendAPI" + }, + { + "target": "com.amazonaws.amplifybackend#DeleteBackendAuth" + }, + { + "target": "com.amazonaws.amplifybackend#DeleteToken" + }, + { + "target": "com.amazonaws.amplifybackend#GenerateBackendAPIModels" + }, + { + "target": "com.amazonaws.amplifybackend#GetBackend" + }, + { + "target": "com.amazonaws.amplifybackend#GetBackendAPI" + }, + { + "target": "com.amazonaws.amplifybackend#GetBackendAPIModels" + }, + { + "target": "com.amazonaws.amplifybackend#GetBackendAuth" + }, + { + "target": "com.amazonaws.amplifybackend#GetBackendJob" + }, + { + "target": "com.amazonaws.amplifybackend#GetToken" + }, + { + "target": "com.amazonaws.amplifybackend#ListBackendJobs" + }, + { + "target": "com.amazonaws.amplifybackend#RemoveAllBackends" + }, + { + "target": "com.amazonaws.amplifybackend#RemoveBackendConfig" + }, + { + "target": "com.amazonaws.amplifybackend#UpdateBackendAPI" + }, + { + "target": "com.amazonaws.amplifybackend#UpdateBackendAuth" + }, + { + "target": "com.amazonaws.amplifybackend#UpdateBackendConfig" + }, + { + "target": "com.amazonaws.amplifybackend#UpdateBackendJob" + } + ], + "traits": { + "aws.api#service": { + "sdkId": "AmplifyBackend", + "arnNamespace": "amplifybackend", + "cloudFormationName": "AmplifyBackend", + "cloudTrailEventSource": "amplifybackend.amazonaws.com" + }, + "aws.auth#sigv4": { + "name": "amplifybackend" + }, + "aws.protocols#restJson1": {}, + "smithy.api#documentation": "

          AWS Amplify Admin API

          ", + "smithy.api#title": "AmplifyBackend" + } + }, + "com.amazonaws.amplifybackend#AuthResources": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "USER_POOL_ONLY", + "name": "USER_POOL_ONLY" + }, + { + "value": "IDENTITY_POOL_AND_USER_POOL", + "name": "IDENTITY_POOL_AND_USER_POOL" + } + ] + } + }, + "com.amazonaws.amplifybackend#BackendAPIAppSyncAuthSettings": { + "type": "structure", + "members": { + "CognitoUserPoolId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The Amazon Cognito user pool id, if Amazon Cognito was used as an authentication setting to access your data models.

          ", + "smithy.api#jsonName": "cognitoUserPoolId" + } + }, + "Description": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The api key description for API_KEY, if it was used as an authentication mechanism to access your data models.

          ", + "smithy.api#jsonName": "description" + } + }, + "ExpirationTime": { + "target": "com.amazonaws.amplifybackend#__double", + "traits": { + "smithy.api#documentation": "

          The api key expiration time for API_KEY, if it was used as an authentication mechanism to access your data models.

          ", + "smithy.api#jsonName": "expirationTime" + } + }, + "OpenIDAuthTTL": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The expiry time for the OpenID authentication mechanism.

          ", + "smithy.api#jsonName": "openIDAuthTTL" + } + }, + "OpenIDClientId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The clientID for openID, if openID was used as an authentication setting to access your data models.

          ", + "smithy.api#jsonName": "openIDClientId" + } + }, + "OpenIDIatTTL": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The expiry time for the OpenID authentication mechanism.

          ", + "smithy.api#jsonName": "openIDIatTTL" + } + }, + "OpenIDIssueURL": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The openID issuer URL, if openID was used as an authentication setting to access your data models.

          ", + "smithy.api#jsonName": "openIDIssueURL" + } + }, + "OpenIDProviderName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The open id provider name, if openID was used as an authentication mechanism to access your data models.

          ", + "smithy.api#jsonName": "openIDProviderName" + } + } + }, + "traits": { + "smithy.api#documentation": "

          The authentication settings for accessing provisioned data models in your Amplify project.

          " + } + }, + "com.amazonaws.amplifybackend#BackendAPIAuthType": { + "type": "structure", + "members": { + "Mode": { + "target": "com.amazonaws.amplifybackend#Mode", + "traits": { + "smithy.api#documentation": "

          Describes the authentication mode.

          ", + "smithy.api#jsonName": "mode" + } + }, + "Settings": { + "target": "com.amazonaws.amplifybackend#BackendAPIAppSyncAuthSettings", + "traits": { + "smithy.api#documentation": "

          Describes settings for the authentication mode.

          ", + "smithy.api#jsonName": "settings" + } + } + }, + "traits": { + "smithy.api#documentation": "

          Describes the auth types for your configured data models.

          " + } + }, + "com.amazonaws.amplifybackend#BackendAPIConflictResolution": { + "type": "structure", + "members": { + "ResolutionStrategy": { + "target": "com.amazonaws.amplifybackend#ResolutionStrategy", + "traits": { + "smithy.api#documentation": "

          The strategy for conflict resolution.

          ", + "smithy.api#jsonName": "resolutionStrategy" + } + } + }, + "traits": { + "smithy.api#documentation": "

          Describes the conflict resolution configuration for your data model configured in your Amplify project.

          " + } + }, + "com.amazonaws.amplifybackend#BackendAPIResourceConfig": { + "type": "structure", + "members": { + "AdditionalAuthTypes": { + "target": "com.amazonaws.amplifybackend#ListOfBackendAPIAuthType", + "traits": { + "smithy.api#documentation": "

          Addition authentication methods used to interact with your data models.

          ", + "smithy.api#jsonName": "additionalAuthTypes" + } + }, + "ApiName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The API name used to interact with the data model, configured as a part of the amplify project.

          ", + "smithy.api#jsonName": "apiName" + } + }, + "ConflictResolution": { + "target": "com.amazonaws.amplifybackend#BackendAPIConflictResolution", + "traits": { + "smithy.api#documentation": "

          The conflict resoultion strategy for your data stored in the data models.

          ", + "smithy.api#jsonName": "conflictResolution" + } + }, + "DefaultAuthType": { + "target": "com.amazonaws.amplifybackend#BackendAPIAuthType", + "traits": { + "smithy.api#documentation": "

          The default authentication type for interacting with the configured data models in your amplify project.

          ", + "smithy.api#jsonName": "defaultAuthType" + } + }, + "Service": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The service used to provision and interact with the data model.

          ", + "smithy.api#jsonName": "service" + } + }, + "TransformSchema": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The definition of the data model in the annotated transform of the graphql schema.

          ", + "smithy.api#jsonName": "transformSchema" + } + } + }, + "traits": { + "smithy.api#documentation": "

          The resource config for the data model,configured as a part of the Amplify project.

          " + } + }, + "com.amazonaws.amplifybackend#BackendAuthSocialProviderConfig": { + "type": "structure", + "members": { + "ClientId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          Describes the client_id which can be obtained from the third party social federation provider.

          ", + "smithy.api#jsonName": "client_id" + } + }, + "ClientSecret": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          Describes the client_secret which can be obtained from third party social federation providers.

          ", + "smithy.api#jsonName": "client_secret" + } + } + }, + "traits": { + "smithy.api#documentation": "

          Describes third party social federation configurations for allowing your app users to sign in using OAuth.

          " + } + }, + "com.amazonaws.amplifybackend#BackendJobRespObj": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId", + "smithy.api#required": {} + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName", + "smithy.api#required": {} + } + }, + "CreateTime": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The time when the job was created.

          ", + "smithy.api#jsonName": "createTime" + } + }, + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + }, + "JobId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The ID for the job.

          ", + "smithy.api#jsonName": "jobId" + } + }, + "Operation": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the operation.

          ", + "smithy.api#jsonName": "operation" + } + }, + "Status": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The current status of the request.

          ", + "smithy.api#jsonName": "status" + } + }, + "UpdateTime": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The time when the job was last updated.

          ", + "smithy.api#jsonName": "updateTime" + } + } + }, + "traits": { + "smithy.api#documentation": "

          The response object for this operation.

          " + } + }, + "com.amazonaws.amplifybackend#BadRequestException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          An error message to inform that the request has failed.

          ", + "smithy.api#jsonName": "message" + } + } + }, + "traits": { + "smithy.api#documentation": "

          An error returned if a request is not formed properly.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 400 + } + }, + "com.amazonaws.amplifybackend#CloneBackend": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#CloneBackendRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#CloneBackendResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          This operation clones an existing backend.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/environments/{BackendEnvironmentName}/clone", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#CloneBackendRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "TargetEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the destination backend environment to be created.

          ", + "smithy.api#jsonName": "targetEnvironmentName", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request body for CloneBackend.

          " + } + }, + "com.amazonaws.amplifybackend#CloneBackendResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName" + } + }, + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + }, + "JobId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The ID for the job.

          ", + "smithy.api#jsonName": "jobId" + } + }, + "Operation": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the operation.

          ", + "smithy.api#jsonName": "operation" + } + }, + "Status": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The current status of the request.

          ", + "smithy.api#jsonName": "status" + } + } + } + }, + "com.amazonaws.amplifybackend#CreateBackend": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#CreateBackendRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#CreateBackendResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          This operation creates a backend for an Amplify app. Backends are automatically created at the time of app creation.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#CreateBackendAPI": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#CreateBackendAPIRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#CreateBackendAPIResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Creates a new backend API resource.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/api", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#CreateBackendAPIRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName", + "smithy.api#required": {} + } + }, + "ResourceConfig": { + "target": "com.amazonaws.amplifybackend#BackendAPIResourceConfig", + "traits": { + "smithy.api#documentation": "

          The resource configuration for this request.

          ", + "smithy.api#jsonName": "resourceConfig", + "smithy.api#required": {} + } + }, + "ResourceName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of this resource.

          ", + "smithy.api#jsonName": "resourceName", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request body for CreateBackendAPI.

          " + } + }, + "com.amazonaws.amplifybackend#CreateBackendAPIResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName" + } + }, + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + }, + "JobId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The ID for the job.

          ", + "smithy.api#jsonName": "jobId" + } + }, + "Operation": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the operation.

          ", + "smithy.api#jsonName": "operation" + } + }, + "Status": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The current status of the request.

          ", + "smithy.api#jsonName": "status" + } + } + } + }, + "com.amazonaws.amplifybackend#CreateBackendAuth": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#CreateBackendAuthRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#CreateBackendAuthResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Creates a new backend authentication resource.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/auth", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#CreateBackendAuthForgotPasswordConfig": { + "type": "structure", + "members": { + "DeliveryMethod": { + "target": "com.amazonaws.amplifybackend#DeliveryMethod", + "traits": { + "smithy.api#documentation": "

          Describes which mode to use (either SMS or email) to deliver messages to app users that want to recover their password.

          ", + "smithy.api#jsonName": "deliveryMethod", + "smithy.api#required": {} + } + }, + "EmailSettings": { + "target": "com.amazonaws.amplifybackend#EmailSettings", + "traits": { + "smithy.api#documentation": "

          The configuration for the email sent when an app user forgets their password.

          ", + "smithy.api#jsonName": "emailSettings" + } + }, + "SmsSettings": { + "target": "com.amazonaws.amplifybackend#SmsSettings", + "traits": { + "smithy.api#documentation": "

          The configuration for the SMS message sent when an app user forgets their password.

          ", + "smithy.api#jsonName": "smsSettings" + } + } + }, + "traits": { + "smithy.api#documentation": "

          Describes the forgot password policy for authenticating into the Amlify app.

          " + } + }, + "com.amazonaws.amplifybackend#CreateBackendAuthIdentityPoolConfig": { + "type": "structure", + "members": { + "IdentityPoolName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          Name of the identity pool used for authorization.

          ", + "smithy.api#jsonName": "identityPoolName", + "smithy.api#required": {} + } + }, + "UnauthenticatedLogin": { + "target": "com.amazonaws.amplifybackend#__boolean", + "traits": { + "smithy.api#documentation": "

          Set to true or false based on whether you want to enable guest authorization to your Amplify app.

          ", + "smithy.api#jsonName": "unauthenticatedLogin", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Describes authorization configurations for the auth resources, configures as a part of your Amplify project.

          " + } + }, + "com.amazonaws.amplifybackend#CreateBackendAuthMFAConfig": { + "type": "structure", + "members": { + "MFAMode": { + "target": "com.amazonaws.amplifybackend#MFAMode", + "traits": { + "smithy.api#documentation": "

          Describes whether MFA should be [ON, OFF, OPTIONAL] for authentication in your Amplify project.

          ", + "smithy.api#required": {} + } + }, + "Settings": { + "target": "com.amazonaws.amplifybackend#Settings", + "traits": { + "smithy.api#documentation": "

          Describes the configuration settings and methods for your Amplify app users to use MFA.

          ", + "smithy.api#jsonName": "settings" + } + } + }, + "traits": { + "smithy.api#documentation": "

          Describes whether multi-factor authentication policies should be applied for your Amazon Cognito user pool configured as a part of your Amplify project.

          " + } + }, + "com.amazonaws.amplifybackend#CreateBackendAuthOAuthConfig": { + "type": "structure", + "members": { + "DomainPrefix": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The domain prefix for your Amplify app.

          ", + "smithy.api#jsonName": "domainPrefix" + } + }, + "OAuthGrantType": { + "target": "com.amazonaws.amplifybackend#OAuthGrantType", + "traits": { + "smithy.api#documentation": "

          The OAuth grant type which you use to allow app users to authenticate from your Amplify app.

          ", + "smithy.api#jsonName": "oAuthGrantType", + "smithy.api#required": {} + } + }, + "OAuthScopes": { + "target": "com.amazonaws.amplifybackend#ListOfOAuthScopesElement", + "traits": { + "smithy.api#documentation": "

          List of OAuth related flows which use to allow your app users to authenticate from your Amplify app.

          ", + "smithy.api#jsonName": "oAuthScopes", + "smithy.api#required": {} + } + }, + "RedirectSignInURIs": { + "target": "com.amazonaws.amplifybackend#ListOf__string", + "traits": { + "smithy.api#documentation": "

          The redirected URI for sigining into your Amplify app.

          ", + "smithy.api#jsonName": "redirectSignInURIs", + "smithy.api#required": {} + } + }, + "RedirectSignOutURIs": { + "target": "com.amazonaws.amplifybackend#ListOf__string", + "traits": { + "smithy.api#documentation": "

          Redirect URLs used by OAuth when a user signs out of an Amplify app.

          ", + "smithy.api#jsonName": "redirectSignOutURIs", + "smithy.api#required": {} + } + }, + "SocialProviderSettings": { + "target": "com.amazonaws.amplifybackend#SocialProviderSettings", + "traits": { + "smithy.api#documentation": "

          The settings for using the social providers for access to your Amplify app.

          ", + "smithy.api#jsonName": "socialProviderSettings" + } + } + }, + "traits": { + "smithy.api#documentation": "

          Creates the OAuth configuration for your Amplify project.

          " + } + }, + "com.amazonaws.amplifybackend#CreateBackendAuthPasswordPolicyConfig": { + "type": "structure", + "members": { + "AdditionalConstraints": { + "target": "com.amazonaws.amplifybackend#ListOfAdditionalConstraintsElement", + "traits": { + "smithy.api#documentation": "

          Additional constraints for the pasword used to access the backend of your Amplify project.

          ", + "smithy.api#jsonName": "additionalConstraints" + } + }, + "MinimumLength": { + "target": "com.amazonaws.amplifybackend#__double", + "traits": { + "smithy.api#documentation": "

          The minimum length of password used to access the backend of your Amplify project.

          ", + "smithy.api#jsonName": "minimumLength", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The password policy configuration for the backend to your Amplify project.

          " + } + }, + "com.amazonaws.amplifybackend#CreateBackendAuthRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName", + "smithy.api#required": {} + } + }, + "ResourceConfig": { + "target": "com.amazonaws.amplifybackend#CreateBackendAuthResourceConfig", + "traits": { + "smithy.api#documentation": "

          The resource configuration for this request object.

          ", + "smithy.api#jsonName": "resourceConfig", + "smithy.api#required": {} + } + }, + "ResourceName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of this resource.

          ", + "smithy.api#jsonName": "resourceName", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request body for CreateBackendAuth.

          " + } + }, + "com.amazonaws.amplifybackend#CreateBackendAuthResourceConfig": { + "type": "structure", + "members": { + "AuthResources": { + "target": "com.amazonaws.amplifybackend#AuthResources", + "traits": { + "smithy.api#documentation": "

          Defines whether you want to configure only authentication or both authentication and authorization settings.

          ", + "smithy.api#jsonName": "authResources", + "smithy.api#required": {} + } + }, + "IdentityPoolConfigs": { + "target": "com.amazonaws.amplifybackend#CreateBackendAuthIdentityPoolConfig", + "traits": { + "smithy.api#documentation": "

          Describes the authorization configuration for the Amazon Cognito identity pool, provisioned as a part of your auth resource in the Amplify project.

          ", + "smithy.api#jsonName": "identityPoolConfigs" + } + }, + "Service": { + "target": "com.amazonaws.amplifybackend#Service", + "traits": { + "smithy.api#documentation": "

          Defines the service name to use when configuring an authentication resource in your Amplify project.

          ", + "smithy.api#jsonName": "service", + "smithy.api#required": {} + } + }, + "UserPoolConfigs": { + "target": "com.amazonaws.amplifybackend#CreateBackendAuthUserPoolConfig", + "traits": { + "smithy.api#documentation": "

          Describes authentication configuration for the Amazon Cognito user pool, provisioned as a part of your auth resource in the Amplify project.

          ", + "smithy.api#jsonName": "userPoolConfigs", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Defines the resource configuration when creating an auth resource in your Amplify project.

          " + } + }, + "com.amazonaws.amplifybackend#CreateBackendAuthResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName" + } + }, + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + }, + "JobId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The ID for the job.

          ", + "smithy.api#jsonName": "jobId" + } + }, + "Operation": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the operation.

          ", + "smithy.api#jsonName": "operation" + } + }, + "Status": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The current status of the request.

          ", + "smithy.api#jsonName": "status" + } + } + } + }, + "com.amazonaws.amplifybackend#CreateBackendAuthUserPoolConfig": { + "type": "structure", + "members": { + "ForgotPassword": { + "target": "com.amazonaws.amplifybackend#CreateBackendAuthForgotPasswordConfig", + "traits": { + "smithy.api#documentation": "

          Describes the forgotten password policy for your Amazon Cognito user pool, configured as a part of your Amplify project.

          ", + "smithy.api#jsonName": "forgotPassword" + } + }, + "Mfa": { + "target": "com.amazonaws.amplifybackend#CreateBackendAuthMFAConfig", + "traits": { + "smithy.api#documentation": "

          Describes whether multi-factor authentication policies should be applied for your Amazon Cognito user pool configured as a part of your Amplify project.

          ", + "smithy.api#jsonName": "mfa" + } + }, + "OAuth": { + "target": "com.amazonaws.amplifybackend#CreateBackendAuthOAuthConfig", + "traits": { + "smithy.api#documentation": "

          Describes the OAuth policy and rules for your Amazon Cognito user pool, configured as a part of your Amplify project.

          ", + "smithy.api#jsonName": "oAuth" + } + }, + "PasswordPolicy": { + "target": "com.amazonaws.amplifybackend#CreateBackendAuthPasswordPolicyConfig", + "traits": { + "smithy.api#documentation": "

          Describes the password policy for your Amazon Cognito user pool, configured as a part of your Amplify project.

          ", + "smithy.api#jsonName": "passwordPolicy" + } + }, + "RequiredSignUpAttributes": { + "target": "com.amazonaws.amplifybackend#ListOfRequiredSignUpAttributesElement", + "traits": { + "smithy.api#documentation": "

          The required attributes to sign up new users in the user pool.

          ", + "smithy.api#jsonName": "requiredSignUpAttributes", + "smithy.api#required": {} + } + }, + "SignInMethod": { + "target": "com.amazonaws.amplifybackend#SignInMethod", + "traits": { + "smithy.api#documentation": "

          Describes the sign-in methods that your Amplify app users to login using the Amazon Cognito user pool, configured as a part of your Amplify project.

          ", + "smithy.api#jsonName": "signInMethod", + "smithy.api#required": {} + } + }, + "UserPoolName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The Amazon Cognito user pool name.

          ", + "smithy.api#jsonName": "userPoolName", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Describes the Amazon Cognito user pool configuration for the auth resource to be configured for your Amplify project.

          " + } + }, + "com.amazonaws.amplifybackend#CreateBackendConfig": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#CreateBackendConfigRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#CreateBackendConfigResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Creates a config object for a backend.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/config", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#CreateBackendConfigRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "BackendManagerAppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID for the backend manager.

          ", + "smithy.api#jsonName": "backendManagerAppId" + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request body for CreateBackendConfig.

          " + } + }, + "com.amazonaws.amplifybackend#CreateBackendConfigResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName" + } + }, + "JobId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The ID for the job.

          ", + "smithy.api#jsonName": "jobId" + } + }, + "Status": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The current status of the request.

          ", + "smithy.api#jsonName": "status" + } + } + } + }, + "com.amazonaws.amplifybackend#CreateBackendRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId", + "smithy.api#required": {} + } + }, + "AppName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the app.

          ", + "smithy.api#jsonName": "appName", + "smithy.api#required": {} + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName", + "smithy.api#required": {} + } + }, + "ResourceConfig": { + "target": "com.amazonaws.amplifybackend#ResourceConfig", + "traits": { + "smithy.api#documentation": "

          The resource configuration for the create backend request.

          ", + "smithy.api#jsonName": "resourceConfig" + } + }, + "ResourceName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the resource.

          ", + "smithy.api#jsonName": "resourceName" + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request body for CreateBackend.

          " + } + }, + "com.amazonaws.amplifybackend#CreateBackendResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName" + } + }, + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + }, + "JobId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The ID for the job.

          ", + "smithy.api#jsonName": "jobId" + } + }, + "Operation": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the operation.

          ", + "smithy.api#jsonName": "operation" + } + }, + "Status": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The current status of the request.

          ", + "smithy.api#jsonName": "status" + } + } + } + }, + "com.amazonaws.amplifybackend#CreateToken": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#CreateTokenRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#CreateTokenResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Generates a one time challenge code to authenticate a user into your Amplify Admin UI.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/challenge", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#CreateTokenRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.amplifybackend#CreateTokenResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "ChallengeCode": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          One time challenge code for authenticating into Amplify Admin UI.

          ", + "smithy.api#jsonName": "challengeCode" + } + }, + "SessionId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          A unique identifier provided when creating a new challenge token.

          ", + "smithy.api#jsonName": "sessionId" + } + }, + "Ttl": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The expiry time for the one time generated token code.

          ", + "smithy.api#jsonName": "ttl" + } + } + } + }, + "com.amazonaws.amplifybackend#DeleteBackend": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#DeleteBackendRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#DeleteBackendResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Removes an existing environment from your Ampify project.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/environments/{BackendEnvironmentName}/remove", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#DeleteBackendAPI": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#DeleteBackendAPIRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#DeleteBackendAPIResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Deletes an existing backend API resource.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/api/{BackendEnvironmentName}/remove", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#DeleteBackendAPIRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "ResourceConfig": { + "target": "com.amazonaws.amplifybackend#BackendAPIResourceConfig", + "traits": { + "smithy.api#documentation": "

          Defines the resource configuration for the data model in your Amplify project.

          ", + "smithy.api#jsonName": "resourceConfig" + } + }, + "ResourceName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of this resource.

          ", + "smithy.api#jsonName": "resourceName", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request body for DeleteBackendAPI.

          " + } + }, + "com.amazonaws.amplifybackend#DeleteBackendAPIResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName" + } + }, + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + }, + "JobId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The ID for the job.

          ", + "smithy.api#jsonName": "jobId" + } + }, + "Operation": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the operation.

          ", + "smithy.api#jsonName": "operation" + } + }, + "Status": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The current status of the request.

          ", + "smithy.api#jsonName": "status" + } + } + } + }, + "com.amazonaws.amplifybackend#DeleteBackendAuth": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#DeleteBackendAuthRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#DeleteBackendAuthResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Deletes an existing backend authentication resource.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/auth/{BackendEnvironmentName}/remove", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#DeleteBackendAuthRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "ResourceName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of this resource.

          ", + "smithy.api#jsonName": "resourceName", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request body for DeleteBackendAuth.

          " + } + }, + "com.amazonaws.amplifybackend#DeleteBackendAuthResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName" + } + }, + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + }, + "JobId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The ID for the job.

          ", + "smithy.api#jsonName": "jobId" + } + }, + "Operation": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the operation.

          ", + "smithy.api#jsonName": "operation" + } + }, + "Status": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The current status of the request.

          ", + "smithy.api#jsonName": "status" + } + } + } + }, + "com.amazonaws.amplifybackend#DeleteBackendRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.amplifybackend#DeleteBackendResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName" + } + }, + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + }, + "JobId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The ID for the job.

          ", + "smithy.api#jsonName": "jobId" + } + }, + "Operation": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the operation.

          ", + "smithy.api#jsonName": "operation" + } + }, + "Status": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The current status of the request.

          ", + "smithy.api#jsonName": "status" + } + } + } + }, + "com.amazonaws.amplifybackend#DeleteToken": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#DeleteTokenRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#DeleteTokenResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Deletes the challenge token based on the given appId and sessionId.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/challenge/{SessionId}/remove", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#DeleteTokenRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "SessionId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The session Id.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.amplifybackend#DeleteTokenResponse": { + "type": "structure", + "members": { + "IsSuccess": { + "target": "com.amazonaws.amplifybackend#__boolean", + "traits": { + "smithy.api#documentation": "

          Indicates whether the request succeeded or failed.

          ", + "smithy.api#jsonName": "isSuccess" + } + } + } + }, + "com.amazonaws.amplifybackend#DeliveryMethod": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "EMAIL", + "name": "EMAIL" + }, + { + "value": "SMS", + "name": "SMS" + } + ] + } + }, + "com.amazonaws.amplifybackend#EmailSettings": { + "type": "structure", + "members": { + "EmailMessage": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The body of the email.

          ", + "smithy.api#jsonName": "emailMessage" + } + }, + "EmailSubject": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The subject of the email.

          ", + "smithy.api#jsonName": "emailSubject" + } + } + }, + "traits": { + "smithy.api#documentation": "

          The configuration for the email sent when an app user forgets their password.

          " + } + }, + "com.amazonaws.amplifybackend#GatewayTimeoutException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          An error message to inform that the request has failed.

          ", + "smithy.api#jsonName": "message" + } + } + }, + "traits": { + "smithy.api#documentation": "

          An error returned if there's a temporary issue with the service.

          ", + "smithy.api#error": "server", + "smithy.api#httpError": 504 + } + }, + "com.amazonaws.amplifybackend#GenerateBackendAPIModels": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#GenerateBackendAPIModelsRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#GenerateBackendAPIModelsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Generates a model schema for an existing backend API resource.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/api/{BackendEnvironmentName}/generateModels", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#GenerateBackendAPIModelsRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "ResourceName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of this resource.

          ", + "smithy.api#jsonName": "resourceName", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request body for GenerateBackendAPIModels.

          " + } + }, + "com.amazonaws.amplifybackend#GenerateBackendAPIModelsResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName" + } + }, + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + }, + "JobId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The ID for the job.

          ", + "smithy.api#jsonName": "jobId" + } + }, + "Operation": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the operation.

          ", + "smithy.api#jsonName": "operation" + } + }, + "Status": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The current status of the request.

          ", + "smithy.api#jsonName": "status" + } + } + } + }, + "com.amazonaws.amplifybackend#GetBackend": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#GetBackendRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#GetBackendResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Provides project level details for your Amplify UI project.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/details", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#GetBackendAPI": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#GetBackendAPIRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#GetBackendAPIResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Gets the details for a backend api.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/api/{BackendEnvironmentName}/details", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#GetBackendAPIModels": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#GetBackendAPIModelsRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#GetBackendAPIModelsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Generates a model schema for existing backend API resource.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/api/{BackendEnvironmentName}/getModels", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#GetBackendAPIModelsRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "ResourceName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of this resource.

          ", + "smithy.api#jsonName": "resourceName", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request body for GetBackendAPIModels.

          " + } + }, + "com.amazonaws.amplifybackend#GetBackendAPIModelsResponse": { + "type": "structure", + "members": { + "Models": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          Stringified JSON of the datastore model.

          ", + "smithy.api#jsonName": "models" + } + }, + "Status": { + "target": "com.amazonaws.amplifybackend#Status", + "traits": { + "smithy.api#documentation": "

          The current status of the request.

          ", + "smithy.api#jsonName": "status" + } + } + } + }, + "com.amazonaws.amplifybackend#GetBackendAPIRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "ResourceConfig": { + "target": "com.amazonaws.amplifybackend#BackendAPIResourceConfig", + "traits": { + "smithy.api#documentation": "

          Defines the resource configuration for the data model in your Amplify project.

          ", + "smithy.api#jsonName": "resourceConfig" + } + }, + "ResourceName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of this resource.

          ", + "smithy.api#jsonName": "resourceName", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request body for GetBackendAPI.

          " + } + }, + "com.amazonaws.amplifybackend#GetBackendAPIResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName" + } + }, + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + }, + "ResourceConfig": { + "target": "com.amazonaws.amplifybackend#BackendAPIResourceConfig", + "traits": { + "smithy.api#documentation": "

          The resource configuration for this response object.

          ", + "smithy.api#jsonName": "resourceConfig" + } + }, + "ResourceName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of this resource.

          ", + "smithy.api#jsonName": "resourceName" + } + } + } + }, + "com.amazonaws.amplifybackend#GetBackendAuth": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#GetBackendAuthRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#GetBackendAuthResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Gets a backend auth details.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/auth/{BackendEnvironmentName}/details", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#GetBackendAuthRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "ResourceName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of this resource.

          ", + "smithy.api#jsonName": "resourceName", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request body for GetBackendAuth.

          " + } + }, + "com.amazonaws.amplifybackend#GetBackendAuthResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName" + } + }, + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + }, + "ResourceConfig": { + "target": "com.amazonaws.amplifybackend#CreateBackendAuthResourceConfig", + "traits": { + "smithy.api#documentation": "

          The resource configuration for authorization requests to the backend of your Amplify project.

          ", + "smithy.api#jsonName": "resourceConfig" + } + }, + "ResourceName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of this resource.

          ", + "smithy.api#jsonName": "resourceName" + } + } + } + }, + "com.amazonaws.amplifybackend#GetBackendJob": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#GetBackendJobRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#GetBackendJobResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Returns information about a specific job.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/backend/{AppId}/job/{BackendEnvironmentName}/{JobId}", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#GetBackendJobRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "JobId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The ID for the job.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.amplifybackend#GetBackendJobResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName" + } + }, + "CreateTime": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The time when the job was created.

          ", + "smithy.api#jsonName": "createTime" + } + }, + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + }, + "JobId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The ID for the job.

          ", + "smithy.api#jsonName": "jobId" + } + }, + "Operation": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the operation.

          ", + "smithy.api#jsonName": "operation" + } + }, + "Status": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The current status of the request.

          ", + "smithy.api#jsonName": "status" + } + }, + "UpdateTime": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The time when the job was last updated.

          ", + "smithy.api#jsonName": "updateTime" + } + } + } + }, + "com.amazonaws.amplifybackend#GetBackendRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName" + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request body for GetBackend.

          " + } + }, + "com.amazonaws.amplifybackend#GetBackendResponse": { + "type": "structure", + "members": { + "AmplifyMetaConfig": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          A stringified version of the current configs for your Amplify project.

          ", + "smithy.api#jsonName": "amplifyMetaConfig" + } + }, + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "AppName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the app.

          ", + "smithy.api#jsonName": "appName" + } + }, + "BackendEnvironmentList": { + "target": "com.amazonaws.amplifybackend#ListOf__string", + "traits": { + "smithy.api#documentation": "

          A list of backend environments in an array.

          ", + "smithy.api#jsonName": "backendEnvironmentList" + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName" + } + }, + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + } + } + }, + "com.amazonaws.amplifybackend#GetToken": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#GetTokenRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#GetTokenResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Gets the challenge token based on the given appId and sessionId.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/backend/{AppId}/challenge/{SessionId}", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#GetTokenRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "SessionId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The session Id.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.amplifybackend#GetTokenResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "ChallengeCode": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The one time challenge code for authenticating into Amplify Admin UI.

          ", + "smithy.api#jsonName": "challengeCode" + } + }, + "SessionId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          A unique identifier provided when creating a new challenge token.

          ", + "smithy.api#jsonName": "sessionId" + } + }, + "Ttl": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The expiry time for the one time generated token code.

          ", + "smithy.api#jsonName": "ttl" + } + } + } + }, + "com.amazonaws.amplifybackend#ListBackendJobs": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#ListBackendJobsRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#ListBackendJobsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Lists the jobs for the backend of an Amplify app.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/job/{BackendEnvironmentName}", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#ListBackendJobsRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "JobId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The ID for the job.

          ", + "smithy.api#jsonName": "jobId" + } + }, + "MaxResults": { + "target": "com.amazonaws.amplifybackend#__integerMin1Max25", + "traits": { + "smithy.api#documentation": "

          The maximum number of results you want in the response.

          ", + "smithy.api#jsonName": "maxResults" + } + }, + "NextToken": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The token for the next set of results.

          ", + "smithy.api#jsonName": "nextToken" + } + }, + "Operation": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          Filters the list of response objects to only include those with the specified operation name.

          ", + "smithy.api#jsonName": "operation" + } + }, + "Status": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          Filters the list of response objects to only include those with the specified status.

          ", + "smithy.api#jsonName": "status" + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request body for ListBackendJobs.

          " + } + }, + "com.amazonaws.amplifybackend#ListBackendJobsResponse": { + "type": "structure", + "members": { + "Jobs": { + "target": "com.amazonaws.amplifybackend#ListOfBackendJobRespObj", + "traits": { + "smithy.api#documentation": "

          An array of jobs and their properties.

          ", + "smithy.api#jsonName": "jobs" + } + }, + "NextToken": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The token for the next set of results.

          ", + "smithy.api#jsonName": "nextToken" + } + } + } + }, + "com.amazonaws.amplifybackend#ListOfAdditionalConstraintsElement": { + "type": "list", + "member": { + "target": "com.amazonaws.amplifybackend#AdditionalConstraintsElement" + } + }, + "com.amazonaws.amplifybackend#ListOfBackendAPIAuthType": { + "type": "list", + "member": { + "target": "com.amazonaws.amplifybackend#BackendAPIAuthType" + } + }, + "com.amazonaws.amplifybackend#ListOfBackendJobRespObj": { + "type": "list", + "member": { + "target": "com.amazonaws.amplifybackend#BackendJobRespObj" + } + }, + "com.amazonaws.amplifybackend#ListOfMfaTypesElement": { + "type": "list", + "member": { + "target": "com.amazonaws.amplifybackend#MfaTypesElement" + } + }, + "com.amazonaws.amplifybackend#ListOfOAuthScopesElement": { + "type": "list", + "member": { + "target": "com.amazonaws.amplifybackend#OAuthScopesElement" + } + }, + "com.amazonaws.amplifybackend#ListOfRequiredSignUpAttributesElement": { + "type": "list", + "member": { + "target": "com.amazonaws.amplifybackend#RequiredSignUpAttributesElement" + } + }, + "com.amazonaws.amplifybackend#ListOf__string": { + "type": "list", + "member": { + "target": "com.amazonaws.amplifybackend#__string" + } + }, + "com.amazonaws.amplifybackend#LoginAuthConfigReqObj": { + "type": "structure", + "members": { + "AwsCognitoIdentityPoolId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          Amazon Cognito identitypool id used for the Amplify Admin UI login authorization.

          ", + "smithy.api#jsonName": "aws_cognito_identity_pool_id" + } + }, + "AwsCognitoRegion": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The AWS Region for the the Amplify Admin login.

          ", + "smithy.api#jsonName": "aws_cognito_region" + } + }, + "AwsUserPoolsId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The Amazon Cognito userpool id used for Amplify Admin UI login authentication.

          ", + "smithy.api#jsonName": "aws_user_pools_id" + } + }, + "AwsUserPoolsWebClientId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The web client ID for the Amazon Cognito userpools.

          ", + "smithy.api#jsonName": "aws_user_pools_web_client_id" + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request object for this operation.

          " + } + }, + "com.amazonaws.amplifybackend#MFAMode": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "ON", + "name": "ON" + }, + { + "value": "OFF", + "name": "OFF" + }, + { + "value": "OPTIONAL", + "name": "OPTIONAL" + } + ] + } + }, + "com.amazonaws.amplifybackend#MfaTypesElement": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "SMS", + "name": "SMS" + }, + { + "value": "TOTP", + "name": "TOTP" + } + ] + } + }, + "com.amazonaws.amplifybackend#Mode": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "API_KEY", + "name": "API_KEY" + }, + { + "value": "AWS_IAM", + "name": "AWS_IAM" + }, + { + "value": "AMAZON_COGNITO_USER_POOLS", + "name": "AMAZON_COGNITO_USER_POOLS" + }, + { + "value": "OPENID_CONNECT", + "name": "OPENID_CONNECT" + } + ] + } + }, + "com.amazonaws.amplifybackend#NotFoundException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          An error message to inform that the request has failed.

          ", + "smithy.api#jsonName": "message" + } + }, + "ResourceType": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The type of resource that wasn't found.

          ", + "smithy.api#jsonName": "resourceType" + } + } + }, + "traits": { + "smithy.api#documentation": "

          An error returned when a specific resource type is not found.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 404 + } + }, + "com.amazonaws.amplifybackend#OAuthGrantType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "CODE", + "name": "CODE" + }, + { + "value": "IMPLICIT", + "name": "IMPLICIT" + } + ] + } + }, + "com.amazonaws.amplifybackend#OAuthScopesElement": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "PHONE", + "name": "PHONE" + }, + { + "value": "EMAIL", + "name": "EMAIL" + }, + { + "value": "OPENID", + "name": "OPENID" + }, + { + "value": "PROFILE", + "name": "PROFILE" + }, + { + "value": "AWS_COGNITO_SIGNIN_USER_ADMIN", + "name": "AWS_COGNITO_SIGNIN_USER_ADMIN" + } + ] + } + }, + "com.amazonaws.amplifybackend#RemoveAllBackends": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#RemoveAllBackendsRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#RemoveAllBackendsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Removes all backend environments from your Amplify project.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/remove", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#RemoveAllBackendsRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "CleanAmplifyApp": { + "target": "com.amazonaws.amplifybackend#__boolean", + "traits": { + "smithy.api#documentation": "

          Cleans up the Amplify Console app if this value is set to true.

          ", + "smithy.api#jsonName": "cleanAmplifyApp" + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request body for RemoveAllBackends.

          " + } + }, + "com.amazonaws.amplifybackend#RemoveAllBackendsResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + }, + "JobId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The ID for the job.

          ", + "smithy.api#jsonName": "jobId" + } + }, + "Operation": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the operation.

          ", + "smithy.api#jsonName": "operation" + } + }, + "Status": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The current status of the request.

          ", + "smithy.api#jsonName": "status" + } + } + } + }, + "com.amazonaws.amplifybackend#RemoveBackendConfig": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#RemoveBackendConfigRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#RemoveBackendConfigResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Removes the AWS resources required to access the Amplify Admin UI.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/config/remove", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#RemoveBackendConfigRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.amplifybackend#RemoveBackendConfigResponse": { + "type": "structure", + "members": { + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + } + } + }, + "com.amazonaws.amplifybackend#RequiredSignUpAttributesElement": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "ADDRESS", + "name": "ADDRESS" + }, + { + "value": "BIRTHDATE", + "name": "BIRTHDATE" + }, + { + "value": "EMAIL", + "name": "EMAIL" + }, + { + "value": "FAMILY_NAME", + "name": "FAMILY_NAME" + }, + { + "value": "GENDER", + "name": "GENDER" + }, + { + "value": "GIVEN_NAME", + "name": "GIVEN_NAME" + }, + { + "value": "LOCALE", + "name": "LOCALE" + }, + { + "value": "MIDDLE_NAME", + "name": "MIDDLE_NAME" + }, + { + "value": "NAME", + "name": "NAME" + }, + { + "value": "NICKNAME", + "name": "NICKNAME" + }, + { + "value": "PHONE_NUMBER", + "name": "PHONE_NUMBER" + }, + { + "value": "PICTURE", + "name": "PICTURE" + }, + { + "value": "PREFERRED_USERNAME", + "name": "PREFERRED_USERNAME" + }, + { + "value": "PROFILE", + "name": "PROFILE" + }, + { + "value": "UPDATED_AT", + "name": "UPDATED_AT" + }, + { + "value": "WEBSITE", + "name": "WEBSITE" + }, + { + "value": "ZONE_INFO", + "name": "ZONE_INFO" + } + ] + } + }, + "com.amazonaws.amplifybackend#ResolutionStrategy": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "OPTIMISTIC_CONCURRENCY", + "name": "OPTIMISTIC_CONCURRENCY" + }, + { + "value": "LAMBDA", + "name": "LAMBDA" + }, + { + "value": "AUTOMERGE", + "name": "AUTOMERGE" + }, + { + "value": "NONE", + "name": "NONE" + } + ] + } + }, + "com.amazonaws.amplifybackend#ResourceConfig": { + "type": "structure", + "members": {}, + "traits": { + "smithy.api#documentation": "

          Defines the resource configuration for the data model in your Amplify project.

          " + } + }, + "com.amazonaws.amplifybackend#Service": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "COGNITO", + "name": "COGNITO" + } + ] + } + }, + "com.amazonaws.amplifybackend#Settings": { + "type": "structure", + "members": { + "MfaTypes": { + "target": "com.amazonaws.amplifybackend#ListOfMfaTypesElement", + "traits": { + "smithy.api#documentation": "

          The supported MFA types

          ", + "smithy.api#jsonName": "mfaTypes" + } + }, + "SmsMessage": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The body of the SMS message.

          ", + "smithy.api#jsonName": "smsMessage" + } + } + }, + "traits": { + "smithy.api#documentation": "

          The settings of your MFA configuration for the backend of your Amplify project.

          " + } + }, + "com.amazonaws.amplifybackend#SignInMethod": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "EMAIL", + "name": "EMAIL" + }, + { + "value": "EMAIL_AND_PHONE_NUMBER", + "name": "EMAIL_AND_PHONE_NUMBER" + }, + { + "value": "PHONE_NUMBER", + "name": "PHONE_NUMBER" + }, + { + "value": "USERNAME", + "name": "USERNAME" + } + ] + } + }, + "com.amazonaws.amplifybackend#SmsSettings": { + "type": "structure", + "members": { + "SmsMessage": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The body of the SMS message.

          ", + "smithy.api#jsonName": "smsMessage" + } + } + }, + "traits": { + "smithy.api#documentation": "

          SMS settings for authentication.

          " + } + }, + "com.amazonaws.amplifybackend#SocialProviderSettings": { + "type": "structure", + "members": { + "Facebook": { + "target": "com.amazonaws.amplifybackend#BackendAuthSocialProviderConfig" + }, + "Google": { + "target": "com.amazonaws.amplifybackend#BackendAuthSocialProviderConfig" + }, + "LoginWithAmazon": { + "target": "com.amazonaws.amplifybackend#BackendAuthSocialProviderConfig" + } + }, + "traits": { + "smithy.api#documentation": "

          The settings for using the social providers for access to your Amplify app.

          " + } + }, + "com.amazonaws.amplifybackend#Status": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "LATEST", + "name": "LATEST" + }, + { + "value": "STALE", + "name": "STALE" + } + ] + } + }, + "com.amazonaws.amplifybackend#TooManyRequestsException": { + "type": "structure", + "members": { + "LimitType": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The type of limit that was exceeded.

          ", + "smithy.api#jsonName": "limitType" + } + }, + "Message": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          An error message to inform that the request has failed.

          ", + "smithy.api#jsonName": "message" + } + } + }, + "traits": { + "smithy.api#documentation": "

          An error that is returned when a limit of a specific type has been exceeded.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 429 + } + }, + "com.amazonaws.amplifybackend#UpdateBackendAPI": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#UpdateBackendAPIRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#UpdateBackendAPIResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Updates an existing backend API resource.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/api/{BackendEnvironmentName}", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#UpdateBackendAPIRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "ResourceConfig": { + "target": "com.amazonaws.amplifybackend#BackendAPIResourceConfig", + "traits": { + "smithy.api#documentation": "

          Defines the resource configuration for the data model in your Amplify project.

          ", + "smithy.api#jsonName": "resourceConfig" + } + }, + "ResourceName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of this resource.

          ", + "smithy.api#jsonName": "resourceName", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request body for UpdateBackendAPI.

          " + } + }, + "com.amazonaws.amplifybackend#UpdateBackendAPIResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName" + } + }, + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + }, + "JobId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The ID for the job.

          ", + "smithy.api#jsonName": "jobId" + } + }, + "Operation": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the operation.

          ", + "smithy.api#jsonName": "operation" + } + }, + "Status": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The current status of the request.

          ", + "smithy.api#jsonName": "status" + } + } + } + }, + "com.amazonaws.amplifybackend#UpdateBackendAuth": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#UpdateBackendAuthRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#UpdateBackendAuthResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Updates an existing backend authentication resource.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/auth/{BackendEnvironmentName}", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#UpdateBackendAuthForgotPasswordConfig": { + "type": "structure", + "members": { + "DeliveryMethod": { + "target": "com.amazonaws.amplifybackend#DeliveryMethod", + "traits": { + "smithy.api#documentation": "

          Describes which mode to use (either SMS or email) to deliver messages to app users that want to recover their password.

          ", + "smithy.api#jsonName": "deliveryMethod" + } + }, + "EmailSettings": { + "target": "com.amazonaws.amplifybackend#EmailSettings", + "traits": { + "smithy.api#documentation": "

          The configuration for the email sent when an app user forgets their password.

          ", + "smithy.api#jsonName": "emailSettings" + } + }, + "SmsSettings": { + "target": "com.amazonaws.amplifybackend#SmsSettings", + "traits": { + "smithy.api#documentation": "

          The configuration for the SMS message sent when an Amplify app user forgets their password.

          ", + "smithy.api#jsonName": "smsSettings" + } + } + }, + "traits": { + "smithy.api#documentation": "

          Describes the forgot password policy for authenticating into the Amlify app.

          " + } + }, + "com.amazonaws.amplifybackend#UpdateBackendAuthIdentityPoolConfig": { + "type": "structure", + "members": { + "UnauthenticatedLogin": { + "target": "com.amazonaws.amplifybackend#__boolean", + "traits": { + "smithy.api#documentation": "

          A boolean value which can be set to allow or disallow guest level authorization into your Amplify app.

          ", + "smithy.api#jsonName": "unauthenticatedLogin" + } + } + }, + "traits": { + "smithy.api#documentation": "

          Describes the authorization configuration for the Amazon Cognito identity pool, provisioned as a part of your auth resource in the Amplify project.

          " + } + }, + "com.amazonaws.amplifybackend#UpdateBackendAuthMFAConfig": { + "type": "structure", + "members": { + "MFAMode": { + "target": "com.amazonaws.amplifybackend#MFAMode", + "traits": { + "smithy.api#documentation": "

          The MFA mode for the backend of your Amplify project.

          " + } + }, + "Settings": { + "target": "com.amazonaws.amplifybackend#Settings", + "traits": { + "smithy.api#documentation": "

          The settings of your MFA configuration for the backend of your Amplify project.

          ", + "smithy.api#jsonName": "settings" + } + } + }, + "traits": { + "smithy.api#documentation": "

          Updates the multi-factor authentication (MFA) configuration for the backend of your Amplify project.

          " + } + }, + "com.amazonaws.amplifybackend#UpdateBackendAuthOAuthConfig": { + "type": "structure", + "members": { + "DomainPrefix": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The Amazon Cognito domain prefix used to create a hosted UI for authentication.

          ", + "smithy.api#jsonName": "domainPrefix" + } + }, + "OAuthGrantType": { + "target": "com.amazonaws.amplifybackend#OAuthGrantType", + "traits": { + "smithy.api#documentation": "

          The OAuth grant type to allow app users to authenticate from your Amplify app.

          ", + "smithy.api#jsonName": "oAuthGrantType" + } + }, + "OAuthScopes": { + "target": "com.amazonaws.amplifybackend#ListOfOAuthScopesElement", + "traits": { + "smithy.api#documentation": "

          The list of OAuth related flows which can allow users to authenticate from your Amplify app.

          ", + "smithy.api#jsonName": "oAuthScopes" + } + }, + "RedirectSignInURIs": { + "target": "com.amazonaws.amplifybackend#ListOf__string", + "traits": { + "smithy.api#documentation": "

          Redirect URLs used by OAuth when a user signs in to an Amplify app.

          ", + "smithy.api#jsonName": "redirectSignInURIs" + } + }, + "RedirectSignOutURIs": { + "target": "com.amazonaws.amplifybackend#ListOf__string", + "traits": { + "smithy.api#documentation": "

          Redirect URLs used by OAuth when a user signs out of an Amplify app.

          ", + "smithy.api#jsonName": "redirectSignOutURIs" + } + }, + "SocialProviderSettings": { + "target": "com.amazonaws.amplifybackend#SocialProviderSettings", + "traits": { + "smithy.api#documentation": "

          Describes third party social federation configurations for allowing your users to sign in with OAuth.

          ", + "smithy.api#jsonName": "socialProviderSettings" + } + } + }, + "traits": { + "smithy.api#documentation": "

          The OAuth configurations for authenticating users into your Amplify app.

          " + } + }, + "com.amazonaws.amplifybackend#UpdateBackendAuthPasswordPolicyConfig": { + "type": "structure", + "members": { + "AdditionalConstraints": { + "target": "com.amazonaws.amplifybackend#ListOfAdditionalConstraintsElement", + "traits": { + "smithy.api#documentation": "

          Describes additional constrains on password requirements to sign in into the auth resource, configured as a part of your Ampify project.

          ", + "smithy.api#jsonName": "additionalConstraints" + } + }, + "MinimumLength": { + "target": "com.amazonaws.amplifybackend#__double", + "traits": { + "smithy.api#documentation": "

          Describes the minimum length of password required to sign in into the auth resource, configured as a part of your Ampify project.

          ", + "smithy.api#jsonName": "minimumLength" + } + } + }, + "traits": { + "smithy.api#documentation": "

          Describes the password policy for your Amazon Cognito user pool configured as a part of your Amplify project.

          " + } + }, + "com.amazonaws.amplifybackend#UpdateBackendAuthRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "ResourceConfig": { + "target": "com.amazonaws.amplifybackend#UpdateBackendAuthResourceConfig", + "traits": { + "smithy.api#documentation": "

          The resource configuration for this request object.

          ", + "smithy.api#jsonName": "resourceConfig", + "smithy.api#required": {} + } + }, + "ResourceName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of this resource.

          ", + "smithy.api#jsonName": "resourceName", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request body for UpdateBackendAuth.

          " + } + }, + "com.amazonaws.amplifybackend#UpdateBackendAuthResourceConfig": { + "type": "structure", + "members": { + "AuthResources": { + "target": "com.amazonaws.amplifybackend#AuthResources", + "traits": { + "smithy.api#documentation": "

          Defines the service name to use when configuring an authentication resource in your Amplify project.

          ", + "smithy.api#jsonName": "authResources", + "smithy.api#required": {} + } + }, + "IdentityPoolConfigs": { + "target": "com.amazonaws.amplifybackend#UpdateBackendAuthIdentityPoolConfig", + "traits": { + "smithy.api#documentation": "

          Describes the authorization configuration for the Amazon Cognito identity pool, provisioned as a part of your auth resource in the Amplify project.

          ", + "smithy.api#jsonName": "identityPoolConfigs" + } + }, + "Service": { + "target": "com.amazonaws.amplifybackend#Service", + "traits": { + "smithy.api#documentation": "

          Defines the service name to use when configuring an authentication resource in your Amplify project.

          ", + "smithy.api#jsonName": "service", + "smithy.api#required": {} + } + }, + "UserPoolConfigs": { + "target": "com.amazonaws.amplifybackend#UpdateBackendAuthUserPoolConfig", + "traits": { + "smithy.api#documentation": "

          Describes the authentication configuration for the Amazon Cognito userpool, provisioned as a part of your auth resource in the Amplify project.

          ", + "smithy.api#jsonName": "userPoolConfigs", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Defines the resource configuration when updating an authentication resource in your Amplify project.

          " + } + }, + "com.amazonaws.amplifybackend#UpdateBackendAuthResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName" + } + }, + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + }, + "JobId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The ID for the job.

          ", + "smithy.api#jsonName": "jobId" + } + }, + "Operation": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the operation.

          ", + "smithy.api#jsonName": "operation" + } + }, + "Status": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The current status of the request.

          ", + "smithy.api#jsonName": "status" + } + } + } + }, + "com.amazonaws.amplifybackend#UpdateBackendAuthUserPoolConfig": { + "type": "structure", + "members": { + "ForgotPassword": { + "target": "com.amazonaws.amplifybackend#UpdateBackendAuthForgotPasswordConfig", + "traits": { + "smithy.api#documentation": "

          Describes the forgot password policy for your Amazon Cognito user pool, configured as a part of your Amplify project.

          ", + "smithy.api#jsonName": "forgotPassword" + } + }, + "Mfa": { + "target": "com.amazonaws.amplifybackend#UpdateBackendAuthMFAConfig", + "traits": { + "smithy.api#documentation": "

          Describes whether multi-factor authentication policies should be applied for your Amazon Cognito user pool configured as a part of your Amplify project.

          ", + "smithy.api#jsonName": "mfa" + } + }, + "OAuth": { + "target": "com.amazonaws.amplifybackend#UpdateBackendAuthOAuthConfig", + "traits": { + "smithy.api#documentation": "

          Describes the OAuth policy and rules for your Amazon Cognito user pool, configured as a part of your Amplify project.

          ", + "smithy.api#jsonName": "oAuth" + } + }, + "PasswordPolicy": { + "target": "com.amazonaws.amplifybackend#UpdateBackendAuthPasswordPolicyConfig", + "traits": { + "smithy.api#documentation": "

          Describes the password policy for your Amazon Cognito user pool, configured as a part of your Amplify project.

          ", + "smithy.api#jsonName": "passwordPolicy" + } + } + }, + "traits": { + "smithy.api#documentation": "

          Describes the Amazon Cognito user pool configuration for the authorization resource to be configured for your Amplify project on an update.

          " + } + }, + "com.amazonaws.amplifybackend#UpdateBackendConfig": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#UpdateBackendConfigRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#UpdateBackendConfigResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Updates the AWS resources required to access the Amplify Admin UI.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/config/update", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#UpdateBackendConfigRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "LoginAuthConfig": { + "target": "com.amazonaws.amplifybackend#LoginAuthConfigReqObj", + "traits": { + "smithy.api#documentation": "

          Describes the Amazon Cognito configuration for Admin UI access.

          ", + "smithy.api#jsonName": "loginAuthConfig" + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request body for UpdateBackendConfig.

          " + } + }, + "com.amazonaws.amplifybackend#UpdateBackendConfigResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "BackendManagerAppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID for the backend manager.

          ", + "smithy.api#jsonName": "backendManagerAppId" + } + }, + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + }, + "LoginAuthConfig": { + "target": "com.amazonaws.amplifybackend#LoginAuthConfigReqObj", + "traits": { + "smithy.api#documentation": "

          Describes the Amazon Cognito configurations for the Admin UI auth resource to login with.

          ", + "smithy.api#jsonName": "loginAuthConfig" + } + } + } + }, + "com.amazonaws.amplifybackend#UpdateBackendJob": { + "type": "operation", + "input": { + "target": "com.amazonaws.amplifybackend#UpdateBackendJobRequest" + }, + "output": { + "target": "com.amazonaws.amplifybackend#UpdateBackendJobResponse" + }, + "errors": [ + { + "target": "com.amazonaws.amplifybackend#BadRequestException" + }, + { + "target": "com.amazonaws.amplifybackend#GatewayTimeoutException" + }, + { + "target": "com.amazonaws.amplifybackend#NotFoundException" + }, + { + "target": "com.amazonaws.amplifybackend#TooManyRequestsException" + } + ], + "traits": { + "smithy.api#documentation": "

          Updates a specific job.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/backend/{AppId}/job/{BackendEnvironmentName}/{JobId}", + "code": 200 + } + } + }, + "com.amazonaws.amplifybackend#UpdateBackendJobRequest": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "JobId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The ID for the job.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "Operation": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          Filters the list of response objects to only include those with the specified operation name.

          ", + "smithy.api#jsonName": "operation" + } + }, + "Status": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          Filters the list of response objects to only include those with the specified status.

          ", + "smithy.api#jsonName": "status" + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request body for GetBackendJob.

          " + } + }, + "com.amazonaws.amplifybackend#UpdateBackendJobResponse": { + "type": "structure", + "members": { + "AppId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The app ID.

          ", + "smithy.api#jsonName": "appId" + } + }, + "BackendEnvironmentName": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the backend environment.

          ", + "smithy.api#jsonName": "backendEnvironmentName" + } + }, + "CreateTime": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The time when the job was created.

          ", + "smithy.api#jsonName": "createTime" + } + }, + "Error": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          If the request failed, this is the returned error.

          ", + "smithy.api#jsonName": "error" + } + }, + "JobId": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The ID for the job.

          ", + "smithy.api#jsonName": "jobId" + } + }, + "Operation": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The name of the operation.

          ", + "smithy.api#jsonName": "operation" + } + }, + "Status": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The current status of the request.

          ", + "smithy.api#jsonName": "status" + } + }, + "UpdateTime": { + "target": "com.amazonaws.amplifybackend#__string", + "traits": { + "smithy.api#documentation": "

          The time when the job was last updated.

          ", + "smithy.api#jsonName": "updateTime" + } + } + } + }, + "com.amazonaws.amplifybackend#__boolean": { + "type": "boolean" + }, + "com.amazonaws.amplifybackend#__double": { + "type": "double" + }, + "com.amazonaws.amplifybackend#__integerMin1Max25": { + "type": "integer", + "traits": { + "smithy.api#range": { + "min": 1, + "max": 25 + } + } + }, + "com.amazonaws.amplifybackend#__string": { + "type": "string" + } + } +} diff --git a/codegen/sdk-codegen/aws-models/appintegrations.2020-07-29.json b/codegen/sdk-codegen/aws-models/appintegrations.2020-07-29.json new file mode 100644 index 000000000000..0d83c1618540 --- /dev/null +++ b/codegen/sdk-codegen/aws-models/appintegrations.2020-07-29.json @@ -0,0 +1,1072 @@ +{ + "smithy": "1.0", + "metadata": { + "suppressions": [ + { + "id": "HttpMethodSemantics", + "namespace": "*" + }, + { + "id": "HttpResponseCodeSemantics", + "namespace": "*" + }, + { + "id": "PaginatedTrait", + "namespace": "*" + }, + { + "id": "HttpHeaderTrait", + "namespace": "*" + }, + { + "id": "HttpUriConflict", + "namespace": "*" + }, + { + "id": "Service", + "namespace": "*" + } + ] + }, + "shapes": { + "com.amazonaws.appintegrations#AccessDeniedException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.appintegrations#Message" + } + }, + "traits": { + "smithy.api#documentation": "

          You do not have sufficient access to perform this action.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 403 + } + }, + "com.amazonaws.appintegrations#AmazonAppIntegrationService": { + "type": "service", + "version": "2020-07-29", + "operations": [ + { + "target": "com.amazonaws.appintegrations#CreateEventIntegration" + }, + { + "target": "com.amazonaws.appintegrations#DeleteEventIntegration" + }, + { + "target": "com.amazonaws.appintegrations#GetEventIntegration" + }, + { + "target": "com.amazonaws.appintegrations#ListEventIntegrationAssociations" + }, + { + "target": "com.amazonaws.appintegrations#ListEventIntegrations" + }, + { + "target": "com.amazonaws.appintegrations#ListTagsForResource" + }, + { + "target": "com.amazonaws.appintegrations#TagResource" + }, + { + "target": "com.amazonaws.appintegrations#UntagResource" + }, + { + "target": "com.amazonaws.appintegrations#UpdateEventIntegration" + } + ], + "traits": { + "aws.api#service": { + "sdkId": "AppIntegrations", + "arnNamespace": "app-integrations", + "cloudFormationName": "AppIntegrations", + "cloudTrailEventSource": "appintegrations.amazonaws.com" + }, + "aws.auth#sigv4": { + "name": "app-integrations" + }, + "aws.protocols#restJson1": {}, + "smithy.api#documentation": "

          The Amazon AppIntegrations APIs are in preview release and are subject to change.

          \n \n

          The Amazon AppIntegrations service enables you to configure and reuse connections to external applications.

          \n

          For information about how you can use external applications with Amazon Connect, see Set up pre-built integrations in the Amazon Connect Administrator Guide.

          ", + "smithy.api#title": "Amazon AppIntegrations Service" + } + }, + "com.amazonaws.appintegrations#Arn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 2048 + }, + "smithy.api#pattern": "^arn:aws:[A-Za-z0-9][A-Za-z0-9_/.-]{0,62}:[A-Za-z0-9_/.-]{0,63}:[A-Za-z0-9_/.-]{0,63}:[A-Za-z0-9][A-Za-z0-9:_/+=,@.-]{0,1023}$" + } + }, + "com.amazonaws.appintegrations#ClientAssociationMetadata": { + "type": "map", + "key": { + "target": "com.amazonaws.appintegrations#NonBlankString" + }, + "value": { + "target": "com.amazonaws.appintegrations#NonBlankString" + } + }, + "com.amazonaws.appintegrations#ClientId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 255 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.appintegrations#CreateEventIntegration": { + "type": "operation", + "input": { + "target": "com.amazonaws.appintegrations#CreateEventIntegrationRequest" + }, + "output": { + "target": "com.amazonaws.appintegrations#CreateEventIntegrationResponse" + }, + "errors": [ + { + "target": "com.amazonaws.appintegrations#AccessDeniedException" + }, + { + "target": "com.amazonaws.appintegrations#DuplicateResourceException" + }, + { + "target": "com.amazonaws.appintegrations#InternalServiceError" + }, + { + "target": "com.amazonaws.appintegrations#InvalidRequestException" + }, + { + "target": "com.amazonaws.appintegrations#ResourceQuotaExceededException" + }, + { + "target": "com.amazonaws.appintegrations#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          The Amazon AppIntegrations APIs are in preview release and are subject to change.

          \n

          Creates an EventIntegration, given a specified name, description, and a reference to an\n Amazon Eventbridge bus in your account and a partner event source that will push events to that bus. No\n objects are created in the your account, only metadata that is persisted on the EventIntegration\n control plane.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/eventIntegrations", + "code": 200 + } + } + }, + "com.amazonaws.appintegrations#CreateEventIntegrationRequest": { + "type": "structure", + "members": { + "Name": { + "target": "com.amazonaws.appintegrations#Name", + "traits": { + "smithy.api#documentation": "

          The name of the event integration.

          ", + "smithy.api#required": {} + } + }, + "Description": { + "target": "com.amazonaws.appintegrations#Description", + "traits": { + "smithy.api#documentation": "

          The description of the event integration.

          " + } + }, + "EventFilter": { + "target": "com.amazonaws.appintegrations#EventFilter", + "traits": { + "smithy.api#documentation": "

          The event filter.

          ", + "smithy.api#required": {} + } + }, + "EventBridgeBus": { + "target": "com.amazonaws.appintegrations#EventBridgeBus", + "traits": { + "smithy.api#documentation": "

          The Eventbridge bus.

          ", + "smithy.api#required": {} + } + }, + "ClientToken": { + "target": "com.amazonaws.appintegrations#IdempotencyToken", + "traits": { + "smithy.api#documentation": "

          A unique, case-sensitive identifier that you provide to ensure the idempotency of the\n request.

          ", + "smithy.api#idempotencyToken": {} + } + }, + "Tags": { + "target": "com.amazonaws.appintegrations#TagMap", + "traits": { + "smithy.api#documentation": "

          One or more tags.

          " + } + } + } + }, + "com.amazonaws.appintegrations#CreateEventIntegrationResponse": { + "type": "structure", + "members": { + "EventIntegrationArn": { + "target": "com.amazonaws.appintegrations#Arn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the event integration.

          " + } + } + } + }, + "com.amazonaws.appintegrations#DeleteEventIntegration": { + "type": "operation", + "input": { + "target": "com.amazonaws.appintegrations#DeleteEventIntegrationRequest" + }, + "output": { + "target": "com.amazonaws.appintegrations#DeleteEventIntegrationResponse" + }, + "errors": [ + { + "target": "com.amazonaws.appintegrations#AccessDeniedException" + }, + { + "target": "com.amazonaws.appintegrations#InternalServiceError" + }, + { + "target": "com.amazonaws.appintegrations#InvalidRequestException" + }, + { + "target": "com.amazonaws.appintegrations#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.appintegrations#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          The Amazon AppIntegrations APIs are in preview release and are subject to change.

          \n

          Deletes the specified existing event integration. If the event integration is associated\n with clients, the request is rejected.

          ", + "smithy.api#http": { + "method": "DELETE", + "uri": "/eventIntegrations/{Name}", + "code": 200 + } + } + }, + "com.amazonaws.appintegrations#DeleteEventIntegrationRequest": { + "type": "structure", + "members": { + "Name": { + "target": "com.amazonaws.appintegrations#Name", + "traits": { + "smithy.api#documentation": "

          The name of the event integration.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.appintegrations#DeleteEventIntegrationResponse": { + "type": "structure", + "members": {} + }, + "com.amazonaws.appintegrations#Description": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 1000 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.appintegrations#DuplicateResourceException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.appintegrations#Message" + } + }, + "traits": { + "smithy.api#documentation": "

          A resource with the specified name already exists.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 409 + } + }, + "com.amazonaws.appintegrations#EventBridgeBus": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 255 + }, + "smithy.api#pattern": "^[a-zA-Z0-9\\/\\._\\-]+$" + } + }, + "com.amazonaws.appintegrations#EventBridgeRuleName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 2048 + }, + "smithy.api#pattern": "^[a-zA-Z0-9\\/\\._\\-]+$" + } + }, + "com.amazonaws.appintegrations#EventFilter": { + "type": "structure", + "members": { + "Source": { + "target": "com.amazonaws.appintegrations#Source", + "traits": { + "smithy.api#documentation": "

          The source of the events.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The Amazon AppIntegrations APIs are in preview release and are subject to change.

          \n

          The event filter.

          " + } + }, + "com.amazonaws.appintegrations#EventIntegration": { + "type": "structure", + "members": { + "EventIntegrationArn": { + "target": "com.amazonaws.appintegrations#Arn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the event integration.

          " + } + }, + "Name": { + "target": "com.amazonaws.appintegrations#Name", + "traits": { + "smithy.api#documentation": "

          The name of the event integration.

          " + } + }, + "Description": { + "target": "com.amazonaws.appintegrations#Description", + "traits": { + "smithy.api#documentation": "

          The event integration description.

          " + } + }, + "EventFilter": { + "target": "com.amazonaws.appintegrations#EventFilter", + "traits": { + "smithy.api#documentation": "

          The event integration filter.

          " + } + }, + "EventBridgeBus": { + "target": "com.amazonaws.appintegrations#EventBridgeBus", + "traits": { + "smithy.api#documentation": "

          The Amazon Eventbridge bus for the event integration.

          " + } + }, + "Tags": { + "target": "com.amazonaws.appintegrations#TagMap", + "traits": { + "smithy.api#documentation": "

          The tags.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          The Amazon AppIntegrations APIs are in preview release and are subject to change.

          \n

          The event integration.

          " + } + }, + "com.amazonaws.appintegrations#EventIntegrationAssociation": { + "type": "structure", + "members": { + "EventIntegrationAssociationArn": { + "target": "com.amazonaws.appintegrations#Arn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) for the event integration association.

          " + } + }, + "EventIntegrationAssociationId": { + "target": "com.amazonaws.appintegrations#UUID", + "traits": { + "smithy.api#documentation": "

          The identifier for the event integration association.

          " + } + }, + "EventIntegrationName": { + "target": "com.amazonaws.appintegrations#Name", + "traits": { + "smithy.api#documentation": "

          The name of the event integration.

          " + } + }, + "ClientId": { + "target": "com.amazonaws.appintegrations#ClientId", + "traits": { + "smithy.api#documentation": "

          The identifier for the client that is associated with the event integration.

          " + } + }, + "EventBridgeRuleName": { + "target": "com.amazonaws.appintegrations#EventBridgeRuleName", + "traits": { + "smithy.api#documentation": "

          The name of the Eventbridge rule.

          " + } + }, + "ClientAssociationMetadata": { + "target": "com.amazonaws.appintegrations#ClientAssociationMetadata", + "traits": { + "smithy.api#documentation": "

          The metadata associated with the client.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          The Amazon AppIntegrations APIs are in preview release and are subject to change.

          \n

          The event integration association.

          " + } + }, + "com.amazonaws.appintegrations#EventIntegrationAssociationsList": { + "type": "list", + "member": { + "target": "com.amazonaws.appintegrations#EventIntegrationAssociation" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 50 + } + } + }, + "com.amazonaws.appintegrations#EventIntegrationsList": { + "type": "list", + "member": { + "target": "com.amazonaws.appintegrations#EventIntegration" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 50 + } + } + }, + "com.amazonaws.appintegrations#GetEventIntegration": { + "type": "operation", + "input": { + "target": "com.amazonaws.appintegrations#GetEventIntegrationRequest" + }, + "output": { + "target": "com.amazonaws.appintegrations#GetEventIntegrationResponse" + }, + "errors": [ + { + "target": "com.amazonaws.appintegrations#AccessDeniedException" + }, + { + "target": "com.amazonaws.appintegrations#InternalServiceError" + }, + { + "target": "com.amazonaws.appintegrations#InvalidRequestException" + }, + { + "target": "com.amazonaws.appintegrations#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.appintegrations#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          The Amazon AppIntegrations APIs are in preview release and are subject to change.

          \n

          Return information about the event integration.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/eventIntegrations/{Name}", + "code": 200 + } + } + }, + "com.amazonaws.appintegrations#GetEventIntegrationRequest": { + "type": "structure", + "members": { + "Name": { + "target": "com.amazonaws.appintegrations#Name", + "traits": { + "smithy.api#documentation": "

          The name of the event integration.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.appintegrations#GetEventIntegrationResponse": { + "type": "structure", + "members": { + "Name": { + "target": "com.amazonaws.appintegrations#Name", + "traits": { + "smithy.api#documentation": "

          The name of the event integration.

          " + } + }, + "Description": { + "target": "com.amazonaws.appintegrations#Description", + "traits": { + "smithy.api#documentation": "

          The description of the event integration.

          " + } + }, + "EventIntegrationArn": { + "target": "com.amazonaws.appintegrations#Arn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) for the event integration.

          " + } + }, + "EventBridgeBus": { + "target": "com.amazonaws.appintegrations#EventBridgeBus", + "traits": { + "smithy.api#documentation": "

          The Eventbridge bus.

          " + } + }, + "EventFilter": { + "target": "com.amazonaws.appintegrations#EventFilter", + "traits": { + "smithy.api#documentation": "

          The event filter.

          " + } + }, + "Tags": { + "target": "com.amazonaws.appintegrations#TagMap", + "traits": { + "smithy.api#documentation": "

          One or more tags.

          " + } + } + } + }, + "com.amazonaws.appintegrations#IdempotencyToken": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 2048 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.appintegrations#InternalServiceError": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.appintegrations#Message" + } + }, + "traits": { + "smithy.api#documentation": "

          Request processing failed due to an error or failure with the service.

          ", + "smithy.api#error": "server", + "smithy.api#httpError": 500 + } + }, + "com.amazonaws.appintegrations#InvalidRequestException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.appintegrations#Message" + } + }, + "traits": { + "smithy.api#documentation": "

          The request is not valid.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 400 + } + }, + "com.amazonaws.appintegrations#ListEventIntegrationAssociations": { + "type": "operation", + "input": { + "target": "com.amazonaws.appintegrations#ListEventIntegrationAssociationsRequest" + }, + "output": { + "target": "com.amazonaws.appintegrations#ListEventIntegrationAssociationsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.appintegrations#AccessDeniedException" + }, + { + "target": "com.amazonaws.appintegrations#InternalServiceError" + }, + { + "target": "com.amazonaws.appintegrations#InvalidRequestException" + }, + { + "target": "com.amazonaws.appintegrations#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.appintegrations#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          The Amazon AppIntegrations APIs are in preview release and are subject to change.

          \n

          Returns a paginated list of event integration associations in the account.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/eventIntegrations/{EventIntegrationName}/associations", + "code": 200 + } + } + }, + "com.amazonaws.appintegrations#ListEventIntegrationAssociationsRequest": { + "type": "structure", + "members": { + "EventIntegrationName": { + "target": "com.amazonaws.appintegrations#Name", + "traits": { + "smithy.api#documentation": "

          The name of the event integration.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "NextToken": { + "target": "com.amazonaws.appintegrations#NextToken", + "traits": { + "smithy.api#documentation": "

          The token for the next set of results. Use the value returned in the previous \nresponse in the next request to retrieve the next set of results.

          ", + "smithy.api#httpQuery": "nextToken" + } + }, + "MaxResults": { + "target": "com.amazonaws.appintegrations#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of results to return per page.

          ", + "smithy.api#httpQuery": "maxResults" + } + } + } + }, + "com.amazonaws.appintegrations#ListEventIntegrationAssociationsResponse": { + "type": "structure", + "members": { + "EventIntegrationAssociations": { + "target": "com.amazonaws.appintegrations#EventIntegrationAssociationsList", + "traits": { + "smithy.api#documentation": "

          The event integration associations.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.appintegrations#NextToken", + "traits": { + "smithy.api#documentation": "

          If there are additional results, this is the token for the next set of results.

          " + } + } + } + }, + "com.amazonaws.appintegrations#ListEventIntegrations": { + "type": "operation", + "input": { + "target": "com.amazonaws.appintegrations#ListEventIntegrationsRequest" + }, + "output": { + "target": "com.amazonaws.appintegrations#ListEventIntegrationsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.appintegrations#AccessDeniedException" + }, + { + "target": "com.amazonaws.appintegrations#InternalServiceError" + }, + { + "target": "com.amazonaws.appintegrations#InvalidRequestException" + }, + { + "target": "com.amazonaws.appintegrations#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          The Amazon AppIntegrations APIs are in preview release and are subject to change.

          \n

          Returns a paginated list of event integrations in the account.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/eventIntegrations", + "code": 200 + } + } + }, + "com.amazonaws.appintegrations#ListEventIntegrationsRequest": { + "type": "structure", + "members": { + "NextToken": { + "target": "com.amazonaws.appintegrations#NextToken", + "traits": { + "smithy.api#documentation": "

          The token for the next set of results. Use the value returned in the previous \nresponse in the next request to retrieve the next set of results.

          ", + "smithy.api#httpQuery": "nextToken" + } + }, + "MaxResults": { + "target": "com.amazonaws.appintegrations#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of results to return per page.

          ", + "smithy.api#httpQuery": "maxResults" + } + } + } + }, + "com.amazonaws.appintegrations#ListEventIntegrationsResponse": { + "type": "structure", + "members": { + "EventIntegrations": { + "target": "com.amazonaws.appintegrations#EventIntegrationsList", + "traits": { + "smithy.api#documentation": "

          The event integrations.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.appintegrations#NextToken", + "traits": { + "smithy.api#documentation": "

          If there are additional results, this is the token for the next set of results.

          " + } + } + } + }, + "com.amazonaws.appintegrations#ListTagsForResource": { + "type": "operation", + "input": { + "target": "com.amazonaws.appintegrations#ListTagsForResourceRequest" + }, + "output": { + "target": "com.amazonaws.appintegrations#ListTagsForResourceResponse" + }, + "errors": [ + { + "target": "com.amazonaws.appintegrations#InternalServiceError" + }, + { + "target": "com.amazonaws.appintegrations#InvalidRequestException" + }, + { + "target": "com.amazonaws.appintegrations#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.appintegrations#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          The Amazon AppIntegrations APIs are in preview release and are subject to change.

          \n

          Lists the tags for the specified resource.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/tags/{resourceArn}", + "code": 200 + } + } + }, + "com.amazonaws.appintegrations#ListTagsForResourceRequest": { + "type": "structure", + "members": { + "resourceArn": { + "target": "com.amazonaws.appintegrations#Arn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the resource.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.appintegrations#ListTagsForResourceResponse": { + "type": "structure", + "members": { + "tags": { + "target": "com.amazonaws.appintegrations#TagMap", + "traits": { + "smithy.api#documentation": "

          Information about the tags.

          " + } + } + } + }, + "com.amazonaws.appintegrations#MaxResults": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 50 + } + } + }, + "com.amazonaws.appintegrations#Message": { + "type": "string" + }, + "com.amazonaws.appintegrations#Name": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 255 + }, + "smithy.api#pattern": "^[a-zA-Z0-9\\/\\._\\-]+$" + } + }, + "com.amazonaws.appintegrations#NextToken": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 1000 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.appintegrations#NonBlankString": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 255 + }, + "smithy.api#pattern": ".*\\S.*" + } + }, + "com.amazonaws.appintegrations#ResourceNotFoundException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.appintegrations#Message" + } + }, + "traits": { + "smithy.api#documentation": "

          The specified resource was not found.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 404 + } + }, + "com.amazonaws.appintegrations#ResourceQuotaExceededException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.appintegrations#Message" + } + }, + "traits": { + "smithy.api#documentation": "

          The allowed quota for the resource has been exceeded.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 429 + } + }, + "com.amazonaws.appintegrations#Source": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 256 + }, + "smithy.api#pattern": "^aws\\.partner\\/.*$" + } + }, + "com.amazonaws.appintegrations#TagKey": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 128 + }, + "smithy.api#pattern": "^(?!aws:)[a-zA-Z+-=._:/]+$" + } + }, + "com.amazonaws.appintegrations#TagKeyList": { + "type": "list", + "member": { + "target": "com.amazonaws.appintegrations#TagKey" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 200 + } + } + }, + "com.amazonaws.appintegrations#TagMap": { + "type": "map", + "key": { + "target": "com.amazonaws.appintegrations#TagKey" + }, + "value": { + "target": "com.amazonaws.appintegrations#TagValue" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 200 + } + } + }, + "com.amazonaws.appintegrations#TagResource": { + "type": "operation", + "input": { + "target": "com.amazonaws.appintegrations#TagResourceRequest" + }, + "output": { + "target": "com.amazonaws.appintegrations#TagResourceResponse" + }, + "errors": [ + { + "target": "com.amazonaws.appintegrations#InternalServiceError" + }, + { + "target": "com.amazonaws.appintegrations#InvalidRequestException" + }, + { + "target": "com.amazonaws.appintegrations#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.appintegrations#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          The Amazon AppIntegrations APIs are in preview release and are subject to change.

          \n

          Adds the specified tags to the specified resource.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/tags/{resourceArn}", + "code": 200 + } + } + }, + "com.amazonaws.appintegrations#TagResourceRequest": { + "type": "structure", + "members": { + "resourceArn": { + "target": "com.amazonaws.appintegrations#Arn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the resource.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "tags": { + "target": "com.amazonaws.appintegrations#TagMap", + "traits": { + "smithy.api#documentation": "

          One or more tags.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.appintegrations#TagResourceResponse": { + "type": "structure", + "members": {} + }, + "com.amazonaws.appintegrations#TagValue": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 + } + } + }, + "com.amazonaws.appintegrations#ThrottlingException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.appintegrations#Message" + } + }, + "traits": { + "smithy.api#documentation": "

          The throttling limit has been exceeded.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 429 + } + }, + "com.amazonaws.appintegrations#UUID": { + "type": "string", + "traits": { + "smithy.api#pattern": "[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}" + } + }, + "com.amazonaws.appintegrations#UntagResource": { + "type": "operation", + "input": { + "target": "com.amazonaws.appintegrations#UntagResourceRequest" + }, + "output": { + "target": "com.amazonaws.appintegrations#UntagResourceResponse" + }, + "errors": [ + { + "target": "com.amazonaws.appintegrations#InternalServiceError" + }, + { + "target": "com.amazonaws.appintegrations#InvalidRequestException" + }, + { + "target": "com.amazonaws.appintegrations#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.appintegrations#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          The Amazon AppIntegrations APIs are in preview release and are subject to change.

          \n

          Removes the specified tags from the specified resource.

          ", + "smithy.api#http": { + "method": "DELETE", + "uri": "/tags/{resourceArn}", + "code": 200 + } + } + }, + "com.amazonaws.appintegrations#UntagResourceRequest": { + "type": "structure", + "members": { + "resourceArn": { + "target": "com.amazonaws.appintegrations#Arn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the resource.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "tagKeys": { + "target": "com.amazonaws.appintegrations#TagKeyList", + "traits": { + "smithy.api#documentation": "

          The tag keys.

          ", + "smithy.api#httpQuery": "tagKeys", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.appintegrations#UntagResourceResponse": { + "type": "structure", + "members": {} + }, + "com.amazonaws.appintegrations#UpdateEventIntegration": { + "type": "operation", + "input": { + "target": "com.amazonaws.appintegrations#UpdateEventIntegrationRequest" + }, + "output": { + "target": "com.amazonaws.appintegrations#UpdateEventIntegrationResponse" + }, + "errors": [ + { + "target": "com.amazonaws.appintegrations#AccessDeniedException" + }, + { + "target": "com.amazonaws.appintegrations#InternalServiceError" + }, + { + "target": "com.amazonaws.appintegrations#InvalidRequestException" + }, + { + "target": "com.amazonaws.appintegrations#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.appintegrations#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          The Amazon AppIntegrations APIs are in preview release and are subject to change.

          \n

          Updates the description of an event integration.

          ", + "smithy.api#http": { + "method": "PATCH", + "uri": "/eventIntegrations/{Name}", + "code": 200 + } + } + }, + "com.amazonaws.appintegrations#UpdateEventIntegrationRequest": { + "type": "structure", + "members": { + "Name": { + "target": "com.amazonaws.appintegrations#Name", + "traits": { + "smithy.api#documentation": "

          The name of the event integration.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "Description": { + "target": "com.amazonaws.appintegrations#Description", + "traits": { + "smithy.api#documentation": "

          The description of the event inegration.

          " + } + } + } + }, + "com.amazonaws.appintegrations#UpdateEventIntegrationResponse": { + "type": "structure", + "members": {} + } + } +} diff --git a/codegen/sdk-codegen/aws-models/connect-contact-lens.2020-08-21.json b/codegen/sdk-codegen/aws-models/connect-contact-lens.2020-08-21.json new file mode 100644 index 000000000000..bc8023a057a7 --- /dev/null +++ b/codegen/sdk-codegen/aws-models/connect-contact-lens.2020-08-21.json @@ -0,0 +1,583 @@ +{ + "smithy": "1.0", + "metadata": { + "suppressions": [ + { + "id": "HttpMethodSemantics", + "namespace": "*" + }, + { + "id": "HttpResponseCodeSemantics", + "namespace": "*" + }, + { + "id": "PaginatedTrait", + "namespace": "*" + }, + { + "id": "HttpHeaderTrait", + "namespace": "*" + }, + { + "id": "HttpUriConflict", + "namespace": "*" + }, + { + "id": "Service", + "namespace": "*" + } + ] + }, + "shapes": { + "com.amazonaws.connectcontactlens#AccessDeniedException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.connectcontactlens#Message", + "traits": { + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          You do not have sufficient access to perform this action.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 403 + } + }, + "com.amazonaws.connectcontactlens#AmazonConnectContactLens": { + "type": "service", + "version": "2020-08-21", + "operations": [ + { + "target": "com.amazonaws.connectcontactlens#ListRealtimeContactAnalysisSegments" + } + ], + "traits": { + "aws.api#service": { + "sdkId": "Connect Contact Lens", + "arnNamespace": "connect", + "cloudFormationName": "ConnectContactLens", + "cloudTrailEventSource": "connectcontactlens.amazonaws.com" + }, + "aws.auth#sigv4": { + "name": "connect" + }, + "aws.protocols#restJson1": {}, + "smithy.api#documentation": "

          Contact Lens for Amazon Connect enables you to analyze conversations between customer and agents,\n by using speech transcription, natural language processing, and intelligent search\n capabilities. It performs sentiment analysis, detects issues, and enables you to automatically\n categorize contacts.

          \n

          Contact Lens for Amazon Connect provides both real-time and post-call analytics of customer-agent\n conversations. For more information, see Analyze conversations using\n Contact Lens in the Amazon Connect Administrator Guide.

          ", + "smithy.api#title": "Amazon Connect Contact Lens" + } + }, + "com.amazonaws.connectcontactlens#Categories": { + "type": "structure", + "members": { + "MatchedCategories": { + "target": "com.amazonaws.connectcontactlens#MatchedCategories", + "traits": { + "smithy.api#documentation": "

          The category rules that have been matched in the analyzed segment.

          ", + "smithy.api#required": {} + } + }, + "MatchedDetails": { + "target": "com.amazonaws.connectcontactlens#MatchedDetails", + "traits": { + "smithy.api#documentation": "

          The category rule that was matched and when it occurred in the transcript.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Provides the category rules that are used to automatically categorize contacts based on\n uttered keywords and phrases.

          " + } + }, + "com.amazonaws.connectcontactlens#CategoryDetails": { + "type": "structure", + "members": { + "PointsOfInterest": { + "target": "com.amazonaws.connectcontactlens#PointsOfInterest", + "traits": { + "smithy.api#documentation": "

          The section of audio where the category rule was detected.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Provides information about the category rule that was matched.

          " + } + }, + "com.amazonaws.connectcontactlens#CategoryName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 256 + }, + "smithy.api#pattern": ".*\\S.*" + } + }, + "com.amazonaws.connectcontactlens#CharacterOffset": { + "type": "integer", + "traits": { + "smithy.api#range": { + "min": 0 + } + } + }, + "com.amazonaws.connectcontactlens#CharacterOffsets": { + "type": "structure", + "members": { + "BeginOffsetChar": { + "target": "com.amazonaws.connectcontactlens#CharacterOffset", + "traits": { + "smithy.api#documentation": "

          The beginning of the issue.

          ", + "smithy.api#required": {} + } + }, + "EndOffsetChar": { + "target": "com.amazonaws.connectcontactlens#CharacterOffset", + "traits": { + "smithy.api#documentation": "

          The end of the issue.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          For characters that were detected as issues, where they occur in the transcript.

          " + } + }, + "com.amazonaws.connectcontactlens#ContactId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 256 + }, + "smithy.api#pattern": ".*\\S.*" + } + }, + "com.amazonaws.connectcontactlens#InstanceId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 256 + }, + "smithy.api#pattern": ".*\\S.*" + } + }, + "com.amazonaws.connectcontactlens#InternalServiceException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.connectcontactlens#Message" + } + }, + "traits": { + "smithy.api#documentation": "

          Request processing failed due to an error or failure with the service.

          ", + "smithy.api#error": "server", + "smithy.api#httpError": 500 + } + }, + "com.amazonaws.connectcontactlens#InvalidRequestException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.connectcontactlens#Message" + } + }, + "traits": { + "smithy.api#documentation": "

          The request is not valid.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 400 + } + }, + "com.amazonaws.connectcontactlens#IssueDetected": { + "type": "structure", + "members": { + "CharacterOffsets": { + "target": "com.amazonaws.connectcontactlens#CharacterOffsets", + "traits": { + "smithy.api#documentation": "

          The offset for when the issue was detected in the segment.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Potential issues that are detected based on an artificial intelligence analysis of each\n turn in the conversation.

          " + } + }, + "com.amazonaws.connectcontactlens#IssuesDetected": { + "type": "list", + "member": { + "target": "com.amazonaws.connectcontactlens#IssueDetected" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 20 + } + } + }, + "com.amazonaws.connectcontactlens#ListRealtimeContactAnalysisSegments": { + "type": "operation", + "input": { + "target": "com.amazonaws.connectcontactlens#ListRealtimeContactAnalysisSegmentsRequest" + }, + "output": { + "target": "com.amazonaws.connectcontactlens#ListRealtimeContactAnalysisSegmentsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.connectcontactlens#AccessDeniedException" + }, + { + "target": "com.amazonaws.connectcontactlens#InternalServiceException" + }, + { + "target": "com.amazonaws.connectcontactlens#InvalidRequestException" + }, + { + "target": "com.amazonaws.connectcontactlens#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.connectcontactlens#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          Provides a list of analysis segments for a real-time analysis session.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/realtime-contact-analysis/analysis-segments", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.connectcontactlens#ListRealtimeContactAnalysisSegmentsRequest": { + "type": "structure", + "members": { + "InstanceId": { + "target": "com.amazonaws.connectcontactlens#InstanceId", + "traits": { + "smithy.api#documentation": "

          The identifier of the Amazon Connect instance.

          ", + "smithy.api#required": {} + } + }, + "ContactId": { + "target": "com.amazonaws.connectcontactlens#ContactId", + "traits": { + "smithy.api#documentation": "

          The identifier of the contact.

          ", + "smithy.api#required": {} + } + }, + "MaxResults": { + "target": "com.amazonaws.connectcontactlens#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximimum number of results to return per page.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.connectcontactlens#NextToken", + "traits": { + "smithy.api#documentation": "

          The token for the next set of results. Use the value returned in the previous \nresponse in the next request to retrieve the next set of results.

          " + } + } + } + }, + "com.amazonaws.connectcontactlens#ListRealtimeContactAnalysisSegmentsResponse": { + "type": "structure", + "members": { + "Segments": { + "target": "com.amazonaws.connectcontactlens#RealtimeContactAnalysisSegments", + "traits": { + "smithy.api#documentation": "

          An analyzed transcript or category.

          ", + "smithy.api#required": {} + } + }, + "NextToken": { + "target": "com.amazonaws.connectcontactlens#NextToken", + "traits": { + "smithy.api#documentation": "

          If there are additional results, this is the token for the next set of results. If response includes nextToken there are two possible scenarios:

          \n
            \n
          • \n

            There are more segments so another call is required to get them.

            \n
          • \n
          • \n

            There are no more segments at this time, but more may be available later (real-time\n analysis is in progress) so the client should call the operation again to get new\n segments.

            \n
          • \n
          \n

          If response does not include nextToken, the analysis is completed (successfully or failed) and there are no more segments to retrieve.

          " + } + } + } + }, + "com.amazonaws.connectcontactlens#MatchedCategories": { + "type": "list", + "member": { + "target": "com.amazonaws.connectcontactlens#CategoryName" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 150 + } + } + }, + "com.amazonaws.connectcontactlens#MatchedDetails": { + "type": "map", + "key": { + "target": "com.amazonaws.connectcontactlens#CategoryName" + }, + "value": { + "target": "com.amazonaws.connectcontactlens#CategoryDetails" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 150 + } + } + }, + "com.amazonaws.connectcontactlens#MaxResults": { + "type": "integer", + "traits": { + "smithy.api#range": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.connectcontactlens#Message": { + "type": "string" + }, + "com.amazonaws.connectcontactlens#NextToken": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 131070 + }, + "smithy.api#pattern": ".*\\S.*" + } + }, + "com.amazonaws.connectcontactlens#OffsetMillis": { + "type": "integer", + "traits": { + "smithy.api#range": { + "min": 0 + } + } + }, + "com.amazonaws.connectcontactlens#ParticipantId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 256 + }, + "smithy.api#pattern": ".*\\S.*" + } + }, + "com.amazonaws.connectcontactlens#ParticipantRole": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 256 + }, + "smithy.api#pattern": ".*\\S.*" + } + }, + "com.amazonaws.connectcontactlens#PointOfInterest": { + "type": "structure", + "members": { + "BeginOffsetMillis": { + "target": "com.amazonaws.connectcontactlens#OffsetMillis", + "traits": { + "smithy.api#documentation": "

          The beginning offset in milliseconds where the category rule was detected.

          ", + "smithy.api#required": {} + } + }, + "EndOffsetMillis": { + "target": "com.amazonaws.connectcontactlens#OffsetMillis", + "traits": { + "smithy.api#documentation": "

          The ending offset in milliseconds where the category rule was detected.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The section of the contact audio where that category rule was detected.

          " + } + }, + "com.amazonaws.connectcontactlens#PointsOfInterest": { + "type": "list", + "member": { + "target": "com.amazonaws.connectcontactlens#PointOfInterest" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 20 + } + } + }, + "com.amazonaws.connectcontactlens#RealtimeContactAnalysisSegment": { + "type": "structure", + "members": { + "Transcript": { + "target": "com.amazonaws.connectcontactlens#Transcript", + "traits": { + "smithy.api#documentation": "

          The analyzed transcript.

          " + } + }, + "Categories": { + "target": "com.amazonaws.connectcontactlens#Categories", + "traits": { + "smithy.api#documentation": "

          The matched category rules.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An analyzed segment for a real-time analysis session.

          " + } + }, + "com.amazonaws.connectcontactlens#RealtimeContactAnalysisSegments": { + "type": "list", + "member": { + "target": "com.amazonaws.connectcontactlens#RealtimeContactAnalysisSegment" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 100 + } + } + }, + "com.amazonaws.connectcontactlens#ResourceNotFoundException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.connectcontactlens#Message" + } + }, + "traits": { + "smithy.api#documentation": "

          The specified resource was not found.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 404 + } + }, + "com.amazonaws.connectcontactlens#SentimentValue": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "POSITIVE", + "name": "POSITIVE" + }, + { + "value": "NEUTRAL", + "name": "NEUTRAL" + }, + { + "value": "NEGATIVE", + "name": "NEGATIVE" + } + ] + } + }, + "com.amazonaws.connectcontactlens#ThrottlingException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.connectcontactlens#Message", + "traits": { + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The throttling limit has been exceeded.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 429 + } + }, + "com.amazonaws.connectcontactlens#Transcript": { + "type": "structure", + "members": { + "Id": { + "target": "com.amazonaws.connectcontactlens#TranscriptId", + "traits": { + "smithy.api#documentation": "

          The identifier of the transcript.

          ", + "smithy.api#required": {} + } + }, + "ParticipantId": { + "target": "com.amazonaws.connectcontactlens#ParticipantId", + "traits": { + "smithy.api#documentation": "

          The identifier of the participant.

          ", + "smithy.api#required": {} + } + }, + "ParticipantRole": { + "target": "com.amazonaws.connectcontactlens#ParticipantRole", + "traits": { + "smithy.api#documentation": "

          The role of participant. For example, is it a customer, agent, or system.

          ", + "smithy.api#required": {} + } + }, + "Content": { + "target": "com.amazonaws.connectcontactlens#TranscriptContent", + "traits": { + "smithy.api#documentation": "

          The content of the transcript.

          ", + "smithy.api#required": {} + } + }, + "BeginOffsetMillis": { + "target": "com.amazonaws.connectcontactlens#OffsetMillis", + "traits": { + "smithy.api#documentation": "

          The beginning offset in the contact for this transcript.

          ", + "smithy.api#required": {} + } + }, + "EndOffsetMillis": { + "target": "com.amazonaws.connectcontactlens#OffsetMillis", + "traits": { + "smithy.api#documentation": "

          The end offset in the contact for this transcript.

          ", + "smithy.api#required": {} + } + }, + "Sentiment": { + "target": "com.amazonaws.connectcontactlens#SentimentValue", + "traits": { + "smithy.api#documentation": "

          The sentiment of the detected for this piece of transcript.

          ", + "smithy.api#required": {} + } + }, + "IssuesDetected": { + "target": "com.amazonaws.connectcontactlens#IssuesDetected", + "traits": { + "smithy.api#documentation": "

          List of positions where issues were detected on the transcript.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          A list of messages in the session.

          " + } + }, + "com.amazonaws.connectcontactlens#TranscriptContent": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1 + }, + "smithy.api#pattern": ".*\\S.*" + } + }, + "com.amazonaws.connectcontactlens#TranscriptId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 256 + }, + "smithy.api#pattern": ".*\\S.*" + } + } + } +} diff --git a/codegen/sdk-codegen/aws-models/connect.2017-08-08.json b/codegen/sdk-codegen/aws-models/connect.2017-08-08.json index 67903dcf4179..cb8fee2b4861 100644 --- a/codegen/sdk-codegen/aws-models/connect.2017-08-08.json +++ b/codegen/sdk-codegen/aws-models/connect.2017-08-08.json @@ -95,9 +95,15 @@ { "target": "com.amazonaws.connect#CreateInstance" }, + { + "target": "com.amazonaws.connect#CreateIntegrationAssociation" + }, { "target": "com.amazonaws.connect#CreateRoutingProfile" }, + { + "target": "com.amazonaws.connect#CreateUseCase" + }, { "target": "com.amazonaws.connect#CreateUser" }, @@ -107,6 +113,12 @@ { "target": "com.amazonaws.connect#DeleteInstance" }, + { + "target": "com.amazonaws.connect#DeleteIntegrationAssociation" + }, + { + "target": "com.amazonaws.connect#DeleteUseCase" + }, { "target": "com.amazonaws.connect#DeleteUser" }, @@ -185,6 +197,9 @@ { "target": "com.amazonaws.connect#ListInstanceStorageConfigs" }, + { + "target": "com.amazonaws.connect#ListIntegrationAssociations" + }, { "target": "com.amazonaws.connect#ListLambdaFunctions" }, @@ -215,6 +230,9 @@ { "target": "com.amazonaws.connect#ListTagsForResource" }, + { + "target": "com.amazonaws.connect#ListUseCases" + }, { "target": "com.amazonaws.connect#ListUserHierarchyGroups" }, @@ -233,6 +251,9 @@ { "target": "com.amazonaws.connect#StartOutboundVoiceContact" }, + { + "target": "com.amazonaws.connect#StartTaskContact" + }, { "target": "com.amazonaws.connect#StopContact" }, @@ -308,7 +329,7 @@ "name": "connect" }, "aws.protocols#restJson1": {}, - "smithy.api#documentation": "

          Amazon Connect is a cloud-based contact center solution that makes it easy to set up and manage a\n customer contact center and provide reliable customer engagement at any scale.

          \n

          Amazon Connect provides rich metrics and real-time reporting that allow you to optimize contact\n routing. You can also resolve customer issues more efficiently by putting customers in touch with\n the right agents.

          \n

          There are limits to the number of Amazon Connect resources that you can create and limits to the\n number of requests that you can make per second. For more information, see Amazon Connect\n Service Quotas in the Amazon Connect Administrator Guide.

          \n

          To connect programmatically to an AWS service, you use an endpoint. For a list of Amazon Connect\n endpoints, see Amazon Connect\n Endpoints.

          \n \n

          Working with contact flows? Check out the Amazon Connect\n Flow language.

          \n
          ", + "smithy.api#documentation": "

          Amazon Connect is a cloud-based contact center solution that makes it easy to set up and manage a\n customer contact center and provide reliable customer engagement at any scale.

          \n

          Amazon Connect provides rich metrics and real-time reporting that allow you to optimize contact\n routing. You can also resolve customer issues more efficiently by putting customers in touch with\n the right agents.

          \n

          There are limits to the number of Amazon Connect resources that you can create and limits to the\n number of requests that you can make per second. For more information, see Amazon Connect\n Service Quotas in the Amazon Connect Administrator Guide.

          \n

          To connect programmatically to an AWS service, you use an endpoint. For a list of Amazon Connect\n endpoints, see Amazon Connect\n Endpoints.

          \n \n

          Working with contact flows? Check out the Amazon Connect Flow language.

          \n
          ", "smithy.api#title": "Amazon Connect Service" } }, @@ -398,7 +419,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Associates a storage resource type for the first time. You can only associate one type of\n storage configuration in a single call. This means, for example, that you can't define an\n instance with multiple S3 buckets for storing chat transcripts.

          \n \n

          This API does not create a resource that doesn't exist. It only associates it to the\n instance. Ensure that the resource being specified in the storage configuration, like an Amazon\n S3 bucket, exists when being used for association.

          ", + "smithy.api#documentation": "

          Associates a storage resource type for the first time. You can only associate one type of\n storage configuration in a single call. This means, for example, that you can't define an\n instance with multiple S3 buckets for storing chat transcripts.

          \n

          This API does not create a resource that doesn't exist. It only associates it to the\n instance. Ensure that the resource being specified in the storage configuration, like an Amazon\n S3 bucket, exists when being used for association.

          ", "smithy.api#http": { "method": "PUT", "uri": "/instance/{InstanceId}/storage-config", @@ -782,6 +803,10 @@ { "value": "CHAT", "name": "CHAT" + }, + { + "value": "TASK", + "name": "TASK" } ] } @@ -794,7 +819,7 @@ "traits": { "smithy.api#length": { "min": 0, - "max": 1 + "max": 3 } } }, @@ -891,7 +916,7 @@ "Type": { "target": "com.amazonaws.connect#ContactFlowType", "traits": { - "smithy.api#documentation": "

          The type of the contact flow. For descriptions of the available types, see Choose a Contact Flow Type in the Amazon Connect Administrator\n Guide.

          " + "smithy.api#documentation": "

          The type of the contact flow. For descriptions of the available types, see Choose a Contact Flow Type in the Amazon Connect Administrator\n Guide.

          " } }, "Description": { @@ -1071,6 +1096,15 @@ "smithy.api#httpError": 410 } }, + "com.amazonaws.connect#ContactReferences": { + "type": "map", + "key": { + "target": "com.amazonaws.connect#ReferenceKey" + }, + "value": { + "target": "com.amazonaws.connect#Reference" + } + }, "com.amazonaws.connect#CreateContactFlow": { "type": "operation", "input": { @@ -1135,7 +1169,7 @@ "Type": { "target": "com.amazonaws.connect#ContactFlowType", "traits": { - "smithy.api#documentation": "

          The type of the contact flow. For descriptions of the available types, see Choose a Contact Flow Type in the Amazon Connect Administrator\n Guide.

          ", + "smithy.api#documentation": "

          The type of the contact flow. For descriptions of the available types, see Choose a Contact Flow Type in the Amazon Connect Administrator\n Guide.

          ", "smithy.api#required": {} } }, @@ -1148,7 +1182,7 @@ "Content": { "target": "com.amazonaws.connect#ContactFlowContent", "traits": { - "smithy.api#documentation": "

          The content of the contact flow.\n \n

          ", + "smithy.api#documentation": "

          The content of the contact flow.

          ", "smithy.api#required": {} } }, @@ -1203,7 +1237,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Initiates an Amazon Connect instance with all the supported channels enabled. It does not attach any\n storage (such as Amazon S3, or Kinesis) or allow for any configurations on features such as\n Contact Lens for Amazon Connect.

          ", + "smithy.api#documentation": "

          This API is in preview release for Amazon Connect and is subject to change.

          \n

          Initiates an Amazon Connect instance with all the supported channels enabled. It does not attach any\n storage (such as Amazon S3, or Kinesis) or allow for any configurations on features such as\n Contact Lens for Amazon Connect.

          ", "smithy.api#http": { "method": "PUT", "uri": "/instance", @@ -1272,6 +1306,105 @@ } } }, + "com.amazonaws.connect#CreateIntegrationAssociation": { + "type": "operation", + "input": { + "target": "com.amazonaws.connect#CreateIntegrationAssociationRequest" + }, + "output": { + "target": "com.amazonaws.connect#CreateIntegrationAssociationResponse" + }, + "errors": [ + { + "target": "com.amazonaws.connect#DuplicateResourceException" + }, + { + "target": "com.amazonaws.connect#InternalServiceException" + }, + { + "target": "com.amazonaws.connect#InvalidRequestException" + }, + { + "target": "com.amazonaws.connect#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.connect#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          This API is in preview release for Amazon Connect and is subject to change.

          \n

          Create an AppIntegration association with anAmazon Connect instance.

          ", + "smithy.api#http": { + "method": "PUT", + "uri": "/instance/{InstanceId}/integration-associations", + "code": 200 + } + } + }, + "com.amazonaws.connect#CreateIntegrationAssociationRequest": { + "type": "structure", + "members": { + "InstanceId": { + "target": "com.amazonaws.connect#InstanceId", + "traits": { + "smithy.api#documentation": "

          The identifier of the Amazon Connect instance.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "IntegrationType": { + "target": "com.amazonaws.connect#IntegrationType", + "traits": { + "smithy.api#documentation": "

          The type of information to be ingested.

          ", + "smithy.api#required": {} + } + }, + "IntegrationArn": { + "target": "com.amazonaws.connect#ARN", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the integration.

          ", + "smithy.api#required": {} + } + }, + "SourceApplicationUrl": { + "target": "com.amazonaws.connect#URI", + "traits": { + "smithy.api#documentation": "

          The URL for the external application.

          ", + "smithy.api#required": {} + } + }, + "SourceApplicationName": { + "target": "com.amazonaws.connect#SourceApplicationName", + "traits": { + "smithy.api#documentation": "

          The name of the external application.

          ", + "smithy.api#required": {} + } + }, + "SourceType": { + "target": "com.amazonaws.connect#SourceType", + "traits": { + "smithy.api#documentation": "

          The type of the data source.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.connect#CreateIntegrationAssociationResponse": { + "type": "structure", + "members": { + "IntegrationAssociationId": { + "target": "com.amazonaws.connect#IntegrationAssociationId", + "traits": { + "smithy.api#documentation": "

          The identifier for the association.

          " + } + }, + "IntegrationAssociationArn": { + "target": "com.amazonaws.connect#ARN", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) for the association.

          " + } + } + } + }, "com.amazonaws.connect#CreateRoutingProfile": { "type": "operation", "input": { @@ -1382,6 +1515,85 @@ } } }, + "com.amazonaws.connect#CreateUseCase": { + "type": "operation", + "input": { + "target": "com.amazonaws.connect#CreateUseCaseRequest" + }, + "output": { + "target": "com.amazonaws.connect#CreateUseCaseResponse" + }, + "errors": [ + { + "target": "com.amazonaws.connect#DuplicateResourceException" + }, + { + "target": "com.amazonaws.connect#InternalServiceException" + }, + { + "target": "com.amazonaws.connect#InvalidRequestException" + }, + { + "target": "com.amazonaws.connect#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.connect#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          This API is in preview release for Amazon Connect and is subject to change.

          \n

          Creates a use case for an AppIntegration association.

          ", + "smithy.api#http": { + "method": "PUT", + "uri": "/instance/{InstanceId}/integration-associations/{IntegrationAssociationId}/use-cases", + "code": 200 + } + } + }, + "com.amazonaws.connect#CreateUseCaseRequest": { + "type": "structure", + "members": { + "InstanceId": { + "target": "com.amazonaws.connect#InstanceId", + "traits": { + "smithy.api#documentation": "

          The identifier of the Amazon Connect instance.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "IntegrationAssociationId": { + "target": "com.amazonaws.connect#IntegrationAssociationId", + "traits": { + "smithy.api#documentation": "

          The identifier for the AppIntegration association.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "UseCaseType": { + "target": "com.amazonaws.connect#UseCaseType", + "traits": { + "smithy.api#documentation": "

          The type of use case to associate to the AppIntegration association. Each AppIntegration\n association can have only one of each use case type.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.connect#CreateUseCaseResponse": { + "type": "structure", + "members": { + "UseCaseId": { + "target": "com.amazonaws.connect#UseCaseId", + "traits": { + "smithy.api#documentation": "

          The identifier of the use case.

          " + } + }, + "UseCaseArn": { + "target": "com.amazonaws.connect#ARN", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) for the use case.

          " + } + } + } + }, "com.amazonaws.connect#CreateUser": { "type": "operation", "input": { @@ -1475,7 +1687,7 @@ "ParentGroupId": { "target": "com.amazonaws.connect#HierarchyGroupId", "traits": { - "smithy.api#documentation": "

          The identifier for the parent hierarchy group. The user hierarchy is created at level one if the parent group ID is null.

          " + "smithy.api#documentation": "

          The identifier for the parent hierarchy group. The user hierarchy is created at level one if\n the parent group ID is null.

          " } }, "InstanceId": { @@ -1790,7 +2002,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Deletes the Amazon Connect instance.

          ", + "smithy.api#documentation": "

          This API is in preview release for Amazon Connect and is subject to change.

          \n

          Deletes the Amazon Connect instance.

          ", "smithy.api#http": { "method": "DELETE", "uri": "/instance/{InstanceId}", @@ -1811,6 +2023,112 @@ } } }, + "com.amazonaws.connect#DeleteIntegrationAssociation": { + "type": "operation", + "input": { + "target": "com.amazonaws.connect#DeleteIntegrationAssociationRequest" + }, + "errors": [ + { + "target": "com.amazonaws.connect#InternalServiceException" + }, + { + "target": "com.amazonaws.connect#InvalidRequestException" + }, + { + "target": "com.amazonaws.connect#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.connect#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          This API is in preview release for Amazon Connect and is subject to change.

          \n

          Deletes an AppIntegration association from an Amazon Connect instance. The association must not have\n any use cases associated with it.

          ", + "smithy.api#http": { + "method": "DELETE", + "uri": "/instance/{InstanceId}/integration-associations/{IntegrationAssociationId}", + "code": 200 + } + } + }, + "com.amazonaws.connect#DeleteIntegrationAssociationRequest": { + "type": "structure", + "members": { + "InstanceId": { + "target": "com.amazonaws.connect#InstanceId", + "traits": { + "smithy.api#documentation": "

          The identifier of the Amazon Connect instance.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "IntegrationAssociationId": { + "target": "com.amazonaws.connect#IntegrationAssociationId", + "traits": { + "smithy.api#documentation": "

          The identifier for the AppIntegration association.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.connect#DeleteUseCase": { + "type": "operation", + "input": { + "target": "com.amazonaws.connect#DeleteUseCaseRequest" + }, + "errors": [ + { + "target": "com.amazonaws.connect#InternalServiceException" + }, + { + "target": "com.amazonaws.connect#InvalidRequestException" + }, + { + "target": "com.amazonaws.connect#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.connect#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          This API is in preview release for Amazon Connect and is subject to change.

          \n

          Deletes a use case from an AppIntegration association.

          ", + "smithy.api#http": { + "method": "DELETE", + "uri": "/instance/{InstanceId}/integration-associations/{IntegrationAssociationId}/use-cases/{UseCaseId}", + "code": 200 + } + } + }, + "com.amazonaws.connect#DeleteUseCaseRequest": { + "type": "structure", + "members": { + "InstanceId": { + "target": "com.amazonaws.connect#InstanceId", + "traits": { + "smithy.api#documentation": "

          The identifier of the Amazon Connect instance.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "IntegrationAssociationId": { + "target": "com.amazonaws.connect#IntegrationAssociationId", + "traits": { + "smithy.api#documentation": "

          The identifier for the AppIntegration association.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "UseCaseId": { + "target": "com.amazonaws.connect#UseCaseId", + "traits": { + "smithy.api#documentation": "

          The identifier for the use case.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, "com.amazonaws.connect#DeleteUser": { "type": "operation", "input": { @@ -1834,7 +2152,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Deletes a user account from the specified Amazon Connect instance.

          \n

          For information about what happens to a user's data when their account is deleted, see\n Delete Users from\n Your Amazon Connect Instance in the Amazon Connect Administrator\n Guide.

          ", + "smithy.api#documentation": "

          Deletes a user account from the specified Amazon Connect instance.

          \n

          For information about what happens to a user's data when their account is deleted, see\n Delete Users from\n Your Amazon Connect Instance in the Amazon Connect Administrator Guide.

          ", "smithy.api#http": { "method": "DELETE", "uri": "/users/{InstanceId}/{UserId}", @@ -1868,7 +2186,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Deletes an existing user hierarchy group. It must not be associated with any agents or have any active child groups.

          ", + "smithy.api#documentation": "

          Deletes an existing user hierarchy group. It must not be associated with any agents or have\n any active child groups.

          ", "smithy.api#http": { "method": "DELETE", "uri": "/user-hierarchy-groups/{InstanceId}/{HierarchyGroupId}", @@ -2007,7 +2325,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Returns the current state of the specified instance identifier. It tracks the instance while it is\n being created and returns an error status if applicable.

          \n

          If an instance is not created\n successfully, the instance status reason field returns details relevant to the reason. The instance\n in a failed state is returned only for 24 hours after\n the CreateInstance API was invoked.

          ", + "smithy.api#documentation": "

          Returns the current state of the specified instance identifier. It tracks the instance while\n it is being created and returns an error status if applicable.

          \n

          If an instance is not created successfully, the instance status reason field returns details\n relevant to the reason. The instance in a failed state is returned only for 24 hours after the\n CreateInstance API was invoked.

          ", "smithy.api#http": { "method": "GET", "uri": "/instance/{InstanceId}", @@ -2131,7 +2449,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Retrieves the current storage configurations for the specified resource type, association ID, and instance ID.

          ", + "smithy.api#documentation": "

          Retrieves the current storage configurations for the specified resource type, association\n ID, and instance ID.

          ", "smithy.api#http": { "method": "GET", "uri": "/instance/{InstanceId}/storage-config/{AssociationId}", @@ -2435,6 +2753,15 @@ } } }, + "com.amazonaws.connect#Description": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 4096 + } + } + }, "com.amazonaws.connect#DestinationNotAllowedException": { "type": "structure", "members": { @@ -2589,7 +2916,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Removes the storage type configurations for the specified resource type and association ID.

          ", + "smithy.api#documentation": "

          Removes the storage type configurations for the specified resource type and association\n ID.

          ", "smithy.api#http": { "method": "DELETE", "uri": "/instance/{InstanceId}/storage-config/{AssociationId}", @@ -3055,20 +3382,20 @@ "Filters": { "target": "com.amazonaws.connect#Filters", "traits": { - "smithy.api#documentation": "

          The queues, up to 100, or channels, to use to filter the metrics returned. Metric data is\n retrieved only for the resources associated with the queues or channels included in the filter.\n You can include both queue IDs and queue ARNs in the same request. Both VOICE and CHAT channels are supported.

          ", + "smithy.api#documentation": "

          The queues, up to 100, or channels, to use to filter the metrics returned. Metric data is\n retrieved only for the resources associated with the queues or channels included in the filter.\n You can include both queue IDs and queue ARNs in the same request. VOICE, CHAT, and TASK channels are supported.

          ", "smithy.api#required": {} } }, "Groupings": { "target": "com.amazonaws.connect#Groupings", "traits": { - "smithy.api#documentation": "

          The grouping applied to the metrics returned. For example, when grouped by\n QUEUE, the metrics returned apply to each queue rather than aggregated for all\n queues. If you group by CHANNEL, you should include a Channels filter. Both VOICE and CHAT channels are supported.

          \n

          If no Grouping is included in the request, a summary of metrics is\n returned.

          " + "smithy.api#documentation": "

          The grouping applied to the metrics returned. For example, when grouped by\n QUEUE, the metrics returned apply to each queue rather than aggregated for all\n queues. If you group by CHANNEL, you should include a Channels filter.\n VOICE, CHAT, and TASK channels are supported.

          \n

          If no Grouping is included in the request, a summary of metrics is\n returned.

          " } }, "CurrentMetrics": { "target": "com.amazonaws.connect#CurrentMetrics", "traits": { - "smithy.api#documentation": "

          The metrics to retrieve. Specify the name and unit for each metric. The following metrics\n are available. For a description of all the metrics, see Real-time Metrics\n Definitions in the Amazon Connect Administrator Guide.

          \n
          \n
          AGENTS_AFTER_CONTACT_WORK
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: ACW\n

          \n\n
          \n
          AGENTS_AVAILABLE
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: Available\n

          \n
          \n
          AGENTS_ERROR
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: Error\n

          \n
          \n
          AGENTS_NON_PRODUCTIVE
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: NPT (Non-Productive Time)\n

          \n
          \n
          AGENTS_ON_CALL
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: On\n contact\n

          \n
          \n
          AGENTS_ON_CONTACT
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: On\n contact\n

          \n
          \n
          AGENTS_ONLINE
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: Online\n

          \n
          \n
          AGENTS_STAFFED
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: Staffed\n

          \n
          \n
          CONTACTS_IN_QUEUE
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: In\n queue\n

          \n
          \n
          CONTACTS_SCHEDULED
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: Scheduled\n

          \n
          \n
          OLDEST_CONTACT_AGE
          \n
          \n

          Unit: SECONDS

          \n

          When you use groupings, Unit says SECONDS but the Value is returned in MILLISECONDS. For example, if you get a\n response like this:

          \n

          \n { \"Metric\": { \"Name\": \"OLDEST_CONTACT_AGE\", \"Unit\": \"SECONDS\" }, \"Value\": 24113.0\n }

          \n

          The actual OLDEST_CONTACT_AGE is 24 seconds.

          \n\n

          Name in real-time metrics report: Oldest\n

          \n
          \n
          SLOTS_ACTIVE
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: Active\n

          \n
          \n
          SLOTS_AVAILABLE
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: Availability\n

          \n
          \n
          ", + "smithy.api#documentation": "

          The metrics to retrieve. Specify the name and unit for each metric. The following metrics\n are available. For a description of all the metrics, see Real-time Metrics\n Definitions in the Amazon Connect Administrator Guide.

          \n
          \n
          AGENTS_AFTER_CONTACT_WORK
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: ACW\n

          \n\n
          \n
          AGENTS_AVAILABLE
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: Available\n

          \n
          \n
          AGENTS_ERROR
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: Error\n

          \n
          \n
          AGENTS_NON_PRODUCTIVE
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: NPT (Non-Productive Time)\n

          \n
          \n
          AGENTS_ON_CALL
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: On\n contact\n

          \n
          \n
          AGENTS_ON_CONTACT
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: On\n contact\n

          \n
          \n
          AGENTS_ONLINE
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: Online\n

          \n
          \n
          AGENTS_STAFFED
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: Staffed\n

          \n
          \n
          CONTACTS_IN_QUEUE
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: In\n queue\n

          \n
          \n
          CONTACTS_SCHEDULED
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: Scheduled\n

          \n
          \n
          OLDEST_CONTACT_AGE
          \n
          \n

          Unit: SECONDS

          \n

          When you use groupings, Unit says SECONDS but the Value is returned in MILLISECONDS. For\n example, if you get a response like this:

          \n

          \n { \"Metric\": { \"Name\": \"OLDEST_CONTACT_AGE\", \"Unit\": \"SECONDS\" }, \"Value\": 24113.0\n }

          \n

          The actual OLDEST_CONTACT_AGE is 24 seconds.

          \n\n

          Name in real-time metrics report: Oldest\n

          \n
          \n
          SLOTS_ACTIVE
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: Active\n

          \n
          \n
          SLOTS_AVAILABLE
          \n
          \n

          Unit: COUNT

          \n

          Name in real-time metrics report: Availability\n

          \n
          \n
          ", "smithy.api#required": {} } }, @@ -3238,7 +3565,7 @@ "Filters": { "target": "com.amazonaws.connect#Filters", "traits": { - "smithy.api#documentation": "

          The queues, up to 100, or channels, to use to filter the metrics returned. Metric data is\n retrieved only for the resources associated with the queues or channels included in the filter.\n You can include both queue IDs and queue ARNs in the same request. Both VOICE and CHAT channels are supported.

          ", + "smithy.api#documentation": "

          The queues, up to 100, or channels, to use to filter the metrics returned. Metric data is\n retrieved only for the resources associated with the queues or channels included in the filter.\n You can include both queue IDs and queue ARNs in the same request. VOICE, CHAT, and TASK channels are supported.

          ", "smithy.api#required": {} } }, @@ -4107,12 +4434,94 @@ "target": "com.amazonaws.connect#InstanceSummary" } }, - "com.amazonaws.connect#InternalServiceException": { - "type": "structure", - "members": { - "Message": { - "target": "com.amazonaws.connect#Message", - "traits": { + "com.amazonaws.connect#IntegrationAssociationId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 200 + } + } + }, + "com.amazonaws.connect#IntegrationAssociationSummary": { + "type": "structure", + "members": { + "IntegrationAssociationId": { + "target": "com.amazonaws.connect#IntegrationAssociationId", + "traits": { + "smithy.api#documentation": "

          The identifier for the AppIntegration association.

          " + } + }, + "IntegrationAssociationArn": { + "target": "com.amazonaws.connect#ARN", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) for the AppIntegration association.

          " + } + }, + "InstanceId": { + "target": "com.amazonaws.connect#InstanceId", + "traits": { + "smithy.api#documentation": "

          The identifier of the Amazon Connect instance.

          " + } + }, + "IntegrationType": { + "target": "com.amazonaws.connect#IntegrationType", + "traits": { + "smithy.api#documentation": "

          The integration type.

          " + } + }, + "IntegrationArn": { + "target": "com.amazonaws.connect#ARN", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) for the AppIntegration.

          " + } + }, + "SourceApplicationUrl": { + "target": "com.amazonaws.connect#URI", + "traits": { + "smithy.api#documentation": "

          The URL for the external application.

          " + } + }, + "SourceApplicationName": { + "target": "com.amazonaws.connect#SourceApplicationName", + "traits": { + "smithy.api#documentation": "

          The user-provided, friendly name for the external application.

          " + } + }, + "SourceType": { + "target": "com.amazonaws.connect#SourceType", + "traits": { + "smithy.api#documentation": "

          The name of the source.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Contains summary information about the associated AppIntegrations.

          " + } + }, + "com.amazonaws.connect#IntegrationAssociationSummaryList": { + "type": "list", + "member": { + "target": "com.amazonaws.connect#IntegrationAssociationSummary" + } + }, + "com.amazonaws.connect#IntegrationType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "EVENT", + "name": "EVENT" + } + ] + } + }, + "com.amazonaws.connect#InternalServiceException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.connect#Message", + "traits": { "smithy.api#documentation": "

          The message.

          " } } @@ -4223,7 +4632,7 @@ "RetentionPeriodHours": { "target": "com.amazonaws.connect#Hours", "traits": { - "smithy.api#documentation": "

          The number of hours data is retained in the stream. Kinesis Video Streams retains the data\n in a data store that is associated with the stream.

          \n \n

          The default value is 0, indicating that the stream does not persist data.

          ", + "smithy.api#documentation": "

          The number of hours data is retained in the stream. Kinesis Video Streams retains the data\n in a data store that is associated with the stream.

          \n

          The default value is 0, indicating that the stream does not persist data.

          ", "smithy.api#required": {} } }, @@ -4576,7 +4985,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Returns a paginated list of all attribute types for the given instance.

          ", + "smithy.api#documentation": "

          This API is in preview release for Amazon Connect and is subject to change.

          \n

          Returns a paginated list of all attribute types for the given instance.

          ", "smithy.api#http": { "method": "GET", "uri": "/instance/{InstanceId}/attributes", @@ -4661,7 +5070,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Returns a paginated list of storage configs for the identified instance and resource\n type.

          ", + "smithy.api#documentation": "

          This API is in preview release for Amazon Connect and is subject to change.

          \n \n

          Returns a paginated list of storage configs for the identified instance and resource\n type.

          ", "smithy.api#http": { "method": "GET", "uri": "/instance/{InstanceId}/storage-configs", @@ -4745,7 +5154,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Return a list of instances which are in active state, creation-in-progress state, and failed\n state. Instances that aren't successfully created (they are in a failed state) are returned only\n for 24 hours after the CreateInstance API was invoked.

          ", + "smithy.api#documentation": "

          This API is in preview release for Amazon Connect and is subject to change.

          \n

          Return a list of instances which are in active state, creation-in-progress state, and failed\n state. Instances that aren't successfully created (they are in a failed state) are returned only\n for 24 hours after the CreateInstance API was invoked.

          ", "smithy.api#http": { "method": "GET", "uri": "/instance", @@ -4796,6 +5205,88 @@ } } }, + "com.amazonaws.connect#ListIntegrationAssociations": { + "type": "operation", + "input": { + "target": "com.amazonaws.connect#ListIntegrationAssociationsRequest" + }, + "output": { + "target": "com.amazonaws.connect#ListIntegrationAssociationsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.connect#InternalServiceException" + }, + { + "target": "com.amazonaws.connect#InvalidRequestException" + }, + { + "target": "com.amazonaws.connect#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.connect#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          This API is in preview release for Amazon Connect and is subject to change.

          \n

          Provides summary information about the AppIntegration associations for the specified Amazon Connect\n instance.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/instance/{InstanceId}/integration-associations", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "items": "IntegrationAssociationSummaryList", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.connect#ListIntegrationAssociationsRequest": { + "type": "structure", + "members": { + "InstanceId": { + "target": "com.amazonaws.connect#InstanceId", + "traits": { + "smithy.api#documentation": "

          The identifier of the Amazon Connect instance.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "NextToken": { + "target": "com.amazonaws.connect#NextToken", + "traits": { + "smithy.api#documentation": "

          The token for the next set of results. Use the value returned in the previous \nresponse in the next request to retrieve the next set of results.

          ", + "smithy.api#httpQuery": "nextToken" + } + }, + "MaxResults": { + "target": "com.amazonaws.connect#MaxResult100", + "traits": { + "smithy.api#box": {}, + "smithy.api#documentation": "

          The maximimum number of results to return per page.

          ", + "smithy.api#httpQuery": "maxResults" + } + } + } + }, + "com.amazonaws.connect#ListIntegrationAssociationsResponse": { + "type": "structure", + "members": { + "IntegrationAssociationSummaryList": { + "target": "com.amazonaws.connect#IntegrationAssociationSummaryList", + "traits": { + "smithy.api#documentation": "

          The AppIntegration associations.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.connect#NextToken", + "traits": { + "smithy.api#documentation": "

          If there are additional results, this is the token for the next set of results.

          " + } + } + } + }, "com.amazonaws.connect#ListLambdaFunctions": { "type": "operation", "input": { @@ -4822,7 +5313,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Returns a paginated list of all the Lambda functions that show up in the drop-down options in the relevant contact flow blocks.

          ", + "smithy.api#documentation": "

          Returns a paginated list of all the Lambda functions that show up in the drop-down options\n in the relevant contact flow blocks.

          ", "smithy.api#http": { "method": "GET", "uri": "/instance/{InstanceId}/lambda-functions", @@ -4907,7 +5398,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Returns a paginated list of all the Amazon Lex bots currently associated with the instance.

          ", + "smithy.api#documentation": "

          Returns a paginated list of all the Amazon Lex bots currently associated with the\n instance.

          ", "smithy.api#http": { "method": "GET", "uri": "/instance/{InstanceId}/lex-bots", @@ -5090,7 +5581,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Provides information about the prompts for the specified Amazon Connect\n instance.

          ", + "smithy.api#documentation": "

          Provides information about the prompts for the specified Amazon Connect instance.

          ", "smithy.api#http": { "method": "GET", "uri": "/prompts-summary/{InstanceId}", @@ -5646,6 +6137,99 @@ } } }, + "com.amazonaws.connect#ListUseCases": { + "type": "operation", + "input": { + "target": "com.amazonaws.connect#ListUseCasesRequest" + }, + "output": { + "target": "com.amazonaws.connect#ListUseCasesResponse" + }, + "errors": [ + { + "target": "com.amazonaws.connect#InternalServiceException" + }, + { + "target": "com.amazonaws.connect#InvalidRequestException" + }, + { + "target": "com.amazonaws.connect#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.connect#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          This API is in preview release for Amazon Connect and is subject to change.

          \n

          List the use cases.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/instance/{InstanceId}/integration-associations/{IntegrationAssociationId}/use-cases", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "items": "UseCaseSummaryList", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.connect#ListUseCasesRequest": { + "type": "structure", + "members": { + "InstanceId": { + "target": "com.amazonaws.connect#InstanceId", + "traits": { + "smithy.api#documentation": "

          The identifier of the Amazon Connect instance.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "IntegrationAssociationId": { + "target": "com.amazonaws.connect#IntegrationAssociationId", + "traits": { + "smithy.api#documentation": "

          The identifier for the integration association.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "NextToken": { + "target": "com.amazonaws.connect#NextToken", + "traits": { + "smithy.api#documentation": "

          The token for the next set of results. Use the value returned in the previous \nresponse in the next request to retrieve the next set of results.

          ", + "smithy.api#httpQuery": "nextToken" + } + }, + "MaxResults": { + "target": "com.amazonaws.connect#MaxResult100", + "traits": { + "smithy.api#box": {}, + "smithy.api#documentation": "

          The maximimum number of results to return per page.

          ", + "smithy.api#httpQuery": "maxResults" + } + } + }, + "traits": { + "smithy.api#documentation": "

          Provides summary information about the use cases for the specified Amazon Connect AppIntegration\n association.

          " + } + }, + "com.amazonaws.connect#ListUseCasesResponse": { + "type": "structure", + "members": { + "UseCaseSummaryList": { + "target": "com.amazonaws.connect#UseCaseSummaryList", + "traits": { + "smithy.api#documentation": "

          The use cases.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.connect#NextToken", + "traits": { + "smithy.api#documentation": "

          If there are additional results, this is the token for the next set of results.

          " + } + } + } + }, "com.amazonaws.connect#ListUserHierarchyGroups": { "type": "operation", "input": { @@ -5901,6 +6485,15 @@ "com.amazonaws.connect#Message": { "type": "string" }, + "com.amazonaws.connect#Name": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 512 + } + } + }, "com.amazonaws.connect#NextToken": { "type": "string" }, @@ -7260,6 +7853,57 @@ } } }, + "com.amazonaws.connect#Reference": { + "type": "structure", + "members": { + "Value": { + "target": "com.amazonaws.connect#ReferenceValue", + "traits": { + "smithy.api#documentation": "

          A formatted URL that will be shown to an agent in the Contact Control Panel (CCP)

          ", + "smithy.api#required": {} + } + }, + "Type": { + "target": "com.amazonaws.connect#ReferenceType", + "traits": { + "smithy.api#documentation": "

          A valid URL.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          A link that an agent selects to complete a given task. You can have up to 4,096 UTF-8 bytes\n across all references for a contact.

          " + } + }, + "com.amazonaws.connect#ReferenceKey": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 4096 + } + } + }, + "com.amazonaws.connect#ReferenceType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "URL", + "name": "URL" + } + ] + } + }, + "com.amazonaws.connect#ReferenceValue": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 4096 + } + } + }, "com.amazonaws.connect#ResourceConflictException": { "type": "structure", "members": { @@ -7369,7 +8013,7 @@ } ], "traits": { - "smithy.api#documentation": "

          When a contact is being recorded, and the recording has been suspended using\n SuspendContactRecording, this API resumes recording the call.

          \n \n

          Only voice recordings are supported at this time.

          ", + "smithy.api#documentation": "

          When a contact is being recorded, and the recording has been suspended using\n SuspendContactRecording, this API resumes recording the call.

          \n\n

          Only voice recordings are supported at this time.

          ", "smithy.api#http": { "method": "POST", "uri": "/contact/resume-recording", @@ -7443,7 +8087,7 @@ "MediaConcurrencies": { "target": "com.amazonaws.connect#MediaConcurrencies", "traits": { - "smithy.api#documentation": "

          The channels agents can handle in the Contact Control Panel (CCP) for this routing profile.

          " + "smithy.api#documentation": "

          The channels agents can handle in the Contact Control Panel (CCP) for this routing\n profile.

          " } }, "DefaultOutboundQueueId": { @@ -7498,7 +8142,7 @@ "target": "com.amazonaws.connect#Priority", "traits": { "smithy.api#box": {}, - "smithy.api#documentation": "

          The order in which contacts are to be handled for the queue. For more information, see Queues: priority and\n delay.

          ", + "smithy.api#documentation": "

          The order in which contacts are to be handled for the queue. For more information, see\n Queues: priority and\n delay.

          ", "smithy.api#required": {} } }, @@ -7512,7 +8156,7 @@ } }, "traits": { - "smithy.api#documentation": "

          Contains information about the queue and channel for which priority and delay can be set.

          " + "smithy.api#documentation": "

          Contains information about the queue and channel for which priority and delay can be\n set.

          " } }, "com.amazonaws.connect#RoutingProfileQueueConfigList": { @@ -7554,7 +8198,7 @@ "Priority": { "target": "com.amazonaws.connect#Priority", "traits": { - "smithy.api#documentation": "

          The order in which contacts are to be handled for the queue. For more information, see Queues: priority and\n delay.

          ", + "smithy.api#documentation": "

          The order in which contacts are to be handled for the queue. For more information, see\n Queues: priority and\n delay.

          ", "smithy.api#required": {} } }, @@ -7772,6 +8416,31 @@ "smithy.api#httpError": 402 } }, + "com.amazonaws.connect#SourceApplicationName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 100 + }, + "smithy.api#pattern": "^[a-zA-Z0-9_ -]+$" + } + }, + "com.amazonaws.connect#SourceType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "SALESFORCE", + "name": "SALESFORCE" + }, + { + "value": "ZENDESK", + "name": "ZENDESK" + } + ] + } + }, "com.amazonaws.connect#StartChatContact": { "type": "operation", "input": { @@ -7897,7 +8566,7 @@ } ], "traits": { - "smithy.api#documentation": "

          This API starts recording the contact when the agent joins the call. StartContactRecording\n is a one-time action. For example, if you use StopContactRecording to stop recording an ongoing\n call, you can't use StartContactRecording to restart it. For scenarios where the recording has\n started and you want to suspend and resume it, such as when collecting sensitive information (for\n example, a credit card number), use SuspendContactRecording and ResumeContactRecording.

          \n

          You can use this API to override the recording behavior configured in the Set recording\n behavior block.

          \n

          Only voice recordings are supported at this time.

          ", + "smithy.api#documentation": "

          This API starts recording the contact when the agent joins the call. StartContactRecording\n is a one-time action. For example, if you use StopContactRecording to stop recording an ongoing\n call, you can't use StartContactRecording to restart it. For scenarios where the recording has\n started and you want to suspend and resume it, such as when collecting sensitive information (for\n example, a credit card number), use SuspendContactRecording and ResumeContactRecording.

          \n

          You can use this API to override the recording behavior configured in the Set recording\n behavior block.

          \n

          Only voice recordings are supported at this time.

          ", "smithy.api#http": { "method": "POST", "uri": "/contact/start-recording", @@ -7974,7 +8643,7 @@ } ], "traits": { - "smithy.api#documentation": "

          This API places an outbound call to a contact, and then initiates the contact flow. It\n performs the actions in the contact flow that's specified (in ContactFlowId).

          \n \n

          Agents are not involved in initiating the outbound API (that is, dialing the contact). If\n the contact flow places an outbound call to a contact, and then puts the contact in queue, that's\n when the call is routed to the agent, like any other inbound case.

          \n \n

          There is a 60 second dialing timeout for this operation. If the call is not connected after\n 60 seconds, it fails.

          \n \n

          UK numbers with a 447 prefix are not allowed by default. Before you can dial these UK\n mobile numbers, you must submit a service quota increase request. For more information, see\n Amazon Connect Service Quotas in the Amazon Connect Administrator Guide.

          \n
          ", + "smithy.api#documentation": "

          This API places an outbound call to a contact, and then initiates the contact flow. It\n performs the actions in the contact flow that's specified (in ContactFlowId).

          \n\n

          Agents are not involved in initiating the outbound API (that is, dialing the contact). If\n the contact flow places an outbound call to a contact, and then puts the contact in queue, that's\n when the call is routed to the agent, like any other inbound case.

          \n\n

          There is a 60 second dialing timeout for this operation. If the call is not connected after\n 60 seconds, it fails.

          \n \n

          UK numbers with a 447 prefix are not allowed by default. Before you can dial these UK\n mobile numbers, you must submit a service quota increase request. For more information, see\n Amazon Connect Service Quotas in the Amazon Connect Administrator Guide.

          \n
          ", "smithy.api#http": { "method": "PUT", "uri": "/contact/outbound-voice", @@ -8044,6 +8713,111 @@ } } }, + "com.amazonaws.connect#StartTaskContact": { + "type": "operation", + "input": { + "target": "com.amazonaws.connect#StartTaskContactRequest" + }, + "output": { + "target": "com.amazonaws.connect#StartTaskContactResponse" + }, + "errors": [ + { + "target": "com.amazonaws.connect#InternalServiceException" + }, + { + "target": "com.amazonaws.connect#InvalidParameterException" + }, + { + "target": "com.amazonaws.connect#InvalidRequestException" + }, + { + "target": "com.amazonaws.connect#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.connect#ServiceQuotaExceededException" + }, + { + "target": "com.amazonaws.connect#ThrottlingException" + } + ], + "traits": { + "smithy.api#documentation": "

          Initiates a contact flow to start a new task.

          ", + "smithy.api#http": { + "method": "PUT", + "uri": "/contact/task", + "code": 200 + } + } + }, + "com.amazonaws.connect#StartTaskContactRequest": { + "type": "structure", + "members": { + "InstanceId": { + "target": "com.amazonaws.connect#InstanceId", + "traits": { + "smithy.api#documentation": "

          The identifier of the Amazon Connect instance.

          ", + "smithy.api#required": {} + } + }, + "PreviousContactId": { + "target": "com.amazonaws.connect#ContactId", + "traits": { + "smithy.api#documentation": "

          The identifier of the previous chat, voice, or task contact.

          " + } + }, + "ContactFlowId": { + "target": "com.amazonaws.connect#ContactFlowId", + "traits": { + "smithy.api#documentation": "

          The identifier of the contact flow for initiating the tasks. To see the ContactFlowId in the\n Amazon Connect console user interface, on the navigation menu go to Routing, Contact Flows. Choose the contact flow. On\n the contact flow page, under the name of the contact flow, choose Show\n additional flow information. The ContactFlowId is the last part of the ARN, shown\n here in bold:

          \n

          arn:aws:connect:us-west-2:xxxxxxxxxxxx:instance/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/contact-flow/846ec553-a005-41c0-8341-xxxxxxxxxxxx\n

          ", + "smithy.api#required": {} + } + }, + "Attributes": { + "target": "com.amazonaws.connect#Attributes", + "traits": { + "smithy.api#documentation": "

          A custom key-value pair using an attribute map. The attributes are standard Amazon Connect\n attributes, and can be accessed in contact flows just like any other contact attributes.

          \n

          There can be up to 32,768 UTF-8 bytes across all key-value pairs per contact. Attribute keys\n can include only alphanumeric, dash, and underscore characters.

          " + } + }, + "Name": { + "target": "com.amazonaws.connect#Name", + "traits": { + "smithy.api#documentation": "

          The name of a task that is shown to an agent in the Contact Control Panel (CCP).

          ", + "smithy.api#required": {} + } + }, + "References": { + "target": "com.amazonaws.connect#ContactReferences", + "traits": { + "smithy.api#documentation": "

          A formatted URL that is shown to an agent in the Contact Control Panel (CCP).

          " + } + }, + "Description": { + "target": "com.amazonaws.connect#Description", + "traits": { + "smithy.api#documentation": "

          A description of the task that is shown to an agent in the Contact Control Panel\n (CCP).

          " + } + }, + "ClientToken": { + "target": "com.amazonaws.connect#ClientToken", + "traits": { + "smithy.api#documentation": "

          A unique, case-sensitive identifier that you provide to ensure the idempotency of the\n request.

          ", + "smithy.api#idempotencyToken": {} + } + } + } + }, + "com.amazonaws.connect#StartTaskContactResponse": { + "type": "structure", + "members": { + "ContactId": { + "target": "com.amazonaws.connect#ContactId", + "traits": { + "smithy.api#documentation": "

          The identifier of this contact within the Amazon Connect instance.

          " + } + } + } + }, "com.amazonaws.connect#Statistic": { "type": "string", "traits": { @@ -8117,7 +8891,7 @@ } ], "traits": { - "smithy.api#documentation": "

          When a contact is being recorded, this API stops recording the call. StopContactRecording is\n a one-time action. If you use StopContactRecording to stop recording an ongoing call, you can't\n use StartContactRecording to restart it. For scenarios where the recording has started and you\n want to suspend it for sensitive information (for example, to collect a credit card number), and\n then restart it, use SuspendContactRecording and ResumeContactRecording.

          \n \n

          Only voice recordings are supported at this time.

          ", + "smithy.api#documentation": "

          When a contact is being recorded, this API stops recording the call. StopContactRecording is\n a one-time action. If you use StopContactRecording to stop recording an ongoing call, you can't\n use StartContactRecording to restart it. For scenarios where the recording has started and you\n want to suspend it for sensitive information (for example, to collect a credit card number), and\n then restart it, use SuspendContactRecording and ResumeContactRecording.

          \n\n

          Only voice recordings are supported at this time.

          ", "smithy.api#http": { "method": "POST", "uri": "/contact/stop-recording", @@ -8402,6 +9176,15 @@ "com.amazonaws.connect#Timestamp": { "type": "timestamp" }, + "com.amazonaws.connect#URI": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 2000 + } + } + }, "com.amazonaws.connect#Unit": { "type": "string", "traits": { @@ -8496,7 +9279,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Creates or updates the contact attributes associated with the specified contact.

          \n

          You can add or update attributes for both ongoing and completed contacts. For example, you\n can update the customer's name or the reason the customer called while the call is active, or add\n notes about steps that the agent took during the call that are displayed to the next agent that\n takes the call. You can also update attributes for a contact using data from your CRM application\n and save the data with the contact in Amazon Connect. You could also flag calls for additional analysis,\n such as legal review or identifying abusive callers.

          \n

          Contact attributes are available in Amazon Connect for 24 months, and are then deleted.

          \n \n

          \n Important: You cannot use the operation to update\n attributes for contacts that occurred prior to the release of the API, September 12, 2018. You\n can update attributes only for contacts that started after the release of the API. If you attempt\n to update attributes for a contact that occurred prior to the release of the API, a 400 error is\n returned. This applies also to queued callbacks that were initiated prior to the release of the\n API but are still active in your instance.

          ", + "smithy.api#documentation": "

          Creates or updates the contact attributes associated with the specified contact.

          \n

          You can add or update attributes for both ongoing and completed contacts. For example, you\n can update the customer's name or the reason the customer called while the call is active, or add\n notes about steps that the agent took during the call that are displayed to the next agent that\n takes the call. You can also update attributes for a contact using data from your CRM application\n and save the data with the contact in Amazon Connect. You could also flag calls for additional analysis,\n such as legal review or identifying abusive callers.

          \n

          Contact attributes are available in Amazon Connect for 24 months, and are then deleted.

          \n\n

          \n Important: You cannot use the operation to update\n attributes for contacts that occurred prior to the release of the API, September 12, 2018. You\n can update attributes only for contacts that started after the release of the API. If you attempt\n to update attributes for a contact that occurred prior to the release of the API, a 400 error is\n returned. This applies also to queued callbacks that were initiated prior to the release of the\n API but are still active in your instance.

          ", "smithy.api#http": { "method": "POST", "uri": "/contact/attributes", @@ -8590,7 +9373,7 @@ "Content": { "target": "com.amazonaws.connect#ContactFlowContent", "traits": { - "smithy.api#documentation": "

          The JSON string that represents contact flow’s content. For an example, see Example contact flow in Amazon Connect Flow language in the Amazon Connect Administrator Guide.

          ", + "smithy.api#documentation": "

          The JSON string that represents contact flow’s content. For an example, see Example contact\n flow in Amazon Connect Flow language in the Amazon Connect Administrator Guide.\n

          ", "smithy.api#required": {} } } @@ -8686,7 +9469,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Updates the value for the specified attribute type.

          ", + "smithy.api#documentation": "

          This API is in preview release for Amazon Connect and is subject to change.

          \n

          Updates the value for the specified attribute type.

          ", "smithy.api#http": { "method": "POST", "uri": "/instance/{InstanceId}/attribute/{AttributeType}", @@ -8745,7 +9528,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Updates an existing configuration for a resource type. This API is idempotent.

          ", + "smithy.api#documentation": "

          This API is in preview release for Amazon Connect and is subject to change.

          \n

          Updates an existing configuration for a resource type. This API is idempotent.

          ", "smithy.api#http": { "method": "POST", "uri": "/instance/{InstanceId}/storage-config/{AssociationId}", @@ -8811,7 +9594,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Updates the channels that agents can handle in the Contact Control Panel (CCP) for a routing profile.

          ", + "smithy.api#documentation": "

          Updates the channels that agents can handle in the Contact Control Panel (CCP) for a routing\n profile.

          ", "smithy.api#http": { "method": "POST", "uri": "/routing-profiles/{InstanceId}/{RoutingProfileId}/concurrency", @@ -8932,7 +9715,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Updates the name and description of a routing profile. The request accepts the following data in JSON format. At least Name or Description must be provided.

          ", + "smithy.api#documentation": "

          Updates the name and description of a routing profile. The request accepts the following\n data in JSON format. At least Name or Description must be\n provided.

          ", "smithy.api#http": { "method": "POST", "uri": "/routing-profiles/{InstanceId}/{RoutingProfileId}/name", @@ -9026,7 +9809,7 @@ "QueueConfigs": { "target": "com.amazonaws.connect#RoutingProfileQueueConfigList", "traits": { - "smithy.api#documentation": "

          The queues to be updated for this routing profile.

          ", + "smithy.api#documentation": "

          The queues to be updated for this routing profile. Queues must first be associated to the\n routing profile. You can do this using AssociateRoutingProfileQueues.

          ", "smithy.api#required": {} } } @@ -9442,6 +10225,58 @@ } } }, + "com.amazonaws.connect#UseCase": { + "type": "structure", + "members": { + "UseCaseId": { + "target": "com.amazonaws.connect#UseCaseId", + "traits": { + "smithy.api#documentation": "

          The identifier for the use case.

          " + } + }, + "UseCaseArn": { + "target": "com.amazonaws.connect#ARN", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) for the use case.

          " + } + }, + "UseCaseType": { + "target": "com.amazonaws.connect#UseCaseType", + "traits": { + "smithy.api#documentation": "

          The type of use case to associate to the AppIntegration association. Each AppIntegration\n association can have only one of each use case type.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Contains the use case.

          " + } + }, + "com.amazonaws.connect#UseCaseId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 200 + } + } + }, + "com.amazonaws.connect#UseCaseSummaryList": { + "type": "list", + "member": { + "target": "com.amazonaws.connect#UseCase" + } + }, + "com.amazonaws.connect#UseCaseType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "RULES_EVALUATION", + "name": "RULES_EVALUATION" + } + ] + } + }, "com.amazonaws.connect#User": { "type": "structure", "members": { diff --git a/codegen/sdk-codegen/aws-models/devops-guru.2019-12-11.json b/codegen/sdk-codegen/aws-models/devops-guru.2019-12-11.json new file mode 100644 index 000000000000..b3c77f740630 --- /dev/null +++ b/codegen/sdk-codegen/aws-models/devops-guru.2019-12-11.json @@ -0,0 +1,3429 @@ +{ + "smithy": "1.0", + "metadata": { + "suppressions": [ + { + "id": "HttpMethodSemantics", + "namespace": "*" + }, + { + "id": "HttpResponseCodeSemantics", + "namespace": "*" + }, + { + "id": "PaginatedTrait", + "namespace": "*" + }, + { + "id": "HttpHeaderTrait", + "namespace": "*" + }, + { + "id": "HttpUriConflict", + "namespace": "*" + }, + { + "id": "Service", + "namespace": "*" + } + ] + }, + "shapes": { + "com.amazonaws.devopsguru#AccessDeniedException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.devopsguru#ErrorMessageString", + "traits": { + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          You don't have permissions to perform the requested operation. The user or role that is making the request must have at \n\t\t\tleast one IAM permissions policy attached that grants the required permissions. For more information, see \n\t\t\tAccess Management in the IAM User Guide.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 403 + } + }, + "com.amazonaws.devopsguru#AddNotificationChannel": { + "type": "operation", + "input": { + "target": "com.amazonaws.devopsguru#AddNotificationChannelRequest" + }, + "output": { + "target": "com.amazonaws.devopsguru#AddNotificationChannelResponse" + }, + "errors": [ + { + "target": "com.amazonaws.devopsguru#AccessDeniedException" + }, + { + "target": "com.amazonaws.devopsguru#ConflictException" + }, + { + "target": "com.amazonaws.devopsguru#InternalServerException" + }, + { + "target": "com.amazonaws.devopsguru#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.devopsguru#ServiceQuotaExceededException" + }, + { + "target": "com.amazonaws.devopsguru#ThrottlingException" + }, + { + "target": "com.amazonaws.devopsguru#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAdds a notification channel to DevOps Guru. A notification channel is used to notify you about important DevOps Guru events, such as when an insight is generated.\n\t\t

          ", + "smithy.api#http": { + "method": "PUT", + "uri": "/channels", + "code": 200 + } + } + }, + "com.amazonaws.devopsguru#AddNotificationChannelRequest": { + "type": "structure", + "members": { + "Config": { + "target": "com.amazonaws.devopsguru#NotificationChannelConfig", + "traits": { + "smithy.api#documentation": "

          A NotificationChannelConfig object that specifies what type of\n \t\tnotification channel to add. The one \n \tsupported notification channel is Amazon Simple Notification Service (Amazon SNS).

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.devopsguru#AddNotificationChannelResponse": { + "type": "structure", + "members": { + "Id": { + "target": "com.amazonaws.devopsguru#NotificationChannelId", + "traits": { + "smithy.api#documentation": "

          \n \t\tThe ID of the added notification channel.\n \t

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.devopsguru#AnomalyId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 100 + }, + "smithy.api#pattern": "^[\\w-]*$" + } + }, + "com.amazonaws.devopsguru#AnomalyLimit": { + "type": "double", + "traits": { + "smithy.api#box": {} + } + }, + "com.amazonaws.devopsguru#AnomalySeverity": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "LOW", + "name": "LOW" + }, + { + "value": "MEDIUM", + "name": "MEDIUM" + }, + { + "value": "HIGH", + "name": "HIGH" + } + ] + } + }, + "com.amazonaws.devopsguru#AnomalySourceDetails": { + "type": "structure", + "members": { + "CloudWatchMetrics": { + "target": "com.amazonaws.devopsguru#CloudWatchMetricsDetails", + "traits": { + "smithy.api#documentation": "

          An array of CloudWatchMetricsDetail object that contains information\n\t\t\tabout the analyzed metrics that displayed anomalous behavior.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tDetails about the source of the anomalous operational data that triggered the anonaly. The \n\t\t\tone supported source is Amazon CloudWatch metrics.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#AnomalyStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "ONGOING", + "name": "ONGOING" + }, + { + "value": "CLOSED", + "name": "CLOSED" + } + ] + } + }, + "com.amazonaws.devopsguru#AnomalyTimeRange": { + "type": "structure", + "members": { + "StartTime": { + "target": "com.amazonaws.devopsguru#Timestamp", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe time when the anomalous behavior started.\n\t\t

          ", + "smithy.api#required": {} + } + }, + "EndTime": { + "target": "com.amazonaws.devopsguru#Timestamp", + "traits": { + "smithy.api#documentation": "

          The time when the anomalous behavior ended.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tA time range that specifies when the observed unusual behavior in an anomaly started and ended.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#CapstoneControlPlaneService": { + "type": "service", + "version": "2019-12-11", + "operations": [ + { + "target": "com.amazonaws.devopsguru#AddNotificationChannel" + }, + { + "target": "com.amazonaws.devopsguru#DescribeAccountHealth" + }, + { + "target": "com.amazonaws.devopsguru#DescribeAccountOverview" + }, + { + "target": "com.amazonaws.devopsguru#DescribeAnomaly" + }, + { + "target": "com.amazonaws.devopsguru#DescribeInsight" + }, + { + "target": "com.amazonaws.devopsguru#DescribeResourceCollectionHealth" + }, + { + "target": "com.amazonaws.devopsguru#DescribeServiceIntegration" + }, + { + "target": "com.amazonaws.devopsguru#GetResourceCollection" + }, + { + "target": "com.amazonaws.devopsguru#ListAnomaliesForInsight" + }, + { + "target": "com.amazonaws.devopsguru#ListEvents" + }, + { + "target": "com.amazonaws.devopsguru#ListInsights" + }, + { + "target": "com.amazonaws.devopsguru#ListNotificationChannels" + }, + { + "target": "com.amazonaws.devopsguru#ListRecommendations" + }, + { + "target": "com.amazonaws.devopsguru#PutFeedback" + }, + { + "target": "com.amazonaws.devopsguru#RemoveNotificationChannel" + }, + { + "target": "com.amazonaws.devopsguru#SearchInsights" + }, + { + "target": "com.amazonaws.devopsguru#UpdateResourceCollection" + }, + { + "target": "com.amazonaws.devopsguru#UpdateServiceIntegration" + } + ], + "traits": { + "aws.api#service": { + "sdkId": "DevOps Guru", + "arnNamespace": "devops-guru", + "cloudFormationName": "DevOpsGuru", + "cloudTrailEventSource": "devopsguru.amazonaws.com" + }, + "aws.auth#sigv4": { + "name": "devops-guru" + }, + "aws.protocols#restJson1": {}, + "smithy.api#documentation": "

          Amazon DevOps Guru is a fully managed service that helps you identify anomalous behavior in business\n\t\t\tcritical operational applications. You specify the AWS resources that you want DevOps Guru to cover,\n\t\t\tthen the Amazon CloudWatch metrics and AWS CloudTrail events related to those resources are analyzed. When\n\t\t\tanomalous behavior is detected, DevOps Guru creates an insight that includes\n\t\t\trecommendations, related events, and related metrics that can help you improve your\n\t\t\toperational applications. For more information, see What is Amazon DevOps Guru.

          \n\t\t\n\t\t

          \n\t\t\tYou can specify 1 or 2 Amazon Simple Notification Service topics so you are notified every time a new insight is created. You can also enable DevOps Guru to generate \n\t\t\tan OpsItem in AWS Systems Manager for each insight to help you manage and track your work addressing insights.\n\t\t

          \n\t\t\n\t\t

          \n\t\t\tTo learn about the DevOps Guru workflow, see How DevOps Guru works. To \n\t\t\tlearn about DevOps Guru concepts, see Concepts in DevOps Guru.\n\t\t

          ", + "smithy.api#title": "Amazon DevOps Guru" + } + }, + "com.amazonaws.devopsguru#Channels": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#NotificationChannel" + } + }, + "com.amazonaws.devopsguru#CloudFormationCollection": { + "type": "structure", + "members": { + "StackNames": { + "target": "com.amazonaws.devopsguru#StackNames", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAn array of CloudFormation stack names.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about AWS CloudFormation stacks. You can use stacks to specify which AWS resources in your account to analyze. \n\t\t\tFor more information, see Stacks \n\t\t\tin the AWS CloudFormation User Guide.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#CloudFormationCollectionFilter": { + "type": "structure", + "members": { + "StackNames": { + "target": "com.amazonaws.devopsguru#StackNames", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAn array of CloudFormation stack names.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about AWS CloudFormation stacks. You can use stacks to specify which AWS resources in your account to analyze. \n\t\t\tFor more information, see Stacks \n\t\t\tin the AWS CloudFormation User Guide.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#CloudFormationHealth": { + "type": "structure", + "members": { + "StackName": { + "target": "com.amazonaws.devopsguru#StackName", + "traits": { + "smithy.api#documentation": "

          \n \t\tThe name of the CloudFormation stack.\n \t

          " + } + }, + "Insight": { + "target": "com.amazonaws.devopsguru#InsightHealth", + "traits": { + "smithy.api#documentation": "

          \n \t\tInformation about the health of the AWS resources in your account that are specified by an AWS CloudFormation stack, including \n \t\tthe number of open proactive, open reactive insights, and the Mean Time to Recover (MTTR) of closed insights.\n \t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n \t\tInformation about the health of AWS resources in your account that are specified by an AWS CloudFormation stack.\n \t

          " + } + }, + "com.amazonaws.devopsguru#CloudFormationHealths": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#CloudFormationHealth" + } + }, + "com.amazonaws.devopsguru#CloudWatchMetricsDetail": { + "type": "structure", + "members": { + "MetricName": { + "target": "com.amazonaws.devopsguru#CloudWatchMetricsMetricName", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe name of the CloudWatch metric.\n\t\t

          " + } + }, + "Namespace": { + "target": "com.amazonaws.devopsguru#CloudWatchMetricsNamespace", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe namespace of the CloudWatch metric. A namespace is a container for CloudWatch metrics.\n\t\t

          " + } + }, + "Dimensions": { + "target": "com.amazonaws.devopsguru#CloudWatchMetricsDimensions", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAn array of CloudWatch dimensions associated with \n\t\t

          " + } + }, + "Stat": { + "target": "com.amazonaws.devopsguru#CloudWatchMetricsStat", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe type of statistic associated with the CloudWatch metric. For more information, see \n\t\t\tStatistics in the \n\t\t\tAmazon CloudWatch User Guide.\n\t\t

          " + } + }, + "Unit": { + "target": "com.amazonaws.devopsguru#CloudWatchMetricsUnit", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe unit of measure used for the CloudWatch metric. For example, Bytes, Seconds, \n\t\t\tCount, and Percent.\n\t\t

          " + } + }, + "Period": { + "target": "com.amazonaws.devopsguru#CloudWatchMetricsPeriod", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe length of time associated with the CloudWatch metric in number of seconds.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about an Amazon CloudWatch metric.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#CloudWatchMetricsDetails": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#CloudWatchMetricsDetail" + } + }, + "com.amazonaws.devopsguru#CloudWatchMetricsDimension": { + "type": "structure", + "members": { + "Name": { + "target": "com.amazonaws.devopsguru#CloudWatchMetricsDimensionName", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe name of the CloudWatch dimension.\n\t\t

          " + } + }, + "Value": { + "target": "com.amazonaws.devopsguru#CloudWatchMetricsDimensionValue", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe value of the CloudWatch dimension.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe dimension of a Amazon CloudWatch metric that is used when DevOps Guru analyzes the resources in your account for \n\t\t\toperational problems and anomalous behaivor. A dimension is a name/value pair that is part of the idenity \n\t\t\tof a metric. A metric can have up to 10 dimensions. For more information, see \n\t\t\tDimensions \n\t\t\tin the Amazon CloudWatch User Guide.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#CloudWatchMetricsDimensionName": { + "type": "string" + }, + "com.amazonaws.devopsguru#CloudWatchMetricsDimensionValue": { + "type": "string" + }, + "com.amazonaws.devopsguru#CloudWatchMetricsDimensions": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#CloudWatchMetricsDimension" + } + }, + "com.amazonaws.devopsguru#CloudWatchMetricsMetricName": { + "type": "string" + }, + "com.amazonaws.devopsguru#CloudWatchMetricsNamespace": { + "type": "string" + }, + "com.amazonaws.devopsguru#CloudWatchMetricsPeriod": { + "type": "integer" + }, + "com.amazonaws.devopsguru#CloudWatchMetricsStat": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Sum", + "name": "SUM" + }, + { + "value": "Average", + "name": "AVERAGE" + }, + { + "value": "SampleCount", + "name": "SAMPLE_COUNT" + }, + { + "value": "Minimum", + "name": "MINIMUM" + }, + { + "value": "Maximum", + "name": "MAXIMUM" + }, + { + "value": "p99", + "name": "P99" + }, + { + "value": "p90", + "name": "P90" + }, + { + "value": "p50", + "name": "P50" + } + ] + } + }, + "com.amazonaws.devopsguru#CloudWatchMetricsUnit": { + "type": "string" + }, + "com.amazonaws.devopsguru#ConflictException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.devopsguru#ErrorMessageString", + "traits": { + "smithy.api#required": {} + } + }, + "ResourceId": { + "target": "com.amazonaws.devopsguru#ResourceIdString", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe ID of the AWS resource in which a conflict occurred.\n\t\t

          ", + "smithy.api#required": {} + } + }, + "ResourceType": { + "target": "com.amazonaws.devopsguru#ResourceIdType", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe type of the AWS resource in which a conflict occurred.\n\t\t

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAn exception that is thrown when a conflict occurs.\n\t\t

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 409 + } + }, + "com.amazonaws.devopsguru#DescribeAccountHealth": { + "type": "operation", + "input": { + "target": "com.amazonaws.devopsguru#DescribeAccountHealthRequest" + }, + "output": { + "target": "com.amazonaws.devopsguru#DescribeAccountHealthResponse" + }, + "errors": [ + { + "target": "com.amazonaws.devopsguru#AccessDeniedException" + }, + { + "target": "com.amazonaws.devopsguru#InternalServerException" + }, + { + "target": "com.amazonaws.devopsguru#ThrottlingException" + }, + { + "target": "com.amazonaws.devopsguru#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n \t\tReturns the number of open reactive insights, the number of open proactive insights, and the number of metrics analyzed in your AWS account. \n \t\tUse these numbers to gauge the health of operations in your AWS account.\n \t

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/accounts/health", + "code": 200 + } + } + }, + "com.amazonaws.devopsguru#DescribeAccountHealthRequest": { + "type": "structure", + "members": {} + }, + "com.amazonaws.devopsguru#DescribeAccountHealthResponse": { + "type": "structure", + "members": { + "OpenReactiveInsights": { + "target": "com.amazonaws.devopsguru#NumOpenReactiveInsights", + "traits": { + "smithy.api#documentation": "

          \n \t\tAn integer that specifies the number of open reactive insights in your AWS account. \n \t

          ", + "smithy.api#required": {} + } + }, + "OpenProactiveInsights": { + "target": "com.amazonaws.devopsguru#NumOpenProactiveInsights", + "traits": { + "smithy.api#documentation": "

          \n \t\tAn integer that specifies the number of open proactive insights in your AWS account.\n \t

          ", + "smithy.api#required": {} + } + }, + "MetricsAnalyzed": { + "target": "com.amazonaws.devopsguru#NumMetricsAnalyzed", + "traits": { + "smithy.api#documentation": "

          \n \t\tAn integer that specifies the number of metrics that have been analyzed in your AWS account.\n \t

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.devopsguru#DescribeAccountOverview": { + "type": "operation", + "input": { + "target": "com.amazonaws.devopsguru#DescribeAccountOverviewRequest" + }, + "output": { + "target": "com.amazonaws.devopsguru#DescribeAccountOverviewResponse" + }, + "errors": [ + { + "target": "com.amazonaws.devopsguru#AccessDeniedException" + }, + { + "target": "com.amazonaws.devopsguru#InternalServerException" + }, + { + "target": "com.amazonaws.devopsguru#ThrottlingException" + }, + { + "target": "com.amazonaws.devopsguru#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n \t\tFor the time range passed in, returns the number of open reactive insight that were created, the number of open proactive insights \n \t\tthat were created, and the Mean Time to Recover (MTTR) for all closed reactive insights.\n \t

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/accounts/overview", + "code": 200 + } + } + }, + "com.amazonaws.devopsguru#DescribeAccountOverviewRequest": { + "type": "structure", + "members": { + "FromTime": { + "target": "com.amazonaws.devopsguru#Timestamp", + "traits": { + "smithy.api#documentation": "

          \n \t\tThe start of the time range passed in. The start time granularity is at the \n \t\tday level. The floor of the start time is used. Returned information occurred after this day. \n \t

          ", + "smithy.api#required": {} + } + }, + "ToTime": { + "target": "com.amazonaws.devopsguru#Timestamp", + "traits": { + "smithy.api#documentation": "

          \n \t\tThe end of the time range passed in. The start time granularity is at the \n \t\tday level. The floor of the start time is used. Returned information occurred before this day. If this is not specified, then the current day is used.\n \t

          " + } + } + } + }, + "com.amazonaws.devopsguru#DescribeAccountOverviewResponse": { + "type": "structure", + "members": { + "ReactiveInsights": { + "target": "com.amazonaws.devopsguru#NumReactiveInsights", + "traits": { + "smithy.api#documentation": "

          \n \t\tAn integer that specifies the number of open reactive insights in your AWS account that were created during the \n \t\ttime range passed in.\n \t

          ", + "smithy.api#required": {} + } + }, + "ProactiveInsights": { + "target": "com.amazonaws.devopsguru#NumProactiveInsights", + "traits": { + "smithy.api#documentation": "

          \n \t\tAn integer that specifies the number of open proactive insights in your AWS account that were created during the \n \t\ttime range passed in.\n \t

          ", + "smithy.api#required": {} + } + }, + "MeanTimeToRecoverInMilliseconds": { + "target": "com.amazonaws.devopsguru#MeanTimeToRecoverInMilliseconds", + "traits": { + "smithy.api#documentation": "

          \n \t\tThe Mean Time to Recover (MTTR) for all closed insights that were created during the \n \t\ttime range passed in.\n \t

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.devopsguru#DescribeAnomaly": { + "type": "operation", + "input": { + "target": "com.amazonaws.devopsguru#DescribeAnomalyRequest" + }, + "output": { + "target": "com.amazonaws.devopsguru#DescribeAnomalyResponse" + }, + "errors": [ + { + "target": "com.amazonaws.devopsguru#AccessDeniedException" + }, + { + "target": "com.amazonaws.devopsguru#InternalServerException" + }, + { + "target": "com.amazonaws.devopsguru#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.devopsguru#ThrottlingException" + }, + { + "target": "com.amazonaws.devopsguru#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n \t\tReturns details about an anomaly that you specify using its ID.\n \t

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/anomalies/{Id}", + "code": 200 + } + } + }, + "com.amazonaws.devopsguru#DescribeAnomalyRequest": { + "type": "structure", + "members": { + "Id": { + "target": "com.amazonaws.devopsguru#AnomalyId", + "traits": { + "smithy.api#documentation": "

          \n \t\tThe ID of the anomaly.\n \t

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.devopsguru#DescribeAnomalyResponse": { + "type": "structure", + "members": { + "ProactiveAnomaly": { + "target": "com.amazonaws.devopsguru#ProactiveAnomaly", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAn ReactiveAnomaly object that represents the requested anomaly.\n\t\t

          " + } + }, + "ReactiveAnomaly": { + "target": "com.amazonaws.devopsguru#ReactiveAnomaly", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAn ProactiveAnomaly object that represents the requested anomaly.\n\t\t

          " + } + } + } + }, + "com.amazonaws.devopsguru#DescribeInsight": { + "type": "operation", + "input": { + "target": "com.amazonaws.devopsguru#DescribeInsightRequest" + }, + "output": { + "target": "com.amazonaws.devopsguru#DescribeInsightResponse" + }, + "errors": [ + { + "target": "com.amazonaws.devopsguru#AccessDeniedException" + }, + { + "target": "com.amazonaws.devopsguru#InternalServerException" + }, + { + "target": "com.amazonaws.devopsguru#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.devopsguru#ThrottlingException" + }, + { + "target": "com.amazonaws.devopsguru#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n \t\tReturns details about an insight that you specify using its ID.\n \t

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/insights/{Id}", + "code": 200 + } + } + }, + "com.amazonaws.devopsguru#DescribeInsightRequest": { + "type": "structure", + "members": { + "Id": { + "target": "com.amazonaws.devopsguru#InsightId", + "traits": { + "smithy.api#documentation": "

          \n \t\tThe ID of the insight.\n \t

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.devopsguru#DescribeInsightResponse": { + "type": "structure", + "members": { + "ProactiveInsight": { + "target": "com.amazonaws.devopsguru#ProactiveInsight", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAn ProactiveInsight object that represents the requested insight.\n\t\t

          " + } + }, + "ReactiveInsight": { + "target": "com.amazonaws.devopsguru#ReactiveInsight", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAn ReactiveInsight object that represents the requested insight.\n\t\t

          " + } + } + } + }, + "com.amazonaws.devopsguru#DescribeResourceCollectionHealth": { + "type": "operation", + "input": { + "target": "com.amazonaws.devopsguru#DescribeResourceCollectionHealthRequest" + }, + "output": { + "target": "com.amazonaws.devopsguru#DescribeResourceCollectionHealthResponse" + }, + "errors": [ + { + "target": "com.amazonaws.devopsguru#AccessDeniedException" + }, + { + "target": "com.amazonaws.devopsguru#InternalServerException" + }, + { + "target": "com.amazonaws.devopsguru#ThrottlingException" + }, + { + "target": "com.amazonaws.devopsguru#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n \t\tReturns the number of open proactive insights, open reactive insights, and the Mean Time to Recover (MTTR) for all closed insights in \n \t\tresource collections in your account. You specify the type of AWS resources collection. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze \n \tonly the AWS resources that are defined in the stacks.\n \t

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/accounts/health/resource-collection/{ResourceCollectionType}", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken" + } + } + }, + "com.amazonaws.devopsguru#DescribeResourceCollectionHealthRequest": { + "type": "structure", + "members": { + "ResourceCollectionType": { + "target": "com.amazonaws.devopsguru#ResourceCollectionType", + "traits": { + "smithy.api#documentation": "

          \n \t\tAn AWS resource collection type. This type specifies how analyzed AWS resources are defined. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze \n \tonly the AWS resources that are defined in the stacks. \n \t

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "NextToken": { + "target": "com.amazonaws.devopsguru#UuidNextToken", + "traits": { + "smithy.api#documentation": "

          The pagination token to use to retrieve \n the next page of results for this operation. If this value is null, it retrieves the first page.

          ", + "smithy.api#httpQuery": "NextToken" + } + } + } + }, + "com.amazonaws.devopsguru#DescribeResourceCollectionHealthResponse": { + "type": "structure", + "members": { + "CloudFormation": { + "target": "com.amazonaws.devopsguru#CloudFormationHealths", + "traits": { + "smithy.api#documentation": "

          \n \t\tThe returned CloudFormationHealthOverview object that contains an InsightHealthOverview object with \n \t\tthe requested system health information.\n \t

          ", + "smithy.api#required": {} + } + }, + "NextToken": { + "target": "com.amazonaws.devopsguru#UuidNextToken", + "traits": { + "smithy.api#documentation": "

          The pagination token to use to retrieve \n the next page of results for this operation. If there are no more pages, this value is null.

          " + } + } + } + }, + "com.amazonaws.devopsguru#DescribeServiceIntegration": { + "type": "operation", + "input": { + "target": "com.amazonaws.devopsguru#DescribeServiceIntegrationRequest" + }, + "output": { + "target": "com.amazonaws.devopsguru#DescribeServiceIntegrationResponse" + }, + "errors": [ + { + "target": "com.amazonaws.devopsguru#AccessDeniedException" + }, + { + "target": "com.amazonaws.devopsguru#InternalServerException" + }, + { + "target": "com.amazonaws.devopsguru#ThrottlingException" + }, + { + "target": "com.amazonaws.devopsguru#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n\t\t\tReturns the integration status of services that are integrated with DevOps Guru. \n\t\t\tThe one service that can be integrated with DevOps Guru \n \tis AWS Systems Manager, which can be used to create an OpsItem for each generated insight.\n\t\t

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/service-integrations", + "code": 200 + } + } + }, + "com.amazonaws.devopsguru#DescribeServiceIntegrationRequest": { + "type": "structure", + "members": {} + }, + "com.amazonaws.devopsguru#DescribeServiceIntegrationResponse": { + "type": "structure", + "members": { + "ServiceIntegration": { + "target": "com.amazonaws.devopsguru#ServiceIntegrationConfig" + } + } + }, + "com.amazonaws.devopsguru#EndTimeRange": { + "type": "structure", + "members": { + "FromTime": { + "target": "com.amazonaws.devopsguru#Timestamp", + "traits": { + "smithy.api#documentation": "

          \n \tThe earliest end time in the time range.\n

          " + } + }, + "ToTime": { + "target": "com.amazonaws.devopsguru#Timestamp", + "traits": { + "smithy.api#documentation": "

          \n \tThe latest end time in the time range.\n

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n \tA range of time that specifies when anomalous behavior in an anomaly or insight ended.\n

          " + } + }, + "com.amazonaws.devopsguru#ErrorMessageString": { + "type": "string" + }, + "com.amazonaws.devopsguru#ErrorNameString": { + "type": "string" + }, + "com.amazonaws.devopsguru#ErrorQuotaCodeString": { + "type": "string" + }, + "com.amazonaws.devopsguru#ErrorServiceCodeString": { + "type": "string" + }, + "com.amazonaws.devopsguru#Event": { + "type": "structure", + "members": { + "ResourceCollection": { + "target": "com.amazonaws.devopsguru#ResourceCollection" + }, + "Id": { + "target": "com.amazonaws.devopsguru#EventId", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe ID of the event.\n\t\t

          " + } + }, + "Time": { + "target": "com.amazonaws.devopsguru#Timestamp", + "traits": { + "smithy.api#documentation": "

          A Timestamp that specifies the time the event occurred.

          " + } + }, + "EventSource": { + "target": "com.amazonaws.devopsguru#EventSource", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe AWS source that emitted the event.\n\t\t

          " + } + }, + "Name": { + "target": "com.amazonaws.devopsguru#EventName", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe name of the event.\n\t\t

          " + } + }, + "DataSource": { + "target": "com.amazonaws.devopsguru#EventDataSource", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe source, AWS_CLOUD_TRAIL or AWS_CODE_DEPLOY, where DevOps Guru analysis found the event.\n\t\t

          " + } + }, + "EventClass": { + "target": "com.amazonaws.devopsguru#EventClass", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe class of the event. The class specifies what the event is related to, such as an infrastructure change, a deployment, or a schema change.\n\t\t

          " + } + }, + "Resources": { + "target": "com.amazonaws.devopsguru#EventResources", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAn EventResource object that contains information about the resource that emitted the event.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAn AWS resource event. AWS resource events and metrics are analyzed by DevOps Guru to find anomalous behavior and \n\t\t\tprovide recommendations to improve your operational solutions.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#EventClass": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "INFRASTRUCTURE", + "name": "INFRASTRUCTURE" + }, + { + "value": "DEPLOYMENT", + "name": "DEPLOYMENT" + }, + { + "value": "SECURITY_CHANGE", + "name": "SECURITY_CHANGE" + }, + { + "value": "CONFIG_CHANGE", + "name": "CONFIG_CHANGE" + }, + { + "value": "SCHEMA_CHANGE", + "name": "SCHEMA_CHANGE" + } + ] + } + }, + "com.amazonaws.devopsguru#EventDataSource": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "AWS_CLOUD_TRAIL", + "name": "AWS_CLOUD_TRAIL" + }, + { + "value": "AWS_CODE_DEPLOY", + "name": "AWS_CODE_DEPLOY" + } + ] + } + }, + "com.amazonaws.devopsguru#EventId": { + "type": "string" + }, + "com.amazonaws.devopsguru#EventName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 50 + } + } + }, + "com.amazonaws.devopsguru#EventResource": { + "type": "structure", + "members": { + "Type": { + "target": "com.amazonaws.devopsguru#EventResourceType", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe type of resource that emitted an event.\n\t\t

          " + } + }, + "Name": { + "target": "com.amazonaws.devopsguru#EventResourceName", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe name of the resource that emitted an event.\n\t\t

          " + } + }, + "Arn": { + "target": "com.amazonaws.devopsguru#EventResourceArn", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe Amazon Resource Name (ARN) of the resource that emitted an event.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe AWS resource that emitted an event. AWS resource events and metrics are analyzed by DevOps Guru to find anomalous behavior and \n\t\t\tprovide recommendations to improve your operational solutions.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#EventResourceArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 36, + "max": 2048 + }, + "smithy.api#pattern": "^arn:aws[-a-z]*:[a-z0-9-]*:[a-z0-9-]*:\\d{12}:.*$" + } + }, + "com.amazonaws.devopsguru#EventResourceName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 2048 + }, + "smithy.api#pattern": "^.*$" + } + }, + "com.amazonaws.devopsguru#EventResourceType": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 2048 + }, + "smithy.api#pattern": "^.*$" + } + }, + "com.amazonaws.devopsguru#EventResources": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#EventResource" + } + }, + "com.amazonaws.devopsguru#EventSource": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 10, + "max": 50 + }, + "smithy.api#pattern": "^[a-z]+[a-z0-9]*\\.amazonaws\\.com|aws\\.events$" + } + }, + "com.amazonaws.devopsguru#EventTimeRange": { + "type": "structure", + "members": { + "FromTime": { + "target": "com.amazonaws.devopsguru#Timestamp", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe time when the event started.\n\t\t

          ", + "smithy.api#required": {} + } + }, + "ToTime": { + "target": "com.amazonaws.devopsguru#Timestamp", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe time when the event ended.\n\t\t

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe time range during which an AWS event occurred. AWS resource events and metrics are analyzed by DevOps Guru to find anomalous behavior and \n\t\t\tprovide recommendations to improve your operational solutions.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#Events": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#Event" + } + }, + "com.amazonaws.devopsguru#GetResourceCollection": { + "type": "operation", + "input": { + "target": "com.amazonaws.devopsguru#GetResourceCollectionRequest" + }, + "output": { + "target": "com.amazonaws.devopsguru#GetResourceCollectionResponse" + }, + "errors": [ + { + "target": "com.amazonaws.devopsguru#AccessDeniedException" + }, + { + "target": "com.amazonaws.devopsguru#InternalServerException" + }, + { + "target": "com.amazonaws.devopsguru#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.devopsguru#ThrottlingException" + }, + { + "target": "com.amazonaws.devopsguru#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n \t\tReturns lists AWS resources that are of the specified resource collection type. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze \n \tonly the AWS resources that are defined in the stacks.\n \t

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/resource-collections/{ResourceCollectionType}", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken" + } + } + }, + "com.amazonaws.devopsguru#GetResourceCollectionRequest": { + "type": "structure", + "members": { + "ResourceCollectionType": { + "target": "com.amazonaws.devopsguru#ResourceCollectionType", + "traits": { + "smithy.api#documentation": "

          \n \t\tThe type of AWS resource collections to return. The one valid value is CLOUD_FORMATION for \n \t\tAWS CloudFormation stacks.\n \t

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "NextToken": { + "target": "com.amazonaws.devopsguru#UuidNextToken", + "traits": { + "smithy.api#documentation": "

          The pagination token to use to retrieve \n the next page of results for this operation. If this value is null, it retrieves the first page.

          ", + "smithy.api#httpQuery": "NextToken" + } + } + } + }, + "com.amazonaws.devopsguru#GetResourceCollectionResponse": { + "type": "structure", + "members": { + "ResourceCollection": { + "target": "com.amazonaws.devopsguru#ResourceCollectionFilter", + "traits": { + "smithy.api#documentation": "

          \n \t\tThe requested list of AWS resource collections. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze \n \tonly the AWS resources that are defined in the stacks.\n \t

          " + } + }, + "NextToken": { + "target": "com.amazonaws.devopsguru#UuidNextToken", + "traits": { + "smithy.api#documentation": "

          The pagination token to use to retrieve \n the next page of results for this operation. If there are no more pages, this value is null.

          " + } + } + } + }, + "com.amazonaws.devopsguru#InsightFeedback": { + "type": "structure", + "members": { + "Id": { + "target": "com.amazonaws.devopsguru#InsightId", + "traits": { + "smithy.api#documentation": "

          \n \tThe insight feedback ID.\n

          " + } + }, + "Feedback": { + "target": "com.amazonaws.devopsguru#InsightFeedbackOption", + "traits": { + "smithy.api#documentation": "

          \n \tThe feedback provided by the customer.\n

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n \tInformation about insight feedback received from a customer.\n

          " + } + }, + "com.amazonaws.devopsguru#InsightFeedbackOption": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "VALID_COLLECTION", + "name": "VALID_COLLECTION" + }, + { + "value": "RECOMMENDATION_USEFUL", + "name": "RECOMMENDATION_USEFUL" + }, + { + "value": "ALERT_TOO_SENSITIVE", + "name": "ALERT_TOO_SENSITIVE" + }, + { + "value": "DATA_NOISY_ANOMALY", + "name": "DATA_NOISY_ANOMALY" + }, + { + "value": "DATA_INCORRECT", + "name": "DATA_INCORRECT" + } + ] + } + }, + "com.amazonaws.devopsguru#InsightHealth": { + "type": "structure", + "members": { + "OpenProactiveInsights": { + "target": "com.amazonaws.devopsguru#NumOpenProactiveInsights", + "traits": { + "smithy.api#documentation": "

          \n \tThe number of open proactive insights.\n

          " + } + }, + "OpenReactiveInsights": { + "target": "com.amazonaws.devopsguru#NumOpenReactiveInsights", + "traits": { + "smithy.api#documentation": "

          \n \tThe number of open reactive insights.\n

          " + } + }, + "MeanTimeToRecoverInMilliseconds": { + "target": "com.amazonaws.devopsguru#MeanTimeToRecoverInMilliseconds", + "traits": { + "smithy.api#documentation": "

          \n \tThe Meant Time to Recover (MTTR) for the insight.\t\n

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n \tInformation about the number of open reactive and proactive insights that can be \n \tused to gauge the health of your system.\n

          " + } + }, + "com.amazonaws.devopsguru#InsightId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 100 + }, + "smithy.api#pattern": "^[\\w-]*$" + } + }, + "com.amazonaws.devopsguru#InsightName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 530 + }, + "smithy.api#pattern": "^[\\s\\S]*$" + } + }, + "com.amazonaws.devopsguru#InsightSeverities": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#InsightSeverity" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 3 + } + } + }, + "com.amazonaws.devopsguru#InsightSeverity": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "LOW", + "name": "LOW" + }, + { + "value": "MEDIUM", + "name": "MEDIUM" + }, + { + "value": "HIGH", + "name": "HIGH" + } + ] + } + }, + "com.amazonaws.devopsguru#InsightStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "ONGOING", + "name": "ONGOING" + }, + { + "value": "CLOSED", + "name": "CLOSED" + } + ] + } + }, + "com.amazonaws.devopsguru#InsightStatuses": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#InsightStatus" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 2 + } + } + }, + "com.amazonaws.devopsguru#InsightTimeRange": { + "type": "structure", + "members": { + "StartTime": { + "target": "com.amazonaws.devopsguru#Timestamp", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe time when the behavior described in an insight started.\n\t\t

          ", + "smithy.api#required": {} + } + }, + "EndTime": { + "target": "com.amazonaws.devopsguru#Timestamp", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe time when the behavior described in an insight ended.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          A time ranged that specifies when the observed behavior in an insight started and\n\t\t\tended.

          " + } + }, + "com.amazonaws.devopsguru#InsightType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "REACTIVE", + "name": "REACTIVE" + }, + { + "value": "PROACTIVE", + "name": "PROACTIVE" + } + ] + } + }, + "com.amazonaws.devopsguru#InternalServerException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.devopsguru#ErrorMessageString", + "traits": { + "smithy.api#required": {} + } + }, + "RetryAfterSeconds": { + "target": "com.amazonaws.devopsguru#RetryAfterSeconds", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe number of seconds after which the action that caused the internal server \n\t\t\texception can be retried.\n\t\t

          ", + "smithy.api#httpHeader": "Retry-After" + } + } + }, + "traits": { + "smithy.api#documentation": "

          An internal failure in an Amazon service occurred.

          ", + "smithy.api#error": "server", + "smithy.api#httpError": 500 + } + }, + "com.amazonaws.devopsguru#ListAnomaliesForInsight": { + "type": "operation", + "input": { + "target": "com.amazonaws.devopsguru#ListAnomaliesForInsightRequest" + }, + "output": { + "target": "com.amazonaws.devopsguru#ListAnomaliesForInsightResponse" + }, + "errors": [ + { + "target": "com.amazonaws.devopsguru#AccessDeniedException" + }, + { + "target": "com.amazonaws.devopsguru#InternalServerException" + }, + { + "target": "com.amazonaws.devopsguru#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.devopsguru#ThrottlingException" + }, + { + "target": "com.amazonaws.devopsguru#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n \t\tReturns a list of the anomalies that belong to an insight that you specify using its ID.\n \t

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/anomalies/insight/{InsightId}", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.devopsguru#ListAnomaliesForInsightMaxResults": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 500 + } + } + }, + "com.amazonaws.devopsguru#ListAnomaliesForInsightRequest": { + "type": "structure", + "members": { + "InsightId": { + "target": "com.amazonaws.devopsguru#InsightId", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe ID of the insight. The returned anomalies belong to this insight.\n\t\t

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "StartTimeRange": { + "target": "com.amazonaws.devopsguru#StartTimeRange", + "traits": { + "smithy.api#documentation": "

          \n \t\tA time range used to specify when the requested anomalies started. All returned anomalies started \n \t\tduring this time range.\n \t

          " + } + }, + "MaxResults": { + "target": "com.amazonaws.devopsguru#ListAnomaliesForInsightMaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of results to return with a single call.\n\tTo retrieve the remaining results, make another call with the returned nextToken value. \n\tThe default value is 500.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.devopsguru#UuidNextToken", + "traits": { + "smithy.api#documentation": "

          The pagination token to use to retrieve \n the next page of results for this operation. If this value is null, it retrieves the first page.

          " + } + } + } + }, + "com.amazonaws.devopsguru#ListAnomaliesForInsightResponse": { + "type": "structure", + "members": { + "ProactiveAnomalies": { + "target": "com.amazonaws.devopsguru#ProactiveAnomalies", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAn array of ProactiveAnomalySummary objects that represent the requested anomalies\n\t\t

          " + } + }, + "ReactiveAnomalies": { + "target": "com.amazonaws.devopsguru#ReactiveAnomalies", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAn array of ReactiveAnomalySummary objects that represent the requested anomalies\n\t\t

          " + } + }, + "NextToken": { + "target": "com.amazonaws.devopsguru#UuidNextToken", + "traits": { + "smithy.api#documentation": "

          The pagination token to use to retrieve \n the next page of results for this operation. If there are no more pages, this value is null.

          " + } + } + } + }, + "com.amazonaws.devopsguru#ListEvents": { + "type": "operation", + "input": { + "target": "com.amazonaws.devopsguru#ListEventsRequest" + }, + "output": { + "target": "com.amazonaws.devopsguru#ListEventsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.devopsguru#AccessDeniedException" + }, + { + "target": "com.amazonaws.devopsguru#InternalServerException" + }, + { + "target": "com.amazonaws.devopsguru#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.devopsguru#ThrottlingException" + }, + { + "target": "com.amazonaws.devopsguru#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n \t\tReturns a list of the events emitted by the resources that are evaluated by DevOps Guru. You can use filters to specify which events are returned.\n \t

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/events", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.devopsguru#ListEventsFilters": { + "type": "structure", + "members": { + "InsightId": { + "target": "com.amazonaws.devopsguru#InsightId", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAn ID of an insight that is related to the events you want to filter for.\n\t\t

          " + } + }, + "EventTimeRange": { + "target": "com.amazonaws.devopsguru#EventTimeRange", + "traits": { + "smithy.api#documentation": "

          A time range during which you want the filtered events to have occurred.

          " + } + }, + "EventClass": { + "target": "com.amazonaws.devopsguru#EventClass", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe class of the events you want to filter for, such as an infrastructure change, a deployment, or a schema change. \t\n\t\t

          " + } + }, + "EventSource": { + "target": "com.amazonaws.devopsguru#EventSource", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe AWS source that emitted the events you want to filter for.\n\t\t

          " + } + }, + "DataSource": { + "target": "com.amazonaws.devopsguru#EventDataSource", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe source, AWS_CLOUD_TRAIL or AWS_CODE_DEPLOY, of the events you want returned.\n\t\t

          " + } + }, + "ResourceCollection": { + "target": "com.amazonaws.devopsguru#ResourceCollection" + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tFilters you can use to specify which events are returned when ListEvents is called.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#ListEventsMaxResults": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 200 + } + } + }, + "com.amazonaws.devopsguru#ListEventsRequest": { + "type": "structure", + "members": { + "Filters": { + "target": "com.amazonaws.devopsguru#ListEventsFilters", + "traits": { + "smithy.api#documentation": "

          \n \t\tA ListEventsFilters object used to specify which events to return.\n \t

          ", + "smithy.api#required": {} + } + }, + "MaxResults": { + "target": "com.amazonaws.devopsguru#ListEventsMaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of results to return with a single call.\n\tTo retrieve the remaining results, make another call with the returned nextToken value. \n\tThe default value is 500.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.devopsguru#UuidNextToken", + "traits": { + "smithy.api#documentation": "

          The pagination token to use to retrieve \n the next page of results for this operation. If this value is null, it retrieves the first page.

          " + } + } + } + }, + "com.amazonaws.devopsguru#ListEventsResponse": { + "type": "structure", + "members": { + "Events": { + "target": "com.amazonaws.devopsguru#Events", + "traits": { + "smithy.api#documentation": "

          \n \t\tA list of the requested events.\n \t

          ", + "smithy.api#required": {} + } + }, + "NextToken": { + "target": "com.amazonaws.devopsguru#UuidNextToken", + "traits": { + "smithy.api#documentation": "

          The pagination token to use to retrieve \n the next page of results for this operation. If there are no more pages, this value is null.

          " + } + } + } + }, + "com.amazonaws.devopsguru#ListInsights": { + "type": "operation", + "input": { + "target": "com.amazonaws.devopsguru#ListInsightsRequest" + }, + "output": { + "target": "com.amazonaws.devopsguru#ListInsightsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.devopsguru#AccessDeniedException" + }, + { + "target": "com.amazonaws.devopsguru#InternalServerException" + }, + { + "target": "com.amazonaws.devopsguru#ThrottlingException" + }, + { + "target": "com.amazonaws.devopsguru#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n \t\tReturns a list of insights in your AWS account. You can specify which insights are returned by their start time and \n \t\tstatus (ONGOING, CLOSED, or ANY).\n \t

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/insights", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.devopsguru#ListInsightsAnyStatusFilter": { + "type": "structure", + "members": { + "Type": { + "target": "com.amazonaws.devopsguru#InsightType", + "traits": { + "smithy.api#documentation": "

          \n \t\tUse to filter for either REACTIVE or PROACTIVE insights.\n \t

          ", + "smithy.api#required": {} + } + }, + "StartTimeRange": { + "target": "com.amazonaws.devopsguru#StartTimeRange", + "traits": { + "smithy.api#documentation": "

          \n \t\tA time range used to specify when the behavior of the filtered insights started.\n \t

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n \t\tUsed to filter for insights that have any status.\n \t

          " + } + }, + "com.amazonaws.devopsguru#ListInsightsClosedStatusFilter": { + "type": "structure", + "members": { + "Type": { + "target": "com.amazonaws.devopsguru#InsightType", + "traits": { + "smithy.api#documentation": "

          \n \tUse to filter for either REACTIVE or PROACTIVE insights.\n

          ", + "smithy.api#required": {} + } + }, + "EndTimeRange": { + "target": "com.amazonaws.devopsguru#EndTimeRange", + "traits": { + "smithy.api#documentation": "

          \n \t\tA time range used to specify when the behavior of the filtered insights ended.\n \t

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n \t\tUsed to filter for insights that have the status CLOSED.\n \t

          " + } + }, + "com.amazonaws.devopsguru#ListInsightsMaxResults": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.devopsguru#ListInsightsOngoingStatusFilter": { + "type": "structure", + "members": { + "Type": { + "target": "com.amazonaws.devopsguru#InsightType", + "traits": { + "smithy.api#documentation": "

          \n \t\tUse to filter for either REACTIVE or PROACTIVE insights.\n \t

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n \tUsed to filter for insights that have the status ONGOING.\n

          " + } + }, + "com.amazonaws.devopsguru#ListInsightsRequest": { + "type": "structure", + "members": { + "StatusFilter": { + "target": "com.amazonaws.devopsguru#ListInsightsStatusFilter", + "traits": { + "smithy.api#documentation": "

          \n \tA filter used to filter the returned insights by their status. You can specify one status filter.\n

          ", + "smithy.api#required": {} + } + }, + "MaxResults": { + "target": "com.amazonaws.devopsguru#ListInsightsMaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of results to return with a single call.\n\tTo retrieve the remaining results, make another call with the returned nextToken value. \n\tThe default value is 500.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.devopsguru#UuidNextToken", + "traits": { + "smithy.api#documentation": "

          The pagination token to use to retrieve \n the next page of results for this operation. If this value is null, it retrieves the first page.

          " + } + } + } + }, + "com.amazonaws.devopsguru#ListInsightsResponse": { + "type": "structure", + "members": { + "ProactiveInsights": { + "target": "com.amazonaws.devopsguru#ProactiveInsights", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe returned list of proactive insights.\n\t\t

          " + } + }, + "ReactiveInsights": { + "target": "com.amazonaws.devopsguru#ReactiveInsights", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe returned list of reactive insights.\t\n\t\t

          " + } + }, + "NextToken": { + "target": "com.amazonaws.devopsguru#UuidNextToken", + "traits": { + "smithy.api#documentation": "

          The pagination token to use to retrieve \n the next page of results for this operation. If there are no more pages, this value is null.

          " + } + } + } + }, + "com.amazonaws.devopsguru#ListInsightsStatusFilter": { + "type": "structure", + "members": { + "Ongoing": { + "target": "com.amazonaws.devopsguru#ListInsightsOngoingStatusFilter", + "traits": { + "smithy.api#documentation": "

          \n \tA ListInsightsAnyStatusFilter that specifies ongoing insights \n \tthat are either REACTIVE or PROACTIVE.\n

          " + } + }, + "Closed": { + "target": "com.amazonaws.devopsguru#ListInsightsClosedStatusFilter", + "traits": { + "smithy.api#documentation": "

          \n \t\tA ListInsightsClosedStatusFilter that specifies closed insights that are \n \t\teither REACTIVE or PROACTIVE.\n \t

          " + } + }, + "Any": { + "target": "com.amazonaws.devopsguru#ListInsightsAnyStatusFilter", + "traits": { + "smithy.api#documentation": "

          \n \t\tA ListInsightsAnyStatusFilter that specifies insights of any status \n \t\tthat are either REACTIVE or PROACTIVE.\n \t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n \tA filter used by ListInsights to specify which insights to return.\n

          " + } + }, + "com.amazonaws.devopsguru#ListNotificationChannels": { + "type": "operation", + "input": { + "target": "com.amazonaws.devopsguru#ListNotificationChannelsRequest" + }, + "output": { + "target": "com.amazonaws.devopsguru#ListNotificationChannelsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.devopsguru#AccessDeniedException" + }, + { + "target": "com.amazonaws.devopsguru#InternalServerException" + }, + { + "target": "com.amazonaws.devopsguru#ThrottlingException" + }, + { + "target": "com.amazonaws.devopsguru#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n \t\tReturns a list of notification channels configured for DevOps Guru. Each notification channel is used to notify you when \n \t\tDevOps Guru generates an insight that contains information about how to improve your operations. The one \n \tsupported notification channel is Amazon Simple Notification Service (Amazon SNS).\n \t

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/channels", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken" + } + } + }, + "com.amazonaws.devopsguru#ListNotificationChannelsRequest": { + "type": "structure", + "members": { + "NextToken": { + "target": "com.amazonaws.devopsguru#UuidNextToken", + "traits": { + "smithy.api#documentation": "

          The pagination token to use to retrieve \n the next page of results for this operation. If this value is null, it retrieves the first page.

          " + } + } + } + }, + "com.amazonaws.devopsguru#ListNotificationChannelsResponse": { + "type": "structure", + "members": { + "Channels": { + "target": "com.amazonaws.devopsguru#Channels", + "traits": { + "smithy.api#documentation": "

          \n \t\tAn array that contains the requested notification channels.\n \t

          " + } + }, + "NextToken": { + "target": "com.amazonaws.devopsguru#UuidNextToken", + "traits": { + "smithy.api#documentation": "

          The pagination token to use to retrieve \n the next page of results for this operation. If there are no more pages, this value is null.

          " + } + } + } + }, + "com.amazonaws.devopsguru#ListRecommendations": { + "type": "operation", + "input": { + "target": "com.amazonaws.devopsguru#ListRecommendationsRequest" + }, + "output": { + "target": "com.amazonaws.devopsguru#ListRecommendationsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.devopsguru#AccessDeniedException" + }, + { + "target": "com.amazonaws.devopsguru#InternalServerException" + }, + { + "target": "com.amazonaws.devopsguru#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.devopsguru#ThrottlingException" + }, + { + "target": "com.amazonaws.devopsguru#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n \t\tReturns a list of a specified insight's recommendations. Each recommendation includes a list of related metrics and a list of related events.\n \t

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/recommendations", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken" + } + } + }, + "com.amazonaws.devopsguru#ListRecommendationsRequest": { + "type": "structure", + "members": { + "InsightId": { + "target": "com.amazonaws.devopsguru#InsightId", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe ID of the requested insight.\n\t\t

          ", + "smithy.api#required": {} + } + }, + "NextToken": { + "target": "com.amazonaws.devopsguru#UuidNextToken", + "traits": { + "smithy.api#documentation": "

          The pagination token to use to retrieve \n the next page of results for this operation. If this value is null, it retrieves the first page.

          " + } + } + } + }, + "com.amazonaws.devopsguru#ListRecommendationsResponse": { + "type": "structure", + "members": { + "Recommendations": { + "target": "com.amazonaws.devopsguru#Recommendations", + "traits": { + "smithy.api#documentation": "

          \n \t\tAn array of the requested recommendations.\n \t

          " + } + }, + "NextToken": { + "target": "com.amazonaws.devopsguru#UuidNextToken", + "traits": { + "smithy.api#documentation": "

          The pagination token to use to retrieve \n the next page of results for this operation. If there are no more pages, this value is null.

          " + } + } + } + }, + "com.amazonaws.devopsguru#MeanTimeToRecoverInMilliseconds": { + "type": "long", + "traits": { + "smithy.api#box": {} + } + }, + "com.amazonaws.devopsguru#NotificationChannel": { + "type": "structure", + "members": { + "Id": { + "target": "com.amazonaws.devopsguru#NotificationChannelId", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe ID of a notification channel.\n\t\t

          " + } + }, + "Config": { + "target": "com.amazonaws.devopsguru#NotificationChannelConfig", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tA NotificationChannelConfig object that contains information about configured notification channels.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about a notification channel. A notification channel is used to notify you when DevOps Guru creates an insight. \n\t\t\tThe one \n \tsupported notification channel is Amazon Simple Notification Service (Amazon SNS).\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#NotificationChannelConfig": { + "type": "structure", + "members": { + "Sns": { + "target": "com.amazonaws.devopsguru#SnsChannelConfig", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about a notification channel configured in DevOps Guru to send notifications when insights are created.\n\t\t

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Information about notification channels you have configured with DevOps Guru.\n\t\t\tThe one \n \tsupported notification channel is Amazon Simple Notification Service (Amazon SNS).

          " + } + }, + "com.amazonaws.devopsguru#NotificationChannelId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 36, + "max": 36 + }, + "smithy.api#pattern": "^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$" + } + }, + "com.amazonaws.devopsguru#NumMetricsAnalyzed": { + "type": "integer" + }, + "com.amazonaws.devopsguru#NumOpenProactiveInsights": { + "type": "integer" + }, + "com.amazonaws.devopsguru#NumOpenReactiveInsights": { + "type": "integer" + }, + "com.amazonaws.devopsguru#NumProactiveInsights": { + "type": "integer" + }, + "com.amazonaws.devopsguru#NumReactiveInsights": { + "type": "integer" + }, + "com.amazonaws.devopsguru#OpsCenterIntegration": { + "type": "structure", + "members": { + "OptInStatus": { + "target": "com.amazonaws.devopsguru#OptInStatus", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tSpecifies if DevOps Guru is enabled to create an AWS Systems Manager OpsItem for each created insight.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about whether DevOps Guru is configured to create an OpsItem in AWS Systems Manager OpsCenter for each created insight.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#OpsCenterIntegrationConfig": { + "type": "structure", + "members": { + "OptInStatus": { + "target": "com.amazonaws.devopsguru#OptInStatus", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tSpecifies if DevOps Guru is enabled to create an AWS Systems Manager OpsItem for each created insight.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about whether DevOps Guru is configured to create an OpsItem in AWS Systems Manager OpsCenter for each created insight.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#OptInStatus": { + "type": "string", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tSpecifies if DevOps Guru is enabled to create an AWS Systems Manager OpsItem for each created insight.\n\t\t

          ", + "smithy.api#enum": [ + { + "value": "ENABLED", + "name": "ENABLED" + }, + { + "value": "DISABLED", + "name": "DISABLED" + } + ] + } + }, + "com.amazonaws.devopsguru#PredictionTimeRange": { + "type": "structure", + "members": { + "StartTime": { + "target": "com.amazonaws.devopsguru#Timestamp", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe time range during which a metric limit is expected to be exceeded. This applies to proactive insights only.\n\t\t

          ", + "smithy.api#required": {} + } + }, + "EndTime": { + "target": "com.amazonaws.devopsguru#Timestamp", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe time when the behavior in a proactive insight is expected to end.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe time range during which anomalous behavior in a proactive anomaly or an insight is expected to occur. \n\t\t

          " + } + }, + "com.amazonaws.devopsguru#ProactiveAnomalies": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#ProactiveAnomalySummary" + } + }, + "com.amazonaws.devopsguru#ProactiveAnomaly": { + "type": "structure", + "members": { + "Id": { + "target": "com.amazonaws.devopsguru#AnomalyId", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe ID of a proactive anomaly.\n\t\t

          " + } + }, + "Severity": { + "target": "com.amazonaws.devopsguru#AnomalySeverity", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe severity of a proactive anomaly.\n\t\t

          " + } + }, + "Status": { + "target": "com.amazonaws.devopsguru#AnomalyStatus", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe status of a proactive anomaly.\n\t\t

          " + } + }, + "UpdateTime": { + "target": "com.amazonaws.devopsguru#Timestamp", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe time of the anomaly's most recent update.\n\t\t

          " + } + }, + "AnomalyTimeRange": { + "target": "com.amazonaws.devopsguru#AnomalyTimeRange" + }, + "PredictionTimeRange": { + "target": "com.amazonaws.devopsguru#PredictionTimeRange" + }, + "SourceDetails": { + "target": "com.amazonaws.devopsguru#AnomalySourceDetails", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tDetails about the source of the analyzed operational data that triggered the anomaly. The one supported source is Amazon CloudWatch metrics. \n\t\t

          " + } + }, + "AssociatedInsightId": { + "target": "com.amazonaws.devopsguru#InsightId", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe ID of the insight that contains this anomaly. An insight is composed of related anomalies.\n\t\t

          " + } + }, + "ResourceCollection": { + "target": "com.amazonaws.devopsguru#ResourceCollection" + }, + "Limit": { + "target": "com.amazonaws.devopsguru#AnomalyLimit", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tA threshold that was exceeded by behavior in analyzed resources. Exceeding this\n\t\t\tthreshold is related to the anomalous behavior that generated this anomaly.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Information about an anomaly. This object is returned by ListAnomalies.

          " + } + }, + "com.amazonaws.devopsguru#ProactiveAnomalySummary": { + "type": "structure", + "members": { + "Id": { + "target": "com.amazonaws.devopsguru#AnomalyId", + "traits": { + "smithy.api#documentation": "

          The ID of the anomaly.

          " + } + }, + "Severity": { + "target": "com.amazonaws.devopsguru#AnomalySeverity", + "traits": { + "smithy.api#documentation": "

          The severity of the anomaly.

          " + } + }, + "Status": { + "target": "com.amazonaws.devopsguru#AnomalyStatus", + "traits": { + "smithy.api#documentation": "

          The status of the anomaly.

          " + } + }, + "UpdateTime": { + "target": "com.amazonaws.devopsguru#Timestamp", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe time of the anomaly's most recent update.\n\t\t

          " + } + }, + "AnomalyTimeRange": { + "target": "com.amazonaws.devopsguru#AnomalyTimeRange" + }, + "PredictionTimeRange": { + "target": "com.amazonaws.devopsguru#PredictionTimeRange" + }, + "SourceDetails": { + "target": "com.amazonaws.devopsguru#AnomalySourceDetails", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tDetails about the source of the analyzed operational data that triggered the anomaly. The one supported source is Amazon CloudWatch metrics. \n\t\t

          " + } + }, + "AssociatedInsightId": { + "target": "com.amazonaws.devopsguru#InsightId", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe ID of the insight that contains this anomaly. An insight is composed of related anomalies.\n\t\t

          " + } + }, + "ResourceCollection": { + "target": "com.amazonaws.devopsguru#ResourceCollection" + }, + "Limit": { + "target": "com.amazonaws.devopsguru#AnomalyLimit", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tA threshold that was exceeded by behavior in analyzed resources. Exceeding this\n\t\t\tthreshold is related to the anomalous behavior that generated this anomaly.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Details about a proactive anomaly. This object is returned by\n\t\t\tDescribeAnomaly.\n

          " + } + }, + "com.amazonaws.devopsguru#ProactiveInsight": { + "type": "structure", + "members": { + "Id": { + "target": "com.amazonaws.devopsguru#InsightId", + "traits": { + "smithy.api#documentation": "

          The ID of the proactive insight.

          " + } + }, + "Name": { + "target": "com.amazonaws.devopsguru#InsightName", + "traits": { + "smithy.api#documentation": "

          The name of the proactive insight.

          " + } + }, + "Severity": { + "target": "com.amazonaws.devopsguru#InsightSeverity", + "traits": { + "smithy.api#documentation": "

          The severity of the proactive insight.

          " + } + }, + "Status": { + "target": "com.amazonaws.devopsguru#InsightStatus", + "traits": { + "smithy.api#documentation": "

          The status of the proactive insight.

          " + } + }, + "InsightTimeRange": { + "target": "com.amazonaws.devopsguru#InsightTimeRange" + }, + "PredictionTimeRange": { + "target": "com.amazonaws.devopsguru#PredictionTimeRange" + }, + "ResourceCollection": { + "target": "com.amazonaws.devopsguru#ResourceCollection" + }, + "SsmOpsItemId": { + "target": "com.amazonaws.devopsguru#SsmOpsItemId", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe ID of the AWS System Manager OpsItem created for this insight. You must enable \n\t\t\tthe creation of OpstItems insights before they are created for each insight.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Details about a proactive insight. This object is returned by ListInsights.

          " + } + }, + "com.amazonaws.devopsguru#ProactiveInsightSummary": { + "type": "structure", + "members": { + "Id": { + "target": "com.amazonaws.devopsguru#InsightId", + "traits": { + "smithy.api#documentation": "

          The ID of the proactive insight.

          " + } + }, + "Name": { + "target": "com.amazonaws.devopsguru#InsightName", + "traits": { + "smithy.api#documentation": "

          The name of the proactive insight.

          " + } + }, + "Severity": { + "target": "com.amazonaws.devopsguru#InsightSeverity", + "traits": { + "smithy.api#documentation": "

          The severity of the proactive insight.

          " + } + }, + "Status": { + "target": "com.amazonaws.devopsguru#InsightStatus", + "traits": { + "smithy.api#documentation": "

          The status of the proactive insight.

          " + } + }, + "InsightTimeRange": { + "target": "com.amazonaws.devopsguru#InsightTimeRange" + }, + "PredictionTimeRange": { + "target": "com.amazonaws.devopsguru#PredictionTimeRange" + }, + "ResourceCollection": { + "target": "com.amazonaws.devopsguru#ResourceCollection" + } + }, + "traits": { + "smithy.api#documentation": "

          Details about a proactive insight. This object is returned by\n\t\t\tDescribeInsight.\n

          " + } + }, + "com.amazonaws.devopsguru#ProactiveInsights": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#ProactiveInsightSummary" + } + }, + "com.amazonaws.devopsguru#PutFeedback": { + "type": "operation", + "input": { + "target": "com.amazonaws.devopsguru#PutFeedbackRequest" + }, + "output": { + "target": "com.amazonaws.devopsguru#PutFeedbackResponse" + }, + "errors": [ + { + "target": "com.amazonaws.devopsguru#AccessDeniedException" + }, + { + "target": "com.amazonaws.devopsguru#ConflictException" + }, + { + "target": "com.amazonaws.devopsguru#InternalServerException" + }, + { + "target": "com.amazonaws.devopsguru#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.devopsguru#ThrottlingException" + }, + { + "target": "com.amazonaws.devopsguru#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n \t\tCollects customer feedback about the specified insight.\n \t

          ", + "smithy.api#http": { + "method": "PUT", + "uri": "/feedback", + "code": 200 + }, + "smithy.api#idempotent": {} + } + }, + "com.amazonaws.devopsguru#PutFeedbackRequest": { + "type": "structure", + "members": { + "InsightFeedback": { + "target": "com.amazonaws.devopsguru#InsightFeedback", + "traits": { + "smithy.api#documentation": "

          \n \t\tThe feedback from customers is about the recommendations in this insight.\n \t

          " + } + } + } + }, + "com.amazonaws.devopsguru#PutFeedbackResponse": { + "type": "structure", + "members": {} + }, + "com.amazonaws.devopsguru#ReactiveAnomalies": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#ReactiveAnomalySummary" + } + }, + "com.amazonaws.devopsguru#ReactiveAnomaly": { + "type": "structure", + "members": { + "Id": { + "target": "com.amazonaws.devopsguru#AnomalyId", + "traits": { + "smithy.api#documentation": "

          The ID of the reactive anomaly.

          " + } + }, + "Severity": { + "target": "com.amazonaws.devopsguru#AnomalySeverity", + "traits": { + "smithy.api#documentation": "

          The severity of the anomaly.

          " + } + }, + "Status": { + "target": "com.amazonaws.devopsguru#AnomalyStatus", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe status of the anomaly.\n\t\t

          " + } + }, + "AnomalyTimeRange": { + "target": "com.amazonaws.devopsguru#AnomalyTimeRange" + }, + "SourceDetails": { + "target": "com.amazonaws.devopsguru#AnomalySourceDetails", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tDetails about the source of the analyzed operational data that triggered the anomaly. The one supported source is Amazon CloudWatch metrics. \n\t\t

          " + } + }, + "AssociatedInsightId": { + "target": "com.amazonaws.devopsguru#InsightId", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe ID of the insight that contains this anomaly. An insight is composed of related anomalies.\n\t\t

          " + } + }, + "ResourceCollection": { + "target": "com.amazonaws.devopsguru#ResourceCollection" + } + }, + "traits": { + "smithy.api#documentation": "

          Details about a reactive anomaly. This object is returned by ListAnomalies.

          " + } + }, + "com.amazonaws.devopsguru#ReactiveAnomalySummary": { + "type": "structure", + "members": { + "Id": { + "target": "com.amazonaws.devopsguru#AnomalyId", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe ID of the reactive anomaly.\n\t\t

          " + } + }, + "Severity": { + "target": "com.amazonaws.devopsguru#AnomalySeverity", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe severity of the reactive anomaly.\t\t\t\n\t\t

          " + } + }, + "Status": { + "target": "com.amazonaws.devopsguru#AnomalyStatus", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe status of the reactive anomaly.\n\t\t

          " + } + }, + "AnomalyTimeRange": { + "target": "com.amazonaws.devopsguru#AnomalyTimeRange" + }, + "SourceDetails": { + "target": "com.amazonaws.devopsguru#AnomalySourceDetails", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tDetails about the source of the analyzed operational data that triggered the anomaly. The one supported source is Amazon CloudWatch metrics. \n\t\t

          " + } + }, + "AssociatedInsightId": { + "target": "com.amazonaws.devopsguru#InsightId", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe ID of the insight that contains this anomaly. An insight is composed of related anomalies.\n\t\t

          " + } + }, + "ResourceCollection": { + "target": "com.amazonaws.devopsguru#ResourceCollection" + } + }, + "traits": { + "smithy.api#documentation": "

          Details about a reactive anomaly. This object is returned by\n\t\t\tDescribeAnomaly.\n

          " + } + }, + "com.amazonaws.devopsguru#ReactiveInsight": { + "type": "structure", + "members": { + "Id": { + "target": "com.amazonaws.devopsguru#InsightId", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe ID of a reactive insight.\n\t\t

          " + } + }, + "Name": { + "target": "com.amazonaws.devopsguru#InsightName", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe name of a reactive insight.\n\t\t

          " + } + }, + "Severity": { + "target": "com.amazonaws.devopsguru#InsightSeverity", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe severity of a reactive insight.\n\t\t

          " + } + }, + "Status": { + "target": "com.amazonaws.devopsguru#InsightStatus", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe status of a reactive insight.\n\t\t

          " + } + }, + "InsightTimeRange": { + "target": "com.amazonaws.devopsguru#InsightTimeRange" + }, + "ResourceCollection": { + "target": "com.amazonaws.devopsguru#ResourceCollection" + }, + "SsmOpsItemId": { + "target": "com.amazonaws.devopsguru#SsmOpsItemId", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe ID of the AWS System Manager OpsItem created for this insight. You must enable \n\t\t\tthe creation of OpstItems insights before they are created for each insight.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about a reactive insight. This object is returned by ListInsights.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#ReactiveInsightSummary": { + "type": "structure", + "members": { + "Id": { + "target": "com.amazonaws.devopsguru#InsightId", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe ID of a reactive summary.\n\t\t

          " + } + }, + "Name": { + "target": "com.amazonaws.devopsguru#InsightName", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe name of a reactive insight.\n\t\t

          " + } + }, + "Severity": { + "target": "com.amazonaws.devopsguru#InsightSeverity", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe severity of a reactive insight.\n\t\t

          " + } + }, + "Status": { + "target": "com.amazonaws.devopsguru#InsightStatus", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe status of a reactive insight.\n\t\t

          " + } + }, + "InsightTimeRange": { + "target": "com.amazonaws.devopsguru#InsightTimeRange" + }, + "ResourceCollection": { + "target": "com.amazonaws.devopsguru#ResourceCollection" + } + }, + "traits": { + "smithy.api#documentation": "

          Information about a reactive insight. This object is returned by\n\t\t\t\tDescribeInsight.\n

          " + } + }, + "com.amazonaws.devopsguru#ReactiveInsights": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#ReactiveInsightSummary" + } + }, + "com.amazonaws.devopsguru#Recommendation": { + "type": "structure", + "members": { + "Description": { + "target": "com.amazonaws.devopsguru#RecommendationDescription", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tA description of the problem.\n\t\t

          " + } + }, + "Link": { + "target": "com.amazonaws.devopsguru#RecommendationLink", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tA hyperlink to information to help you address the problem.\n\t\t

          " + } + }, + "Name": { + "target": "com.amazonaws.devopsguru#RecommendationName", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe name of the recommendation.\n\t\t

          " + } + }, + "Reason": { + "target": "com.amazonaws.devopsguru#RecommendationReason", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe reason DevOps Guru flagged the anomalous behavior as a problem.\n\t\t

          " + } + }, + "RelatedEvents": { + "target": "com.amazonaws.devopsguru#RecommendationRelatedEvents", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tEvents that are related to the problem. Use these events to learn more about what's happening and to help address the issue.\n\t\t

          " + } + }, + "RelatedAnomalies": { + "target": "com.amazonaws.devopsguru#RecommendationRelatedAnomalies", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAnomalies that are related to the problem. Use these Anomalies to learn more about what's happening and to help address the issue.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Recommendation information to help you remediate detected anomalous behavior that\n\t\t\tgenerated an insight.

          " + } + }, + "com.amazonaws.devopsguru#RecommendationDescription": { + "type": "string" + }, + "com.amazonaws.devopsguru#RecommendationLink": { + "type": "string" + }, + "com.amazonaws.devopsguru#RecommendationName": { + "type": "string" + }, + "com.amazonaws.devopsguru#RecommendationReason": { + "type": "string" + }, + "com.amazonaws.devopsguru#RecommendationRelatedAnomalies": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#RecommendationRelatedAnomaly" + } + }, + "com.amazonaws.devopsguru#RecommendationRelatedAnomaly": { + "type": "structure", + "members": { + "Resources": { + "target": "com.amazonaws.devopsguru#RecommendationRelatedAnomalyResources", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAn array of objects that represent resources in which DevOps Guru detected anomalous behavior. Each object contains the name \n\t\t\tand type of the resource.\n\t\t

          " + } + }, + "SourceDetails": { + "target": "com.amazonaws.devopsguru#RelatedAnomalySourceDetails", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about where the anomalous behavior related the recommendation was found. For example, details in Amazon CloudWatch metrics.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about an anomaly that is related to a recommendations.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#RecommendationRelatedAnomalyResource": { + "type": "structure", + "members": { + "Name": { + "target": "com.amazonaws.devopsguru#RecommendationRelatedAnomalyResourceName", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe name of the resource.\n\t\t

          " + } + }, + "Type": { + "target": "com.amazonaws.devopsguru#RecommendationRelatedAnomalyResourceType", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe type of the resource.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about a resource in which DevOps Guru detected anomalous behavior.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#RecommendationRelatedAnomalyResourceName": { + "type": "string" + }, + "com.amazonaws.devopsguru#RecommendationRelatedAnomalyResourceType": { + "type": "string" + }, + "com.amazonaws.devopsguru#RecommendationRelatedAnomalyResources": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#RecommendationRelatedAnomalyResource" + } + }, + "com.amazonaws.devopsguru#RecommendationRelatedAnomalySourceDetail": { + "type": "structure", + "members": { + "CloudWatchMetrics": { + "target": "com.amazonaws.devopsguru#RecommendationRelatedCloudWatchMetricsSourceDetails", + "traits": { + "smithy.api#documentation": "

          An array of CloudWatchMetricsDetail objects that contains information\n\t\t\tabout the analyzed metrics that displayed anomalous behavior.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tContains an array of RecommendationRelatedCloudWatchMetricsSourceDetail objects that contain the name \n\t\t\tand namespace of an Amazon CloudWatch metric.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#RecommendationRelatedCloudWatchMetricsSourceDetail": { + "type": "structure", + "members": { + "MetricName": { + "target": "com.amazonaws.devopsguru#RecommendationRelatedCloudWatchMetricsSourceMetricName", + "traits": { + "smithy.api#documentation": "

          The name of the CloudWatch metric.

          " + } + }, + "Namespace": { + "target": "com.amazonaws.devopsguru#RecommendationRelatedCloudWatchMetricsSourceNamespace", + "traits": { + "smithy.api#documentation": "

          The namespace of the CloudWatch metric. A namespace is a container for CloudWatch metrics.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about an Amazon CloudWatch metric that is analyzed by DevOps Guru. It is one of many analyzed metrics \n\t\t\tthat are used to generate insights.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#RecommendationRelatedCloudWatchMetricsSourceDetails": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#RecommendationRelatedCloudWatchMetricsSourceDetail" + } + }, + "com.amazonaws.devopsguru#RecommendationRelatedCloudWatchMetricsSourceMetricName": { + "type": "string" + }, + "com.amazonaws.devopsguru#RecommendationRelatedCloudWatchMetricsSourceNamespace": { + "type": "string" + }, + "com.amazonaws.devopsguru#RecommendationRelatedEvent": { + "type": "structure", + "members": { + "Name": { + "target": "com.amazonaws.devopsguru#RecommendationRelatedEventName", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe name of the event. This corresponds to the Name field in an \n\t\t\tEvent object.\n\t\t

          " + } + }, + "Resources": { + "target": "com.amazonaws.devopsguru#RecommendationRelatedEventResources", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tA ResourceCollection object that contains arrays of the names of AWS \n\t\t\tCloudFormation stacks.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about an event that is related to a recommendations.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#RecommendationRelatedEventName": { + "type": "string" + }, + "com.amazonaws.devopsguru#RecommendationRelatedEventResource": { + "type": "structure", + "members": { + "Name": { + "target": "com.amazonaws.devopsguru#RecommendationRelatedEventResourceName", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe name of the resource that emitted the event. This corresponds to the Name field in an \n\t\t\tEventResource object.\n\t\t

          " + } + }, + "Type": { + "target": "com.amazonaws.devopsguru#RecommendationRelatedEventResourceType", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe type of the resource that emitted the event. This corresponds to the Type field in an \n\t\t\tEventResource object.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about an AWS resource that emitted and event that is related to a recommendation in an insight. \n\t\t

          " + } + }, + "com.amazonaws.devopsguru#RecommendationRelatedEventResourceName": { + "type": "string" + }, + "com.amazonaws.devopsguru#RecommendationRelatedEventResourceType": { + "type": "string" + }, + "com.amazonaws.devopsguru#RecommendationRelatedEventResources": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#RecommendationRelatedEventResource" + } + }, + "com.amazonaws.devopsguru#RecommendationRelatedEvents": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#RecommendationRelatedEvent" + } + }, + "com.amazonaws.devopsguru#Recommendations": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#Recommendation" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 10 + } + } + }, + "com.amazonaws.devopsguru#RelatedAnomalySourceDetails": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#RecommendationRelatedAnomalySourceDetail" + } + }, + "com.amazonaws.devopsguru#RemoveNotificationChannel": { + "type": "operation", + "input": { + "target": "com.amazonaws.devopsguru#RemoveNotificationChannelRequest" + }, + "output": { + "target": "com.amazonaws.devopsguru#RemoveNotificationChannelResponse" + }, + "errors": [ + { + "target": "com.amazonaws.devopsguru#AccessDeniedException" + }, + { + "target": "com.amazonaws.devopsguru#ConflictException" + }, + { + "target": "com.amazonaws.devopsguru#InternalServerException" + }, + { + "target": "com.amazonaws.devopsguru#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.devopsguru#ThrottlingException" + }, + { + "target": "com.amazonaws.devopsguru#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n \t\tRemoves a notification channel from DevOps Guru. A notification channel is used to notify you when DevOps Guru generates an insight \n \t\tthat contains information about how to improve your operations.\n \t

          ", + "smithy.api#http": { + "method": "DELETE", + "uri": "/channels/{Id}", + "code": 200 + } + } + }, + "com.amazonaws.devopsguru#RemoveNotificationChannelRequest": { + "type": "structure", + "members": { + "Id": { + "target": "com.amazonaws.devopsguru#NotificationChannelId", + "traits": { + "smithy.api#documentation": "

          \n \t\tThe ID of the notification channel to be removed.\n \t

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.devopsguru#RemoveNotificationChannelResponse": { + "type": "structure", + "members": {} + }, + "com.amazonaws.devopsguru#ResourceCollection": { + "type": "structure", + "members": { + "CloudFormation": { + "target": "com.amazonaws.devopsguru#CloudFormationCollection", + "traits": { + "smithy.api#documentation": "

          An array of the names of AWS CloudFormation stacks. The stacks define AWS resources\n \t\tthat DevOps Guru analyzes. \n \t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tA collection of AWS resources supported by DevOps Guru. The one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze \n \tonly the AWS resources that are defined in the stacks.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#ResourceCollectionFilter": { + "type": "structure", + "members": { + "CloudFormation": { + "target": "com.amazonaws.devopsguru#CloudFormationCollectionFilter", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about AWS CloudFormation stacks. You can use stacks to specify which AWS resources in your account to analyze. \n\t\t\tFor more information, see Stacks \n\t\t\tin the AWS CloudFormation User Guide.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about a filter used to specify which AWS resources are analyzed for anomalous behavior by DevOps Guru.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#ResourceCollectionType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "AWS_CLOUD_FORMATION", + "name": "AWS_CLOUD_FORMATION" + } + ] + } + }, + "com.amazonaws.devopsguru#ResourceIdString": { + "type": "string" + }, + "com.amazonaws.devopsguru#ResourceIdType": { + "type": "string" + }, + "com.amazonaws.devopsguru#ResourceNotFoundException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.devopsguru#ErrorMessageString", + "traits": { + "smithy.api#required": {} + } + }, + "ResourceId": { + "target": "com.amazonaws.devopsguru#ResourceIdString", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe ID of the AWS resource that could not be found.\n\t\t

          ", + "smithy.api#required": {} + } + }, + "ResourceType": { + "target": "com.amazonaws.devopsguru#ResourceIdType", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe type of the AWS resource that could not be found.\n\t\t

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          A requested resource could not be found

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 404 + } + }, + "com.amazonaws.devopsguru#RetryAfterSeconds": { + "type": "integer" + }, + "com.amazonaws.devopsguru#SearchInsights": { + "type": "operation", + "input": { + "target": "com.amazonaws.devopsguru#SearchInsightsRequest" + }, + "output": { + "target": "com.amazonaws.devopsguru#SearchInsightsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.devopsguru#AccessDeniedException" + }, + { + "target": "com.amazonaws.devopsguru#InternalServerException" + }, + { + "target": "com.amazonaws.devopsguru#ThrottlingException" + }, + { + "target": "com.amazonaws.devopsguru#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n \t\tReturns a list of insights in your AWS account. You can specify which insights are returned by their start time, one or more statuses \n \t\t(ONGOING, CLOSED, and CLOSED), one or more severities (LOW, MEDIUM, \n \t\tand HIGH), and type (REACTIVE or PROACTIVE).\n \t

          \n \t

          \n \t\tUse the Filters parameter to specify status and severity \n \t\tsearch parameters. Use the Type parameter to specify REACTIVE or PROACTIVE in your search.\n \t

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/insights/search", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.devopsguru#SearchInsightsFilters": { + "type": "structure", + "members": { + "Severities": { + "target": "com.amazonaws.devopsguru#InsightSeverities", + "traits": { + "smithy.api#documentation": "

          \n \t\tAn array of severity values used to search for insights.\n \t

          " + } + }, + "Statuses": { + "target": "com.amazonaws.devopsguru#InsightStatuses", + "traits": { + "smithy.api#documentation": "

          \n \t\tAn array of status values used to search for insights.\n \t

          " + } + }, + "ResourceCollection": { + "target": "com.amazonaws.devopsguru#ResourceCollection" + } + }, + "traits": { + "smithy.api#documentation": "

          \n \t\tSpecifies one or more severity values and one or more status values that are used to search \n \t\tfor insights. \n \t

          " + } + }, + "com.amazonaws.devopsguru#SearchInsightsMaxResults": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.devopsguru#SearchInsightsRequest": { + "type": "structure", + "members": { + "StartTimeRange": { + "target": "com.amazonaws.devopsguru#StartTimeRange", + "traits": { + "smithy.api#documentation": "

          \n \t\tThe start of the time range passed in. Returned insights occurred after this time.\n \t

          ", + "smithy.api#required": {} + } + }, + "Filters": { + "target": "com.amazonaws.devopsguru#SearchInsightsFilters", + "traits": { + "smithy.api#documentation": "

          \n \t\tA SearchInsightsFilters object that is used to set the severity and status filters on your insight search.\n \t

          " + } + }, + "MaxResults": { + "target": "com.amazonaws.devopsguru#SearchInsightsMaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of results to return with a single call.\n\tTo retrieve the remaining results, make another call with the returned nextToken value. \n\tThe default value is 500.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.devopsguru#UuidNextToken", + "traits": { + "smithy.api#documentation": "

          The pagination token to use to retrieve \n the next page of results for this operation. If this value is null, it retrieves the first page.

          " + } + }, + "Type": { + "target": "com.amazonaws.devopsguru#InsightType", + "traits": { + "smithy.api#documentation": "

          \n \t\tThe type of insights you are searching for (REACTIVE or PROACTIVE).\n \t

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.devopsguru#SearchInsightsResponse": { + "type": "structure", + "members": { + "ProactiveInsights": { + "target": "com.amazonaws.devopsguru#ProactiveInsights", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe returned proactive insights.\n\t\t

          " + } + }, + "ReactiveInsights": { + "target": "com.amazonaws.devopsguru#ReactiveInsights", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe returned reactive insights.\n\t\t

          " + } + }, + "NextToken": { + "target": "com.amazonaws.devopsguru#UuidNextToken", + "traits": { + "smithy.api#documentation": "

          The pagination token to use to retrieve \n the next page of results for this operation. If there are no more pages, this value is null.

          " + } + } + } + }, + "com.amazonaws.devopsguru#ServiceIntegrationConfig": { + "type": "structure", + "members": { + "OpsCenter": { + "target": "com.amazonaws.devopsguru#OpsCenterIntegration", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about whether DevOps Guru is configured to create an OpsItem in AWS Systems Manager OpsCenter for each created insight. \n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about the integration of DevOps Guru with another AWS service, such as AWS Systems Manager.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#ServiceQuotaExceededException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.devopsguru#ErrorMessageString" + } + }, + "traits": { + "smithy.api#documentation": "

          The request contains a value that exceeds a maximum quota.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 402 + } + }, + "com.amazonaws.devopsguru#SnsChannelConfig": { + "type": "structure", + "members": { + "TopicArn": { + "target": "com.amazonaws.devopsguru#TopicArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an Amazon Simple Notification Service topic.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Contains the Amazon Resource Name (ARN) of an Amazon Simple Notification Service topic.

          " + } + }, + "com.amazonaws.devopsguru#SsmOpsItemId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 100 + }, + "smithy.api#pattern": "^.*$" + } + }, + "com.amazonaws.devopsguru#StackName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 128 + }, + "smithy.api#pattern": "^[a-zA-Z*]+[a-zA-Z0-9-]*$" + } + }, + "com.amazonaws.devopsguru#StackNames": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#StackName" + } + }, + "com.amazonaws.devopsguru#StartTimeRange": { + "type": "structure", + "members": { + "FromTime": { + "target": "com.amazonaws.devopsguru#Timestamp", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe start time of the time range.\n\t\t

          " + } + }, + "ToTime": { + "target": "com.amazonaws.devopsguru#Timestamp", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe end time of the time range.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tA time range used to specify when the behavior of an insight or anomaly started. \n\t\t

          " + } + }, + "com.amazonaws.devopsguru#ThrottlingException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.devopsguru#ErrorMessageString", + "traits": { + "smithy.api#required": {} + } + }, + "QuotaCode": { + "target": "com.amazonaws.devopsguru#ErrorQuotaCodeString", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe code of the quota that was exceeded, causing the throttling exception.\n\t\t

          " + } + }, + "ServiceCode": { + "target": "com.amazonaws.devopsguru#ErrorServiceCodeString", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe code of the service that caused the throttling exception.\n\t\t

          " + } + }, + "RetryAfterSeconds": { + "target": "com.amazonaws.devopsguru#RetryAfterSeconds", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe number of seconds after which the action that caused the throttling \n\t\t\texception can be retried.\n\t\t

          ", + "smithy.api#httpHeader": "Retry-After" + } + } + }, + "traits": { + "smithy.api#documentation": "

          The request was denied due to a request throttling.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 429 + } + }, + "com.amazonaws.devopsguru#Timestamp": { + "type": "timestamp" + }, + "com.amazonaws.devopsguru#TopicArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 36, + "max": 1024 + }, + "smithy.api#pattern": "^arn:aws[a-z0-9-]*:sns:[a-z0-9-]+:\\d{12}:[^:]+$" + } + }, + "com.amazonaws.devopsguru#UpdateCloudFormationCollectionFilter": { + "type": "structure", + "members": { + "StackNames": { + "target": "com.amazonaws.devopsguru#UpdateStackNames", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAn array of the name of stacks to update.\n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Contains the names of AWS CloudFormation stacks used to update a collection of stacks.

          " + } + }, + "com.amazonaws.devopsguru#UpdateResourceCollection": { + "type": "operation", + "input": { + "target": "com.amazonaws.devopsguru#UpdateResourceCollectionRequest" + }, + "output": { + "target": "com.amazonaws.devopsguru#UpdateResourceCollectionResponse" + }, + "errors": [ + { + "target": "com.amazonaws.devopsguru#AccessDeniedException" + }, + { + "target": "com.amazonaws.devopsguru#ConflictException" + }, + { + "target": "com.amazonaws.devopsguru#InternalServerException" + }, + { + "target": "com.amazonaws.devopsguru#ThrottlingException" + }, + { + "target": "com.amazonaws.devopsguru#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Updates the collection of resources that DevOps Guru analyzes.\n\t\t\tThe one type of AWS resource collection supported is AWS CloudFormation stacks. DevOps Guru can be configured to analyze \n \tonly the AWS resources that are defined in the stacks. This method also creates the IAM role required for you\n\t\t\tto use DevOps Guru.

          ", + "smithy.api#http": { + "method": "PUT", + "uri": "/resource-collections", + "code": 200 + } + } + }, + "com.amazonaws.devopsguru#UpdateResourceCollectionAction": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "ADD", + "name": "ADD" + }, + { + "value": "REMOVE", + "name": "REMOVE" + } + ] + } + }, + "com.amazonaws.devopsguru#UpdateResourceCollectionFilter": { + "type": "structure", + "members": { + "CloudFormation": { + "target": "com.amazonaws.devopsguru#UpdateCloudFormationCollectionFilter", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAn collection of AWS CloudFormation stacks. \n\t\t

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tContains information used to update a collection of AWS resources.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#UpdateResourceCollectionRequest": { + "type": "structure", + "members": { + "Action": { + "target": "com.amazonaws.devopsguru#UpdateResourceCollectionAction", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tSpecifies if the resource collection in the request is added or deleted to the resource collection. \n\t\t

          ", + "smithy.api#required": {} + } + }, + "ResourceCollection": { + "target": "com.amazonaws.devopsguru#UpdateResourceCollectionFilter", + "traits": { + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.devopsguru#UpdateResourceCollectionResponse": { + "type": "structure", + "members": {} + }, + "com.amazonaws.devopsguru#UpdateServiceIntegration": { + "type": "operation", + "input": { + "target": "com.amazonaws.devopsguru#UpdateServiceIntegrationRequest" + }, + "output": { + "target": "com.amazonaws.devopsguru#UpdateServiceIntegrationResponse" + }, + "errors": [ + { + "target": "com.amazonaws.devopsguru#AccessDeniedException" + }, + { + "target": "com.amazonaws.devopsguru#ConflictException" + }, + { + "target": "com.amazonaws.devopsguru#InternalServerException" + }, + { + "target": "com.amazonaws.devopsguru#ThrottlingException" + }, + { + "target": "com.amazonaws.devopsguru#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n\t\t\tEnables or disables integration with a service that can be integrated with DevOps Guru. The one service that can be integrated with\n\t\t\tDevOps Guru is AWS Systems Manager, which can be used to create an OpsItem for each generated insight.\n\t\t

          ", + "smithy.api#http": { + "method": "PUT", + "uri": "/service-integrations", + "code": 200 + } + } + }, + "com.amazonaws.devopsguru#UpdateServiceIntegrationConfig": { + "type": "structure", + "members": { + "OpsCenter": { + "target": "com.amazonaws.devopsguru#OpsCenterIntegrationConfig" + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tInformation about updating the integration status of an AWS service, such as AWS Systems Manager, with DevOps Guru. \n\t\t

          " + } + }, + "com.amazonaws.devopsguru#UpdateServiceIntegrationRequest": { + "type": "structure", + "members": { + "ServiceIntegration": { + "target": "com.amazonaws.devopsguru#UpdateServiceIntegrationConfig", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAn IntegratedServiceConfig object used to specify the integrated service you want to update, and whether you \n\t\t\twant to update it to enabled or disabled.\n\t\t

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.devopsguru#UpdateServiceIntegrationResponse": { + "type": "structure", + "members": {} + }, + "com.amazonaws.devopsguru#UpdateStackNames": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#StackName" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 100 + } + } + }, + "com.amazonaws.devopsguru#UuidNextToken": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 36, + "max": 36 + }, + "smithy.api#pattern": "^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$" + } + }, + "com.amazonaws.devopsguru#ValidationException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.devopsguru#ErrorMessageString", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tA message that describes the validation exception.\n\t\t

          ", + "smithy.api#required": {} + } + }, + "Reason": { + "target": "com.amazonaws.devopsguru#ValidationExceptionReason", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe reason the validation exception was thrown.\n\t\t

          " + } + }, + "Fields": { + "target": "com.amazonaws.devopsguru#ValidationExceptionFields" + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tContains information about data passed in to a field during a request that is not valid. \n\t\t

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 400 + } + }, + "com.amazonaws.devopsguru#ValidationExceptionField": { + "type": "structure", + "members": { + "Name": { + "target": "com.amazonaws.devopsguru#ErrorNameString", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe name of the field.\n\t\t

          ", + "smithy.api#required": {} + } + }, + "Message": { + "target": "com.amazonaws.devopsguru#ErrorMessageString", + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe message associated with the validation exception with information to help \n\t\t\tdetermine its cause.\n\t\t

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tThe field associated with the validation exception.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#ValidationExceptionFields": { + "type": "list", + "member": { + "target": "com.amazonaws.devopsguru#ValidationExceptionField" + }, + "traits": { + "smithy.api#documentation": "

          \n\t\t\tAn array of fields that are associated with the validation exception.\n\t\t

          " + } + }, + "com.amazonaws.devopsguru#ValidationExceptionReason": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "UNKNOWN_OPERATION", + "name": "UNKNOWN_OPERATION" + }, + { + "value": "CANNOT_PARSE", + "name": "CANNOT_PARSE" + }, + { + "value": "FIELD_VALIDATION_FAILED", + "name": "FIELD_VALIDATION_FAILED" + }, + { + "value": "OTHER", + "name": "OTHER" + } + ] + } + } + } +} diff --git a/codegen/sdk-codegen/aws-models/directory-service.2015-04-16.json b/codegen/sdk-codegen/aws-models/directory-service.2015-04-16.json index 248d00bea74e..4196963b2358 100644 --- a/codegen/sdk-codegen/aws-models/directory-service.2015-04-16.json +++ b/codegen/sdk-codegen/aws-models/directory-service.2015-04-16.json @@ -84,11 +84,11 @@ "com.amazonaws.directoryservice#AccessDeniedException": { "type": "structure", "members": { - "RequestId": { - "target": "com.amazonaws.directoryservice#RequestId" - }, "Message": { "target": "com.amazonaws.directoryservice#ExceptionMessage" + }, + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId" } }, "traits": { @@ -216,12 +216,6 @@ "com.amazonaws.directoryservice#AddRegionRequest": { "type": "structure", "members": { - "VPCSettings": { - "target": "com.amazonaws.directoryservice#DirectoryVpcSettings", - "traits": { - "smithy.api#required": {} - } - }, "DirectoryId": { "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { @@ -235,6 +229,12 @@ "smithy.api#documentation": "

          The name of the Region where you want to add domain controllers for replication. For example, us-east-1.

          ", "smithy.api#required": {} } + }, + "VPCSettings": { + "target": "com.amazonaws.directoryservice#DirectoryVpcSettings", + "traits": { + "smithy.api#required": {} + } } } }, @@ -316,17 +316,17 @@ "com.amazonaws.directoryservice#Attribute": { "type": "structure", "members": { - "Value": { - "target": "com.amazonaws.directoryservice#AttributeValue", - "traits": { - "smithy.api#documentation": "

          The value of the attribute.

          " - } - }, "Name": { "target": "com.amazonaws.directoryservice#AttributeName", "traits": { "smithy.api#documentation": "

          The name of the attribute.

          " } + }, + "Value": { + "target": "com.amazonaws.directoryservice#AttributeValue", + "traits": { + "smithy.api#documentation": "

          The value of the attribute.

          " + } } }, "traits": { @@ -353,17 +353,17 @@ "com.amazonaws.directoryservice#AuthenticationFailedException": { "type": "structure", "members": { - "RequestId": { - "target": "com.amazonaws.directoryservice#RequestId", - "traits": { - "smithy.api#documentation": "

          The identifier of the request that caused the exception.

          " - } - }, "Message": { "target": "com.amazonaws.directoryservice#ExceptionMessage", "traits": { "smithy.api#documentation": "

          The textual message for the exception.

          " } + }, + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId", + "traits": { + "smithy.api#documentation": "

          The identifier of the request that caused the exception.

          " + } } }, "traits": { @@ -435,6 +435,12 @@ "smithy.api#documentation": "

          The identifier of the certificate.

          " } }, + "State": { + "target": "com.amazonaws.directoryservice#CertificateState", + "traits": { + "smithy.api#documentation": "

          The state of the certificate.

          " + } + }, "StateReason": { "target": "com.amazonaws.directoryservice#CertificateStateReason", "traits": { @@ -447,22 +453,28 @@ "smithy.api#documentation": "

          The common name for the certificate.

          " } }, + "RegisteredDateTime": { + "target": "com.amazonaws.directoryservice#CertificateRegisteredDateTime", + "traits": { + "smithy.api#documentation": "

          The date and time that the certificate was registered.

          " + } + }, "ExpiryDateTime": { "target": "com.amazonaws.directoryservice#CertificateExpiryDateTime", "traits": { "smithy.api#documentation": "

          The date and time when the certificate will expire.

          " } }, - "State": { - "target": "com.amazonaws.directoryservice#CertificateState", + "Type": { + "target": "com.amazonaws.directoryservice#CertificateType", "traits": { - "smithy.api#documentation": "

          The state of the certificate.

          " + "smithy.api#documentation": "

          Select ClientCertAuth for smart card integration.

          " } }, - "RegisteredDateTime": { - "target": "com.amazonaws.directoryservice#CertificateRegisteredDateTime", + "ClientCertAuthSettings": { + "target": "com.amazonaws.directoryservice#ClientCertAuthSettings", "traits": { - "smithy.api#documentation": "

          The date and time that the certificate was registered.

          " + "smithy.api#documentation": "

          Provides information about the client certificate authentication settings. The default value is ClientLDAPS.

          " } } }, @@ -500,11 +512,11 @@ "com.amazonaws.directoryservice#CertificateDoesNotExistException": { "type": "structure", "members": { - "RequestId": { - "target": "com.amazonaws.directoryservice#RequestId" - }, "Message": { "target": "com.amazonaws.directoryservice#ExceptionMessage" + }, + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId" } }, "traits": { @@ -524,11 +536,11 @@ "com.amazonaws.directoryservice#CertificateInUseException": { "type": "structure", "members": { - "RequestId": { - "target": "com.amazonaws.directoryservice#RequestId" - }, "Message": { "target": "com.amazonaws.directoryservice#ExceptionMessage" + }, + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId" } }, "traits": { @@ -539,6 +551,18 @@ "com.amazonaws.directoryservice#CertificateInfo": { "type": "structure", "members": { + "CertificateId": { + "target": "com.amazonaws.directoryservice#CertificateId", + "traits": { + "smithy.api#documentation": "

          The identifier of the certificate.

          " + } + }, + "CommonName": { + "target": "com.amazonaws.directoryservice#CertificateCN", + "traits": { + "smithy.api#documentation": "

          The common name for the certificate.

          " + } + }, "State": { "target": "com.amazonaws.directoryservice#CertificateState", "traits": { @@ -551,16 +575,10 @@ "smithy.api#documentation": "

          The date and time when the certificate will expire.

          " } }, - "CertificateId": { - "target": "com.amazonaws.directoryservice#CertificateId", - "traits": { - "smithy.api#documentation": "

          The identifier of the certificate.

          " - } - }, - "CommonName": { - "target": "com.amazonaws.directoryservice#CertificateCN", + "Type": { + "target": "com.amazonaws.directoryservice#CertificateType", "traits": { - "smithy.api#documentation": "

          The common name for the certificate.

          " + "smithy.api#documentation": "

          Displays the type of certificate.

          " } } }, @@ -571,11 +589,11 @@ "com.amazonaws.directoryservice#CertificateLimitExceededException": { "type": "structure", "members": { - "RequestId": { - "target": "com.amazonaws.directoryservice#RequestId" - }, "Message": { "target": "com.amazonaws.directoryservice#ExceptionMessage" + }, + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId" } }, "traits": { @@ -620,6 +638,21 @@ "com.amazonaws.directoryservice#CertificateStateReason": { "type": "string" }, + "com.amazonaws.directoryservice#CertificateType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "ClientCertAuth", + "name": "CLIENT_CERT_AUTH" + }, + { + "value": "ClientLDAPS", + "name": "CLIENT_LDAPS" + } + ] + } + }, "com.amazonaws.directoryservice#CertificatesInfo": { "type": "list", "member": { @@ -638,6 +671,31 @@ "target": "com.amazonaws.directoryservice#CidrIp" } }, + "com.amazonaws.directoryservice#ClientAuthenticationType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "SmartCard", + "name": "SMART_CARD" + } + ] + } + }, + "com.amazonaws.directoryservice#ClientCertAuthSettings": { + "type": "structure", + "members": { + "OCSPUrl": { + "target": "com.amazonaws.directoryservice#OCSPUrl", + "traits": { + "smithy.api#documentation": "

          Specifies the URL of the default OCSP server used to check for revocation status.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Contains information about the client certificate authentication settings, such as ClientLDAPS or ClientCertAuth.

          " + } + }, "com.amazonaws.directoryservice#ClientException": { "type": "structure", "members": { @@ -659,12 +717,6 @@ "com.amazonaws.directoryservice#Computer": { "type": "structure", "members": { - "ComputerAttributes": { - "target": "com.amazonaws.directoryservice#Attributes", - "traits": { - "smithy.api#documentation": "

          An array of Attribute objects containing the LDAP attributes that belong to the\n computer account.

          " - } - }, "ComputerId": { "target": "com.amazonaws.directoryservice#SID", "traits": { @@ -676,6 +728,12 @@ "traits": { "smithy.api#documentation": "

          The computer name.

          " } + }, + "ComputerAttributes": { + "target": "com.amazonaws.directoryservice#Attributes", + "traits": { + "smithy.api#documentation": "

          An array of Attribute objects containing the LDAP attributes that belong to the\n computer account.

          " + } } }, "traits": { @@ -763,6 +821,26 @@ "com.amazonaws.directoryservice#ConnectDirectoryRequest": { "type": "structure", "members": { + "Name": { + "target": "com.amazonaws.directoryservice#DirectoryName", + "traits": { + "smithy.api#documentation": "

          The fully qualified name of the on-premises directory, such as\n corp.example.com.

          ", + "smithy.api#required": {} + } + }, + "ShortName": { + "target": "com.amazonaws.directoryservice#DirectoryShortName", + "traits": { + "smithy.api#documentation": "

          The NetBIOS name of the on-premises directory, such as CORP.

          " + } + }, + "Password": { + "target": "com.amazonaws.directoryservice#ConnectPassword", + "traits": { + "smithy.api#documentation": "

          The password for the on-premises user account.

          ", + "smithy.api#required": {} + } + }, "Description": { "target": "com.amazonaws.directoryservice#Description", "traits": { @@ -776,12 +854,6 @@ "smithy.api#required": {} } }, - "ShortName": { - "target": "com.amazonaws.directoryservice#DirectoryShortName", - "traits": { - "smithy.api#documentation": "

          The NetBIOS name of the on-premises directory, such as CORP.

          " - } - }, "ConnectSettings": { "target": "com.amazonaws.directoryservice#DirectoryConnectSettings", "traits": { @@ -794,20 +866,6 @@ "traits": { "smithy.api#documentation": "

          The tags to be assigned to AD Connector.

          " } - }, - "Name": { - "target": "com.amazonaws.directoryservice#DirectoryName", - "traits": { - "smithy.api#documentation": "

          The fully qualified name of the on-premises directory, such as\n corp.example.com.

          ", - "smithy.api#required": {} - } - }, - "Password": { - "target": "com.amazonaws.directoryservice#ConnectPassword", - "traits": { - "smithy.api#documentation": "

          The password for the on-premises user account.

          ", - "smithy.api#required": {} - } } }, "traits": { @@ -953,10 +1011,11 @@ "com.amazonaws.directoryservice#CreateComputerRequest": { "type": "structure", "members": { - "OrganizationalUnitDistinguishedName": { - "target": "com.amazonaws.directoryservice#OrganizationalUnitDN", + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          The fully-qualified distinguished name of the organizational unit to place the computer account in.

          " + "smithy.api#documentation": "

          The identifier of the directory in which to create the computer account.

          ", + "smithy.api#required": {} } }, "ComputerName": { @@ -966,24 +1025,23 @@ "smithy.api#required": {} } }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", + "Password": { + "target": "com.amazonaws.directoryservice#ComputerPassword", "traits": { - "smithy.api#documentation": "

          The identifier of the directory in which to create the computer account.

          ", + "smithy.api#documentation": "

          A one-time password that is used to join the computer to the directory. You should generate a random, strong password to use for this parameter.

          ", "smithy.api#required": {} } }, - "ComputerAttributes": { - "target": "com.amazonaws.directoryservice#Attributes", + "OrganizationalUnitDistinguishedName": { + "target": "com.amazonaws.directoryservice#OrganizationalUnitDN", "traits": { - "smithy.api#documentation": "

          An array of Attribute objects that contain any LDAP attributes to apply to the\n computer account.

          " + "smithy.api#documentation": "

          The fully-qualified distinguished name of the organizational unit to place the computer account in.

          " } }, - "Password": { - "target": "com.amazonaws.directoryservice#ComputerPassword", + "ComputerAttributes": { + "target": "com.amazonaws.directoryservice#Attributes", "traits": { - "smithy.api#documentation": "

          A one-time password that is used to join the computer to the directory. You should generate a random, strong password to use for this parameter.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          An array of Attribute objects that contain any LDAP attributes to apply to the\n computer account.

          " } } }, @@ -1043,13 +1101,6 @@ "com.amazonaws.directoryservice#CreateConditionalForwarderRequest": { "type": "structure", "members": { - "DnsIpAddrs": { - "target": "com.amazonaws.directoryservice#DnsIpAddrs", - "traits": { - "smithy.api#documentation": "

          The IP addresses of the remote DNS server associated with RemoteDomainName.

          ", - "smithy.api#required": {} - } - }, "DirectoryId": { "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { @@ -1063,6 +1114,13 @@ "smithy.api#documentation": "

          The fully qualified domain name (FQDN) of the remote domain with which you will set up a trust relationship.

          ", "smithy.api#required": {} } + }, + "DnsIpAddrs": { + "target": "com.amazonaws.directoryservice#DnsIpAddrs", + "traits": { + "smithy.api#documentation": "

          The IP addresses of the remote DNS server associated with RemoteDomainName.

          ", + "smithy.api#required": {} + } } }, "traits": { @@ -1105,16 +1163,24 @@ "com.amazonaws.directoryservice#CreateDirectoryRequest": { "type": "structure", "members": { - "Tags": { - "target": "com.amazonaws.directoryservice#Tags", + "Name": { + "target": "com.amazonaws.directoryservice#DirectoryName", "traits": { - "smithy.api#documentation": "

          The tags to be assigned to the Simple AD directory.

          " + "smithy.api#documentation": "

          The fully qualified name for the directory, such as corp.example.com.

          ", + "smithy.api#required": {} } }, - "VpcSettings": { - "target": "com.amazonaws.directoryservice#DirectoryVpcSettings", + "ShortName": { + "target": "com.amazonaws.directoryservice#DirectoryShortName", "traits": { - "smithy.api#documentation": "

          A DirectoryVpcSettings object that contains additional information for\n the operation.

          " + "smithy.api#documentation": "

          The NetBIOS name of the directory, such as CORP.

          " + } + }, + "Password": { + "target": "com.amazonaws.directoryservice#Password", + "traits": { + "smithy.api#documentation": "

          The password for the directory administrator. The directory creation process creates a\n directory administrator account with the user name Administrator and this\n password.

          \n

          If you need to change the password for the administrator account, you can use the ResetUserPassword API call.

          \n

          The regex pattern for this string is made up of the following conditions:

          \n
            \n
          • \n

            Length (?=^.{8,64}$) – Must be between 8 and 64 characters

            \n
          • \n
          \n

          AND any 3 of the following password complexity rules required by Active Directory:

          \n
            \n
          • \n

            Numbers and upper case and lowercase (?=.*\\d)(?=.*[A-Z])(?=.*[a-z])

            \n
          • \n
          • \n

            Numbers and special characters and lower case (?=.*\\d)(?=.*[^A-Za-z0-9\\s])(?=.*[a-z])

            \n
          • \n
          • \n

            Special characters and upper case and lower case (?=.*[^A-Za-z0-9\\s])(?=.*[A-Z])(?=.*[a-z])

            \n
          • \n
          • \n

            Numbers and upper case and special characters (?=.*\\d)(?=.*[A-Z])(?=.*[^A-Za-z0-9\\s])

            \n
          • \n
          \n

          For additional information about how Active Directory passwords are enforced, see Password must meet complexity requirements on the Microsoft website.

          ", + "smithy.api#required": {} } }, "Description": { @@ -1130,24 +1196,16 @@ "smithy.api#required": {} } }, - "ShortName": { - "target": "com.amazonaws.directoryservice#DirectoryShortName", + "VpcSettings": { + "target": "com.amazonaws.directoryservice#DirectoryVpcSettings", "traits": { - "smithy.api#documentation": "

          The NetBIOS name of the directory, such as CORP.

          " + "smithy.api#documentation": "

          A DirectoryVpcSettings object that contains additional information for\n the operation.

          " } }, - "Password": { - "target": "com.amazonaws.directoryservice#Password", + "Tags": { + "target": "com.amazonaws.directoryservice#Tags", "traits": { - "smithy.api#documentation": "

          The password for the directory administrator. The directory creation process creates a\n directory administrator account with the user name Administrator and this\n password.

          \n

          If you need to change the password for the administrator account, you can use the ResetUserPassword API call.

          \n

          The regex pattern for this string is made up of the following conditions:

          \n
            \n
          • \n

            Length (?=^.{8,64}$) – Must be between 8 and 64 characters

            \n
          • \n
          \n

          AND any 3 of the following password complexity rules required by Active Directory:

          \n
            \n
          • \n

            Numbers and upper case and lowercase (?=.*\\d)(?=.*[A-Z])(?=.*[a-z])

            \n
          • \n
          • \n

            Numbers and special characters and lower case (?=.*\\d)(?=.*[^A-Za-z0-9\\s])(?=.*[a-z])

            \n
          • \n
          • \n

            Special characters and upper case and lower case (?=.*[^A-Za-z0-9\\s])(?=.*[A-Z])(?=.*[a-z])

            \n
          • \n
          • \n

            Numbers and upper case and special characters (?=.*\\d)(?=.*[A-Z])(?=.*[^A-Za-z0-9\\s])

            \n
          • \n
          \n

          For additional information about how Active Directory passwords are enforced, see Password must meet complexity requirements on the Microsoft website.

          ", - "smithy.api#required": {} - } - }, - "Name": { - "target": "com.amazonaws.directoryservice#DirectoryName", - "traits": { - "smithy.api#documentation": "

          The fully qualified name for the directory, such as corp.example.com.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The tags to be assigned to the Simple AD directory.

          " } } }, @@ -1204,17 +1262,17 @@ "com.amazonaws.directoryservice#CreateLogSubscriptionRequest": { "type": "structure", "members": { - "LogGroupName": { - "target": "com.amazonaws.directoryservice#LogGroupName", + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          The name of the CloudWatch log group where the real-time domain controller logs are\n forwarded.

          ", + "smithy.api#documentation": "

          Identifier of the directory to which you want to subscribe and receive real-time logs to\n your specified CloudWatch log group.

          ", "smithy.api#required": {} } }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", + "LogGroupName": { + "target": "com.amazonaws.directoryservice#LogGroupName", "traits": { - "smithy.api#documentation": "

          Identifier of the directory to which you want to subscribe and receive real-time logs to\n your specified CloudWatch log group.

          ", + "smithy.api#documentation": "

          The name of the CloudWatch log group where the real-time domain controller logs are\n forwarded.

          ", "smithy.api#required": {} } } @@ -1256,49 +1314,49 @@ "com.amazonaws.directoryservice#CreateMicrosoftADRequest": { "type": "structure", "members": { - "VpcSettings": { - "target": "com.amazonaws.directoryservice#DirectoryVpcSettings", + "Name": { + "target": "com.amazonaws.directoryservice#DirectoryName", "traits": { - "smithy.api#documentation": "

          Contains VPC information for the CreateDirectory or CreateMicrosoftAD operation.

          ", + "smithy.api#documentation": "

          The fully qualified domain name for the AWS Managed Microsoft AD directory, such as\n corp.example.com. This name will resolve inside your VPC only. It does not need\n to be publicly resolvable.

          ", "smithy.api#required": {} } }, - "Password": { - "target": "com.amazonaws.directoryservice#Password", + "ShortName": { + "target": "com.amazonaws.directoryservice#DirectoryShortName", "traits": { - "smithy.api#documentation": "

          The password for the default administrative user named Admin.

          \n

          If you need to change the password for the administrator account, you can use the ResetUserPassword API call.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The NetBIOS name for your domain, such as CORP. If you don't specify a NetBIOS name, it will default to the first part of your directory DNS. For example, CORP for the directory DNS corp.example.com.

          " } }, - "Name": { - "target": "com.amazonaws.directoryservice#DirectoryName", + "Password": { + "target": "com.amazonaws.directoryservice#Password", "traits": { - "smithy.api#documentation": "

          The fully qualified domain name for the AWS Managed Microsoft AD directory, such as\n corp.example.com. This name will resolve inside your VPC only. It does not need\n to be publicly resolvable.

          ", + "smithy.api#documentation": "

          The password for the default administrative user named Admin.

          \n

          If you need to change the password for the administrator account, you can use the ResetUserPassword API call.

          ", "smithy.api#required": {} } }, - "Tags": { - "target": "com.amazonaws.directoryservice#Tags", - "traits": { - "smithy.api#documentation": "

          The tags to be assigned to the AWS Managed Microsoft AD directory.

          " - } - }, "Description": { "target": "com.amazonaws.directoryservice#Description", "traits": { "smithy.api#documentation": "

          A description for the directory. This label will appear on the AWS console Directory Details page after the directory is created.

          " } }, + "VpcSettings": { + "target": "com.amazonaws.directoryservice#DirectoryVpcSettings", + "traits": { + "smithy.api#documentation": "

          Contains VPC information for the CreateDirectory or CreateMicrosoftAD operation.

          ", + "smithy.api#required": {} + } + }, "Edition": { "target": "com.amazonaws.directoryservice#DirectoryEdition", "traits": { "smithy.api#documentation": "

          AWS Managed Microsoft AD is available in two editions: Standard and Enterprise. Enterprise is the default.

          " } }, - "ShortName": { - "target": "com.amazonaws.directoryservice#DirectoryShortName", + "Tags": { + "target": "com.amazonaws.directoryservice#Tags", "traits": { - "smithy.api#documentation": "

          The NetBIOS name for your domain, such as CORP. If you don't specify a NetBIOS name, it will default to the first part of your directory DNS. For example, CORP for the directory DNS corp.example.com.

          " + "smithy.api#documentation": "

          The tags to be assigned to the AWS Managed Microsoft AD directory.

          " } } }, @@ -1355,18 +1413,18 @@ "com.amazonaws.directoryservice#CreateSnapshotRequest": { "type": "structure", "members": { - "Name": { - "target": "com.amazonaws.directoryservice#SnapshotName", - "traits": { - "smithy.api#documentation": "

          The descriptive name to apply to the snapshot.

          " - } - }, "DirectoryId": { "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { "smithy.api#documentation": "

          The identifier of the directory of which to take a snapshot.

          ", "smithy.api#required": {} } + }, + "Name": { + "target": "com.amazonaws.directoryservice#SnapshotName", + "traits": { + "smithy.api#documentation": "

          The descriptive name to apply to the snapshot.

          " + } } }, "traits": { @@ -1422,38 +1480,38 @@ "com.amazonaws.directoryservice#CreateTrustRequest": { "type": "structure", "members": { - "TrustDirection": { - "target": "com.amazonaws.directoryservice#TrustDirection", + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          The direction of the trust relationship.

          ", + "smithy.api#documentation": "

          The Directory ID of the AWS Managed Microsoft AD directory for which to establish the trust relationship.

          ", "smithy.api#required": {} } }, - "TrustPassword": { - "target": "com.amazonaws.directoryservice#TrustPassword", + "RemoteDomainName": { + "target": "com.amazonaws.directoryservice#RemoteDomainName", "traits": { - "smithy.api#documentation": "

          The trust password. The must be the same password that was used when creating the trust relationship on the external domain.

          ", + "smithy.api#documentation": "

          The Fully Qualified Domain Name (FQDN) of the external domain for which to create the trust relationship.

          ", "smithy.api#required": {} } }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", + "TrustPassword": { + "target": "com.amazonaws.directoryservice#TrustPassword", "traits": { - "smithy.api#documentation": "

          The Directory ID of the AWS Managed Microsoft AD directory for which to establish the trust relationship.

          ", + "smithy.api#documentation": "

          The trust password. The must be the same password that was used when creating the trust relationship on the external domain.

          ", "smithy.api#required": {} } }, - "SelectiveAuth": { - "target": "com.amazonaws.directoryservice#SelectiveAuth", + "TrustDirection": { + "target": "com.amazonaws.directoryservice#TrustDirection", "traits": { - "smithy.api#documentation": "

          Optional parameter to enable selective authentication for the trust.

          " + "smithy.api#documentation": "

          The direction of the trust relationship.

          ", + "smithy.api#required": {} } }, - "RemoteDomainName": { - "target": "com.amazonaws.directoryservice#RemoteDomainName", + "TrustType": { + "target": "com.amazonaws.directoryservice#TrustType", "traits": { - "smithy.api#documentation": "

          The Fully Qualified Domain Name (FQDN) of the external domain for which to create the trust relationship.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The trust relationship type. Forest is the default.

          " } }, "ConditionalForwarderIpAddrs": { @@ -1462,10 +1520,10 @@ "smithy.api#documentation": "

          The IP addresses of the remote DNS server associated with RemoteDomainName.

          " } }, - "TrustType": { - "target": "com.amazonaws.directoryservice#TrustType", + "SelectiveAuth": { + "target": "com.amazonaws.directoryservice#SelectiveAuth", "traits": { - "smithy.api#documentation": "

          The trust relationship type. Forest is the default.

          " + "smithy.api#documentation": "

          Optional parameter to enable selective authentication for the trust.

          " } } }, @@ -1824,17 +1882,17 @@ "com.amazonaws.directoryservice#DeregisterCertificateRequest": { "type": "structure", "members": { - "CertificateId": { - "target": "com.amazonaws.directoryservice#CertificateId", + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          The identifier of the certificate.

          ", + "smithy.api#documentation": "

          The identifier of the directory.

          ", "smithy.api#required": {} } }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", + "CertificateId": { + "target": "com.amazonaws.directoryservice#CertificateId", "traits": { - "smithy.api#documentation": "

          The identifier of the directory.

          ", + "smithy.api#documentation": "

          The identifier of the certificate.

          ", "smithy.api#required": {} } } @@ -1996,18 +2054,18 @@ "com.amazonaws.directoryservice#DescribeConditionalForwardersRequest": { "type": "structure", "members": { - "RemoteDomainNames": { - "target": "com.amazonaws.directoryservice#RemoteDomainNames", - "traits": { - "smithy.api#documentation": "

          The fully qualified domain names (FQDN) of the remote domains for which to get the list of associated conditional forwarders. If this member is null, all conditional forwarders are returned.

          " - } - }, "DirectoryId": { "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { "smithy.api#documentation": "

          The directory ID for which to get the list of associated conditional forwarders.

          ", "smithy.api#required": {} } + }, + "RemoteDomainNames": { + "target": "com.amazonaws.directoryservice#RemoteDomainNames", + "traits": { + "smithy.api#documentation": "

          The fully qualified domain names (FQDN) of the remote domains for which to get the list of associated conditional forwarders. If this member is null, all conditional forwarders are returned.

          " + } } }, "traits": { @@ -2060,6 +2118,12 @@ "com.amazonaws.directoryservice#DescribeDirectoriesRequest": { "type": "structure", "members": { + "DirectoryIds": { + "target": "com.amazonaws.directoryservice#DirectoryIds", + "traits": { + "smithy.api#documentation": "

          A list of identifiers of the directories for which to obtain the information. If this\n member is null, all directories that belong to the current account are returned.

          \n

          An empty list results in an InvalidParameterException being thrown.

          " + } + }, "NextToken": { "target": "com.amazonaws.directoryservice#NextToken", "traits": { @@ -2071,12 +2135,6 @@ "traits": { "smithy.api#documentation": "

          The maximum number of items to return. If this value is zero, the maximum number of items\n is specified by the limitations of the operation.

          " } - }, - "DirectoryIds": { - "target": "com.amazonaws.directoryservice#DirectoryIds", - "traits": { - "smithy.api#documentation": "

          A list of identifiers of the directories for which to obtain the information. If this\n member is null, all directories that belong to the current account are returned.

          \n

          An empty list results in an InvalidParameterException being thrown.

          " - } } }, "traits": { @@ -2086,17 +2144,17 @@ "com.amazonaws.directoryservice#DescribeDirectoriesResult": { "type": "structure", "members": { - "NextToken": { - "target": "com.amazonaws.directoryservice#NextToken", - "traits": { - "smithy.api#documentation": "

          If not null, more results are available. Pass this value for the NextToken\n parameter in a subsequent call to DescribeDirectories to retrieve the next\n set of items.

          " - } - }, "DirectoryDescriptions": { "target": "com.amazonaws.directoryservice#DirectoryDescriptions", "traits": { "smithy.api#documentation": "

          The list of DirectoryDescription objects that were retrieved.

          \n

          It is possible that this list contains less than the number of items specified in the\n Limit member of the request. This occurs if there are less than the requested\n number of items left to retrieve, or if the limitations of the operation have been\n exceeded.

          " } + }, + "NextToken": { + "target": "com.amazonaws.directoryservice#NextToken", + "traits": { + "smithy.api#documentation": "

          If not null, more results are available. Pass this value for the NextToken\n parameter in a subsequent call to DescribeDirectories to retrieve the next\n set of items.

          " + } } }, "traits": { @@ -2143,10 +2201,11 @@ "com.amazonaws.directoryservice#DescribeDomainControllersRequest": { "type": "structure", "members": { - "NextToken": { - "target": "com.amazonaws.directoryservice#NextToken", + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          The DescribeDomainControllers.NextToken value from a previous call to DescribeDomainControllers. Pass null if this is the first call.

          " + "smithy.api#documentation": "

          Identifier of the directory for which to retrieve the domain controller information.

          ", + "smithy.api#required": {} } }, "DomainControllerIds": { @@ -2155,11 +2214,10 @@ "smithy.api#documentation": "

          A list of identifiers for the domain controllers whose information will be provided.

          " } }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", + "NextToken": { + "target": "com.amazonaws.directoryservice#NextToken", "traits": { - "smithy.api#documentation": "

          Identifier of the directory for which to retrieve the domain controller information.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The DescribeDomainControllers.NextToken value from a previous call to DescribeDomainControllers. Pass null if this is the first call.

          " } }, "Limit": { @@ -2216,17 +2274,17 @@ "com.amazonaws.directoryservice#DescribeEventTopicsRequest": { "type": "structure", "members": { - "TopicNames": { - "target": "com.amazonaws.directoryservice#TopicNames", - "traits": { - "smithy.api#documentation": "

          A list of SNS topic names for which to obtain the information. If this member is null, all associations for the specified Directory ID are returned.

          \n

          An empty list results in an InvalidParameterException being thrown.

          " - } - }, "DirectoryId": { "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { "smithy.api#documentation": "

          The Directory ID for which to get the list of associated SNS topics. If this member is null, associations for all Directory IDs are returned.

          " } + }, + "TopicNames": { + "target": "com.amazonaws.directoryservice#TopicNames", + "traits": { + "smithy.api#documentation": "

          A list of SNS topic names for which to obtain the information. If this member is null, all associations for the specified Directory ID are returned.

          \n

          An empty list results in an InvalidParameterException being thrown.

          " + } } }, "traits": { @@ -2282,10 +2340,11 @@ "com.amazonaws.directoryservice#DescribeLDAPSSettingsRequest": { "type": "structure", "members": { - "NextToken": { - "target": "com.amazonaws.directoryservice#NextToken", + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          The type of next token used for pagination.

          " + "smithy.api#documentation": "

          The identifier of the directory.

          ", + "smithy.api#required": {} } }, "Type": { @@ -2294,11 +2353,10 @@ "smithy.api#documentation": "

          The type of LDAP security to enable. Currently only the value Client is\n supported.

          " } }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", + "NextToken": { + "target": "com.amazonaws.directoryservice#NextToken", "traits": { - "smithy.api#documentation": "

          The identifier of the directory.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The type of next token used for pagination.

          " } }, "Limit": { @@ -2380,7 +2438,7 @@ "NextToken": { "target": "com.amazonaws.directoryservice#NextToken", "traits": { - "smithy.api#documentation": "

          The DescribeRegionsResult.NextToken value from a previous call to\n DescribeRegions. Pass null if this is the first call.

          " + "smithy.api#documentation": "

          The DescribeRegionsResult.NextToken value from a previous call to\n DescribeRegions. Pass null if this is the first call.

          " } } } @@ -2391,13 +2449,13 @@ "RegionsDescription": { "target": "com.amazonaws.directoryservice#RegionsDescription", "traits": { - "smithy.api#documentation": "

          List of regional information related to the directory per replicated Region.

          " + "smithy.api#documentation": "

          List of Region information related to the directory for each replicated Region.

          " } }, "NextToken": { "target": "com.amazonaws.directoryservice#NextToken", "traits": { - "smithy.api#documentation": "

          If not null, more results are available. Pass this value for the NextToken parameter\n in a subsequent call to DescribeRegions to retrieve the next set of items.

          " + "smithy.api#documentation": "

          If not null, more results are available. Pass this value for the NextToken parameter\n in a subsequent call to DescribeRegions to retrieve the next set of items.

          " } } } @@ -2437,10 +2495,11 @@ "com.amazonaws.directoryservice#DescribeSharedDirectoriesRequest": { "type": "structure", "members": { - "Limit": { - "target": "com.amazonaws.directoryservice#Limit", + "OwnerDirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          The number of shared directories to return in the response object.

          " + "smithy.api#documentation": "

          Returns the identifier of the directory in the directory owner account.

          ", + "smithy.api#required": {} } }, "SharedDirectoryIds": { @@ -2455,11 +2514,10 @@ "smithy.api#documentation": "

          The DescribeSharedDirectoriesResult.NextToken value from a previous call to\n DescribeSharedDirectories. Pass null if this is the first call.

          " } }, - "OwnerDirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", + "Limit": { + "target": "com.amazonaws.directoryservice#Limit", "traits": { - "smithy.api#documentation": "

          Returns the identifier of the directory in the directory owner account.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The number of shared directories to return in the response object.

          " } } } @@ -2467,17 +2525,17 @@ "com.amazonaws.directoryservice#DescribeSharedDirectoriesResult": { "type": "structure", "members": { - "NextToken": { - "target": "com.amazonaws.directoryservice#NextToken", - "traits": { - "smithy.api#documentation": "

          If not null, token that indicates that more results are available. Pass this value for the\n NextToken parameter in a subsequent call to DescribeSharedDirectories to retrieve the next set of items.

          " - } - }, "SharedDirectories": { "target": "com.amazonaws.directoryservice#SharedDirectories", "traits": { "smithy.api#documentation": "

          A list of all shared directories in your account.

          " } + }, + "NextToken": { + "target": "com.amazonaws.directoryservice#NextToken", + "traits": { + "smithy.api#documentation": "

          If not null, token that indicates that more results are available. Pass this value for the\n NextToken parameter in a subsequent call to DescribeSharedDirectories to retrieve the next set of items.

          " + } } } }, @@ -2513,29 +2571,29 @@ "com.amazonaws.directoryservice#DescribeSnapshotsRequest": { "type": "structure", "members": { - "SnapshotIds": { - "target": "com.amazonaws.directoryservice#SnapshotIds", - "traits": { - "smithy.api#documentation": "

          A list of identifiers of the snapshots to obtain the information for. If this member is\n null or empty, all snapshots are returned using the Limit and NextToken\n members.

          " - } - }, - "Limit": { - "target": "com.amazonaws.directoryservice#Limit", - "traits": { - "smithy.api#documentation": "

          The maximum number of objects to return.

          " - } - }, "DirectoryId": { "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { "smithy.api#documentation": "

          The identifier of the directory for which to retrieve snapshot information.

          " } }, + "SnapshotIds": { + "target": "com.amazonaws.directoryservice#SnapshotIds", + "traits": { + "smithy.api#documentation": "

          A list of identifiers of the snapshots to obtain the information for. If this member is\n null or empty, all snapshots are returned using the Limit and NextToken\n members.

          " + } + }, "NextToken": { "target": "com.amazonaws.directoryservice#NextToken", "traits": { "smithy.api#documentation": "

          The DescribeSnapshotsResult.NextToken value from a previous call to\n DescribeSnapshots. Pass null if this is the first call.

          " } + }, + "Limit": { + "target": "com.amazonaws.directoryservice#Limit", + "traits": { + "smithy.api#documentation": "

          The maximum number of objects to return.

          " + } } }, "traits": { @@ -2597,12 +2655,6 @@ "com.amazonaws.directoryservice#DescribeTrustsRequest": { "type": "structure", "members": { - "NextToken": { - "target": "com.amazonaws.directoryservice#NextToken", - "traits": { - "smithy.api#documentation": "

          The DescribeTrustsResult.NextToken value from a previous call to\n DescribeTrusts. Pass null if this is the first call.

          " - } - }, "DirectoryId": { "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { @@ -2615,6 +2667,12 @@ "smithy.api#documentation": "

          A list of identifiers of the trust relationships for which to obtain the information. If this member is null, all trust relationships that belong to the current account are returned.

          \n

          An empty list results in an InvalidParameterException being thrown.

          " } }, + "NextToken": { + "target": "com.amazonaws.directoryservice#NextToken", + "traits": { + "smithy.api#documentation": "

          The DescribeTrustsResult.NextToken value from a previous call to\n DescribeTrusts. Pass null if this is the first call.

          " + } + }, "Limit": { "target": "com.amazonaws.directoryservice#Limit", "traits": { @@ -2667,11 +2725,11 @@ "com.amazonaws.directoryservice#DirectoryAlreadyInRegionException": { "type": "structure", "members": { - "RequestId": { - "target": "com.amazonaws.directoryservice#RequestId" - }, "Message": { "target": "com.amazonaws.directoryservice#ExceptionMessage" + }, + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId" } }, "traits": { @@ -2733,12 +2791,6 @@ "com.amazonaws.directoryservice#DirectoryConnectSettingsDescription": { "type": "structure", "members": { - "AvailabilityZones": { - "target": "com.amazonaws.directoryservice#AvailabilityZones", - "traits": { - "smithy.api#documentation": "

          A list of the Availability Zones that the directory is in.

          " - } - }, "VpcId": { "target": "com.amazonaws.directoryservice#VpcId", "traits": { @@ -2757,17 +2809,23 @@ "smithy.api#documentation": "

          The user name of the service account in the on-premises directory.

          " } }, - "ConnectIps": { - "target": "com.amazonaws.directoryservice#IpAddrs", - "traits": { - "smithy.api#documentation": "

          The IP addresses of the AD Connector servers.

          " - } - }, "SecurityGroupId": { "target": "com.amazonaws.directoryservice#SecurityGroupId", "traits": { "smithy.api#documentation": "

          The security group identifier for the AD Connector directory.

          " } + }, + "AvailabilityZones": { + "target": "com.amazonaws.directoryservice#AvailabilityZones", + "traits": { + "smithy.api#documentation": "

          A list of the Availability Zones that the directory is in.

          " + } + }, + "ConnectIps": { + "target": "com.amazonaws.directoryservice#IpAddrs", + "traits": { + "smithy.api#documentation": "

          The IP addresses of the AD Connector servers.

          " + } } }, "traits": { @@ -2777,34 +2835,34 @@ "com.amazonaws.directoryservice#DirectoryDescription": { "type": "structure", "members": { - "ShareStatus": { - "target": "com.amazonaws.directoryservice#ShareStatus", + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          Current directory status of the shared AWS Managed Microsoft AD directory.

          " + "smithy.api#documentation": "

          The directory identifier.

          " } }, - "Stage": { - "target": "com.amazonaws.directoryservice#DirectoryStage", + "Name": { + "target": "com.amazonaws.directoryservice#DirectoryName", "traits": { - "smithy.api#documentation": "

          The current stage of the directory.

          " + "smithy.api#documentation": "

          The fully qualified name of the directory.

          " } }, - "ConnectSettings": { - "target": "com.amazonaws.directoryservice#DirectoryConnectSettingsDescription", + "ShortName": { + "target": "com.amazonaws.directoryservice#DirectoryShortName", "traits": { - "smithy.api#documentation": "

          A DirectoryConnectSettingsDescription object that contains additional\n information about an AD Connector directory. This member is only present if the directory is\n an AD Connector directory.

          " + "smithy.api#documentation": "

          The short name of the directory.

          " } }, - "LaunchTime": { - "target": "com.amazonaws.directoryservice#LaunchTime", + "Size": { + "target": "com.amazonaws.directoryservice#DirectorySize", "traits": { - "smithy.api#documentation": "

          Specifies when the directory was created.

          " + "smithy.api#documentation": "

          The directory size.

          " } }, - "StageReason": { - "target": "com.amazonaws.directoryservice#StageReason", + "Edition": { + "target": "com.amazonaws.directoryservice#DirectoryEdition", "traits": { - "smithy.api#documentation": "

          Additional information about the directory stage.

          " + "smithy.api#documentation": "

          The edition associated with this directory.

          " } }, "Alias": { @@ -2813,70 +2871,52 @@ "smithy.api#documentation": "

          The alias for the directory. If no alias has been created for the directory, the alias is\n the directory identifier, such as d-XXXXXXXXXX.

          " } }, - "DesiredNumberOfDomainControllers": { - "target": "com.amazonaws.directoryservice#DesiredNumberOfDomainControllers", - "traits": { - "smithy.api#documentation": "

          The desired number of domain controllers in the directory if the directory is Microsoft AD.

          " - } - }, - "ShareMethod": { - "target": "com.amazonaws.directoryservice#ShareMethod", - "traits": { - "smithy.api#documentation": "

          The method used when sharing a directory to determine whether the directory should be\n shared within your AWS organization (ORGANIZATIONS) or with any AWS account by\n sending a shared directory request (HANDSHAKE).

          " - } - }, "AccessUrl": { "target": "com.amazonaws.directoryservice#AccessUrl", "traits": { "smithy.api#documentation": "

          The access URL for the directory, such as\n http://.awsapps.com. If no alias has been created for the\n directory, is the directory identifier, such as\n d-XXXXXXXXXX.

          " } }, - "Size": { - "target": "com.amazonaws.directoryservice#DirectorySize", - "traits": { - "smithy.api#documentation": "

          The directory size.

          " - } - }, - "ShareNotes": { - "target": "com.amazonaws.directoryservice#Notes", + "Description": { + "target": "com.amazonaws.directoryservice#Description", "traits": { - "smithy.api#documentation": "

          A directory share request that is sent by the directory owner to the directory consumer.\n The request includes a typed message to help the directory consumer administrator determine\n whether to approve or reject the share invitation.

          " + "smithy.api#documentation": "

          The description for the directory.

          " } }, - "Type": { - "target": "com.amazonaws.directoryservice#DirectoryType", + "DnsIpAddrs": { + "target": "com.amazonaws.directoryservice#DnsIpAddrs", "traits": { - "smithy.api#documentation": "

          The directory size.

          " + "smithy.api#documentation": "

          The IP addresses of the DNS servers for the directory. For a Simple AD or Microsoft AD\n directory, these are the IP addresses of the Simple AD or Microsoft AD directory servers. For\n an AD Connector directory, these are the IP addresses of the DNS servers or domain controllers\n in the on-premises directory to which the AD Connector is connected.

          " } }, - "ShortName": { - "target": "com.amazonaws.directoryservice#DirectoryShortName", + "Stage": { + "target": "com.amazonaws.directoryservice#DirectoryStage", "traits": { - "smithy.api#documentation": "

          The short name of the directory.

          " + "smithy.api#documentation": "

          The current stage of the directory.

          " } }, - "RadiusStatus": { - "target": "com.amazonaws.directoryservice#RadiusStatus", + "ShareStatus": { + "target": "com.amazonaws.directoryservice#ShareStatus", "traits": { - "smithy.api#documentation": "

          The status of the RADIUS MFA server connection.

          " + "smithy.api#documentation": "

          Current directory status of the shared AWS Managed Microsoft AD directory.

          " } }, - "OwnerDirectoryDescription": { - "target": "com.amazonaws.directoryservice#OwnerDirectoryDescription", + "ShareMethod": { + "target": "com.amazonaws.directoryservice#ShareMethod", "traits": { - "smithy.api#documentation": "

          Describes the AWS Managed Microsoft AD directory in the directory owner account.

          " + "smithy.api#documentation": "

          The method used when sharing a directory to determine whether the directory should be\n shared within your AWS organization (ORGANIZATIONS) or with any AWS account by\n sending a shared directory request (HANDSHAKE).

          " } }, - "Description": { - "target": "com.amazonaws.directoryservice#Description", + "ShareNotes": { + "target": "com.amazonaws.directoryservice#Notes", "traits": { - "smithy.api#documentation": "

          The description for the directory.

          " + "smithy.api#documentation": "

          A directory share request that is sent by the directory owner to the directory consumer.\n The request includes a typed message to help the directory consumer administrator determine\n whether to approve or reject the share invitation.

          " } }, - "RadiusSettings": { - "target": "com.amazonaws.directoryservice#RadiusSettings", + "LaunchTime": { + "target": "com.amazonaws.directoryservice#LaunchTime", "traits": { - "smithy.api#documentation": "

          A RadiusSettings object that contains information about the RADIUS\n server configured for this directory.

          " + "smithy.api#documentation": "

          Specifies when the directory was created.

          " } }, "StageLastUpdatedDateTime": { @@ -2885,10 +2925,10 @@ "smithy.api#documentation": "

          The date and time that the stage was last updated.

          " } }, - "Name": { - "target": "com.amazonaws.directoryservice#DirectoryName", + "Type": { + "target": "com.amazonaws.directoryservice#DirectoryType", "traits": { - "smithy.api#documentation": "

          The fully qualified name of the directory.

          " + "smithy.api#documentation": "

          The directory size.

          " } }, "VpcSettings": { @@ -2897,16 +2937,28 @@ "smithy.api#documentation": "

          A DirectoryVpcSettingsDescription object that contains additional\n information about a directory. This member is only present if the directory is a Simple AD or\n Managed AD directory.

          " } }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", + "ConnectSettings": { + "target": "com.amazonaws.directoryservice#DirectoryConnectSettingsDescription", "traits": { - "smithy.api#documentation": "

          The directory identifier.

          " + "smithy.api#documentation": "

          A DirectoryConnectSettingsDescription object that contains additional\n information about an AD Connector directory. This member is only present if the directory is\n an AD Connector directory.

          " } }, - "Edition": { - "target": "com.amazonaws.directoryservice#DirectoryEdition", + "RadiusSettings": { + "target": "com.amazonaws.directoryservice#RadiusSettings", "traits": { - "smithy.api#documentation": "

          The edition associated with this directory.

          " + "smithy.api#documentation": "

          A RadiusSettings object that contains information about the RADIUS\n server configured for this directory.

          " + } + }, + "RadiusStatus": { + "target": "com.amazonaws.directoryservice#RadiusStatus", + "traits": { + "smithy.api#documentation": "

          The status of the RADIUS MFA server connection.

          " + } + }, + "StageReason": { + "target": "com.amazonaws.directoryservice#StageReason", + "traits": { + "smithy.api#documentation": "

          Additional information about the directory stage.

          " } }, "SsoEnabled": { @@ -2915,16 +2967,22 @@ "smithy.api#documentation": "

          Indicates if single sign-on is enabled for the directory. For more information, see EnableSso and DisableSso.

          " } }, - "RegionsInfo": { - "target": "com.amazonaws.directoryservice#RegionsInfo", + "DesiredNumberOfDomainControllers": { + "target": "com.amazonaws.directoryservice#DesiredNumberOfDomainControllers", "traits": { - "smithy.api#documentation": "

          Lists the Regions where the directory has replicated.

          " + "smithy.api#documentation": "

          The desired number of domain controllers in the directory if the directory is Microsoft AD.

          " } }, - "DnsIpAddrs": { - "target": "com.amazonaws.directoryservice#DnsIpAddrs", + "OwnerDirectoryDescription": { + "target": "com.amazonaws.directoryservice#OwnerDirectoryDescription", "traits": { - "smithy.api#documentation": "

          The IP addresses of the DNS servers for the directory. For a Simple AD or Microsoft AD\n directory, these are the IP addresses of the Simple AD or Microsoft AD directory servers. For\n an AD Connector directory, these are the IP addresses of the DNS servers or domain controllers\n in the on-premises directory to which the AD Connector is connected.

          " + "smithy.api#documentation": "

          Describes the AWS Managed Microsoft AD directory in the directory owner account.

          " + } + }, + "RegionsInfo": { + "target": "com.amazonaws.directoryservice#RegionsInfo", + "traits": { + "smithy.api#documentation": "

          Lists the Regions where the directory has replicated.

          " } } }, @@ -2989,11 +3047,11 @@ "com.amazonaws.directoryservice#DirectoryLimitExceededException": { "type": "structure", "members": { - "RequestId": { - "target": "com.amazonaws.directoryservice#RequestId" - }, "Message": { "target": "com.amazonaws.directoryservice#ExceptionMessage" + }, + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId" } }, "traits": { @@ -3004,28 +3062,28 @@ "com.amazonaws.directoryservice#DirectoryLimits": { "type": "structure", "members": { - "ConnectedDirectoriesLimit": { + "CloudOnlyDirectoriesLimit": { "target": "com.amazonaws.directoryservice#Limit", "traits": { - "smithy.api#documentation": "

          The maximum number of connected directories allowed in the Region.

          " + "smithy.api#documentation": "

          The maximum number of cloud directories allowed in the Region.

          " } }, - "CloudOnlyMicrosoftADLimit": { + "CloudOnlyDirectoriesCurrentCount": { "target": "com.amazonaws.directoryservice#Limit", "traits": { - "smithy.api#documentation": "

          The maximum number of AWS Managed Microsoft AD directories allowed in the region.

          " + "smithy.api#documentation": "

          The current number of cloud directories in the Region.

          " } }, - "ConnectedDirectoriesCurrentCount": { - "target": "com.amazonaws.directoryservice#Limit", + "CloudOnlyDirectoriesLimitReached": { + "target": "com.amazonaws.directoryservice#CloudOnlyDirectoriesLimitReached", "traits": { - "smithy.api#documentation": "

          The current number of connected directories in the Region.

          " + "smithy.api#documentation": "

          Indicates if the cloud directory limit has been reached.

          " } }, - "CloudOnlyMicrosoftADLimitReached": { - "target": "com.amazonaws.directoryservice#CloudOnlyDirectoriesLimitReached", + "CloudOnlyMicrosoftADLimit": { + "target": "com.amazonaws.directoryservice#Limit", "traits": { - "smithy.api#documentation": "

          Indicates if the AWS Managed Microsoft AD directory limit has been reached.

          " + "smithy.api#documentation": "

          The maximum number of AWS Managed Microsoft AD directories allowed in the region.

          " } }, "CloudOnlyMicrosoftADCurrentCount": { @@ -3034,28 +3092,28 @@ "smithy.api#documentation": "

          The current number of AWS Managed Microsoft AD directories in the region.

          " } }, - "ConnectedDirectoriesLimitReached": { - "target": "com.amazonaws.directoryservice#ConnectedDirectoriesLimitReached", + "CloudOnlyMicrosoftADLimitReached": { + "target": "com.amazonaws.directoryservice#CloudOnlyDirectoriesLimitReached", "traits": { - "smithy.api#documentation": "

          Indicates if the connected directory limit has been reached.

          " + "smithy.api#documentation": "

          Indicates if the AWS Managed Microsoft AD directory limit has been reached.

          " } }, - "CloudOnlyDirectoriesLimit": { + "ConnectedDirectoriesLimit": { "target": "com.amazonaws.directoryservice#Limit", "traits": { - "smithy.api#documentation": "

          The maximum number of cloud directories allowed in the Region.

          " + "smithy.api#documentation": "

          The maximum number of connected directories allowed in the Region.

          " } }, - "CloudOnlyDirectoriesCurrentCount": { + "ConnectedDirectoriesCurrentCount": { "target": "com.amazonaws.directoryservice#Limit", "traits": { - "smithy.api#documentation": "

          The current number of cloud directories in the Region.

          " + "smithy.api#documentation": "

          The current number of connected directories in the Region.

          " } }, - "CloudOnlyDirectoriesLimitReached": { - "target": "com.amazonaws.directoryservice#CloudOnlyDirectoriesLimitReached", + "ConnectedDirectoriesLimitReached": { + "target": "com.amazonaws.directoryservice#ConnectedDirectoriesLimitReached", "traits": { - "smithy.api#documentation": "

          Indicates if the cloud directory limit has been reached.

          " + "smithy.api#documentation": "

          Indicates if the connected directory limit has been reached.

          " } } }, @@ -3072,11 +3130,11 @@ "com.amazonaws.directoryservice#DirectoryNotSharedException": { "type": "structure", "members": { - "RequestId": { - "target": "com.amazonaws.directoryservice#RequestId" - }, "Message": { "target": "com.amazonaws.directoryservice#ExceptionMessage" + }, + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId" } }, "traits": { @@ -3181,6 +3239,9 @@ { "target": "com.amazonaws.directoryservice#DescribeTrusts" }, + { + "target": "com.amazonaws.directoryservice#DisableClientAuthentication" + }, { "target": "com.amazonaws.directoryservice#DisableLDAPS" }, @@ -3190,6 +3251,9 @@ { "target": "com.amazonaws.directoryservice#DisableSso" }, + { + "target": "com.amazonaws.directoryservice#EnableClientAuthentication" + }, { "target": "com.amazonaws.directoryservice#EnableLDAPS" }, @@ -3400,57 +3464,112 @@ "com.amazonaws.directoryservice#DirectoryVpcSettings": { "type": "structure", "members": { + "VpcId": { + "target": "com.amazonaws.directoryservice#VpcId", + "traits": { + "smithy.api#documentation": "

          The identifier of the VPC in which to create the directory.

          ", + "smithy.api#required": {} + } + }, "SubnetIds": { "target": "com.amazonaws.directoryservice#SubnetIds", "traits": { "smithy.api#documentation": "

          The identifiers of the subnets for the directory servers. The two subnets must be in\n different Availability Zones. AWS Directory Service creates a directory server and a DNS\n server in each of these subnets.

          ", "smithy.api#required": {} } - }, + } + }, + "traits": { + "smithy.api#documentation": "

          Contains VPC information for the CreateDirectory or CreateMicrosoftAD operation.

          " + } + }, + "com.amazonaws.directoryservice#DirectoryVpcSettingsDescription": { + "type": "structure", + "members": { "VpcId": { "target": "com.amazonaws.directoryservice#VpcId", "traits": { - "smithy.api#documentation": "

          The identifier of the VPC in which to create the directory.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The identifier of the VPC that the directory is in.

          " + } + }, + "SubnetIds": { + "target": "com.amazonaws.directoryservice#SubnetIds", + "traits": { + "smithy.api#documentation": "

          The identifiers of the subnets for the directory servers.

          " + } + }, + "SecurityGroupId": { + "target": "com.amazonaws.directoryservice#SecurityGroupId", + "traits": { + "smithy.api#documentation": "

          The domain controller security group identifier for the directory.

          " + } + }, + "AvailabilityZones": { + "target": "com.amazonaws.directoryservice#AvailabilityZones", + "traits": { + "smithy.api#documentation": "

          The list of Availability Zones that the directory is in.

          " } } }, "traits": { - "smithy.api#documentation": "

          Contains VPC information for the CreateDirectory or CreateMicrosoftAD operation.

          " + "smithy.api#documentation": "

          Contains information about the directory.

          " + } + }, + "com.amazonaws.directoryservice#DisableClientAuthentication": { + "type": "operation", + "input": { + "target": "com.amazonaws.directoryservice#DisableClientAuthenticationRequest" + }, + "output": { + "target": "com.amazonaws.directoryservice#DisableClientAuthenticationResult" + }, + "errors": [ + { + "target": "com.amazonaws.directoryservice#AccessDeniedException" + }, + { + "target": "com.amazonaws.directoryservice#ClientException" + }, + { + "target": "com.amazonaws.directoryservice#DirectoryDoesNotExistException" + }, + { + "target": "com.amazonaws.directoryservice#InvalidClientAuthStatusException" + }, + { + "target": "com.amazonaws.directoryservice#ServiceException" + }, + { + "target": "com.amazonaws.directoryservice#UnsupportedOperationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Disable client authentication for smart cards.

          " } }, - "com.amazonaws.directoryservice#DirectoryVpcSettingsDescription": { + "com.amazonaws.directoryservice#DisableClientAuthenticationRequest": { "type": "structure", "members": { - "SecurityGroupId": { - "target": "com.amazonaws.directoryservice#SecurityGroupId", - "traits": { - "smithy.api#documentation": "

          The domain controller security group identifier for the directory.

          " - } - }, - "VpcId": { - "target": "com.amazonaws.directoryservice#VpcId", - "traits": { - "smithy.api#documentation": "

          The identifier of the VPC that the directory is in.

          " - } - }, - "AvailabilityZones": { - "target": "com.amazonaws.directoryservice#AvailabilityZones", + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          The list of Availability Zones that the directory is in.

          " + "smithy.api#documentation": "

          Disable client authentication in a specified directory for smart cards.

          ", + "smithy.api#required": {} } }, - "SubnetIds": { - "target": "com.amazonaws.directoryservice#SubnetIds", + "Type": { + "target": "com.amazonaws.directoryservice#ClientAuthenticationType", "traits": { - "smithy.api#documentation": "

          The identifiers of the subnets for the directory servers.

          " + "smithy.api#documentation": "

          Disable the type of client authentication request.

          ", + "smithy.api#required": {} } } - }, - "traits": { - "smithy.api#documentation": "

          Contains information about the directory.

          " } }, + "com.amazonaws.directoryservice#DisableClientAuthenticationResult": { + "type": "structure", + "members": {} + }, "com.amazonaws.directoryservice#DisableLDAPS": { "type": "operation", "input": { @@ -3586,12 +3705,6 @@ "com.amazonaws.directoryservice#DisableSsoRequest": { "type": "structure", "members": { - "Password": { - "target": "com.amazonaws.directoryservice#ConnectPassword", - "traits": { - "smithy.api#documentation": "

          The password of an alternate account to use to disable single-sign on. This is only used\n for AD Connector directories. For more information, see the UserName parameter.

          " - } - }, "DirectoryId": { "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { @@ -3604,6 +3717,12 @@ "traits": { "smithy.api#documentation": "

          The username of an alternate account to use to disable single-sign on. This is only used for AD Connector directories. This account must have privileges to remove a service principal name.

          \n

          If the AD Connector service account does not have privileges to remove a service principal\n name, you can specify an alternate account with the UserName and Password\n parameters. These credentials are only used to disable single sign-on and are not stored by\n the service. The AD Connector service account is not changed.

          " } + }, + "Password": { + "target": "com.amazonaws.directoryservice#ConnectPassword", + "traits": { + "smithy.api#documentation": "

          The password of an alternate account to use to disable single-sign on. This is only used\n for AD Connector directories. For more information, see the UserName parameter.

          " + } } }, "traits": { @@ -3626,28 +3745,22 @@ "com.amazonaws.directoryservice#DomainController": { "type": "structure", "members": { - "DnsIpAddr": { - "target": "com.amazonaws.directoryservice#IpAddr", - "traits": { - "smithy.api#documentation": "

          The IP address of the domain controller.

          " - } - }, "DirectoryId": { "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { "smithy.api#documentation": "

          Identifier of the directory where the domain controller resides.

          " } }, - "LaunchTime": { - "target": "com.amazonaws.directoryservice#LaunchTime", + "DomainControllerId": { + "target": "com.amazonaws.directoryservice#DomainControllerId", "traits": { - "smithy.api#documentation": "

          Specifies when the domain controller was created.

          " + "smithy.api#documentation": "

          Identifies a specific domain controller in the directory.

          " } }, - "StatusLastUpdatedDateTime": { - "target": "com.amazonaws.directoryservice#LastUpdatedDateTime", + "DnsIpAddr": { + "target": "com.amazonaws.directoryservice#IpAddr", "traits": { - "smithy.api#documentation": "

          The date and time that the status was last updated.

          " + "smithy.api#documentation": "

          The IP address of the domain controller.

          " } }, "VpcId": { @@ -3656,35 +3769,41 @@ "smithy.api#documentation": "

          The identifier of the VPC that contains the domain controller.

          " } }, - "DomainControllerId": { - "target": "com.amazonaws.directoryservice#DomainControllerId", - "traits": { - "smithy.api#documentation": "

          Identifies a specific domain controller in the directory.

          " - } - }, "SubnetId": { "target": "com.amazonaws.directoryservice#SubnetId", "traits": { "smithy.api#documentation": "

          Identifier of the subnet in the VPC that contains the domain controller.

          " } }, - "Status": { - "target": "com.amazonaws.directoryservice#DomainControllerStatus", - "traits": { - "smithy.api#documentation": "

          The status of the domain controller.

          " - } - }, "AvailabilityZone": { "target": "com.amazonaws.directoryservice#AvailabilityZone", "traits": { "smithy.api#documentation": "

          The Availability Zone where the domain controller is located.

          " } }, + "Status": { + "target": "com.amazonaws.directoryservice#DomainControllerStatus", + "traits": { + "smithy.api#documentation": "

          The status of the domain controller.

          " + } + }, "StatusReason": { "target": "com.amazonaws.directoryservice#DomainControllerStatusReason", "traits": { "smithy.api#documentation": "

          A description of the domain controller state.

          " } + }, + "LaunchTime": { + "target": "com.amazonaws.directoryservice#LaunchTime", + "traits": { + "smithy.api#documentation": "

          Specifies when the domain controller was created.

          " + } + }, + "StatusLastUpdatedDateTime": { + "target": "com.amazonaws.directoryservice#LastUpdatedDateTime", + "traits": { + "smithy.api#documentation": "

          The date and time that the status was last updated.

          " + } } }, "traits": { @@ -3762,6 +3881,64 @@ "target": "com.amazonaws.directoryservice#DomainController" } }, + "com.amazonaws.directoryservice#EnableClientAuthentication": { + "type": "operation", + "input": { + "target": "com.amazonaws.directoryservice#EnableClientAuthenticationRequest" + }, + "output": { + "target": "com.amazonaws.directoryservice#EnableClientAuthenticationResult" + }, + "errors": [ + { + "target": "com.amazonaws.directoryservice#AccessDeniedException" + }, + { + "target": "com.amazonaws.directoryservice#ClientException" + }, + { + "target": "com.amazonaws.directoryservice#DirectoryDoesNotExistException" + }, + { + "target": "com.amazonaws.directoryservice#InvalidClientAuthStatusException" + }, + { + "target": "com.amazonaws.directoryservice#NoAvailableCertificateException" + }, + { + "target": "com.amazonaws.directoryservice#ServiceException" + }, + { + "target": "com.amazonaws.directoryservice#UnsupportedOperationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Enable client authentication for smardtcards.

          " + } + }, + "com.amazonaws.directoryservice#EnableClientAuthenticationRequest": { + "type": "structure", + "members": { + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", + "traits": { + "smithy.api#documentation": "

          Enable client authentication in a specified directory for smart cards.

          ", + "smithy.api#required": {} + } + }, + "Type": { + "target": "com.amazonaws.directoryservice#ClientAuthenticationType", + "traits": { + "smithy.api#documentation": "

          Enable the type of client authentication request.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.directoryservice#EnableClientAuthenticationResult": { + "type": "structure", + "members": {} + }, "com.amazonaws.directoryservice#EnableLDAPS": { "type": "operation", "input": { @@ -3950,11 +4127,11 @@ "com.amazonaws.directoryservice#EntityAlreadyExistsException": { "type": "structure", "members": { - "RequestId": { - "target": "com.amazonaws.directoryservice#RequestId" - }, "Message": { "target": "com.amazonaws.directoryservice#ExceptionMessage" + }, + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId" } }, "traits": { @@ -3965,11 +4142,11 @@ "com.amazonaws.directoryservice#EntityDoesNotExistException": { "type": "structure", "members": { - "RequestId": { - "target": "com.amazonaws.directoryservice#RequestId" - }, "Message": { "target": "com.amazonaws.directoryservice#ExceptionMessage" + }, + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId" } }, "traits": { @@ -3986,29 +4163,29 @@ "smithy.api#documentation": "

          The Directory ID of an AWS Directory Service directory that will publish status messages to an SNS topic.

          " } }, - "TopicArn": { - "target": "com.amazonaws.directoryservice#TopicArn", - "traits": { - "smithy.api#documentation": "

          The SNS topic ARN (Amazon Resource Name).

          " - } - }, - "Status": { - "target": "com.amazonaws.directoryservice#TopicStatus", - "traits": { - "smithy.api#documentation": "

          The topic registration status.

          " - } - }, "TopicName": { "target": "com.amazonaws.directoryservice#TopicName", "traits": { "smithy.api#documentation": "

          The name of an AWS SNS topic the receives status messages from the directory.

          " } }, + "TopicArn": { + "target": "com.amazonaws.directoryservice#TopicArn", + "traits": { + "smithy.api#documentation": "

          The SNS topic ARN (Amazon Resource Name).

          " + } + }, "CreatedDateTime": { "target": "com.amazonaws.directoryservice#CreatedDateTime", "traits": { "smithy.api#documentation": "

          The date and time of when you associated your directory with the SNS topic.

          " } + }, + "Status": { + "target": "com.amazonaws.directoryservice#TopicStatus", + "traits": { + "smithy.api#documentation": "

          The topic registration status.

          " + } } }, "traits": { @@ -4153,14 +4330,29 @@ "smithy.api#error": "client" } }, - "com.amazonaws.directoryservice#InvalidLDAPSStatusException": { + "com.amazonaws.directoryservice#InvalidClientAuthStatusException": { "type": "structure", "members": { + "Message": { + "target": "com.amazonaws.directoryservice#ExceptionMessage" + }, "RequestId": { "target": "com.amazonaws.directoryservice#RequestId" - }, + } + }, + "traits": { + "smithy.api#documentation": "

          The client authorization was invalid.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.directoryservice#InvalidLDAPSStatusException": { + "type": "structure", + "members": { "Message": { "target": "com.amazonaws.directoryservice#ExceptionMessage" + }, + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId" } }, "traits": { @@ -4171,11 +4363,11 @@ "com.amazonaws.directoryservice#InvalidNextTokenException": { "type": "structure", "members": { - "RequestId": { - "target": "com.amazonaws.directoryservice#RequestId" - }, "Message": { "target": "com.amazonaws.directoryservice#ExceptionMessage" + }, + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId" } }, "traits": { @@ -4216,11 +4408,11 @@ "com.amazonaws.directoryservice#InvalidTargetException": { "type": "structure", "members": { - "RequestId": { - "target": "com.amazonaws.directoryservice#RequestId" - }, "Message": { "target": "com.amazonaws.directoryservice#ExceptionMessage" + }, + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId" } }, "traits": { @@ -4263,16 +4455,16 @@ "com.amazonaws.directoryservice#IpRouteInfo": { "type": "structure", "members": { - "CidrIp": { - "target": "com.amazonaws.directoryservice#CidrIp", + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          IP address block in the IpRoute.

          " + "smithy.api#documentation": "

          Identifier (ID) of the directory associated with the IP addresses.

          " } }, - "AddedDateTime": { - "target": "com.amazonaws.directoryservice#AddedDateTime", + "CidrIp": { + "target": "com.amazonaws.directoryservice#CidrIp", "traits": { - "smithy.api#documentation": "

          The date and time the address block was added to the directory.

          " + "smithy.api#documentation": "

          IP address block in the IpRoute.

          " } }, "IpRouteStatusMsg": { @@ -4281,16 +4473,10 @@ "smithy.api#documentation": "

          The status of the IP address block.

          " } }, - "Description": { - "target": "com.amazonaws.directoryservice#Description", - "traits": { - "smithy.api#documentation": "

          Description of the IpRouteInfo.

          " - } - }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", + "AddedDateTime": { + "target": "com.amazonaws.directoryservice#AddedDateTime", "traits": { - "smithy.api#documentation": "

          Identifier (ID) of the directory associated with the IP addresses.

          " + "smithy.api#documentation": "

          The date and time the address block was added to the directory.

          " } }, "IpRouteStatusReason": { @@ -4298,6 +4484,12 @@ "traits": { "smithy.api#documentation": "

          The reason for the IpRouteStatusMsg.

          " } + }, + "Description": { + "target": "com.amazonaws.directoryservice#Description", + "traits": { + "smithy.api#documentation": "

          Description of the IpRouteInfo.

          " + } } }, "traits": { @@ -4368,10 +4560,10 @@ "com.amazonaws.directoryservice#LDAPSSettingInfo": { "type": "structure", "members": { - "LastUpdatedDateTime": { - "target": "com.amazonaws.directoryservice#LastUpdatedDateTime", + "LDAPSStatus": { + "target": "com.amazonaws.directoryservice#LDAPSStatus", "traits": { - "smithy.api#documentation": "

          The date and time when the LDAPS settings were last updated.

          " + "smithy.api#documentation": "

          The state of the LDAPS settings.

          " } }, "LDAPSStatusReason": { @@ -4380,10 +4572,10 @@ "smithy.api#documentation": "

          Describes a state change for LDAPS.

          " } }, - "LDAPSStatus": { - "target": "com.amazonaws.directoryservice#LDAPSStatus", + "LastUpdatedDateTime": { + "target": "com.amazonaws.directoryservice#LastUpdatedDateTime", "traits": { - "smithy.api#documentation": "

          The state of the LDAPS settings.

          " + "smithy.api#documentation": "

          The date and time when the LDAPS settings were last updated.

          " } } }, @@ -4493,10 +4685,11 @@ "com.amazonaws.directoryservice#ListCertificatesRequest": { "type": "structure", "members": { - "Limit": { - "target": "com.amazonaws.directoryservice#PageLimit", + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          The number of items that should show up on one page

          " + "smithy.api#documentation": "

          The identifier of the directory.

          ", + "smithy.api#required": {} } }, "NextToken": { @@ -4505,11 +4698,10 @@ "smithy.api#documentation": "

          A token for requesting another page of certificates if the NextToken response\n element indicates that more certificates are available. Use the value of the returned\n NextToken element in your request until the token comes back as\n null. Pass null if this is the first call.

          " } }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", + "Limit": { + "target": "com.amazonaws.directoryservice#PageLimit", "traits": { - "smithy.api#documentation": "

          The identifier of the directory.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The number of items that should show up on one page

          " } } } @@ -4563,6 +4755,13 @@ "com.amazonaws.directoryservice#ListIpRoutesRequest": { "type": "structure", "members": { + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", + "traits": { + "smithy.api#documentation": "

          Identifier (ID) of the directory for which you want to retrieve the IP addresses.

          ", + "smithy.api#required": {} + } + }, "NextToken": { "target": "com.amazonaws.directoryservice#NextToken", "traits": { @@ -4574,13 +4773,6 @@ "traits": { "smithy.api#documentation": "

          Maximum number of items to return. If this value is zero, the maximum number of items is specified by the limitations of the operation.

          " } - }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", - "traits": { - "smithy.api#documentation": "

          Identifier (ID) of the directory for which you want to retrieve the IP addresses.

          ", - "smithy.api#required": {} - } } } }, @@ -4630,18 +4822,18 @@ "com.amazonaws.directoryservice#ListLogSubscriptionsRequest": { "type": "structure", "members": { - "NextToken": { - "target": "com.amazonaws.directoryservice#NextToken", - "traits": { - "smithy.api#documentation": "

          The token for the next set of items to return.

          " - } - }, "DirectoryId": { "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { "smithy.api#documentation": "

          If a DirectoryID is provided, lists only the log subscription\n associated with that directory. If no DirectoryId is provided, lists all\n log subscriptions associated with your AWS account. If there are no log subscriptions for the\n AWS account or the directory, an empty list will be returned.

          " } }, + "NextToken": { + "target": "com.amazonaws.directoryservice#NextToken", + "traits": { + "smithy.api#documentation": "

          The token for the next set of items to return.

          " + } + }, "Limit": { "target": "com.amazonaws.directoryservice#Limit", "traits": { @@ -4696,12 +4888,6 @@ "com.amazonaws.directoryservice#ListSchemaExtensionsRequest": { "type": "structure", "members": { - "Limit": { - "target": "com.amazonaws.directoryservice#Limit", - "traits": { - "smithy.api#documentation": "

          The maximum number of items to return.

          " - } - }, "DirectoryId": { "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { @@ -4714,23 +4900,29 @@ "traits": { "smithy.api#documentation": "

          The ListSchemaExtensions.NextToken value from a previous call to ListSchemaExtensions. Pass null if this is the first call.

          " } + }, + "Limit": { + "target": "com.amazonaws.directoryservice#Limit", + "traits": { + "smithy.api#documentation": "

          The maximum number of items to return.

          " + } } } }, "com.amazonaws.directoryservice#ListSchemaExtensionsResult": { "type": "structure", "members": { - "NextToken": { - "target": "com.amazonaws.directoryservice#NextToken", - "traits": { - "smithy.api#documentation": "

          If not null, more results are available. Pass this value for the NextToken parameter in a subsequent call to ListSchemaExtensions to retrieve the next set of items.

          " - } - }, "SchemaExtensionsInfo": { "target": "com.amazonaws.directoryservice#SchemaExtensionsInfo", "traits": { "smithy.api#documentation": "

          Information about the schema extensions applied to the directory.

          " } + }, + "NextToken": { + "target": "com.amazonaws.directoryservice#NextToken", + "traits": { + "smithy.api#documentation": "

          If not null, more results are available. Pass this value for the NextToken parameter in a subsequent call to ListSchemaExtensions to retrieve the next set of items.

          " + } } } }, @@ -4766,6 +4958,13 @@ "com.amazonaws.directoryservice#ListTagsForResourceRequest": { "type": "structure", "members": { + "ResourceId": { + "target": "com.amazonaws.directoryservice#ResourceId", + "traits": { + "smithy.api#documentation": "

          Identifier (ID) of the directory for which you want to retrieve tags.

          ", + "smithy.api#required": {} + } + }, "NextToken": { "target": "com.amazonaws.directoryservice#NextToken", "traits": { @@ -4777,13 +4976,6 @@ "traits": { "smithy.api#documentation": "

          Reserved for future use.

          " } - }, - "ResourceId": { - "target": "com.amazonaws.directoryservice#ResourceId", - "traits": { - "smithy.api#documentation": "

          Identifier (ID) of the directory for which you want to retrieve tags.

          ", - "smithy.api#required": {} - } } } }, @@ -4817,6 +5009,12 @@ "com.amazonaws.directoryservice#LogSubscription": { "type": "structure", "members": { + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", + "traits": { + "smithy.api#documentation": "

          Identifier (ID) of the directory that you want to associate with the log\n subscription.

          " + } + }, "LogGroupName": { "target": "com.amazonaws.directoryservice#LogGroupName", "traits": { @@ -4828,12 +5026,6 @@ "traits": { "smithy.api#documentation": "

          The date and time that the log subscription was created.

          " } - }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", - "traits": { - "smithy.api#documentation": "

          Identifier (ID) of the directory that you want to associate with the log\n subscription.

          " - } } }, "traits": { @@ -4877,6 +5069,16 @@ "smithy.api#sensitive": {} } }, + "com.amazonaws.directoryservice#OCSPUrl": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 1024 + }, + "smithy.api#pattern": "^(https?|ftp|file|ldaps?)://[-a-zA-Z0-9+&@#/%?=~_|!:,.;()]*[-a-zA-Z0-9+&@#/%=~_|()]" + } + }, "com.amazonaws.directoryservice#OrganizationalUnitDN": { "type": "string", "traits": { @@ -4904,24 +5106,6 @@ "com.amazonaws.directoryservice#OwnerDirectoryDescription": { "type": "structure", "members": { - "RadiusSettings": { - "target": "com.amazonaws.directoryservice#RadiusSettings", - "traits": { - "smithy.api#documentation": "

          A RadiusSettings object that contains information about the RADIUS\n server.

          " - } - }, - "DnsIpAddrs": { - "target": "com.amazonaws.directoryservice#DnsIpAddrs", - "traits": { - "smithy.api#documentation": "

          IP address of the directory’s domain controllers.

          " - } - }, - "RadiusStatus": { - "target": "com.amazonaws.directoryservice#RadiusStatus", - "traits": { - "smithy.api#documentation": "

          Information about the status of the RADIUS server.

          " - } - }, "DirectoryId": { "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { @@ -4934,11 +5118,29 @@ "smithy.api#documentation": "

          Identifier of the directory owner account.

          " } }, + "DnsIpAddrs": { + "target": "com.amazonaws.directoryservice#DnsIpAddrs", + "traits": { + "smithy.api#documentation": "

          IP address of the directory’s domain controllers.

          " + } + }, "VpcSettings": { "target": "com.amazonaws.directoryservice#DirectoryVpcSettingsDescription", "traits": { "smithy.api#documentation": "

          Information about the VPC settings for the directory.

          " } + }, + "RadiusSettings": { + "target": "com.amazonaws.directoryservice#RadiusSettings", + "traits": { + "smithy.api#documentation": "

          A RadiusSettings object that contains information about the RADIUS\n server.

          " + } + }, + "RadiusStatus": { + "target": "com.amazonaws.directoryservice#RadiusStatus", + "traits": { + "smithy.api#documentation": "

          Information about the status of the RADIUS server.

          " + } } }, "traits": { @@ -5015,16 +5217,10 @@ "com.amazonaws.directoryservice#RadiusSettings": { "type": "structure", "members": { - "DisplayLabel": { - "target": "com.amazonaws.directoryservice#RadiusDisplayLabel", - "traits": { - "smithy.api#documentation": "

          Not currently used.

          " - } - }, - "SharedSecret": { - "target": "com.amazonaws.directoryservice#RadiusSharedSecret", + "RadiusServers": { + "target": "com.amazonaws.directoryservice#Servers", "traits": { - "smithy.api#documentation": "

          Required for enabling RADIUS on the directory.

          " + "smithy.api#documentation": "

          An array of strings that contains the fully qualified domain name (FQDN) or IP addresses of the RADIUS server endpoints, or the FQDN or IP addresses of your RADIUS server load balancer.

          " } }, "RadiusPort": { @@ -5033,34 +5229,40 @@ "smithy.api#documentation": "

          The port that your RADIUS server is using for communications. Your on-premises network must allow inbound traffic over this port from the AWS Directory Service servers.

          " } }, + "RadiusTimeout": { + "target": "com.amazonaws.directoryservice#RadiusTimeout", + "traits": { + "smithy.api#documentation": "

          The amount of time, in seconds, to wait for the RADIUS server to respond.

          " + } + }, "RadiusRetries": { "target": "com.amazonaws.directoryservice#RadiusRetries", "traits": { "smithy.api#documentation": "

          The maximum number of times that communication with the RADIUS server is attempted.

          " } }, + "SharedSecret": { + "target": "com.amazonaws.directoryservice#RadiusSharedSecret", + "traits": { + "smithy.api#documentation": "

          Required for enabling RADIUS on the directory.

          " + } + }, "AuthenticationProtocol": { "target": "com.amazonaws.directoryservice#RadiusAuthenticationProtocol", "traits": { "smithy.api#documentation": "

          The protocol specified for your RADIUS endpoints.

          " } }, - "UseSameUsername": { - "target": "com.amazonaws.directoryservice#UseSameUsername", + "DisplayLabel": { + "target": "com.amazonaws.directoryservice#RadiusDisplayLabel", "traits": { "smithy.api#documentation": "

          Not currently used.

          " } }, - "RadiusTimeout": { - "target": "com.amazonaws.directoryservice#RadiusTimeout", - "traits": { - "smithy.api#documentation": "

          The amount of time, in seconds, to wait for the RADIUS server to respond.

          " - } - }, - "RadiusServers": { - "target": "com.amazonaws.directoryservice#Servers", + "UseSameUsername": { + "target": "com.amazonaws.directoryservice#UseSameUsername", "traits": { - "smithy.api#documentation": "

          An array of strings that contains the fully qualified domain name (FQDN) or IP addresses of the RADIUS server endpoints, or the FQDN or IP addresses of your RADIUS server load balancer.

          " + "smithy.api#documentation": "

          Not currently used.

          " } } }, @@ -5109,13 +5311,10 @@ "com.amazonaws.directoryservice#RegionDescription": { "type": "structure", "members": { - "VpcSettings": { - "target": "com.amazonaws.directoryservice#DirectoryVpcSettings" - }, - "StatusLastUpdatedDateTime": { - "target": "com.amazonaws.directoryservice#StateLastUpdatedDateTime", + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          The date and time that the Region status was last updated.

          " + "smithy.api#documentation": "

          The identifier of the directory.

          " } }, "RegionName": { @@ -5127,7 +5326,7 @@ "RegionType": { "target": "com.amazonaws.directoryservice#RegionType", "traits": { - "smithy.api#documentation": "

          Specifies if the Region is the primary Region or an additional Region.

          " + "smithy.api#documentation": "

          Specifies whether the Region is the primary Region or an additional Region.

          " } }, "Status": { @@ -5136,6 +5335,9 @@ "smithy.api#documentation": "

          The status of the replication process for the specified Region.

          " } }, + "VpcSettings": { + "target": "com.amazonaws.directoryservice#DirectoryVpcSettings" + }, "DesiredNumberOfDomainControllers": { "target": "com.amazonaws.directoryservice#DesiredNumberOfDomainControllers", "traits": { @@ -5148,10 +5350,10 @@ "smithy.api#documentation": "

          Specifies when the Region replication began.

          " } }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", + "StatusLastUpdatedDateTime": { + "target": "com.amazonaws.directoryservice#StateLastUpdatedDateTime", "traits": { - "smithy.api#documentation": "

          The identifier of the directory.

          " + "smithy.api#documentation": "

          The date and time that the Region status was last updated.

          " } }, "LastUpdatedDateTime": { @@ -5162,7 +5364,7 @@ } }, "traits": { - "smithy.api#documentation": "

          The replicated regional information for a directory.

          " + "smithy.api#documentation": "

          The replicated Region information for a directory.

          " } }, "com.amazonaws.directoryservice#RegionLimitExceededException": { @@ -5176,7 +5378,7 @@ } }, "traits": { - "smithy.api#documentation": "

          You have reached the limit for maximum number of simultaneous region replications per directory.

          ", + "smithy.api#documentation": "

          You have reached the limit for maximum number of simultaneous Region replications per directory.

          ", "smithy.api#error": "client" } }, @@ -5216,7 +5418,7 @@ "PrimaryRegion": { "target": "com.amazonaws.directoryservice#RegionName", "traits": { - "smithy.api#documentation": "

          The Region from where the AWS Managed Microsoft AD directory was originally created.

          " + "smithy.api#documentation": "

          The Region where the AWS Managed Microsoft AD directory was originally created.

          " } }, "AdditionalRegions": { @@ -5287,6 +5489,15 @@ "smithy.api#documentation": "

          The certificate PEM string that needs to be registered.

          ", "smithy.api#required": {} } + }, + "Type": { + "target": "com.amazonaws.directoryservice#CertificateType", + "traits": { + "smithy.api#documentation": "

          The certificate type to register for the request.

          " + } + }, + "ClientCertAuthSettings": { + "target": "com.amazonaws.directoryservice#ClientCertAuthSettings" } } }, @@ -5330,17 +5541,17 @@ "com.amazonaws.directoryservice#RegisterEventTopicRequest": { "type": "structure", "members": { - "TopicName": { - "target": "com.amazonaws.directoryservice#TopicName", + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          The SNS topic name to which the directory will publish status messages. This SNS topic must be in the same region as the specified Directory ID.

          ", + "smithy.api#documentation": "

          The Directory ID that will publish status messages to the SNS topic.

          ", "smithy.api#required": {} } }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", + "TopicName": { + "target": "com.amazonaws.directoryservice#TopicName", "traits": { - "smithy.api#documentation": "

          The Directory ID that will publish status messages to the SNS topic.

          ", + "smithy.api#documentation": "

          The SNS topic name to which the directory will publish status messages. This SNS topic must be in the same region as the specified Directory ID.

          ", "smithy.api#required": {} } } @@ -5549,17 +5760,17 @@ "com.amazonaws.directoryservice#RemoveTagsFromResourceRequest": { "type": "structure", "members": { - "TagKeys": { - "target": "com.amazonaws.directoryservice#TagKeys", + "ResourceId": { + "target": "com.amazonaws.directoryservice#ResourceId", "traits": { - "smithy.api#documentation": "

          The tag key (name) of the tag to be removed.

          ", + "smithy.api#documentation": "

          Identifier (ID) of the directory from which to remove the tag.

          ", "smithy.api#required": {} } }, - "ResourceId": { - "target": "com.amazonaws.directoryservice#ResourceId", + "TagKeys": { + "target": "com.amazonaws.directoryservice#TagKeys", "traits": { - "smithy.api#documentation": "

          Identifier (ID) of the directory from which to remove the tag.

          ", + "smithy.api#documentation": "

          The tag key (name) of the tag to be removed.

          ", "smithy.api#required": {} } } @@ -5632,17 +5843,17 @@ "smithy.api#required": {} } }, - "NewPassword": { - "target": "com.amazonaws.directoryservice#UserPassword", + "UserName": { + "target": "com.amazonaws.directoryservice#CustomerUserName", "traits": { - "smithy.api#documentation": "

          The new password that will be reset.

          ", + "smithy.api#documentation": "

          The user name of the user whose password will be reset.

          ", "smithy.api#required": {} } }, - "UserName": { - "target": "com.amazonaws.directoryservice#CustomerUserName", + "NewPassword": { + "target": "com.amazonaws.directoryservice#UserPassword", "traits": { - "smithy.api#documentation": "

          The user name of the user whose password will be reset.

          ", + "smithy.api#documentation": "

          The new password that will be reset.

          ", "smithy.api#required": {} } } @@ -5725,10 +5936,10 @@ "com.amazonaws.directoryservice#SchemaExtensionInfo": { "type": "structure", "members": { - "StartDateTime": { - "target": "com.amazonaws.directoryservice#StartDateTime", + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          The date and time that the schema extension started being applied to the directory.

          " + "smithy.api#documentation": "

          The identifier of the directory to which the schema extension is applied.

          " } }, "SchemaExtensionId": { @@ -5737,10 +5948,10 @@ "smithy.api#documentation": "

          The identifier of the schema extension.

          " } }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", + "Description": { + "target": "com.amazonaws.directoryservice#Description", "traits": { - "smithy.api#documentation": "

          The identifier of the directory to which the schema extension is applied.

          " + "smithy.api#documentation": "

          A description of the schema extension.

          " } }, "SchemaExtensionStatus": { @@ -5755,10 +5966,10 @@ "smithy.api#documentation": "

          The reason for the SchemaExtensionStatus.

          " } }, - "Description": { - "target": "com.amazonaws.directoryservice#Description", + "StartDateTime": { + "target": "com.amazonaws.directoryservice#StartDateTime", "traits": { - "smithy.api#documentation": "

          A description of the schema extension.

          " + "smithy.api#documentation": "

          The date and time that the schema extension started being applied to the directory.

          " } }, "EndDateTime": { @@ -5929,6 +6140,12 @@ "smithy.api#required": {} } }, + "ShareNotes": { + "target": "com.amazonaws.directoryservice#Notes", + "traits": { + "smithy.api#documentation": "

          A directory share request that is sent by the directory owner to the directory consumer.\n The request includes a typed message to help the directory consumer administrator determine\n whether to approve or reject the share invitation.

          " + } + }, "ShareTarget": { "target": "com.amazonaws.directoryservice#ShareTarget", "traits": { @@ -5939,14 +6156,8 @@ "ShareMethod": { "target": "com.amazonaws.directoryservice#ShareMethod", "traits": { - "smithy.api#documentation": "

          The method used when sharing a directory to determine whether the directory should be\n shared within your AWS organization (ORGANIZATIONS) or with any AWS account by\n sending a directory sharing request (HANDSHAKE).

          ", - "smithy.api#required": {} - } - }, - "ShareNotes": { - "target": "com.amazonaws.directoryservice#Notes", - "traits": { - "smithy.api#documentation": "

          A directory share request that is sent by the directory owner to the directory consumer.\n The request includes a typed message to help the directory consumer administrator determine\n whether to approve or reject the share invitation.

          " + "smithy.api#documentation": "

          The method used when sharing a directory to determine whether the directory should be\n shared within your AWS organization (ORGANIZATIONS) or with any AWS account by\n sending a directory sharing request (HANDSHAKE).

          ", + "smithy.api#required": {} } } } @@ -5965,11 +6176,11 @@ "com.amazonaws.directoryservice#ShareLimitExceededException": { "type": "structure", "members": { - "RequestId": { - "target": "com.amazonaws.directoryservice#RequestId" - }, "Message": { "target": "com.amazonaws.directoryservice#ExceptionMessage" + }, + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId" } }, "traits": { @@ -6038,17 +6249,17 @@ "com.amazonaws.directoryservice#ShareTarget": { "type": "structure", "members": { - "Type": { - "target": "com.amazonaws.directoryservice#TargetType", + "Id": { + "target": "com.amazonaws.directoryservice#TargetId", "traits": { - "smithy.api#documentation": "

          Type of identifier to be used in the Id field.

          ", + "smithy.api#documentation": "

          Identifier of the directory consumer account.

          ", "smithy.api#required": {} } }, - "Id": { - "target": "com.amazonaws.directoryservice#TargetId", + "Type": { + "target": "com.amazonaws.directoryservice#TargetType", "traits": { - "smithy.api#documentation": "

          Identifier of the directory consumer account.

          ", + "smithy.api#documentation": "

          Type of identifier to be used in the Id field.

          ", "smithy.api#required": {} } } @@ -6066,10 +6277,10 @@ "com.amazonaws.directoryservice#SharedDirectory": { "type": "structure", "members": { - "ShareStatus": { - "target": "com.amazonaws.directoryservice#ShareStatus", + "OwnerAccountId": { + "target": "com.amazonaws.directoryservice#CustomerId", "traits": { - "smithy.api#documentation": "

          Current directory status of the shared AWS Managed Microsoft AD directory.

          " + "smithy.api#documentation": "

          Identifier of the directory owner account, which contains the directory that has been\n shared to the consumer account.

          " } }, "OwnerDirectoryId": { @@ -6078,16 +6289,28 @@ "smithy.api#documentation": "

          Identifier of the directory in the directory owner account.

          " } }, + "ShareMethod": { + "target": "com.amazonaws.directoryservice#ShareMethod", + "traits": { + "smithy.api#documentation": "

          The method used when sharing a directory to determine whether the directory should be\n shared within your AWS organization (ORGANIZATIONS) or with any AWS account by\n sending a shared directory request (HANDSHAKE).

          " + } + }, + "SharedAccountId": { + "target": "com.amazonaws.directoryservice#CustomerId", + "traits": { + "smithy.api#documentation": "

          Identifier of the directory consumer account that has access to the shared directory\n (OwnerDirectoryId) in the directory owner account.

          " + } + }, "SharedDirectoryId": { "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { "smithy.api#documentation": "

          Identifier of the shared directory in the directory consumer account. This identifier is\n different for each directory owner account.

          " } }, - "OwnerAccountId": { - "target": "com.amazonaws.directoryservice#CustomerId", + "ShareStatus": { + "target": "com.amazonaws.directoryservice#ShareStatus", "traits": { - "smithy.api#documentation": "

          Identifier of the directory owner account, which contains the directory that has been\n shared to the consumer account.

          " + "smithy.api#documentation": "

          Current directory status of the shared AWS Managed Microsoft AD directory.

          " } }, "ShareNotes": { @@ -6096,12 +6319,6 @@ "smithy.api#documentation": "

          A directory share request that is sent by the directory owner to the directory consumer.\n The request includes a typed message to help the directory consumer administrator determine\n whether to approve or reject the share invitation.

          " } }, - "ShareMethod": { - "target": "com.amazonaws.directoryservice#ShareMethod", - "traits": { - "smithy.api#documentation": "

          The method used when sharing a directory to determine whether the directory should be\n shared within your AWS organization (ORGANIZATIONS) or with any AWS account by\n sending a shared directory request (HANDSHAKE).

          " - } - }, "CreatedDateTime": { "target": "com.amazonaws.directoryservice#CreatedDateTime", "traits": { @@ -6113,12 +6330,6 @@ "traits": { "smithy.api#documentation": "

          The date and time that the shared directory was last updated.

          " } - }, - "SharedAccountId": { - "target": "com.amazonaws.directoryservice#CustomerId", - "traits": { - "smithy.api#documentation": "

          Identifier of the directory consumer account that has access to the shared directory\n (OwnerDirectoryId) in the directory owner account.

          " - } } }, "traits": { @@ -6128,16 +6339,10 @@ "com.amazonaws.directoryservice#Snapshot": { "type": "structure", "members": { - "Name": { - "target": "com.amazonaws.directoryservice#SnapshotName", - "traits": { - "smithy.api#documentation": "

          The descriptive name of the snapshot.

          " - } - }, - "Type": { - "target": "com.amazonaws.directoryservice#SnapshotType", + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          The snapshot type.

          " + "smithy.api#documentation": "

          The directory identifier.

          " } }, "SnapshotId": { @@ -6146,10 +6351,16 @@ "smithy.api#documentation": "

          The snapshot identifier.

          " } }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", + "Type": { + "target": "com.amazonaws.directoryservice#SnapshotType", "traits": { - "smithy.api#documentation": "

          The directory identifier.

          " + "smithy.api#documentation": "

          The snapshot type.

          " + } + }, + "Name": { + "target": "com.amazonaws.directoryservice#SnapshotName", + "traits": { + "smithy.api#documentation": "

          The descriptive name of the snapshot.

          " } }, "Status": { @@ -6187,11 +6398,11 @@ "com.amazonaws.directoryservice#SnapshotLimitExceededException": { "type": "structure", "members": { - "RequestId": { - "target": "com.amazonaws.directoryservice#RequestId" - }, "Message": { "target": "com.amazonaws.directoryservice#ExceptionMessage" + }, + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId" } }, "traits": { @@ -6202,16 +6413,16 @@ "com.amazonaws.directoryservice#SnapshotLimits": { "type": "structure", "members": { - "ManualSnapshotsCurrentCount": { + "ManualSnapshotsLimit": { "target": "com.amazonaws.directoryservice#Limit", "traits": { - "smithy.api#documentation": "

          The current number of manual snapshots of the directory.

          " + "smithy.api#documentation": "

          The maximum number of manual snapshots allowed.

          " } }, - "ManualSnapshotsLimit": { + "ManualSnapshotsCurrentCount": { "target": "com.amazonaws.directoryservice#Limit", "traits": { - "smithy.api#documentation": "

          The maximum number of manual snapshots allowed.

          " + "smithy.api#documentation": "

          The current number of manual snapshots of the directory.

          " } }, "ManualSnapshotsLimitReached": { @@ -6322,17 +6533,17 @@ "com.amazonaws.directoryservice#StartSchemaExtensionRequest": { "type": "structure", "members": { - "Description": { - "target": "com.amazonaws.directoryservice#Description", + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          A description of the schema extension.

          ", + "smithy.api#documentation": "

          The identifier of the directory for which the schema extension will be applied to.

          ", "smithy.api#required": {} } }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", + "CreateSnapshotBeforeSchemaExtension": { + "target": "com.amazonaws.directoryservice#CreateSnapshotBeforeSchemaExtension", "traits": { - "smithy.api#documentation": "

          The identifier of the directory for which the schema extension will be applied to.

          ", + "smithy.api#documentation": "

          If true, creates a snapshot of the directory before applying the schema extension.

          ", "smithy.api#required": {} } }, @@ -6343,10 +6554,10 @@ "smithy.api#required": {} } }, - "CreateSnapshotBeforeSchemaExtension": { - "target": "com.amazonaws.directoryservice#CreateSnapshotBeforeSchemaExtension", + "Description": { + "target": "com.amazonaws.directoryservice#Description", "traits": { - "smithy.api#documentation": "

          If true, creates a snapshot of the directory before applying the schema extension.

          ", + "smithy.api#documentation": "

          A description of the schema extension.

          ", "smithy.api#required": {} } } @@ -6387,17 +6598,17 @@ "com.amazonaws.directoryservice#Tag": { "type": "structure", "members": { - "Value": { - "target": "com.amazonaws.directoryservice#TagValue", + "Key": { + "target": "com.amazonaws.directoryservice#TagKey", "traits": { - "smithy.api#documentation": "

          The optional value of the tag. The string value can be Unicode characters. The string can contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: \"^([\\\\p{L}\\\\p{Z}\\\\p{N}_.:/=+\\\\-]*)$\").

          ", + "smithy.api#documentation": "

          Required name of the tag. The string value can be Unicode characters and cannot be prefixed with \"aws:\". The string can contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: \"^([\\\\p{L}\\\\p{Z}\\\\p{N}_.:/=+\\\\-]*)$\").

          ", "smithy.api#required": {} } }, - "Key": { - "target": "com.amazonaws.directoryservice#TagKey", + "Value": { + "target": "com.amazonaws.directoryservice#TagValue", "traits": { - "smithy.api#documentation": "

          Required name of the tag. The string value can be Unicode characters and cannot be prefixed with \"aws:\". The string can contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: \"^([\\\\p{L}\\\\p{Z}\\\\p{N}_.:/=+\\\\-]*)$\").

          ", + "smithy.api#documentation": "

          The optional value of the tag. The string value can be Unicode characters. The string can contain only the set of Unicode letters, digits, white-space, '_', '.', '/', '=', '+', '-' (Java regex: \"^([\\\\p{L}\\\\p{Z}\\\\p{N}_.:/=+\\\\-]*)$\").

          ", "smithy.api#required": {} } } @@ -6425,11 +6636,11 @@ "com.amazonaws.directoryservice#TagLimitExceededException": { "type": "structure", "members": { - "RequestId": { - "target": "com.amazonaws.directoryservice#RequestId" - }, "Message": { "target": "com.amazonaws.directoryservice#ExceptionMessage" + }, + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId" } }, "traits": { @@ -6518,22 +6729,28 @@ "com.amazonaws.directoryservice#Trust": { "type": "structure", "members": { - "LastUpdatedDateTime": { - "target": "com.amazonaws.directoryservice#LastUpdatedDateTime", + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          The date and time that the trust relationship was last updated.

          " + "smithy.api#documentation": "

          The Directory ID of the AWS directory involved in the trust relationship.

          " } }, - "StateLastUpdatedDateTime": { - "target": "com.amazonaws.directoryservice#StateLastUpdatedDateTime", + "TrustId": { + "target": "com.amazonaws.directoryservice#TrustId", "traits": { - "smithy.api#documentation": "

          The date and time that the TrustState was last updated.

          " + "smithy.api#documentation": "

          The unique ID of the trust relationship.

          " } }, - "TrustStateReason": { - "target": "com.amazonaws.directoryservice#TrustStateReason", + "RemoteDomainName": { + "target": "com.amazonaws.directoryservice#RemoteDomainName", "traits": { - "smithy.api#documentation": "

          The reason for the TrustState.

          " + "smithy.api#documentation": "

          The Fully Qualified Domain Name (FQDN) of the external domain involved in the trust relationship.

          " + } + }, + "TrustType": { + "target": "com.amazonaws.directoryservice#TrustType", + "traits": { + "smithy.api#documentation": "

          The trust relationship type. Forest is the default.

          " } }, "TrustDirection": { @@ -6542,34 +6759,34 @@ "smithy.api#documentation": "

          The trust relationship direction.

          " } }, - "RemoteDomainName": { - "target": "com.amazonaws.directoryservice#RemoteDomainName", + "TrustState": { + "target": "com.amazonaws.directoryservice#TrustState", "traits": { - "smithy.api#documentation": "

          The Fully Qualified Domain Name (FQDN) of the external domain involved in the trust relationship.

          " + "smithy.api#documentation": "

          The trust relationship state.

          " } }, - "TrustId": { - "target": "com.amazonaws.directoryservice#TrustId", + "CreatedDateTime": { + "target": "com.amazonaws.directoryservice#CreatedDateTime", "traits": { - "smithy.api#documentation": "

          The unique ID of the trust relationship.

          " + "smithy.api#documentation": "

          The date and time that the trust relationship was created.

          " } }, - "TrustType": { - "target": "com.amazonaws.directoryservice#TrustType", + "LastUpdatedDateTime": { + "target": "com.amazonaws.directoryservice#LastUpdatedDateTime", "traits": { - "smithy.api#documentation": "

          The trust relationship type. Forest is the default.

          " + "smithy.api#documentation": "

          The date and time that the trust relationship was last updated.

          " } }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", + "StateLastUpdatedDateTime": { + "target": "com.amazonaws.directoryservice#StateLastUpdatedDateTime", "traits": { - "smithy.api#documentation": "

          The Directory ID of the AWS directory involved in the trust relationship.

          " + "smithy.api#documentation": "

          The date and time that the TrustState was last updated.

          " } }, - "TrustState": { - "target": "com.amazonaws.directoryservice#TrustState", + "TrustStateReason": { + "target": "com.amazonaws.directoryservice#TrustStateReason", "traits": { - "smithy.api#documentation": "

          The trust relationship state.

          " + "smithy.api#documentation": "

          The reason for the TrustState.

          " } }, "SelectiveAuth": { @@ -6577,12 +6794,6 @@ "traits": { "smithy.api#documentation": "

          Current state of selective authentication for the trust.

          " } - }, - "CreatedDateTime": { - "target": "com.amazonaws.directoryservice#CreatedDateTime", - "traits": { - "smithy.api#documentation": "

          The date and time that the trust relationship was created.

          " - } } }, "traits": { @@ -6738,17 +6949,17 @@ "com.amazonaws.directoryservice#UnshareDirectoryRequest": { "type": "structure", "members": { - "UnshareTarget": { - "target": "com.amazonaws.directoryservice#UnshareTarget", + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          Identifier for the directory consumer account with whom the directory has to be\n unshared.

          ", + "smithy.api#documentation": "

          The identifier of the AWS Managed Microsoft AD directory that you want to stop\n sharing.

          ", "smithy.api#required": {} } }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", + "UnshareTarget": { + "target": "com.amazonaws.directoryservice#UnshareTarget", "traits": { - "smithy.api#documentation": "

          The identifier of the AWS Managed Microsoft AD directory that you want to stop\n sharing.

          ", + "smithy.api#documentation": "

          Identifier for the directory consumer account with whom the directory has to be\n unshared.

          ", "smithy.api#required": {} } } @@ -6790,11 +7001,11 @@ "com.amazonaws.directoryservice#UnsupportedOperationException": { "type": "structure", "members": { - "RequestId": { - "target": "com.amazonaws.directoryservice#RequestId" - }, "Message": { "target": "com.amazonaws.directoryservice#ExceptionMessage" + }, + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId" } }, "traits": { @@ -6844,17 +7055,17 @@ "smithy.api#required": {} } }, - "DnsIpAddrs": { - "target": "com.amazonaws.directoryservice#DnsIpAddrs", + "RemoteDomainName": { + "target": "com.amazonaws.directoryservice#RemoteDomainName", "traits": { - "smithy.api#documentation": "

          The updated IP addresses of the remote DNS server associated with the conditional forwarder.

          ", + "smithy.api#documentation": "

          The fully qualified domain name (FQDN) of the remote domain with which you will set up a trust relationship.

          ", "smithy.api#required": {} } }, - "RemoteDomainName": { - "target": "com.amazonaws.directoryservice#RemoteDomainName", + "DnsIpAddrs": { + "target": "com.amazonaws.directoryservice#DnsIpAddrs", "traits": { - "smithy.api#documentation": "

          The fully qualified domain name (FQDN) of the remote domain with which you will set up a trust relationship.

          ", + "smithy.api#documentation": "

          The updated IP addresses of the remote DNS server associated with the conditional forwarder.

          ", "smithy.api#required": {} } } @@ -6908,17 +7119,17 @@ "com.amazonaws.directoryservice#UpdateNumberOfDomainControllersRequest": { "type": "structure", "members": { - "DesiredNumber": { - "target": "com.amazonaws.directoryservice#DesiredNumberOfDomainControllers", + "DirectoryId": { + "target": "com.amazonaws.directoryservice#DirectoryId", "traits": { - "smithy.api#documentation": "

          The number of domain controllers desired in the directory.

          ", + "smithy.api#documentation": "

          Identifier of the directory to which the domain controllers will be added or removed.

          ", "smithy.api#required": {} } }, - "DirectoryId": { - "target": "com.amazonaws.directoryservice#DirectoryId", + "DesiredNumber": { + "target": "com.amazonaws.directoryservice#DesiredNumberOfDomainControllers", "traits": { - "smithy.api#documentation": "

          Identifier of the directory to which the domain controllers will be added or removed.

          ", + "smithy.api#documentation": "

          The number of domain controllers desired in the directory.

          ", "smithy.api#required": {} } } @@ -7015,32 +7226,32 @@ "com.amazonaws.directoryservice#UpdateTrustRequest": { "type": "structure", "members": { - "SelectiveAuth": { - "target": "com.amazonaws.directoryservice#SelectiveAuth", - "traits": { - "smithy.api#documentation": "

          Updates selective authentication for the trust.

          " - } - }, "TrustId": { "target": "com.amazonaws.directoryservice#TrustId", "traits": { "smithy.api#documentation": "

          Identifier of the trust relationship.

          ", "smithy.api#required": {} } + }, + "SelectiveAuth": { + "target": "com.amazonaws.directoryservice#SelectiveAuth", + "traits": { + "smithy.api#documentation": "

          Updates selective authentication for the trust.

          " + } } } }, "com.amazonaws.directoryservice#UpdateTrustResult": { "type": "structure", "members": { + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId" + }, "TrustId": { "target": "com.amazonaws.directoryservice#TrustId", "traits": { "smithy.api#documentation": "

          Identifier of the trust relationship.

          " } - }, - "RequestId": { - "target": "com.amazonaws.directoryservice#RequestId" } } }, @@ -7050,11 +7261,11 @@ "com.amazonaws.directoryservice#UserDoesNotExistException": { "type": "structure", "members": { - "RequestId": { - "target": "com.amazonaws.directoryservice#RequestId" - }, "Message": { "target": "com.amazonaws.directoryservice#ExceptionMessage" + }, + "RequestId": { + "target": "com.amazonaws.directoryservice#RequestId" } }, "traits": { diff --git a/codegen/sdk-codegen/aws-models/ec2.2016-11-15.json b/codegen/sdk-codegen/aws-models/ec2.2016-11-15.json index 93d22ed6fca6..2b8193d9e6e5 100644 --- a/codegen/sdk-codegen/aws-models/ec2.2016-11-15.json +++ b/codegen/sdk-codegen/aws-models/ec2.2016-11-15.json @@ -3475,7 +3475,7 @@ "target": "com.amazonaws.ec2#VolumeAttachment" }, "traits": { - "smithy.api#documentation": "

          Attaches an EBS volume to a running or stopped instance and exposes it to the instance\n with the specified device name.

          \n

          Encrypted EBS volumes must be attached to instances that support Amazon EBS encryption. For\n more information, see Amazon EBS Encryption in the Amazon Elastic Compute Cloud User Guide.

          \n

          After you attach an EBS volume, you must make it available. For more information, see \n Making an EBS volume available for use.

          \n

          If a volume has an AWS Marketplace product code:

          \n
            \n
          • \n

            The volume can be attached only to a stopped instance.

            \n
          • \n
          • \n

            AWS Marketplace product codes are copied from the volume to the instance.

            \n
          • \n
          • \n

            You must be subscribed to the product.

            \n
          • \n
          • \n

            The instance type and operating system of the instance must support the product. For\n example, you can't detach a volume from a Windows instance and attach it to a Linux\n instance.

            \n
          • \n
          \n

          For more information, see Attaching Amazon EBS volumes in the\n Amazon Elastic Compute Cloud User Guide.

          " + "smithy.api#documentation": "

          Attaches an EBS volume to a running or stopped instance and exposes it to the instance\n with the specified device name.

          \n

          Encrypted EBS volumes must be attached to instances that support Amazon EBS encryption. For\n more information, see Amazon EBS encryption in the Amazon Elastic Compute Cloud User Guide.

          \n

          After you attach an EBS volume, you must make it available. For more information, see \n Making an EBS volume available for use.

          \n

          If a volume has an AWS Marketplace product code:

          \n
            \n
          • \n

            The volume can be attached only to a stopped instance.

            \n
          • \n
          • \n

            AWS Marketplace product codes are copied from the volume to the instance.

            \n
          • \n
          • \n

            You must be subscribed to the product.

            \n
          • \n
          • \n

            The instance type and operating system of the instance must support the product. For\n example, you can't detach a volume from a Windows instance and attach it to a Linux\n instance.

            \n
          • \n
          \n

          For more information, see Attaching Amazon EBS volumes in the\n Amazon Elastic Compute Cloud User Guide.

          " } }, "com.amazonaws.ec2#AttachVolumeRequest": { @@ -7369,7 +7369,7 @@ "target": "com.amazonaws.ec2#Boolean", "traits": { "aws.protocols#ec2QueryName": "Encrypted", - "smithy.api#documentation": "

          To encrypt a copy of an unencrypted snapshot if encryption by default is not enabled, \n enable encryption using this parameter. Otherwise, omit this parameter. Encrypted snapshots \n are encrypted, even if you omit this parameter and encryption by default is not enabled. You \n cannot set this parameter to false. For more information, see Amazon EBS Encryption in the \n Amazon Elastic Compute Cloud User Guide.

          ", + "smithy.api#documentation": "

          To encrypt a copy of an unencrypted snapshot if encryption by default is not enabled, \n enable encryption using this parameter. Otherwise, omit this parameter. Encrypted snapshots \n are encrypted, even if you omit this parameter and encryption by default is not enabled. You \n cannot set this parameter to false. For more information, see Amazon EBS encryption in the \n Amazon Elastic Compute Cloud User Guide.

          ", "smithy.api#xmlName": "encrypted" } }, @@ -7385,7 +7385,7 @@ "target": "com.amazonaws.ec2#String", "traits": { "aws.protocols#ec2QueryName": "PresignedUrl", - "smithy.api#documentation": "

          When you copy an encrypted source snapshot using the Amazon EC2 Query API, you must supply a\n pre-signed URL. This parameter is optional for unencrypted snapshots. For more information,\n see Query\n Requests.

          \n

          The PresignedUrl should use the snapshot source endpoint, the\n CopySnapshot action, and include the SourceRegion,\n SourceSnapshotId, and DestinationRegion parameters. The\n PresignedUrl must be signed using AWS Signature Version 4. Because EBS\n snapshots are stored in Amazon S3, the signing algorithm for this parameter uses the same logic\n that is described in Authenticating Requests by Using Query\n Parameters (AWS Signature Version 4) in the Amazon Simple Storage Service API Reference. An\n invalid or improperly signed PresignedUrl will cause the copy operation to fail\n asynchronously, and the snapshot will move to an error state.

          ", + "smithy.api#documentation": "

          When you copy an encrypted source snapshot using the Amazon EC2 Query API, you must supply a\n pre-signed URL. This parameter is optional for unencrypted snapshots. For more information,\n see Query\n requests.

          \n

          The PresignedUrl should use the snapshot source endpoint, the\n CopySnapshot action, and include the SourceRegion,\n SourceSnapshotId, and DestinationRegion parameters. The\n PresignedUrl must be signed using AWS Signature Version 4. Because EBS\n snapshots are stored in Amazon S3, the signing algorithm for this parameter uses the same logic\n that is described in Authenticating Requests: Using Query\n Parameters (AWS Signature Version 4) in the Amazon Simple Storage Service API Reference. An\n invalid or improperly signed PresignedUrl will cause the copy operation to fail\n asynchronously, and the snapshot will move to an error state.

          ", "smithy.api#xmlName": "presignedUrl" } }, @@ -10038,7 +10038,7 @@ "target": "com.amazonaws.ec2#Snapshot" }, "traits": { - "smithy.api#documentation": "

          Creates a snapshot of an EBS volume and stores it in Amazon S3. You can use snapshots for\n backups, to make copies of EBS volumes, and to save data before shutting down an\n instance.

          \n

          When a snapshot is created, any AWS Marketplace product codes that are associated with the\n source volume are propagated to the snapshot.

          \n

          You can take a snapshot of an attached volume that is in use. However, snapshots only\n capture data that has been written to your EBS volume at the time the snapshot command is\n issued; this may exclude any data that has been cached by any applications or the operating\n system. If you can pause any file systems on the volume long enough to take a snapshot, your\n snapshot should be complete. However, if you cannot pause all file writes to the volume, you\n should unmount the volume from within the instance, issue the snapshot command, and then\n remount the volume to ensure a consistent and complete snapshot. You may remount and use your\n volume while the snapshot status is pending.

          \n

          To create a snapshot for EBS volumes that serve as root devices, you should stop the\n instance before taking the snapshot.

          \n

          Snapshots that are taken from encrypted volumes are automatically encrypted. Volumes that\n are created from encrypted snapshots are also automatically encrypted. Your encrypted volumes\n and any associated snapshots always remain protected.

          \n

          You can tag your snapshots during creation. For more information, see Tagging your Amazon EC2\n resources in the Amazon Elastic Compute Cloud User Guide.

          \n

          For more information, see Amazon Elastic Block Store and Amazon EBS Encryption in the Amazon Elastic Compute Cloud User Guide.

          " + "smithy.api#documentation": "

          Creates a snapshot of an EBS volume and stores it in Amazon S3. You can use snapshots for\n backups, to make copies of EBS volumes, and to save data before shutting down an\n instance.

          \n

          When a snapshot is created, any AWS Marketplace product codes that are associated with the\n source volume are propagated to the snapshot.

          \n

          You can take a snapshot of an attached volume that is in use. However, snapshots only\n capture data that has been written to your EBS volume at the time the snapshot command is\n issued; this might exclude any data that has been cached by any applications or the operating\n system. If you can pause any file systems on the volume long enough to take a snapshot, your\n snapshot should be complete. However, if you cannot pause all file writes to the volume, you\n should unmount the volume from within the instance, issue the snapshot command, and then\n remount the volume to ensure a consistent and complete snapshot. You may remount and use your\n volume while the snapshot status is pending.

          \n

          To create a snapshot for EBS volumes that serve as root devices, you should stop the\n instance before taking the snapshot.

          \n

          Snapshots that are taken from encrypted volumes are automatically encrypted. Volumes that\n are created from encrypted snapshots are also automatically encrypted. Your encrypted volumes\n and any associated snapshots always remain protected.

          \n

          You can tag your snapshots during creation. For more information, see Tagging your Amazon EC2\n resources in the Amazon Elastic Compute Cloud User Guide.

          \n

          For more information, see Amazon Elastic Block Store and Amazon EBS encryption in the Amazon Elastic Compute Cloud User Guide.

          " } }, "com.amazonaws.ec2#CreateSnapshotRequest": { @@ -11132,7 +11132,7 @@ "target": "com.amazonaws.ec2#Volume" }, "traits": { - "smithy.api#documentation": "

          Creates an EBS volume that can be attached to an instance in the same Availability Zone.\n The volume is created in the regional endpoint that you send the HTTP request to. For more\n information see Regions and\n Endpoints.

          \n

          You can create a new empty volume or restore a volume from an EBS snapshot.\n Any AWS Marketplace product codes from the snapshot are propagated to the volume.

          \n

          You can create encrypted volumes. Encrypted volumes must be attached to instances that \n support Amazon EBS encryption. Volumes that are created from encrypted snapshots are also automatically \n encrypted. For more information, see Amazon EBS Encryption\n in the Amazon Elastic Compute Cloud User Guide.

          \n

          You can tag your volumes during creation. For more information, see Tagging your Amazon EC2\n resources in the Amazon Elastic Compute Cloud User Guide.

          \n

          For more information, see Creating an Amazon EBS volume in the\n Amazon Elastic Compute Cloud User Guide.

          " + "smithy.api#documentation": "

          Creates an EBS volume that can be attached to an instance in the same Availability Zone.

          \n

          You can create a new empty volume or restore a volume from an EBS snapshot.\n Any AWS Marketplace product codes from the snapshot are propagated to the volume.

          \n

          You can create encrypted volumes. Encrypted volumes must be attached to instances that \n support Amazon EBS encryption. Volumes that are created from encrypted snapshots are also automatically \n encrypted. For more information, see Amazon EBS encryption\n in the Amazon Elastic Compute Cloud User Guide.

          \n

          You can tag your volumes during creation. For more information, see Tagging your Amazon EC2\n resources in the Amazon Elastic Compute Cloud User Guide.

          \n

          For more information, see Creating an Amazon EBS volume in the\n Amazon Elastic Compute Cloud User Guide.

          " } }, "com.amazonaws.ec2#CreateVolumePermission": { @@ -11202,14 +11202,14 @@ "target": "com.amazonaws.ec2#Boolean", "traits": { "aws.protocols#ec2QueryName": "Encrypted", - "smithy.api#documentation": "

          Specifies whether the volume should be encrypted. \n The effect of setting the encryption state to true depends on \nthe volume origin (new or from a snapshot), starting encryption state, ownership, and whether encryption by default is enabled. \n For more information, see Encryption by default\n in the Amazon Elastic Compute Cloud User Guide.

          \n

          Encrypted Amazon EBS volumes must be attached to instances that support Amazon EBS encryption. \n For more information, see Supported\n instance types.

          ", + "smithy.api#documentation": "

          Indicates whether the volume should be encrypted. \n The effect of setting the encryption state to true depends on \nthe volume origin (new or from a snapshot), starting encryption state, ownership, and whether encryption by default is enabled. \n For more information, see Encryption by default\n in the Amazon Elastic Compute Cloud User Guide.

          \n

          Encrypted Amazon EBS volumes must be attached to instances that support Amazon EBS encryption. \n For more information, see Supported\n instance types.

          ", "smithy.api#xmlName": "encrypted" } }, "Iops": { "target": "com.amazonaws.ec2#Integer", "traits": { - "smithy.api#documentation": "

          The number of I/O operations per second (IOPS) to provision for an io1 or io2 volume, with a maximum\n \tratio of 50 IOPS/GiB for io1, and 500 IOPS/GiB for io2. Range is 100 to 64,000 IOPS for\n volumes in most Regions. Maximum IOPS of 64,000 is guaranteed only on\n Nitro-based instances. Other instance families guarantee performance up to\n 32,000 IOPS. For more information, see Amazon EBS volume types in the\n Amazon Elastic Compute Cloud User Guide.

          \n

          This parameter is valid only for Provisioned IOPS SSD (io1 and io2) volumes.

          " + "smithy.api#documentation": "

          The number of I/O operations per second (IOPS). For gp3, io1, and io2 volumes, this represents \n the number of IOPS that are provisioned for the volume. For gp2 volumes, this represents the baseline \n performance of the volume and the rate at which the volume accumulates I/O credits for bursting.

          \n

          The following are the supported values for each volume type:

          \n
            \n
          • \n

            \n gp3: 3,000-16,000 IOPS

            \n
          • \n
          • \n

            \n io1: 100-64,000 IOPS

            \n
          • \n
          • \n

            \n io2: 100-64,000 IOPS

            \n
          • \n
          \n

          For io1 and io2 volumes, we guarantee 64,000 IOPS only for \n Instances built on the Nitro System. Other instance families guarantee performance \n up to 32,000 IOPS.

          \n

          This parameter is required for io1 and io2 volumes.\n The default for gp3 volumes is 3,000 IOPS.\n This parameter is not supported for gp2, st1, sc1, or standard volumes.

          " } }, "KmsKeyId": { @@ -11227,7 +11227,7 @@ "Size": { "target": "com.amazonaws.ec2#Integer", "traits": { - "smithy.api#documentation": "

          The size of the volume, in GiBs. You must specify either a snapshot ID or a volume size.

          \n

          Constraints: 1-16,384 for gp2,\n 4-16,384 for io1 and io2, 500-16,384 for\n st1, 500-16,384 for sc1, and\n 1-1,024 for standard. If you specify a\n snapshot, the volume size must be equal to or larger than the snapshot size.

          \n

          Default: If you're creating the volume from a snapshot and don't specify a volume size,\n the default is the snapshot size.

          " + "smithy.api#documentation": "

          The size of the volume, in GiBs. You must specify either a snapshot ID or a volume size.\n If you specify a snapshot, the default is the snapshot size. You can specify a volume \n size that is equal to or larger than the snapshot size.

          \n

          The following are the supported volumes sizes for each volume type:

          \n
            \n
          • \n

            \n gp2 and gp3: 1-16,384

            \n
          • \n
          • \n

            \n io1 and io2: 4-16,384

            \n
          • \n
          • \n

            \n st1 and sc1: 125-16,384

            \n
          • \n
          • \n

            \n standard: 1-1,024

            \n
          • \n
          " } }, "SnapshotId": { @@ -11239,7 +11239,7 @@ "VolumeType": { "target": "com.amazonaws.ec2#VolumeType", "traits": { - "smithy.api#documentation": "

          The volume type. This can be gp2 for General Purpose SSD, io1 or io2 for Provisioned IOPS SSD,\n st1 for Throughput Optimized HDD, sc1 for Cold HDD, or\n standard for Magnetic volumes.

          \n

          Default: gp2\n

          " + "smithy.api#documentation": "

          The volume type. This parameter can be one of the following values:

          \n
            \n
          • \n

            General Purpose SSD: gp2 | gp3\n

            \n
          • \n
          • \n

            Provisioned IOPS SSD: io1 | io2\n

            \n
          • \n
          • \n

            Throughput Optimized HDD: st1\n

            \n
          • \n
          • \n

            Cold HDD: sc1\n

            \n
          • \n
          • \n

            Magnetic: standard\n

            \n
          • \n
          \n

          For more information, see Amazon EBS volume types in the\n Amazon Elastic Compute Cloud User Guide.

          \n

          Default: gp2\n

          " } }, "DryRun": { @@ -11260,7 +11260,13 @@ "MultiAttachEnabled": { "target": "com.amazonaws.ec2#Boolean", "traits": { - "smithy.api#documentation": "

          Specifies whether to enable Amazon EBS Multi-Attach. If you enable Multi-Attach, you can attach the \n \tvolume to up to 16 Nitro-based instances in the same Availability Zone. For more information, \n \tsee \n \t\tAmazon EBS Multi-Attach in the Amazon Elastic Compute Cloud User Guide.

          " + "smithy.api#documentation": "

          Indicates whether to enable Amazon EBS Multi-Attach. If you enable Multi-Attach, you can attach the \n \tvolume to up to 16 Instances built on the Nitro System in the same Availability Zone. This parameter is \n \tsupported with io1 volumes only. For more information, \n \tsee \n \t\tAmazon EBS Multi-Attach in the Amazon Elastic Compute Cloud User Guide.

          " + } + }, + "Throughput": { + "target": "com.amazonaws.ec2#Integer", + "traits": { + "smithy.api#documentation": "

          The throughput to provision for a volume, with a maximum of 1,000 MiB/s.

          \n

          This parameter is valid only for gp3 volumes.

          \n \t

          Valid Range: Minimum value of 125. Maximum value of 1000.

          " } } } @@ -20814,7 +20820,7 @@ "target": "com.amazonaws.ec2#DescribeSnapshotAttributeResult" }, "traits": { - "smithy.api#documentation": "

          Describes the specified attribute of the specified snapshot. You can specify only one\n attribute at a time.

          \n

          For more information about EBS snapshots, see Amazon EBS Snapshots in the Amazon Elastic Compute Cloud User Guide.

          " + "smithy.api#documentation": "

          Describes the specified attribute of the specified snapshot. You can specify only one\n attribute at a time.

          \n

          For more information about EBS snapshots, see Amazon EBS snapshots in the Amazon Elastic Compute Cloud User Guide.

          " } }, "com.amazonaws.ec2#DescribeSnapshotAttributeRequest": { @@ -20882,7 +20888,7 @@ "target": "com.amazonaws.ec2#DescribeSnapshotsResult" }, "traits": { - "smithy.api#documentation": "

          Describes the specified EBS snapshots available to you or all of the EBS snapshots\n available to you.

          \n

          The snapshots available to you include public snapshots, private snapshots that you own,\n and private snapshots owned by other AWS accounts for which you have explicit create volume\n permissions.

          \n

          The create volume permissions fall into the following categories:

          \n
            \n
          • \n

            \n public: The owner of the snapshot granted create volume\n permissions for the snapshot to the all group. All AWS accounts have create\n volume permissions for these snapshots.

            \n
          • \n
          • \n

            \n explicit: The owner of the snapshot granted create volume\n permissions to a specific AWS account.

            \n
          • \n
          • \n

            \n implicit: An AWS account has implicit create volume permissions\n for all snapshots it owns.

            \n
          • \n
          \n

          The list of snapshots returned can be filtered by specifying snapshot IDs, snapshot\n owners, or AWS accounts with create volume permissions. If no options are specified, Amazon\n EC2 returns all snapshots for which you have create volume permissions.

          \n

          If you specify one or more snapshot IDs, only snapshots that have the specified IDs are\n returned. If you specify an invalid snapshot ID, an error is returned. If you specify a\n snapshot ID for which you do not have access, it is not included in the returned\n results.

          \n

          If you specify one or more snapshot owners using the OwnerIds option, only\n snapshots from the specified owners and for which you have access are returned. The results\n can include the AWS account IDs of the specified owners, amazon for snapshots\n owned by Amazon, or self for snapshots that you own.

          \n

          If you specify a list of restorable users, only snapshots with create snapshot permissions\n for those users are returned. You can specify AWS account IDs (if you own the snapshots),\n self for snapshots for which you own or have explicit permissions, or\n all for public snapshots.

          \n

          If you are describing a long list of snapshots, we recommend that you paginate the output to make the\n list more manageable. The MaxResults parameter sets the maximum number of results\n returned in a single page. If the list of results exceeds your MaxResults value,\n then that number of results is returned along with a NextToken value that can be\n passed to a subsequent DescribeSnapshots request to retrieve the remaining\n results.

          \n

          To get the state of fast snapshot restores for a snapshot, use DescribeFastSnapshotRestores.

          \n

          For more information about EBS snapshots, see Amazon EBS Snapshots in the Amazon Elastic Compute Cloud User Guide.

          ", + "smithy.api#documentation": "

          Describes the specified EBS snapshots available to you or all of the EBS snapshots\n available to you.

          \n

          The snapshots available to you include public snapshots, private snapshots that you own,\n and private snapshots owned by other AWS accounts for which you have explicit create volume\n permissions.

          \n

          The create volume permissions fall into the following categories:

          \n
            \n
          • \n

            \n public: The owner of the snapshot granted create volume\n permissions for the snapshot to the all group. All AWS accounts have create\n volume permissions for these snapshots.

            \n
          • \n
          • \n

            \n explicit: The owner of the snapshot granted create volume\n permissions to a specific AWS account.

            \n
          • \n
          • \n

            \n implicit: An AWS account has implicit create volume permissions\n for all snapshots it owns.

            \n
          • \n
          \n

          The list of snapshots returned can be filtered by specifying snapshot IDs, snapshot\n owners, or AWS accounts with create volume permissions. If no options are specified, Amazon\n EC2 returns all snapshots for which you have create volume permissions.

          \n

          If you specify one or more snapshot IDs, only snapshots that have the specified IDs are\n returned. If you specify an invalid snapshot ID, an error is returned. If you specify a\n snapshot ID for which you do not have access, it is not included in the returned\n results.

          \n

          If you specify one or more snapshot owners using the OwnerIds option, only\n snapshots from the specified owners and for which you have access are returned. The results\n can include the AWS account IDs of the specified owners, amazon for snapshots\n owned by Amazon, or self for snapshots that you own.

          \n

          If you specify a list of restorable users, only snapshots with create snapshot permissions\n for those users are returned. You can specify AWS account IDs (if you own the snapshots),\n self for snapshots for which you own or have explicit permissions, or\n all for public snapshots.

          \n

          If you are describing a long list of snapshots, we recommend that you paginate the output to make the\n list more manageable. The MaxResults parameter sets the maximum number of results\n returned in a single page. If the list of results exceeds your MaxResults value,\n then that number of results is returned along with a NextToken value that can be\n passed to a subsequent DescribeSnapshots request to retrieve the remaining\n results.

          \n

          To get the state of fast snapshot restores for a snapshot, use DescribeFastSnapshotRestores.

          \n

          For more information about EBS snapshots, see Amazon EBS snapshots in the Amazon Elastic Compute Cloud User Guide.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -20904,7 +20910,7 @@ "MaxResults": { "target": "com.amazonaws.ec2#Integer", "traits": { - "smithy.api#documentation": "

          The maximum number of snapshot results returned by DescribeSnapshots in\n paginated output. When this parameter is used, DescribeSnapshots only returns\n MaxResults results in a single page along with a NextToken\n response element. The remaining results of the initial request can be seen by sending another\n DescribeSnapshots request with the returned NextToken value. This\n value can be between 5 and 1000; if MaxResults is given a value larger than 1000,\n only 1000 results are returned. If this parameter is not used, then\n DescribeSnapshots returns all results. You cannot specify this parameter and\n the snapshot IDs parameter in the same request.

          " + "smithy.api#documentation": "

          The maximum number of snapshot results returned by DescribeSnapshots in\n paginated output. When this parameter is used, DescribeSnapshots only returns\n MaxResults results in a single page along with a NextToken\n response element. The remaining results of the initial request can be seen by sending another\n DescribeSnapshots request with the returned NextToken value. This\n value can be between 5 and 1,000; if MaxResults is given a value larger than 1,000,\n only 1,000 results are returned. If this parameter is not used, then\n DescribeSnapshots returns all results. You cannot specify this parameter and\n the snapshot IDs parameter in the same request.

          " } }, "NextToken": { @@ -22451,7 +22457,7 @@ "target": "com.amazonaws.ec2#DescribeVolumeAttributeResult" }, "traits": { - "smithy.api#documentation": "

          Describes the specified attribute of the specified volume. You can specify only one\n attribute at a time.

          \n

          For more information about EBS volumes, see Amazon EBS Volumes in the Amazon Elastic Compute Cloud User Guide.

          " + "smithy.api#documentation": "

          Describes the specified attribute of the specified volume. You can specify only one\n attribute at a time.

          \n

          For more information about EBS volumes, see Amazon EBS volumes in the Amazon Elastic Compute Cloud User Guide.

          " } }, "com.amazonaws.ec2#DescribeVolumeAttributeRequest": { @@ -22519,7 +22525,7 @@ "target": "com.amazonaws.ec2#DescribeVolumeStatusResult" }, "traits": { - "smithy.api#documentation": "

          Describes the status of the specified volumes. Volume status provides the result of the\n checks performed on your volumes to determine events that can impair the performance of your\n volumes. The performance of a volume can be affected if an issue occurs on the volume's\n underlying host. If the volume's underlying host experiences a power outage or system issue,\n after the system is restored, there could be data inconsistencies on the volume. Volume events\n notify you if this occurs. Volume actions notify you if any action needs to be taken in\n response to the event.

          \n

          The DescribeVolumeStatus operation provides the following information about\n the specified volumes:

          \n

          \n Status: Reflects the current status of the volume. The possible\n values are ok, impaired , warning, or\n insufficient-data. If all checks pass, the overall status of the volume is\n ok. If the check fails, the overall status is impaired. If the\n status is insufficient-data, then the checks may still be taking place on your\n volume at the time. We recommend that you retry the request. For more information about volume\n status, see Monitoring the status of your volumes in the\n Amazon Elastic Compute Cloud User Guide.

          \n

          \n Events: Reflect the cause of a volume status and may require you to\n take action. For example, if your volume returns an impaired status, then the\n volume event might be potential-data-inconsistency. This means that your volume\n has been affected by an issue with the underlying host, has all I/O operations disabled, and\n may have inconsistent data.

          \n

          \n Actions: Reflect the actions you may have to take in response to an\n event. For example, if the status of the volume is impaired and the volume event\n shows potential-data-inconsistency, then the action shows\n enable-volume-io. This means that you may want to enable the I/O operations for\n the volume by calling the EnableVolumeIO action and then check the volume\n for data consistency.

          \n

          Volume status is based on the volume status checks, and does not reflect the volume state.\n Therefore, volume status does not indicate volumes in the error state (for\n example, when a volume is incapable of accepting I/O.)

          ", + "smithy.api#documentation": "

          Describes the status of the specified volumes. Volume status provides the result of the\n checks performed on your volumes to determine events that can impair the performance of your\n volumes. The performance of a volume can be affected if an issue occurs on the volume's\n underlying host. If the volume's underlying host experiences a power outage or system issue,\n after the system is restored, there could be data inconsistencies on the volume. Volume events\n notify you if this occurs. Volume actions notify you if any action needs to be taken in\n response to the event.

          \n

          The DescribeVolumeStatus operation provides the following information about\n the specified volumes:

          \n

          \n Status: Reflects the current status of the volume. The possible\n values are ok, impaired , warning, or\n insufficient-data. If all checks pass, the overall status of the volume is\n ok. If the check fails, the overall status is impaired. If the\n status is insufficient-data, then the checks might still be taking place on your\n volume at the time. We recommend that you retry the request. For more information about volume\n status, see Monitoring the status of your volumes in the\n Amazon Elastic Compute Cloud User Guide.

          \n

          \n Events: Reflect the cause of a volume status and might require you to\n take action. For example, if your volume returns an impaired status, then the\n volume event might be potential-data-inconsistency. This means that your volume\n has been affected by an issue with the underlying host, has all I/O operations disabled, and\n might have inconsistent data.

          \n

          \n Actions: Reflect the actions you might have to take in response to an\n event. For example, if the status of the volume is impaired and the volume event\n shows potential-data-inconsistency, then the action shows\n enable-volume-io. This means that you may want to enable the I/O operations for\n the volume by calling the EnableVolumeIO action and then check the volume\n for data consistency.

          \n

          Volume status is based on the volume status checks, and does not reflect the volume state.\n Therefore, volume status does not indicate volumes in the error state (for\n example, when a volume is incapable of accepting I/O.)

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -22541,7 +22547,7 @@ "MaxResults": { "target": "com.amazonaws.ec2#Integer", "traits": { - "smithy.api#documentation": "

          The maximum number of volume results returned by DescribeVolumeStatus in\n paginated output. When this parameter is used, the request only returns\n MaxResults results in a single page along with a NextToken\n response element. The remaining results of the initial request can be seen by sending another\n request with the returned NextToken value. This value can be between 5 and 1000;\n if MaxResults is given a value larger than 1000, only 1000 results are returned.\n If this parameter is not used, then DescribeVolumeStatus returns all results. You\n cannot specify this parameter and the volume IDs parameter in the same request.

          " + "smithy.api#documentation": "

          The maximum number of volume results returned by DescribeVolumeStatus in\n paginated output. When this parameter is used, the request only returns\n MaxResults results in a single page along with a NextToken\n response element. The remaining results of the initial request can be seen by sending another\n request with the returned NextToken value. This value can be between 5 and 1,000;\n if MaxResults is given a value larger than 1,000, only 1,000 results are returned.\n If this parameter is not used, then DescribeVolumeStatus returns all results. You\n cannot specify this parameter and the volume IDs parameter in the same request.

          " } }, "NextToken": { @@ -22597,7 +22603,7 @@ "target": "com.amazonaws.ec2#DescribeVolumesResult" }, "traits": { - "smithy.api#documentation": "

          Describes the specified EBS volumes or all of your EBS volumes.

          \n

          If you are describing a long list of volumes, we recommend that you paginate the output to make the list\n more manageable. The MaxResults parameter sets the maximum number of results\n returned in a single page. If the list of results exceeds your MaxResults value,\n then that number of results is returned along with a NextToken value that can be\n passed to a subsequent DescribeVolumes request to retrieve the remaining\n results.

          \n

          For more information about EBS volumes, see Amazon EBS Volumes in the Amazon Elastic Compute Cloud User Guide.

          ", + "smithy.api#documentation": "

          Describes the specified EBS volumes or all of your EBS volumes.

          \n

          If you are describing a long list of volumes, we recommend that you paginate the output to make the list\n more manageable. The MaxResults parameter sets the maximum number of results\n returned in a single page. If the list of results exceeds your MaxResults value,\n then that number of results is returned along with a NextToken value that can be\n passed to a subsequent DescribeVolumes request to retrieve the remaining\n results.

          \n

          For more information about EBS volumes, see Amazon EBS volumes in the Amazon Elastic Compute Cloud User Guide.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -22688,7 +22694,7 @@ "Filters": { "target": "com.amazonaws.ec2#FilterList", "traits": { - "smithy.api#documentation": "

          The filters.

          \n
            \n
          • \n

            \n attachment.attach-time - The time stamp when the attachment\n initiated.

            \n
          • \n
          • \n

            \n attachment.delete-on-termination - Whether the volume is deleted on\n instance termination.

            \n
          • \n
          • \n

            \n attachment.device - The device name specified in the block device mapping\n (for example, /dev/sda1).

            \n
          • \n
          • \n

            \n attachment.instance-id - The ID of the instance the volume is attached\n to.

            \n
          • \n
          • \n

            \n attachment.status - The attachment state (attaching |\n attached | detaching).

            \n
          • \n
          • \n

            \n availability-zone - The Availability Zone in which the volume was\n created.

            \n
          • \n
          • \n

            \n create-time - The time stamp when the volume was created.

            \n
          • \n
          • \n

            \n encrypted - Indicates whether the volume is encrypted (true\n | false)

            \n
          • \n
          • \n \t\t

            \n multi-attach-enabled - Indicates whether the volume is enabled for Multi-Attach (true\n \t\t\t| false)

            \n \t
          • \n
          • \n

            \n fast-restored - Indicates whether the volume was created from a \n snapshot that is enabled for fast snapshot restore (true | \n false).

            \n
          • \n
          • \n

            \n size - The size of the volume, in GiB.

            \n
          • \n
          • \n

            \n snapshot-id - The snapshot from which the volume was created.

            \n
          • \n
          • \n

            \n status - The state of the volume (creating |\n available | in-use | deleting |\n deleted | error).

            \n
          • \n
          • \n

            \n tag: - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value.\n For example, to find all resources that have a tag with the key Owner and the value TeamA, specify tag:Owner for the filter name and TeamA for the filter value.

            \n
          • \n
          • \n

            \n tag-key - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.

            \n
          • \n
          • \n

            \n volume-id - The volume ID.

            \n
          • \n
          • \n

            \n volume-type - The Amazon EBS volume type. This can be gp2\n for General Purpose SSD, io1 or io2 for Provisioned IOPS SSD, st1 for Throughput Optimized HDD,\n sc1 for Cold HDD, or standard for Magnetic volumes.

            \n
          • \n
          ", + "smithy.api#documentation": "

          The filters.

          \n
            \n
          • \n

            \n attachment.attach-time - The time stamp when the attachment\n initiated.

            \n
          • \n
          • \n

            \n attachment.delete-on-termination - Whether the volume is deleted on\n instance termination.

            \n
          • \n
          • \n

            \n attachment.device - The device name specified in the block device mapping\n (for example, /dev/sda1).

            \n
          • \n
          • \n

            \n attachment.instance-id - The ID of the instance the volume is attached\n to.

            \n
          • \n
          • \n

            \n attachment.status - The attachment state (attaching |\n attached | detaching).

            \n
          • \n
          • \n

            \n availability-zone - The Availability Zone in which the volume was\n created.

            \n
          • \n
          • \n

            \n create-time - The time stamp when the volume was created.

            \n
          • \n
          • \n

            \n encrypted - Indicates whether the volume is encrypted (true\n | false)

            \n
          • \n
          • \n \t\t

            \n multi-attach-enabled - Indicates whether the volume is enabled for Multi-Attach (true\n \t\t\t| false)

            \n \t
          • \n
          • \n

            \n fast-restored - Indicates whether the volume was created from a \n snapshot that is enabled for fast snapshot restore (true | \n false).

            \n
          • \n
          • \n

            \n size - The size of the volume, in GiB.

            \n
          • \n
          • \n

            \n snapshot-id - The snapshot from which the volume was created.

            \n
          • \n
          • \n

            \n status - The state of the volume (creating |\n available | in-use | deleting |\n deleted | error).

            \n
          • \n
          • \n

            \n tag: - The key/value combination of a tag assigned to the resource. Use the tag key in the filter name and the tag value as the filter value.\n For example, to find all resources that have a tag with the key Owner and the value TeamA, specify tag:Owner for the filter name and TeamA for the filter value.

            \n
          • \n
          • \n

            \n tag-key - The key of a tag assigned to the resource. Use this filter to find all resources assigned a tag with a specific key, regardless of the tag value.

            \n
          • \n
          • \n

            \n volume-id - The volume ID.

            \n
          • \n
          • \n

            \n volume-type - The Amazon EBS volume type (gp2 | gp3 | io1 | io2 | \n st1 | sc1| standard)

            \n
          • \n
          ", "smithy.api#xmlName": "Filter" } }, @@ -24085,7 +24091,7 @@ "target": "com.amazonaws.ec2#DisableEbsEncryptionByDefaultResult" }, "traits": { - "smithy.api#documentation": "

          Disables EBS encryption by default for your account in the current Region.

          \n

          After you disable encryption by default, you can still create encrypted volumes by \n enabling encryption when you create each volume.

          \n

          Disabling encryption by default does not change the encryption status of your\n existing volumes.

          \n

          For more information, see Amazon EBS Encryption in the\n Amazon Elastic Compute Cloud User Guide.

          " + "smithy.api#documentation": "

          Disables EBS encryption by default for your account in the current Region.

          \n

          After you disable encryption by default, you can still create encrypted volumes by \n enabling encryption when you create each volume.

          \n

          Disabling encryption by default does not change the encryption status of your\n existing volumes.

          \n

          For more information, see Amazon EBS encryption in the\n Amazon Elastic Compute Cloud User Guide.

          " } }, "com.amazonaws.ec2#DisableEbsEncryptionByDefaultRequest": { @@ -25281,7 +25287,7 @@ "target": "com.amazonaws.ec2#Integer", "traits": { "aws.protocols#ec2QueryName": "Iops", - "smithy.api#documentation": "

          The number of I/O operations per second (IOPS) that the volume supports. For\n io1 and io2 volumes, this represents the number of IOPS that are provisioned\n for the volume. For gp2 volumes, this represents the baseline performance\n of the volume and the rate at which the volume accumulates I/O credits for bursting. For\n more information, see Amazon EBS volume types in the\n Amazon Elastic Compute Cloud User Guide.

          \n

          Constraints: Range is 100-16,000 IOPS for gp2 volumes and\n 100 to 64,000 IOPS for io1 and io2 volumes in\n most Regions. Maximum io1 and io2 IOPS of 64,000 is guaranteed\n only on Nitro-based\n instances. Other instance families guarantee performance up to\n 32,000 IOPS. For more information, see Amazon EBS Volume\n Types in the Amazon Elastic Compute Cloud User Guide.

          \n

          Condition: This parameter is required for requests to create io1 and io2 volumes;\n it is not used in requests to create gp2, st1,\n sc1, or standard volumes.

          ", + "smithy.api#documentation": "

          The number of I/O operations per second (IOPS). For gp3, io1, and io2 volumes, this \n represents the number of IOPS that are provisioned for the volume. For gp2 volumes, this \n represents the baseline performance of the volume and the rate at which the volume accumulates \n I/O credits for bursting.

          \n

          The following are the supported values for each volume type:

          \n
            \n
          • \n

            \n gp3: 3,000-16,000 IOPS

            \n
          • \n
          • \n

            \n io1: 100-64,000 IOPS

            \n
          • \n
          • \n

            \n io2: 100-64,000 IOPS

            \n
          • \n
          \n

          For io1 and io2 volumes, we guarantee 64,000 IOPS only for\n Instances built on the Nitro System. Other instance families guarantee performance \n up to 32,000 IOPS.

          \n

          This parameter is required for io1 and io2 volumes.\n The default for gp3 volumes is 3,000 IOPS.\n This parameter is not supported for gp2, st1, sc1, or standard volumes.

          ", "smithy.api#xmlName": "iops" } }, @@ -25297,7 +25303,7 @@ "target": "com.amazonaws.ec2#Integer", "traits": { "aws.protocols#ec2QueryName": "VolumeSize", - "smithy.api#documentation": "

          The size of the volume, in GiB.

          \n

          Default: If you're creating the volume from a snapshot and don't specify a volume\n size, the default is the snapshot size.

          \n

          Constraints: 1-16384 for General Purpose SSD (gp2), 4-16384 for\n Provisioned IOPS SSD (io1 and io2), 500-16384 for Throughput Optimized HDD\n (st1), 500-16384 for Cold HDD (sc1), and 1-1024 for\n Magnetic (standard) volumes. If you specify a snapshot, the volume size\n must be equal to or larger than the snapshot size.

          ", + "smithy.api#documentation": "

          The size of the volume, in GiBs. You must specify either a snapshot ID or a volume size.\n If you specify a snapshot, the default is the snapshot size. You can specify a volume \n size that is equal to or larger than the snapshot size.

          \n

          The following are the supported volumes sizes for each volume type:

          \n
            \n
          • \n

            \n gp2 and gp3:1-16,384

            \n
          • \n
          • \n

            \n io1 and io2: 4-16,384

            \n
          • \n
          • \n

            \n st1: 500-16,384

            \n
          • \n
          • \n

            \n sc1: 500-16,384

            \n
          • \n
          • \n

            \n standard: 1-1,024

            \n
          • \n
          ", "smithy.api#xmlName": "volumeSize" } }, @@ -25305,7 +25311,7 @@ "target": "com.amazonaws.ec2#VolumeType", "traits": { "aws.protocols#ec2QueryName": "VolumeType", - "smithy.api#documentation": "

          The volume type. If you set the type to io1 or io2, you must also specify\n \tthe Iops parameter. If you set the type to gp2, \n \tst1, sc1, or standard, you must omit \n \tthe Iops parameter.

          \n

          Default: gp2\n

          ", + "smithy.api#documentation": "

          The volume type. For more information, see Amazon EBS volume types in the\n Amazon Elastic Compute Cloud User Guide. If the volume type is io1 or io2, \n you must specify the IOPS that the volume supports.

          ", "smithy.api#xmlName": "volumeType" } }, @@ -25316,6 +25322,14 @@ "smithy.api#xmlName": "KmsKeyId" } }, + "Throughput": { + "target": "com.amazonaws.ec2#Integer", + "traits": { + "aws.protocols#ec2QueryName": "Throughput", + "smithy.api#documentation": "

          The throughput that the volume supports, in MiB/s.

          \n \t

          This parameter is valid only for gp3 volumes.

          \n \t

          Valid Range: Minimum value of 125. Maximum value of 1000.

          ", + "smithy.api#xmlName": "throughput" + } + }, "Encrypted": { "target": "com.amazonaws.ec2#Boolean", "traits": { @@ -25945,7 +25959,7 @@ "target": "com.amazonaws.ec2#EnableEbsEncryptionByDefaultResult" }, "traits": { - "smithy.api#documentation": "

          Enables EBS encryption by default for your account in the current Region.

          \n

          After you enable encryption by default, the EBS volumes that you create are\n are always encrypted, either using the default CMK or the CMK that you specified\n when you created each volume. For more information, see Amazon EBS Encryption in the\n Amazon Elastic Compute Cloud User Guide.

          \n

          You can specify the default CMK for encryption by default using ModifyEbsDefaultKmsKeyId\n or ResetEbsDefaultKmsKeyId.

          \n

          Enabling encryption by default has no effect on the encryption status of your \n existing volumes.

          \n

          After you enable encryption by default, you can no longer launch instances\n using instance types that do not support encryption. For more information, see Supported\n instance types.

          " + "smithy.api#documentation": "

          Enables EBS encryption by default for your account in the current Region.

          \n

          After you enable encryption by default, the EBS volumes that you create are\n are always encrypted, either using the default CMK or the CMK that you specified\n when you created each volume. For more information, see Amazon EBS encryption in the\n Amazon Elastic Compute Cloud User Guide.

          \n

          You can specify the default CMK for encryption by default using ModifyEbsDefaultKmsKeyId\n or ResetEbsDefaultKmsKeyId.

          \n

          Enabling encryption by default has no effect on the encryption status of your \n existing volumes.

          \n

          After you enable encryption by default, you can no longer launch instances\n using instance types that do not support encryption. For more information, see Supported\n instance types.

          " } }, "com.amazonaws.ec2#EnableEbsEncryptionByDefaultRequest": { @@ -29072,7 +29086,7 @@ "target": "com.amazonaws.ec2#GetEbsDefaultKmsKeyIdResult" }, "traits": { - "smithy.api#documentation": "

          Describes the default customer master key (CMK) for EBS encryption by default for your account in this Region. \n You can change the default CMK for encryption by default using ModifyEbsDefaultKmsKeyId or\n ResetEbsDefaultKmsKeyId.

          \n

          For more information, see Amazon EBS Encryption\n in the Amazon Elastic Compute Cloud User Guide.

          " + "smithy.api#documentation": "

          Describes the default customer master key (CMK) for EBS encryption by default for your account in this Region. \n You can change the default CMK for encryption by default using ModifyEbsDefaultKmsKeyId or\n ResetEbsDefaultKmsKeyId.

          \n

          For more information, see Amazon EBS encryption\n in the Amazon Elastic Compute Cloud User Guide.

          " } }, "com.amazonaws.ec2#GetEbsDefaultKmsKeyIdRequest": { @@ -29108,7 +29122,7 @@ "target": "com.amazonaws.ec2#GetEbsEncryptionByDefaultResult" }, "traits": { - "smithy.api#documentation": "

          Describes whether EBS encryption by default is enabled for your account in the current\n Region.

          \n

          For more information, see Amazon EBS Encryption\n in the Amazon Elastic Compute Cloud User Guide.

          " + "smithy.api#documentation": "

          Describes whether EBS encryption by default is enabled for your account in the current\n Region.

          \n

          For more information, see Amazon EBS encryption\n in the Amazon Elastic Compute Cloud User Guide.

          " } }, "com.amazonaws.ec2#GetEbsEncryptionByDefaultRequest": { @@ -34897,6 +34911,33 @@ { "value": "r5a.24xlarge" }, + { + "value": "r5b.large" + }, + { + "value": "r5b.xlarge" + }, + { + "value": "r5b.2xlarge" + }, + { + "value": "r5b.4xlarge" + }, + { + "value": "r5b.8xlarge" + }, + { + "value": "r5b.12xlarge" + }, + { + "value": "r5b.16xlarge" + }, + { + "value": "r5b.24xlarge" + }, + { + "value": "r5b.metal" + }, { "value": "r5d.large" }, @@ -35383,6 +35424,36 @@ { "value": "d2.8xlarge" }, + { + "value": "d3.xlarge" + }, + { + "value": "d3.2xlarge" + }, + { + "value": "d3.4xlarge" + }, + { + "value": "d3.8xlarge" + }, + { + "value": "d3en.xlarge" + }, + { + "value": "d3en.2xlarge" + }, + { + "value": "d3en.4xlarge" + }, + { + "value": "d3en.6xlarge" + }, + { + "value": "d3en.8xlarge" + }, + { + "value": "d3en.12xlarge" + }, { "value": "f1.2xlarge" }, @@ -35494,6 +35565,27 @@ { "value": "m5ad.24xlarge" }, + { + "value": "m5zn.large" + }, + { + "value": "m5zn.xlarge" + }, + { + "value": "m5zn.2xlarge" + }, + { + "value": "m5zn.3xlarge" + }, + { + "value": "m5zn.6xlarge" + }, + { + "value": "m5zn.12xlarge" + }, + { + "value": "m5zn.metal" + }, { "value": "h1.2xlarge" }, @@ -37141,6 +37233,14 @@ "smithy.api#documentation": "

          The volume type.

          ", "smithy.api#xmlName": "volumeType" } + }, + "Throughput": { + "target": "com.amazonaws.ec2#Integer", + "traits": { + "aws.protocols#ec2QueryName": "Throughput", + "smithy.api#documentation": "

          The throughput that the volume supports, in MiB/s.

          ", + "smithy.api#xmlName": "throughput" + } } }, "traits": { @@ -37153,7 +37253,7 @@ "Encrypted": { "target": "com.amazonaws.ec2#Boolean", "traits": { - "smithy.api#documentation": "

          Indicates whether the EBS volume is encrypted. Encrypted volumes can only be attached to instances that support Amazon EBS encryption. If you are creating a volume from a snapshot, you can't specify an encryption value.

          " + "smithy.api#documentation": "

          Indicates whether the EBS volume is encrypted. Encrypted volumes can only be attached to instances that support Amazon EBS encryption. \n If you are creating a volume from a snapshot, you can't specify an encryption value.

          " } }, "DeleteOnTermination": { @@ -37165,7 +37265,7 @@ "Iops": { "target": "com.amazonaws.ec2#Integer", "traits": { - "smithy.api#documentation": "

          The number of I/O operations per second (IOPS) to provision for an io1 or io2 volume, with a maximum\n \t\tratio of 50 IOPS/GiB for io1, and 500 IOPS/GiB for io2. Range is 100 to 64,000 IOPS for\n \t\tvolumes in most Regions. Maximum IOPS of 64,000 is guaranteed only on\n \t\tNitro-based instances. Other instance families guarantee performance up to\n \t\t32,000 IOPS. For more information, see Amazon EBS Volume Types in the\n \t\tAmazon Elastic Compute Cloud User Guide.

          \n \t

          This parameter is valid only for Provisioned IOPS SSD (io1 and io2) volumes.

          " + "smithy.api#documentation": "

          The number of I/O operations per second (IOPS). For gp3, io1, and io2 \n volumes, this represents the number of IOPS that are provisioned for the volume. For gp2 volumes, \n this represents the baseline performance of the volume and the rate at which the volume accumulates I/O credits \n for bursting.

          \n

          The following are the supported values for each volume type:

          \n
            \n
          • \n

            \n gp3: 3,000-16,000 IOPS

            \n
          • \n
          • \n

            \n io1: 100-64,000 IOPS

            \n
          • \n
          • \n

            \n io2: 100-64,000 IOPS

            \n
          • \n
          \n

          For io1 and io2 volumes, we guarantee 64,000 IOPS \n only for Instances built on the Nitro System. Other instance families guarantee performance up\n to 32,000 IOPS.

          \n

          This parameter is required for io1 and io2 volumes.\n The default for gp3 volumes is 3,000 IOPS.\n This parameter is not supported for gp2, st1, sc1, or standard volumes.

          " } }, "KmsKeyId": { @@ -37183,13 +37283,19 @@ "VolumeSize": { "target": "com.amazonaws.ec2#Integer", "traits": { - "smithy.api#documentation": "

          The size of the volume, in GiB.

          \n

          Default: If you're creating the volume from a snapshot and don't specify a volume size, the default is the snapshot size.

          " + "smithy.api#documentation": "

          The size of the volume, in GiBs. You must specify either a snapshot ID or a volume size.\n If you specify a snapshot, the default is the snapshot size. You can specify a volume \n size that is equal to or larger than the snapshot size.

          \n

          The following are the supported volumes sizes for each volume type:

          \n
            \n
          • \n

            \n gp2 and gp3: 1-16,384

            \n
          • \n
          • \n

            \n io1 and io2: 4-16,384

            \n
          • \n
          • \n

            \n st1 and sc1: 125-16,384

            \n
          • \n
          • \n

            \n standard: 1-1,024

            \n
          • \n
          " } }, "VolumeType": { "target": "com.amazonaws.ec2#VolumeType", "traits": { - "smithy.api#documentation": "

          The volume type.

          " + "smithy.api#documentation": "

          The volume type. The default is gp2. For more information, see Amazon EBS volume types in the\n Amazon Elastic Compute Cloud User Guide.

          " + } + }, + "Throughput": { + "target": "com.amazonaws.ec2#Integer", + "traits": { + "smithy.api#documentation": "

          The throughput to provision for a gp3 volume, with a maximum of 1,000 MiB/s.

          \n \t

          Valid Range: Minimum value of 125. Maximum value of 1000.

          " } } }, @@ -39700,7 +39806,7 @@ "target": "com.amazonaws.ec2#ModifyEbsDefaultKmsKeyIdResult" }, "traits": { - "smithy.api#documentation": "

          Changes the default customer master key (CMK) for EBS encryption by default for your account in this Region.

          \n

          AWS creates a unique AWS managed CMK in each Region for use with encryption by default. If\n you change the default CMK to a symmetric customer managed CMK, it is used instead of the AWS\n managed CMK. To reset the default CMK to the AWS managed CMK for EBS, use ResetEbsDefaultKmsKeyId. Amazon EBS does not support asymmetric CMKs.

          \n

          If you delete or disable the customer managed CMK that you specified for use with\n encryption by default, your instances will fail to launch.

          \n

          For more information, see Amazon EBS Encryption\n in the Amazon Elastic Compute Cloud User Guide.

          " + "smithy.api#documentation": "

          Changes the default customer master key (CMK) for EBS encryption by default for your account in this Region.

          \n

          AWS creates a unique AWS managed CMK in each Region for use with encryption by default. If\n you change the default CMK to a symmetric customer managed CMK, it is used instead of the AWS\n managed CMK. To reset the default CMK to the AWS managed CMK for EBS, use ResetEbsDefaultKmsKeyId. Amazon EBS does not support asymmetric CMKs.

          \n

          If you delete or disable the customer managed CMK that you specified for use with\n encryption by default, your instances will fail to launch.

          \n

          For more information, see Amazon EBS encryption\n in the Amazon Elastic Compute Cloud User Guide.

          " } }, "com.amazonaws.ec2#ModifyEbsDefaultKmsKeyIdRequest": { @@ -41515,7 +41621,7 @@ "target": "com.amazonaws.ec2#ModifyVolumeResult" }, "traits": { - "smithy.api#documentation": "

          You can modify several parameters of an existing EBS volume, including volume size, volume\n type, and IOPS capacity. If your EBS volume is attached to a current-generation EC2 instance\n type, you may be able to apply these changes without stopping the instance or detaching the\n volume from it. For more information about modifying an EBS volume running Linux, see Modifying the size, IOPS, or\n type of an EBS volume on Linux. For more information about modifying an EBS volume\n running Windows, see Modifying the size, IOPS, or type of an EBS volume on Windows.

          \n

          When you complete a resize operation on your volume, you need to extend the volume's\n file-system size to take advantage of the new storage capacity. For information about\n extending a Linux file system, see Extending a Linux\n file system. For information about extending a Windows file system, see Extending a\n Windows file system.

          \n

          You can use CloudWatch Events to check the status of a modification to an EBS volume. For\n information about CloudWatch Events, see the Amazon CloudWatch Events User Guide. You can also track the status of a\n modification using DescribeVolumesModifications. For information\n about tracking status changes using either method, see Monitoring volume\n modifications.

          \n

          With previous-generation instance types, resizing an EBS volume may require detaching and\n reattaching the volume or stopping and restarting the instance. For more information, see\n Modifying the size,\n IOPS, or type of an EBS volume on Linux and Modifying the size, IOPS, or type of an EBS\n volume on Windows.

          \n

          If you reach the maximum volume modification rate per volume limit, you will need to wait\n at least six hours before applying further modifications to the affected EBS volume.

          " + "smithy.api#documentation": "

          You can modify several parameters of an existing EBS volume, including volume size, volume\n type, and IOPS capacity. If your EBS volume is attached to a current-generation EC2 instance\n type, you might be able to apply these changes without stopping the instance or detaching the\n volume from it. For more information about modifying an EBS volume running Linux, see Modifying the size, IOPS, or\n type of an EBS volume on Linux. For more information about modifying an EBS volume\n running Windows, see Modifying the size, IOPS, or type of an EBS volume on Windows.

          \n

          When you complete a resize operation on your volume, you need to extend the volume's\n file-system size to take advantage of the new storage capacity. For information about\n extending a Linux file system, see Extending a Linux\n file system. For information about extending a Windows file system, see Extending a\n Windows file system.

          \n

          You can use CloudWatch Events to check the status of a modification to an EBS volume. For\n information about CloudWatch Events, see the Amazon CloudWatch Events User Guide. You can also track the status of a\n modification using DescribeVolumesModifications. For information\n about tracking status changes using either method, see Monitoring volume\n modifications.

          \n

          With previous-generation instance types, resizing an EBS volume might require detaching and\n reattaching the volume or stopping and restarting the instance. For more information, see\n Amazon EBS Elastic \n Volumes (Linux) or Amazon EBS Elastic Volumes (Windows).

          \n

          If you reach the maximum volume modification rate per volume limit, you will need to wait\n at least six hours before applying further modifications to the affected EBS volume.

          " } }, "com.amazonaws.ec2#ModifyVolumeAttribute": { @@ -41572,19 +41678,25 @@ "Size": { "target": "com.amazonaws.ec2#Integer", "traits": { - "smithy.api#documentation": "

          The target size of the volume, in GiB. The target volume size must be greater than or\n equal to than the existing size of the volume. For information about available EBS volume\n sizes, see Amazon EBS Volume\n Types.

          \n

          Default: If no size is specified, the existing size is retained.

          " + "smithy.api#documentation": "

          The target size of the volume, in GiB. The target volume size must be greater than or\n equal to the existing size of the volume.

          \n

          The following are the supported volumes sizes for each volume type:

          \n
            \n
          • \n

            \n gp2 and gp3: 1-16,384

            \n
          • \n
          • \n

            \n io1 and io2: 4-16,384

            \n
          • \n
          • \n

            \n st1 and sc1: 125-16,384

            \n
          • \n
          • \n

            \n standard: 1-1,024

            \n
          • \n
          \n

          Default: If no size is specified, the existing size is retained.

          " } }, "VolumeType": { "target": "com.amazonaws.ec2#VolumeType", "traits": { - "smithy.api#documentation": "

          The target EBS volume type of the volume.

          \n

          Default: If no type is specified, the existing type is retained.

          " + "smithy.api#documentation": "

          The target EBS volume type of the volume. For more information, see Amazon EBS volume types in the Amazon Elastic Compute Cloud User Guide.

          \n

          Default: If no type is specified, the existing type is retained.

          " } }, "Iops": { "target": "com.amazonaws.ec2#Integer", "traits": { - "smithy.api#documentation": "

          The target IOPS rate of the volume.

          \n

          This is only valid for Provisioned IOPS SSD (io1 and io2) volumes. For moreinformation, see \n \t\n \t\tProvisioned IOPS SSD (io1 and io2) volumes.

          \n

          Default: If no IOPS value is specified, the existing value is retained.

          " + "smithy.api#documentation": "

          The target IOPS rate of the volume. This parameter is valid only for gp3, io1, and io2 volumes.

          \n

          The following are the supported values for each volume type:

          \n
            \n
          • \n

            \n gp3: 3,000-16,000 IOPS

            \n
          • \n
          • \n

            \n io1: 100-64,000 IOPS

            \n
          • \n
          • \n

            \n io2: 100-64,000 IOPS

            \n
          • \n
          \n

          Default: If no IOPS value is specified, the existing value is retained.

          " + } + }, + "Throughput": { + "target": "com.amazonaws.ec2#Integer", + "traits": { + "smithy.api#documentation": "

          The target throughput of the volume, in MiB/s. This parameter is valid only for gp3 volumes. \n The maximum value is 1,000.

          \n

          Default: If no throughput value is specified, the existing value is retained.

          \n \t

          Valid Range: Minimum value of 125. Maximum value of 1000.

          " } } } @@ -48764,7 +48876,7 @@ "target": "com.amazonaws.ec2#ResetEbsDefaultKmsKeyIdResult" }, "traits": { - "smithy.api#documentation": "

          Resets the default customer master key (CMK) for EBS encryption for your account in this Region \n to the AWS managed CMK for EBS.

          \n

          After resetting the default CMK to the AWS managed CMK, you can continue to encrypt by a \n customer managed CMK by specifying it when you create the volume. For more information, see\n Amazon EBS Encryption\n in the Amazon Elastic Compute Cloud User Guide.

          " + "smithy.api#documentation": "

          Resets the default customer master key (CMK) for EBS encryption for your account in this Region \n to the AWS managed CMK for EBS.

          \n

          After resetting the default CMK to the AWS managed CMK, you can continue to encrypt by a \n customer managed CMK by specifying it when you create the volume. For more information, see\n Amazon EBS encryption\n in the Amazon Elastic Compute Cloud User Guide.

          " } }, "com.amazonaws.ec2#ResetEbsDefaultKmsKeyIdRequest": { @@ -58544,7 +58656,7 @@ "target": "com.amazonaws.ec2#Integer", "traits": { "aws.protocols#ec2QueryName": "Iops", - "smithy.api#documentation": "

          The number of I/O operations per second (IOPS) that the volume supports. For Provisioned IOPS SSD\n volumes, this represents the number of IOPS that are provisioned for the volume. For General Purpose SSD\n volumes, this represents the baseline performance of the volume and the rate at which the\n volume accumulates I/O credits for bursting. For more information, see Amazon EBS volume types in\n the Amazon Elastic Compute Cloud User Guide.

          \n

          Constraints: Range is 100-16,000 IOPS for gp2 volumes and 100\n to 64,000 IOPS for io1 and io2 volumes, in most Regions. The maximum\n IOPS for io1 and io2 of 64,000 is guaranteed only on Nitro-based\n instances. Other instance families guarantee performance up to\n 32,000 IOPS.

          \n

          Condition: This parameter is required for requests to create io1 and io2 volumes;\n it is not used in requests to create gp2, st1,\n sc1, or standard volumes.

          ", + "smithy.api#documentation": "

          The number of I/O operations per second (IOPS). For gp3, io1, and io2 volumes, this represents \n the number of IOPS that are provisioned for the volume. For gp2 volumes, this represents the baseline \n performance of the volume and the rate at which the volume accumulates I/O credits for bursting.

          ", "smithy.api#xmlName": "iops" } }, @@ -58560,7 +58672,7 @@ "target": "com.amazonaws.ec2#VolumeType", "traits": { "aws.protocols#ec2QueryName": "VolumeType", - "smithy.api#documentation": "

          The volume type. This can be gp2 for General Purpose SSD, io1 or io2 for Provisioned IOPS SSD,\n st1 for Throughput Optimized HDD, sc1 for Cold HDD, or\n standard for Magnetic volumes.

          ", + "smithy.api#documentation": "

          The volume type.

          ", "smithy.api#xmlName": "volumeType" } }, @@ -58579,6 +58691,14 @@ "smithy.api#documentation": "

          Indicates whether Amazon EBS Multi-Attach is enabled.

          ", "smithy.api#xmlName": "multiAttachEnabled" } + }, + "Throughput": { + "target": "com.amazonaws.ec2#Integer", + "traits": { + "aws.protocols#ec2QueryName": "Throughput", + "smithy.api#documentation": "

          The throughput that the volume supports, in MiB/s.

          ", + "smithy.api#xmlName": "throughput" + } } }, "traits": { @@ -58774,6 +58894,14 @@ "smithy.api#xmlName": "targetVolumeType" } }, + "TargetThroughput": { + "target": "com.amazonaws.ec2#Integer", + "traits": { + "aws.protocols#ec2QueryName": "TargetThroughput", + "smithy.api#documentation": "

          The target throughput of the volume, in MiB/s.

          ", + "smithy.api#xmlName": "targetThroughput" + } + }, "OriginalSize": { "target": "com.amazonaws.ec2#Integer", "traits": { @@ -58798,6 +58926,14 @@ "smithy.api#xmlName": "originalVolumeType" } }, + "OriginalThroughput": { + "target": "com.amazonaws.ec2#Integer", + "traits": { + "aws.protocols#ec2QueryName": "OriginalThroughput", + "smithy.api#documentation": "

          The original throughput of the volume, in MiB/s.

          ", + "smithy.api#xmlName": "originalThroughput" + } + }, "Progress": { "target": "com.amazonaws.ec2#Long", "traits": { @@ -59207,6 +59343,9 @@ }, { "value": "st1" + }, + { + "value": "gp3" } ] } diff --git a/codegen/sdk-codegen/aws-models/ecr-public.2020-10-30.json b/codegen/sdk-codegen/aws-models/ecr-public.2020-10-30.json new file mode 100644 index 000000000000..789a25905bfd --- /dev/null +++ b/codegen/sdk-codegen/aws-models/ecr-public.2020-10-30.json @@ -0,0 +1,2738 @@ +{ + "smithy": "1.0", + "metadata": { + "suppressions": [ + { + "id": "HttpMethodSemantics", + "namespace": "*" + }, + { + "id": "HttpResponseCodeSemantics", + "namespace": "*" + }, + { + "id": "PaginatedTrait", + "namespace": "*" + }, + { + "id": "HttpHeaderTrait", + "namespace": "*" + }, + { + "id": "HttpUriConflict", + "namespace": "*" + }, + { + "id": "Service", + "namespace": "*" + } + ] + }, + "shapes": { + "com.amazonaws.ecrpublic#AboutText": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 10240 + } + } + }, + "com.amazonaws.ecrpublic#Architecture": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 50 + } + } + }, + "com.amazonaws.ecrpublic#ArchitectureList": { + "type": "list", + "member": { + "target": "com.amazonaws.ecrpublic#Architecture" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 50 + } + } + }, + "com.amazonaws.ecrpublic#Arn": { + "type": "string" + }, + "com.amazonaws.ecrpublic#AuthorizationData": { + "type": "structure", + "members": { + "authorizationToken": { + "target": "com.amazonaws.ecrpublic#Base64", + "traits": { + "smithy.api#documentation": "

          A base64-encoded string that contains authorization data for a public Amazon ECR registry.\n When the string is decoded, it is presented in the format user:password for\n public registry authentication using docker login.

          " + } + }, + "expiresAt": { + "target": "com.amazonaws.ecrpublic#ExpirationTimestamp", + "traits": { + "smithy.api#documentation": "

          The Unix time in seconds and milliseconds when the authorization token expires.\n Authorization tokens are valid for 12 hours.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An authorization token data object that corresponds to a public registry.

          " + } + }, + "com.amazonaws.ecrpublic#Base64": { + "type": "string", + "traits": { + "smithy.api#pattern": "^\\S+$" + } + }, + "com.amazonaws.ecrpublic#BatchCheckLayerAvailability": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#BatchCheckLayerAvailabilityRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#BatchCheckLayerAvailabilityResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#RegistryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + } + ], + "traits": { + "smithy.api#documentation": "

          Checks the availability of one or more image layers within a repository in a public\n registry. When an image is pushed to a repository, each image layer is checked to verify if\n it has been uploaded before. If it has been uploaded, then the image layer is\n skipped.

          \n \n

          This operation is used by the Amazon ECR proxy and is not generally used by customers for pulling and pushing images. In most cases, you should use the docker CLI to pull, tag, and push images.

          \n
          " + } + }, + "com.amazonaws.ecrpublic#BatchCheckLayerAvailabilityRequest": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryIdOrAlias", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the public registry that contains the image layers to\n check. If you do not specify a registry, the default public registry is assumed.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The name of the repository that is associated with the image layers to check.

          ", + "smithy.api#required": {} + } + }, + "layerDigests": { + "target": "com.amazonaws.ecrpublic#BatchedOperationLayerDigestList", + "traits": { + "smithy.api#documentation": "

          The digests of the image layers to check.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.ecrpublic#BatchCheckLayerAvailabilityResponse": { + "type": "structure", + "members": { + "layers": { + "target": "com.amazonaws.ecrpublic#LayerList", + "traits": { + "smithy.api#documentation": "

          A list of image layer objects corresponding to the image layer references in the\n request.

          " + } + }, + "failures": { + "target": "com.amazonaws.ecrpublic#LayerFailureList", + "traits": { + "smithy.api#documentation": "

          Any failures associated with the call.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#BatchDeleteImage": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#BatchDeleteImageRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#BatchDeleteImageResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + } + ], + "traits": { + "smithy.api#documentation": "

          Deletes a list of specified images within a repository in a public registry. Images are\n specified with either an imageTag or imageDigest.

          \n

          You can remove a tag from an image by specifying the image's tag in your request. When\n you remove the last tag from an image, the image is deleted from your repository.

          \n

          You can completely delete an image (and all of its tags) by specifying the image's\n digest in your request.

          " + } + }, + "com.amazonaws.ecrpublic#BatchDeleteImageRequest": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the registry that contains the image to delete.\n If you do not specify a registry, the default public registry is assumed.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The repository in a public registry that contains the image to delete.

          ", + "smithy.api#required": {} + } + }, + "imageIds": { + "target": "com.amazonaws.ecrpublic#ImageIdentifierList", + "traits": { + "smithy.api#documentation": "

          A list of image ID references that correspond to images to delete. The format of the\n imageIds reference is imageTag=tag or\n imageDigest=digest.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.ecrpublic#BatchDeleteImageResponse": { + "type": "structure", + "members": { + "imageIds": { + "target": "com.amazonaws.ecrpublic#ImageIdentifierList", + "traits": { + "smithy.api#documentation": "

          The image IDs of the deleted images.

          " + } + }, + "failures": { + "target": "com.amazonaws.ecrpublic#ImageFailureList", + "traits": { + "smithy.api#documentation": "

          Any failures associated with the call.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#BatchedOperationLayerDigest": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 1000 + } + } + }, + "com.amazonaws.ecrpublic#BatchedOperationLayerDigestList": { + "type": "list", + "member": { + "target": "com.amazonaws.ecrpublic#BatchedOperationLayerDigest" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.ecrpublic#CompleteLayerUpload": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#CompleteLayerUploadRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#CompleteLayerUploadResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#EmptyUploadException" + }, + { + "target": "com.amazonaws.ecrpublic#InvalidLayerException" + }, + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#LayerAlreadyExistsException" + }, + { + "target": "com.amazonaws.ecrpublic#LayerPartTooSmallException" + }, + { + "target": "com.amazonaws.ecrpublic#RegistryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + }, + { + "target": "com.amazonaws.ecrpublic#UnsupportedCommandException" + }, + { + "target": "com.amazonaws.ecrpublic#UploadNotFoundException" + } + ], + "traits": { + "smithy.api#documentation": "

          Informs Amazon ECR that the image layer upload has completed for a specified public registry,\n repository name, and upload ID. You can optionally provide a sha256 digest of\n the image layer for data validation purposes.

          \n

          When an image is pushed, the CompleteLayerUpload API is called once per each new image\n layer to verify that the upload has completed.

          \n \n

          This operation is used by the Amazon ECR proxy and is not generally used by customers for pulling and pushing images. In most cases, you should use the docker CLI to pull, tag, and push images.

          \n
          " + } + }, + "com.amazonaws.ecrpublic#CompleteLayerUploadRequest": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryIdOrAlias", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the registry to which to upload layers.\n If you do not specify a registry, the default public registry is assumed.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The name of the repository in a public registry to associate with the image\n layer.

          ", + "smithy.api#required": {} + } + }, + "uploadId": { + "target": "com.amazonaws.ecrpublic#UploadId", + "traits": { + "smithy.api#documentation": "

          The upload ID from a previous InitiateLayerUpload operation to\n associate with the image layer.

          ", + "smithy.api#required": {} + } + }, + "layerDigests": { + "target": "com.amazonaws.ecrpublic#LayerDigestList", + "traits": { + "smithy.api#documentation": "

          The sha256 digest of the image layer.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.ecrpublic#CompleteLayerUploadResponse": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The public registry ID associated with the request.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The repository name associated with the request.

          " + } + }, + "uploadId": { + "target": "com.amazonaws.ecrpublic#UploadId", + "traits": { + "smithy.api#documentation": "

          The upload ID associated with the layer.

          " + } + }, + "layerDigest": { + "target": "com.amazonaws.ecrpublic#LayerDigest", + "traits": { + "smithy.api#documentation": "

          The sha256 digest of the image layer.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#CreateRepository": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#CreateRepositoryRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#CreateRepositoryResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#LimitExceededException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryAlreadyExistsException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + } + ], + "traits": { + "smithy.api#documentation": "

          Creates a repository in a public registry. For more information, see Amazon ECR\n repositories in the Amazon Elastic Container Registry User Guide.

          " + } + }, + "com.amazonaws.ecrpublic#CreateRepositoryRequest": { + "type": "structure", + "members": { + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The name to use for the repository. This appears publicly in the Amazon ECR Public Gallery.\n The repository name may be specified on its own (such as nginx-web-app) or it\n can be prepended with a namespace to group the repository into a category (such as\n project-a/nginx-web-app).

          ", + "smithy.api#required": {} + } + }, + "catalogData": { + "target": "com.amazonaws.ecrpublic#RepositoryCatalogDataInput", + "traits": { + "smithy.api#documentation": "

          The details about the repository that are publicly visible in the\n Amazon ECR Public Gallery.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#CreateRepositoryResponse": { + "type": "structure", + "members": { + "repository": { + "target": "com.amazonaws.ecrpublic#Repository", + "traits": { + "smithy.api#documentation": "

          The repository that was created.

          " + } + }, + "catalogData": { + "target": "com.amazonaws.ecrpublic#RepositoryCatalogData" + } + } + }, + "com.amazonaws.ecrpublic#CreationTimestamp": { + "type": "timestamp" + }, + "com.amazonaws.ecrpublic#DefaultRegistryAliasFlag": { + "type": "boolean" + }, + "com.amazonaws.ecrpublic#DeleteRepository": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#DeleteRepositoryRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#DeleteRepositoryResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryNotEmptyException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + } + ], + "traits": { + "smithy.api#documentation": "

          Deletes a repository in a public registry. If the repository contains images, you must\n either delete all images in the repository or use the force option which\n deletes all images on your behalf before deleting the repository.

          " + } + }, + "com.amazonaws.ecrpublic#DeleteRepositoryPolicy": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#DeleteRepositoryPolicyRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#DeleteRepositoryPolicyResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryPolicyNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + } + ], + "traits": { + "smithy.api#documentation": "

          Deletes the repository policy associated with the specified repository.

          " + } + }, + "com.amazonaws.ecrpublic#DeleteRepositoryPolicyRequest": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the public registry that contains the repository\n policy to delete. If you do not specify a registry, the default public registry is assumed.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The name of the repository that is associated with the repository policy to\n delete.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.ecrpublic#DeleteRepositoryPolicyResponse": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The registry ID associated with the request.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The repository name associated with the request.

          " + } + }, + "policyText": { + "target": "com.amazonaws.ecrpublic#RepositoryPolicyText", + "traits": { + "smithy.api#documentation": "

          The JSON repository policy that was deleted from the repository.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#DeleteRepositoryRequest": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the public registry that contains the repository to\n delete. If you do not specify a registry, the default public registry is assumed.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The name of the repository to delete.

          ", + "smithy.api#required": {} + } + }, + "force": { + "target": "com.amazonaws.ecrpublic#ForceFlag", + "traits": { + "smithy.api#documentation": "

          If a repository contains images, forces the deletion.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#DeleteRepositoryResponse": { + "type": "structure", + "members": { + "repository": { + "target": "com.amazonaws.ecrpublic#Repository", + "traits": { + "smithy.api#documentation": "

          The repository that was deleted.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#DescribeImageTags": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#DescribeImageTagsRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#DescribeImageTagsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + } + ], + "traits": { + "smithy.api#documentation": "

          Returns the image tag details for a repository in a public registry.

          ", + "smithy.api#paginated": { + "inputToken": "nextToken", + "outputToken": "nextToken", + "items": "imageTagDetails", + "pageSize": "maxResults" + } + } + }, + "com.amazonaws.ecrpublic#DescribeImageTagsRequest": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the public registry that contains the repository in\n which to describe images. If you do not specify a registry, the default public registry is assumed.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The name of the repository that contains the image tag details to describe.

          ", + "smithy.api#required": {} + } + }, + "nextToken": { + "target": "com.amazonaws.ecrpublic#NextToken", + "traits": { + "smithy.api#documentation": "

          The nextToken value returned from a previous paginated\n DescribeImageTags request where maxResults was used and the\n results exceeded the value of that parameter. Pagination continues from the end of the\n previous results that returned the nextToken value. This value is\n null when there are no more results to return. This option cannot be used\n when you specify images with imageIds.

          " + } + }, + "maxResults": { + "target": "com.amazonaws.ecrpublic#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of repository results returned by DescribeImageTags in\n paginated output. When this parameter is used, DescribeImageTags only returns\n maxResults results in a single page along with a nextToken\n response element. The remaining results of the initial request can be seen by sending\n another DescribeImageTags request with the returned nextToken\n value. This value can be between 1 and 1000. If this parameter\n is not used, then DescribeImageTags returns up to 100\n results and a nextToken value, if applicable. This option cannot be used when\n you specify images with imageIds.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#DescribeImageTagsResponse": { + "type": "structure", + "members": { + "imageTagDetails": { + "target": "com.amazonaws.ecrpublic#ImageTagDetailList", + "traits": { + "smithy.api#documentation": "

          The image tag details for the images in the requested repository.

          " + } + }, + "nextToken": { + "target": "com.amazonaws.ecrpublic#NextToken", + "traits": { + "smithy.api#documentation": "

          The nextToken value to include in a future DescribeImageTags\n request. When the results of a DescribeImageTags request exceed\n maxResults, this value can be used to retrieve the next page of results.\n This value is null when there are no more results to return.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#DescribeImages": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#DescribeImagesRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#DescribeImagesResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#ImageNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + } + ], + "traits": { + "smithy.api#documentation": "

          Returns metadata about the images in a repository in a public registry.

          \n \n

          Beginning with Docker version 1.9, the Docker client compresses image layers before\n pushing them to a V2 Docker registry. The output of the docker images\n command shows the uncompressed image size, so it may return a larger image size than the\n image sizes returned by DescribeImages.

          \n
          ", + "smithy.api#paginated": { + "inputToken": "nextToken", + "outputToken": "nextToken", + "items": "imageDetails", + "pageSize": "maxResults" + } + } + }, + "com.amazonaws.ecrpublic#DescribeImagesRequest": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the public registry that contains the repository in\n which to describe images. If you do not specify a registry, the default public registry is assumed.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The repository that contains the images to describe.

          ", + "smithy.api#required": {} + } + }, + "imageIds": { + "target": "com.amazonaws.ecrpublic#ImageIdentifierList", + "traits": { + "smithy.api#documentation": "

          The list of image IDs for the requested repository.

          " + } + }, + "nextToken": { + "target": "com.amazonaws.ecrpublic#NextToken", + "traits": { + "smithy.api#documentation": "

          The nextToken value returned from a previous paginated\n DescribeImages request where maxResults was used and the\n results exceeded the value of that parameter. Pagination continues from the end of the\n previous results that returned the nextToken value. This value is\n null when there are no more results to return. This option cannot be used\n when you specify images with imageIds.

          " + } + }, + "maxResults": { + "target": "com.amazonaws.ecrpublic#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of repository results returned by DescribeImages in\n paginated output. When this parameter is used, DescribeImages only returns\n maxResults results in a single page along with a nextToken\n response element. The remaining results of the initial request can be seen by sending\n another DescribeImages request with the returned nextToken value.\n This value can be between 1 and 1000. If this parameter is not\n used, then DescribeImages returns up to 100 results and a\n nextToken value, if applicable. This option cannot be used when you specify\n images with imageIds.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#DescribeImagesResponse": { + "type": "structure", + "members": { + "imageDetails": { + "target": "com.amazonaws.ecrpublic#ImageDetailList", + "traits": { + "smithy.api#documentation": "

          A list of ImageDetail objects that contain data about the\n image.

          " + } + }, + "nextToken": { + "target": "com.amazonaws.ecrpublic#NextToken", + "traits": { + "smithy.api#documentation": "

          The nextToken value to include in a future DescribeImages\n request. When the results of a DescribeImages request exceed\n maxResults, this value can be used to retrieve the next page of results.\n This value is null when there are no more results to return.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#DescribeRegistries": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#DescribeRegistriesRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#DescribeRegistriesResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + }, + { + "target": "com.amazonaws.ecrpublic#UnsupportedCommandException" + } + ], + "traits": { + "smithy.api#documentation": "

          Returns details for a public registry.

          ", + "smithy.api#paginated": { + "inputToken": "nextToken", + "outputToken": "nextToken", + "items": "registries", + "pageSize": "maxResults" + } + } + }, + "com.amazonaws.ecrpublic#DescribeRegistriesRequest": { + "type": "structure", + "members": { + "nextToken": { + "target": "com.amazonaws.ecrpublic#NextToken", + "traits": { + "smithy.api#documentation": "

          The nextToken value returned from a previous paginated\n DescribeRegistries request where maxResults was used and the\n results exceeded the value of that parameter. Pagination continues from the end of the\n previous results that returned the nextToken value. This value is\n null when there are no more results to return.

          \n \n

          This token should be treated as an opaque identifier that is only used to retrieve the next items in a list and not for other programmatic purposes.

          \n
          " + } + }, + "maxResults": { + "target": "com.amazonaws.ecrpublic#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of repository results returned by DescribeRegistries in\n paginated output. When this parameter is used, DescribeRegistries only returns\n maxResults results in a single page along with a nextToken\n response element. The remaining results of the initial request can be seen by sending\n another DescribeRegistries request with the returned nextToken\n value. This value can be between 1 and 1000. If this parameter\n is not used, then DescribeRegistries returns up to 100\n results and a nextToken value, if applicable.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#DescribeRegistriesResponse": { + "type": "structure", + "members": { + "registries": { + "target": "com.amazonaws.ecrpublic#RegistryList", + "traits": { + "smithy.api#documentation": "

          An object containing the details for a public registry.

          ", + "smithy.api#required": {} + } + }, + "nextToken": { + "target": "com.amazonaws.ecrpublic#NextToken", + "traits": { + "smithy.api#documentation": "

          The nextToken value to include in a future\n DescribeRepositories request. When the results of a\n DescribeRepositories request exceed maxResults, this value can\n be used to retrieve the next page of results. This value is null when there\n are no more results to return.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#DescribeRepositories": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#DescribeRepositoriesRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#DescribeRepositoriesResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + } + ], + "traits": { + "smithy.api#documentation": "

          Describes repositories in a public registry.

          ", + "smithy.api#paginated": { + "inputToken": "nextToken", + "outputToken": "nextToken", + "items": "repositories", + "pageSize": "maxResults" + } + } + }, + "com.amazonaws.ecrpublic#DescribeRepositoriesRequest": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the registry that contains the repositories to be\n described. If you do not specify a registry, the default public registry is assumed.

          " + } + }, + "repositoryNames": { + "target": "com.amazonaws.ecrpublic#RepositoryNameList", + "traits": { + "smithy.api#documentation": "

          A list of repositories to describe. If this parameter is omitted, then all repositories\n in a registry are described.

          " + } + }, + "nextToken": { + "target": "com.amazonaws.ecrpublic#NextToken", + "traits": { + "smithy.api#documentation": "

          The nextToken value returned from a previous paginated\n DescribeRepositories request where maxResults was used and the\n results exceeded the value of that parameter. Pagination continues from the end of the\n previous results that returned the nextToken value. This value is\n null when there are no more results to return. This option cannot be used\n when you specify repositories with repositoryNames.

          \n \n

          This token should be treated as an opaque identifier that is only used to retrieve the next items in a list and not for other programmatic purposes.

          \n
          " + } + }, + "maxResults": { + "target": "com.amazonaws.ecrpublic#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of repository results returned by DescribeRepositories\n in paginated output. When this parameter is used, DescribeRepositories only\n returns maxResults results in a single page along with a\n nextToken response element. The remaining results of the initial request\n can be seen by sending another DescribeRepositories request with the returned\n nextToken value. This value can be between 1 and\n 1000. If this parameter is not used, then DescribeRepositories\n returns up to 100 results and a nextToken value, if\n applicable. This option cannot be used when you specify repositories with\n repositoryNames.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#DescribeRepositoriesResponse": { + "type": "structure", + "members": { + "repositories": { + "target": "com.amazonaws.ecrpublic#RepositoryList", + "traits": { + "smithy.api#documentation": "

          A list of repository objects corresponding to valid repositories.

          " + } + }, + "nextToken": { + "target": "com.amazonaws.ecrpublic#NextToken", + "traits": { + "smithy.api#documentation": "

          The nextToken value to include in a future\n DescribeRepositories request. When the results of a\n DescribeRepositories request exceed maxResults, this value can\n be used to retrieve the next page of results. This value is null when there\n are no more results to return.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#EmptyUploadException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The specified layer upload does not contain any layer parts.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#ExceptionMessage": { + "type": "string" + }, + "com.amazonaws.ecrpublic#ExpirationTimestamp": { + "type": "timestamp" + }, + "com.amazonaws.ecrpublic#ForceFlag": { + "type": "boolean" + }, + "com.amazonaws.ecrpublic#GetAuthorizationToken": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#GetAuthorizationTokenRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#GetAuthorizationTokenResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + } + ], + "traits": { + "smithy.api#documentation": "

          Retrieves an authorization token. An authorization token represents your IAM\n authentication credentials and can be used to access any Amazon ECR registry that your IAM\n principal has access to. The authorization token is valid for 12 hours. This API requires\n the ecr-public:GetAuthorizationToken and\n sts:GetServiceBearerToken permissions.

          " + } + }, + "com.amazonaws.ecrpublic#GetAuthorizationTokenRequest": { + "type": "structure", + "members": {} + }, + "com.amazonaws.ecrpublic#GetAuthorizationTokenResponse": { + "type": "structure", + "members": { + "authorizationData": { + "target": "com.amazonaws.ecrpublic#AuthorizationData", + "traits": { + "smithy.api#documentation": "

          An authorization token data object that corresponds to a public registry.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#GetRegistryCatalogData": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#GetRegistryCatalogDataRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#GetRegistryCatalogDataResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#ServerException" + }, + { + "target": "com.amazonaws.ecrpublic#UnsupportedCommandException" + } + ], + "traits": { + "smithy.api#documentation": "

          Retrieves catalog metadata for a public registry.

          " + } + }, + "com.amazonaws.ecrpublic#GetRegistryCatalogDataRequest": { + "type": "structure", + "members": {} + }, + "com.amazonaws.ecrpublic#GetRegistryCatalogDataResponse": { + "type": "structure", + "members": { + "registryCatalogData": { + "target": "com.amazonaws.ecrpublic#RegistryCatalogData", + "traits": { + "smithy.api#documentation": "

          The catalog metadata for the public registry.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.ecrpublic#GetRepositoryCatalogData": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#GetRepositoryCatalogDataRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#GetRepositoryCatalogDataResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + } + ], + "traits": { + "smithy.api#documentation": "

          Retrieve catalog metadata for a repository in a public registry. This metadata is\n displayed publicly in the Amazon ECR Public Gallery.

          " + } + }, + "com.amazonaws.ecrpublic#GetRepositoryCatalogDataRequest": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the registry that contains the repositories to be\n described. If you do not specify a registry, the default public registry is assumed.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The name of the repository to retrieve the catalog metadata for.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.ecrpublic#GetRepositoryCatalogDataResponse": { + "type": "structure", + "members": { + "catalogData": { + "target": "com.amazonaws.ecrpublic#RepositoryCatalogData", + "traits": { + "smithy.api#documentation": "

          The catalog metadata for the repository.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#GetRepositoryPolicy": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#GetRepositoryPolicyRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#GetRepositoryPolicyResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryPolicyNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + } + ], + "traits": { + "smithy.api#documentation": "

          Retrieves the repository policy for the specified repository.

          " + } + }, + "com.amazonaws.ecrpublic#GetRepositoryPolicyRequest": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the public registry that contains the repository.\n If you do not specify a registry, the default public registry is assumed.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The name of the repository with the policy to retrieve.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.ecrpublic#GetRepositoryPolicyResponse": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The registry ID associated with the request.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The repository name associated with the request.

          " + } + }, + "policyText": { + "target": "com.amazonaws.ecrpublic#RepositoryPolicyText", + "traits": { + "smithy.api#documentation": "

          The repository policy text associated with the repository. The policy text will be in\n JSON format.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#Image": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryIdOrAlias", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the registry containing the image.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The name of the repository associated with the image.

          " + } + }, + "imageId": { + "target": "com.amazonaws.ecrpublic#ImageIdentifier", + "traits": { + "smithy.api#documentation": "

          An object containing the image tag and image digest associated with an image.

          " + } + }, + "imageManifest": { + "target": "com.amazonaws.ecrpublic#ImageManifest", + "traits": { + "smithy.api#documentation": "

          The image manifest associated with the image.

          " + } + }, + "imageManifestMediaType": { + "target": "com.amazonaws.ecrpublic#MediaType", + "traits": { + "smithy.api#documentation": "

          The manifest media type of the image.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An object representing an Amazon ECR image.

          " + } + }, + "com.amazonaws.ecrpublic#ImageAlreadyExistsException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The specified image has already been pushed, and there were no changes to the manifest\n or image tag after the last push.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#ImageDetail": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the public registry to which this image\n belongs.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The name of the repository to which this image belongs.

          " + } + }, + "imageDigest": { + "target": "com.amazonaws.ecrpublic#ImageDigest", + "traits": { + "smithy.api#documentation": "

          The sha256 digest of the image manifest.

          " + } + }, + "imageTags": { + "target": "com.amazonaws.ecrpublic#ImageTagList", + "traits": { + "smithy.api#documentation": "

          The list of tags associated with this image.

          " + } + }, + "imageSizeInBytes": { + "target": "com.amazonaws.ecrpublic#ImageSizeInBytes", + "traits": { + "smithy.api#documentation": "

          The size, in bytes, of the image in the repository.

          \n

          If the image is a manifest list, this will be the max size of all manifests in the\n list.

          \n \n

          Beginning with Docker version 1.9, the Docker client compresses image layers before\n pushing them to a V2 Docker registry. The output of the docker images\n command shows the uncompressed image size, so it may return a larger image size than the\n image sizes returned by DescribeImages.

          \n
          " + } + }, + "imagePushedAt": { + "target": "com.amazonaws.ecrpublic#PushTimestamp", + "traits": { + "smithy.api#documentation": "

          The date and time, expressed in standard JavaScript date format, at which the current\n image was pushed to the repository.

          " + } + }, + "imageManifestMediaType": { + "target": "com.amazonaws.ecrpublic#MediaType", + "traits": { + "smithy.api#documentation": "

          The media type of the image manifest.

          " + } + }, + "artifactMediaType": { + "target": "com.amazonaws.ecrpublic#MediaType", + "traits": { + "smithy.api#documentation": "

          The artifact media type of the image.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An object that describes an image returned by a DescribeImages\n operation.

          " + } + }, + "com.amazonaws.ecrpublic#ImageDetailList": { + "type": "list", + "member": { + "target": "com.amazonaws.ecrpublic#ImageDetail" + } + }, + "com.amazonaws.ecrpublic#ImageDigest": { + "type": "string" + }, + "com.amazonaws.ecrpublic#ImageDigestDoesNotMatchException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The specified image digest does not match the digest that Amazon ECR calculated for the\n image.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#ImageFailure": { + "type": "structure", + "members": { + "imageId": { + "target": "com.amazonaws.ecrpublic#ImageIdentifier", + "traits": { + "smithy.api#documentation": "

          The image ID associated with the failure.

          " + } + }, + "failureCode": { + "target": "com.amazonaws.ecrpublic#ImageFailureCode", + "traits": { + "smithy.api#documentation": "

          The code associated with the failure.

          " + } + }, + "failureReason": { + "target": "com.amazonaws.ecrpublic#ImageFailureReason", + "traits": { + "smithy.api#documentation": "

          The reason for the failure.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An object representing an Amazon ECR image failure.

          " + } + }, + "com.amazonaws.ecrpublic#ImageFailureCode": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "InvalidImageDigest", + "name": "InvalidImageDigest" + }, + { + "value": "InvalidImageTag", + "name": "InvalidImageTag" + }, + { + "value": "ImageTagDoesNotMatchDigest", + "name": "ImageTagDoesNotMatchDigest" + }, + { + "value": "ImageNotFound", + "name": "ImageNotFound" + }, + { + "value": "MissingDigestAndTag", + "name": "MissingDigestAndTag" + }, + { + "value": "ImageReferencedByManifestList", + "name": "ImageReferencedByManifestList" + }, + { + "value": "KmsError", + "name": "KmsError" + } + ] + } + }, + "com.amazonaws.ecrpublic#ImageFailureList": { + "type": "list", + "member": { + "target": "com.amazonaws.ecrpublic#ImageFailure" + } + }, + "com.amazonaws.ecrpublic#ImageFailureReason": { + "type": "string" + }, + "com.amazonaws.ecrpublic#ImageIdentifier": { + "type": "structure", + "members": { + "imageDigest": { + "target": "com.amazonaws.ecrpublic#ImageDigest", + "traits": { + "smithy.api#documentation": "

          The sha256 digest of the image manifest.

          " + } + }, + "imageTag": { + "target": "com.amazonaws.ecrpublic#ImageTag", + "traits": { + "smithy.api#documentation": "

          The tag used for the image.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An object with identifying information for an Amazon ECR image.

          " + } + }, + "com.amazonaws.ecrpublic#ImageIdentifierList": { + "type": "list", + "member": { + "target": "com.amazonaws.ecrpublic#ImageIdentifier" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.ecrpublic#ImageManifest": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 4194304 + } + } + }, + "com.amazonaws.ecrpublic#ImageNotFoundException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The image requested does not exist in the specified repository.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#ImageSizeInBytes": { + "type": "long", + "traits": { + "smithy.api#box": {} + } + }, + "com.amazonaws.ecrpublic#ImageTag": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 300 + } + } + }, + "com.amazonaws.ecrpublic#ImageTagAlreadyExistsException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The specified image is tagged with a tag that already exists. The repository is\n configured for tag immutability.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#ImageTagDetail": { + "type": "structure", + "members": { + "imageTag": { + "target": "com.amazonaws.ecrpublic#ImageTag", + "traits": { + "smithy.api#documentation": "

          The tag associated with the image.

          " + } + }, + "createdAt": { + "target": "com.amazonaws.ecrpublic#CreationTimestamp", + "traits": { + "smithy.api#documentation": "

          The time stamp indicating when the image tag was created.

          " + } + }, + "imageDetail": { + "target": "com.amazonaws.ecrpublic#ReferencedImageDetail", + "traits": { + "smithy.api#documentation": "

          An object that describes the details of an image.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An object representing the image tag details for an image.

          " + } + }, + "com.amazonaws.ecrpublic#ImageTagDetailList": { + "type": "list", + "member": { + "target": "com.amazonaws.ecrpublic#ImageTagDetail" + } + }, + "com.amazonaws.ecrpublic#ImageTagList": { + "type": "list", + "member": { + "target": "com.amazonaws.ecrpublic#ImageTag" + } + }, + "com.amazonaws.ecrpublic#InitiateLayerUpload": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#InitiateLayerUploadRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#InitiateLayerUploadResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#RegistryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + }, + { + "target": "com.amazonaws.ecrpublic#UnsupportedCommandException" + } + ], + "traits": { + "smithy.api#documentation": "

          Notifies Amazon ECR that you intend to upload an image layer.

          \n

          When an image is pushed, the InitiateLayerUpload API is called once per image layer that\n has not already been uploaded. Whether or not an image layer has been uploaded is\n determined by the BatchCheckLayerAvailability API action.

          \n \n

          This operation is used by the Amazon ECR proxy and is not generally used by customers for pulling and pushing images. In most cases, you should use the docker CLI to pull, tag, and push images.

          \n
          " + } + }, + "com.amazonaws.ecrpublic#InitiateLayerUploadRequest": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryIdOrAlias", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the registry to which you intend to upload layers.\n If you do not specify a registry, the default public registry is assumed.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The name of the repository to which you intend to upload layers.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.ecrpublic#InitiateLayerUploadResponse": { + "type": "structure", + "members": { + "uploadId": { + "target": "com.amazonaws.ecrpublic#UploadId", + "traits": { + "smithy.api#documentation": "

          The upload ID for the layer upload. This parameter is passed to further UploadLayerPart and CompleteLayerUpload operations.

          " + } + }, + "partSize": { + "target": "com.amazonaws.ecrpublic#PartSize", + "traits": { + "smithy.api#documentation": "

          The size, in bytes, that Amazon ECR expects future layer part uploads to be.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#InvalidLayerException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The layer digest calculation performed by Amazon ECR upon receipt of the image layer does not\n match the digest specified.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#InvalidLayerPartException": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the layer part.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The name of the repository.

          " + } + }, + "uploadId": { + "target": "com.amazonaws.ecrpublic#UploadId", + "traits": { + "smithy.api#documentation": "

          The upload ID associated with the layer part.

          " + } + }, + "lastValidByteReceived": { + "target": "com.amazonaws.ecrpublic#PartSize", + "traits": { + "smithy.api#documentation": "

          The position of the last byte of the layer part.

          " + } + }, + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The layer part size is not valid, or the first byte specified is not consecutive to the\n last byte of a previous layer part upload.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#InvalidParameterException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The specified parameter is invalid. Review the available parameters for the API\n request.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#Layer": { + "type": "structure", + "members": { + "layerDigest": { + "target": "com.amazonaws.ecrpublic#LayerDigest", + "traits": { + "smithy.api#documentation": "

          The sha256 digest of the image layer.

          " + } + }, + "layerAvailability": { + "target": "com.amazonaws.ecrpublic#LayerAvailability", + "traits": { + "smithy.api#documentation": "

          The availability status of the image layer.

          " + } + }, + "layerSize": { + "target": "com.amazonaws.ecrpublic#LayerSizeInBytes", + "traits": { + "smithy.api#documentation": "

          The size, in bytes, of the image layer.

          " + } + }, + "mediaType": { + "target": "com.amazonaws.ecrpublic#MediaType", + "traits": { + "smithy.api#documentation": "

          The media type of the layer, such as\n application/vnd.docker.image.rootfs.diff.tar.gzip or\n application/vnd.oci.image.layer.v1.tar+gzip.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An object representing an Amazon ECR image layer.

          " + } + }, + "com.amazonaws.ecrpublic#LayerAlreadyExistsException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The image layer already exists in the associated repository.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#LayerAvailability": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "AVAILABLE", + "name": "AVAILABLE" + }, + { + "value": "UNAVAILABLE", + "name": "UNAVAILABLE" + } + ] + } + }, + "com.amazonaws.ecrpublic#LayerDigest": { + "type": "string", + "traits": { + "smithy.api#pattern": "[a-zA-Z0-9-_+.]+:[a-fA-F0-9]+" + } + }, + "com.amazonaws.ecrpublic#LayerDigestList": { + "type": "list", + "member": { + "target": "com.amazonaws.ecrpublic#LayerDigest" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.ecrpublic#LayerFailure": { + "type": "structure", + "members": { + "layerDigest": { + "target": "com.amazonaws.ecrpublic#BatchedOperationLayerDigest", + "traits": { + "smithy.api#documentation": "

          The layer digest associated with the failure.

          " + } + }, + "failureCode": { + "target": "com.amazonaws.ecrpublic#LayerFailureCode", + "traits": { + "smithy.api#documentation": "

          The failure code associated with the failure.

          " + } + }, + "failureReason": { + "target": "com.amazonaws.ecrpublic#LayerFailureReason", + "traits": { + "smithy.api#documentation": "

          The reason for the failure.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An object representing an Amazon ECR image layer failure.

          " + } + }, + "com.amazonaws.ecrpublic#LayerFailureCode": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "InvalidLayerDigest", + "name": "InvalidLayerDigest" + }, + { + "value": "MissingLayerDigest", + "name": "MissingLayerDigest" + } + ] + } + }, + "com.amazonaws.ecrpublic#LayerFailureList": { + "type": "list", + "member": { + "target": "com.amazonaws.ecrpublic#LayerFailure" + } + }, + "com.amazonaws.ecrpublic#LayerFailureReason": { + "type": "string" + }, + "com.amazonaws.ecrpublic#LayerList": { + "type": "list", + "member": { + "target": "com.amazonaws.ecrpublic#Layer" + } + }, + "com.amazonaws.ecrpublic#LayerPartBlob": { + "type": "blob", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 20971520 + } + } + }, + "com.amazonaws.ecrpublic#LayerPartTooSmallException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          Layer parts must be at least 5 MiB in size.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#LayerSizeInBytes": { + "type": "long", + "traits": { + "smithy.api#box": {} + } + }, + "com.amazonaws.ecrpublic#LayersNotFoundException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The specified layers could not be found, or the specified layer is not valid for this\n repository.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#LimitExceededException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The operation did not succeed because it would have exceeded a service limit for your\n account. For more information, see Amazon ECR Service Quotas in the\n Amazon Elastic Container Registry User Guide.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#LogoImageBlob": { + "type": "blob", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 512000 + } + } + }, + "com.amazonaws.ecrpublic#MarketplaceCertified": { + "type": "boolean", + "traits": { + "smithy.api#box": {} + } + }, + "com.amazonaws.ecrpublic#MaxResults": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 1000 + } + } + }, + "com.amazonaws.ecrpublic#MediaType": { + "type": "string" + }, + "com.amazonaws.ecrpublic#NextToken": { + "type": "string" + }, + "com.amazonaws.ecrpublic#OperatingSystem": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 50 + } + } + }, + "com.amazonaws.ecrpublic#OperatingSystemList": { + "type": "list", + "member": { + "target": "com.amazonaws.ecrpublic#OperatingSystem" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 50 + } + } + }, + "com.amazonaws.ecrpublic#PartSize": { + "type": "long", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 0 + } + } + }, + "com.amazonaws.ecrpublic#PrimaryRegistryAliasFlag": { + "type": "boolean" + }, + "com.amazonaws.ecrpublic#PushTimestamp": { + "type": "timestamp" + }, + "com.amazonaws.ecrpublic#PutImage": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#PutImageRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#PutImageResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#ImageAlreadyExistsException" + }, + { + "target": "com.amazonaws.ecrpublic#ImageDigestDoesNotMatchException" + }, + { + "target": "com.amazonaws.ecrpublic#ImageTagAlreadyExistsException" + }, + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#LayersNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#LimitExceededException" + }, + { + "target": "com.amazonaws.ecrpublic#ReferencedImagesNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#RegistryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + }, + { + "target": "com.amazonaws.ecrpublic#UnsupportedCommandException" + } + ], + "traits": { + "smithy.api#documentation": "

          Creates or updates the image manifest and tags associated with an image.

          \n

          When an image is pushed and all new image layers have been uploaded, the PutImage API is\n called once to create or update the image manifest and the tags associated with the\n image.

          \n \n \n

          This operation is used by the Amazon ECR proxy and is not generally used by customers for pulling and pushing images. In most cases, you should use the docker CLI to pull, tag, and push images.

          \n
          " + } + }, + "com.amazonaws.ecrpublic#PutImageRequest": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryIdOrAlias", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the public registry that contains the repository in\n which to put the image. If you do not specify a registry, the default public registry is assumed.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The name of the repository in which to put the image.

          ", + "smithy.api#required": {} + } + }, + "imageManifest": { + "target": "com.amazonaws.ecrpublic#ImageManifest", + "traits": { + "smithy.api#documentation": "

          The image manifest corresponding to the image to be uploaded.

          ", + "smithy.api#required": {} + } + }, + "imageManifestMediaType": { + "target": "com.amazonaws.ecrpublic#MediaType", + "traits": { + "smithy.api#documentation": "

          The media type of the image manifest. If you push an image manifest that does not\n contain the mediaType field, you must specify the\n imageManifestMediaType in the request.

          " + } + }, + "imageTag": { + "target": "com.amazonaws.ecrpublic#ImageTag", + "traits": { + "smithy.api#documentation": "

          The tag to associate with the image. This parameter is required for images that use the\n Docker Image Manifest V2 Schema 2 or Open Container Initiative (OCI) formats.

          " + } + }, + "imageDigest": { + "target": "com.amazonaws.ecrpublic#ImageDigest", + "traits": { + "smithy.api#documentation": "

          The image digest of the image manifest corresponding to the image.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#PutImageResponse": { + "type": "structure", + "members": { + "image": { + "target": "com.amazonaws.ecrpublic#Image", + "traits": { + "smithy.api#documentation": "

          Details of the image uploaded.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#PutRegistryCatalogData": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#PutRegistryCatalogDataRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#PutRegistryCatalogDataResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + }, + { + "target": "com.amazonaws.ecrpublic#UnsupportedCommandException" + } + ], + "traits": { + "smithy.api#documentation": "

          Create or updates the catalog data for a public registry.

          " + } + }, + "com.amazonaws.ecrpublic#PutRegistryCatalogDataRequest": { + "type": "structure", + "members": { + "displayName": { + "target": "com.amazonaws.ecrpublic#RegistryDisplayName", + "traits": { + "smithy.api#documentation": "

          The display name for a public registry. The display name is shown as the repository\n author in the Amazon ECR Public Gallery.

          \n \n

          The registry display name is only publicly visible in the Amazon ECR Public Gallery for\n verified accounts.

          \n
          " + } + } + } + }, + "com.amazonaws.ecrpublic#PutRegistryCatalogDataResponse": { + "type": "structure", + "members": { + "registryCatalogData": { + "target": "com.amazonaws.ecrpublic#RegistryCatalogData", + "traits": { + "smithy.api#documentation": "

          The catalog data for the public registry.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.ecrpublic#PutRepositoryCatalogData": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#PutRepositoryCatalogDataRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#PutRepositoryCatalogDataResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + } + ], + "traits": { + "smithy.api#documentation": "

          Creates or updates the catalog data for a repository in a public registry.

          " + } + }, + "com.amazonaws.ecrpublic#PutRepositoryCatalogDataRequest": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the public registry the repository is in.\n If you do not specify a registry, the default public registry is assumed.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The name of the repository to create or update the catalog data for.

          ", + "smithy.api#required": {} + } + }, + "catalogData": { + "target": "com.amazonaws.ecrpublic#RepositoryCatalogDataInput", + "traits": { + "smithy.api#documentation": "

          An object containing the catalog data for a repository. This data is publicly visible in\n the Amazon ECR Public Gallery.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.ecrpublic#PutRepositoryCatalogDataResponse": { + "type": "structure", + "members": { + "catalogData": { + "target": "com.amazonaws.ecrpublic#RepositoryCatalogData", + "traits": { + "smithy.api#documentation": "

          The catalog data for the repository.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#ReferencedImageDetail": { + "type": "structure", + "members": { + "imageDigest": { + "target": "com.amazonaws.ecrpublic#ImageDigest", + "traits": { + "smithy.api#documentation": "

          The sha256 digest of the image manifest.

          " + } + }, + "imageSizeInBytes": { + "target": "com.amazonaws.ecrpublic#ImageSizeInBytes", + "traits": { + "smithy.api#documentation": "

          The size, in bytes, of the image in the repository.

          \n

          If the image is a manifest list, this will be the max size of all manifests in the\n list.

          \n \n

          Beginning with Docker version 1.9, the Docker client compresses image layers before\n pushing them to a V2 Docker registry. The output of the docker images\n command shows the uncompressed image size, so it may return a larger image size than the\n image sizes returned by DescribeImages.

          \n
          " + } + }, + "imagePushedAt": { + "target": "com.amazonaws.ecrpublic#PushTimestamp", + "traits": { + "smithy.api#documentation": "

          The date and time, expressed in standard JavaScript date format, at which the current\n image tag was pushed to the repository.

          " + } + }, + "imageManifestMediaType": { + "target": "com.amazonaws.ecrpublic#MediaType", + "traits": { + "smithy.api#documentation": "

          The media type of the image manifest.

          " + } + }, + "artifactMediaType": { + "target": "com.amazonaws.ecrpublic#MediaType", + "traits": { + "smithy.api#documentation": "

          The artifact media type of the image.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An object that describes the image tag details returned by a DescribeImageTags action.

          " + } + }, + "com.amazonaws.ecrpublic#ReferencedImagesNotFoundException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The manifest list is referencing an image that does not exist.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#Registry": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the registry. If you do not specify a registry, the default public registry is assumed.

          ", + "smithy.api#required": {} + } + }, + "registryArn": { + "target": "com.amazonaws.ecrpublic#Arn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the public registry.

          ", + "smithy.api#required": {} + } + }, + "registryUri": { + "target": "com.amazonaws.ecrpublic#Url", + "traits": { + "smithy.api#documentation": "

          The URI of a public registry. The URI contains a universal prefix and the registry\n alias.

          ", + "smithy.api#required": {} + } + }, + "verified": { + "target": "com.amazonaws.ecrpublic#RegistryVerified", + "traits": { + "smithy.api#documentation": "

          Whether the account is verified. This indicates whether the account is an AWS\n Marketplace vendor. If an account is verified, each public repository will received a\n verified account badge on the Amazon ECR Public Gallery.

          ", + "smithy.api#required": {} + } + }, + "aliases": { + "target": "com.amazonaws.ecrpublic#RegistryAliasList", + "traits": { + "smithy.api#documentation": "

          An array of objects representing the aliases for a public registry.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The details of a public registry.

          " + } + }, + "com.amazonaws.ecrpublic#RegistryAlias": { + "type": "structure", + "members": { + "name": { + "target": "com.amazonaws.ecrpublic#RegistryAliasName", + "traits": { + "smithy.api#documentation": "

          The name of the registry alias.

          ", + "smithy.api#required": {} + } + }, + "status": { + "target": "com.amazonaws.ecrpublic#RegistryAliasStatus", + "traits": { + "smithy.api#documentation": "

          The status of the registry alias.

          ", + "smithy.api#required": {} + } + }, + "primaryRegistryAlias": { + "target": "com.amazonaws.ecrpublic#PrimaryRegistryAliasFlag", + "traits": { + "smithy.api#documentation": "

          Whether or not the registry alias is the primary alias for the registry. If true, the\n alias is the primary registry alias and is displayed in both the repository URL and the\n image URI used in the docker pull commands on the Amazon ECR Public Gallery.

          \n \n

          A registry alias that is not the primary registry alias can be used in the repository\n URI in a docker pull command.

          \n
          ", + "smithy.api#required": {} + } + }, + "defaultRegistryAlias": { + "target": "com.amazonaws.ecrpublic#DefaultRegistryAliasFlag", + "traits": { + "smithy.api#documentation": "

          Whether or not the registry alias is the default alias for the registry. When the first\n public repository is created, your public registry is assigned a default registry\n alias.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          An object representing the aliases for a public registry. A public registry is given an\n alias upon creation but a custom alias can be set using the Amazon ECR console. For more\n information, see Registries in the\n Amazon Elastic Container Registry User Guide.

          " + } + }, + "com.amazonaws.ecrpublic#RegistryAliasList": { + "type": "list", + "member": { + "target": "com.amazonaws.ecrpublic#RegistryAlias" + } + }, + "com.amazonaws.ecrpublic#RegistryAliasName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 2, + "max": 50 + }, + "smithy.api#pattern": "[a-z][a-z0-9]+(?:[._-][a-z0-9]+)*" + } + }, + "com.amazonaws.ecrpublic#RegistryAliasStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "ACTIVE", + "name": "ACTIVE" + }, + { + "value": "PENDING", + "name": "PENDING" + }, + { + "value": "REJECTED", + "name": "REJECTED" + } + ] + } + }, + "com.amazonaws.ecrpublic#RegistryCatalogData": { + "type": "structure", + "members": { + "displayName": { + "target": "com.amazonaws.ecrpublic#RegistryDisplayName", + "traits": { + "smithy.api#documentation": "

          The display name for a public registry. This appears on the Amazon ECR Public Gallery.

          \n \n

          Only accounts that have the verified account badge can have a registry display\n name.

          \n
          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          The metadata for a public registry.

          " + } + }, + "com.amazonaws.ecrpublic#RegistryDisplayName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 100 + } + } + }, + "com.amazonaws.ecrpublic#RegistryId": { + "type": "string", + "traits": { + "smithy.api#pattern": "[0-9]{12}" + } + }, + "com.amazonaws.ecrpublic#RegistryIdOrAlias": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 256 + } + } + }, + "com.amazonaws.ecrpublic#RegistryList": { + "type": "list", + "member": { + "target": "com.amazonaws.ecrpublic#Registry" + } + }, + "com.amazonaws.ecrpublic#RegistryNotFoundException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The registry does not exist.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#RegistryVerified": { + "type": "boolean", + "traits": { + "smithy.api#box": {} + } + }, + "com.amazonaws.ecrpublic#Repository": { + "type": "structure", + "members": { + "repositoryArn": { + "target": "com.amazonaws.ecrpublic#Arn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) that identifies the repository. The ARN contains the arn:aws:ecr namespace, followed by the region of the repository, AWS account ID of the repository owner, repository namespace, and repository name. For example, arn:aws:ecr:region:012345678910:repository/test.

          " + } + }, + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the public registry that contains the\n repository.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The name of the repository.

          " + } + }, + "repositoryUri": { + "target": "com.amazonaws.ecrpublic#Url", + "traits": { + "smithy.api#documentation": "

          The URI for the repository. You can use this URI for container image push\n and pull operations.

          " + } + }, + "createdAt": { + "target": "com.amazonaws.ecrpublic#CreationTimestamp", + "traits": { + "smithy.api#documentation": "

          The date and time, in JavaScript date format, when the repository was created.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An object representing a repository.

          " + } + }, + "com.amazonaws.ecrpublic#RepositoryAlreadyExistsException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The specified repository already exists in the specified registry.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#RepositoryCatalogData": { + "type": "structure", + "members": { + "description": { + "target": "com.amazonaws.ecrpublic#RepositoryDescription", + "traits": { + "smithy.api#documentation": "

          The short description of the repository.

          " + } + }, + "architectures": { + "target": "com.amazonaws.ecrpublic#ArchitectureList", + "traits": { + "smithy.api#documentation": "

          The architecture tags that are associated with the repository.

          \n \n

          Only supported operating system tags appear publicly in the Amazon ECR Public Gallery. For\n more information, see RepositoryCatalogDataInput.

          \n
          " + } + }, + "operatingSystems": { + "target": "com.amazonaws.ecrpublic#OperatingSystemList", + "traits": { + "smithy.api#documentation": "

          The operating system tags that are associated with the repository.

          \n \n

          Only supported operating system tags appear publicly in the Amazon ECR Public Gallery. For\n more information, see RepositoryCatalogDataInput.

          \n
          " + } + }, + "logoUrl": { + "target": "com.amazonaws.ecrpublic#ResourceUrl", + "traits": { + "smithy.api#documentation": "

          The URL containing the logo associated with the repository.

          " + } + }, + "aboutText": { + "target": "com.amazonaws.ecrpublic#AboutText", + "traits": { + "smithy.api#documentation": "

          The longform description of the contents of the repository. This text appears in the\n repository details on the Amazon ECR Public Gallery.

          " + } + }, + "usageText": { + "target": "com.amazonaws.ecrpublic#UsageText", + "traits": { + "smithy.api#documentation": "

          The longform usage details of the contents of the repository. The usage text provides\n context for users of the repository.

          " + } + }, + "marketplaceCertified": { + "target": "com.amazonaws.ecrpublic#MarketplaceCertified", + "traits": { + "smithy.api#documentation": "

          Whether or not the repository is certified by AWS Marketplace.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          The catalog data for a repository. This data is publicly visible in the\n Amazon ECR Public Gallery.

          " + } + }, + "com.amazonaws.ecrpublic#RepositoryCatalogDataInput": { + "type": "structure", + "members": { + "description": { + "target": "com.amazonaws.ecrpublic#RepositoryDescription", + "traits": { + "smithy.api#documentation": "

          A short description of the contents of the repository. This text appears in both the\n image details and also when searching for repositories on the Amazon ECR Public Gallery.

          " + } + }, + "architectures": { + "target": "com.amazonaws.ecrpublic#ArchitectureList", + "traits": { + "smithy.api#documentation": "

          The system architecture that the images in the repository are compatible with. On the\n Amazon ECR Public Gallery, the following supported architectures will appear as badges on the\n repository and are used as search filters.

          \n
            \n
          • \n

            \n Linux\n

            \n
          • \n
          • \n

            \n Windows\n

            \n
          • \n
          \n \n

          If an unsupported tag is added to your repository catalog data, it will be associated\n with the repository and can be retrieved using the API but will not be discoverable in\n the Amazon ECR Public Gallery.

          \n
          " + } + }, + "operatingSystems": { + "target": "com.amazonaws.ecrpublic#OperatingSystemList", + "traits": { + "smithy.api#documentation": "

          The operating systems that the images in the repository are compatible with. On the\n Amazon ECR Public Gallery, the following supported operating systems will appear as badges on\n the repository and are used as search filters.

          \n
            \n
          • \n

            \n ARM\n

            \n
          • \n
          • \n

            \n ARM 64\n

            \n
          • \n
          • \n

            \n x86\n

            \n
          • \n
          • \n

            \n x86-64\n

            \n
          • \n
          \n \n

          If an unsupported tag is added to your repository catalog data, it will be associated\n with the repository and can be retrieved using the API but will not be discoverable in\n the Amazon ECR Public Gallery.

          \n
          " + } + }, + "logoImageBlob": { + "target": "com.amazonaws.ecrpublic#LogoImageBlob", + "traits": { + "smithy.api#documentation": "

          The base64-encoded repository logo payload.

          \n \n

          The repository logo is only publicly visible in the Amazon ECR Public Gallery for verified\n accounts.

          \n
          " + } + }, + "aboutText": { + "target": "com.amazonaws.ecrpublic#AboutText", + "traits": { + "smithy.api#documentation": "

          A detailed description of the contents of the repository. It is publicly visible in the\n Amazon ECR Public Gallery. The text must be in markdown format.

          " + } + }, + "usageText": { + "target": "com.amazonaws.ecrpublic#UsageText", + "traits": { + "smithy.api#documentation": "

          Detailed information on how to use the contents of the repository. It is publicly\n visible in the Amazon ECR Public Gallery. The usage text provides context, support information,\n and additional usage details for users of the repository. The text must be in markdown\n format.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An object containing the catalog data for a repository. This data is publicly visible in\n the Amazon ECR Public Gallery.

          " + } + }, + "com.amazonaws.ecrpublic#RepositoryDescription": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 1024 + } + } + }, + "com.amazonaws.ecrpublic#RepositoryList": { + "type": "list", + "member": { + "target": "com.amazonaws.ecrpublic#Repository" + } + }, + "com.amazonaws.ecrpublic#RepositoryName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 2, + "max": 205 + }, + "smithy.api#pattern": "(?:[a-z0-9]+(?:[._-][a-z0-9]+)*/)*[a-z0-9]+(?:[._-][a-z0-9]+)*" + } + }, + "com.amazonaws.ecrpublic#RepositoryNameList": { + "type": "list", + "member": { + "target": "com.amazonaws.ecrpublic#RepositoryName" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.ecrpublic#RepositoryNotEmptyException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The specified repository contains images. To delete a repository that contains images,\n you must force the deletion with the force parameter.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#RepositoryNotFoundException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The specified repository could not be found. Check the spelling of the specified\n repository and ensure that you are performing operations on the correct registry.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#RepositoryPolicyNotFoundException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The specified repository and registry combination does not have an associated repository\n policy.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#RepositoryPolicyText": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 10240 + } + } + }, + "com.amazonaws.ecrpublic#ResourceUrl": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 2048 + } + } + }, + "com.amazonaws.ecrpublic#ServerException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          These errors are usually caused by a server-side issue.

          ", + "smithy.api#error": "server" + } + }, + "com.amazonaws.ecrpublic#SetRepositoryPolicy": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#SetRepositoryPolicyRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#SetRepositoryPolicyResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + } + ], + "traits": { + "smithy.api#documentation": "

          Applies a repository policy to the specified public repository to control access\n permissions. For more information, see Amazon ECR Repository\n Policies in the Amazon Elastic Container Registry User Guide.

          " + } + }, + "com.amazonaws.ecrpublic#SetRepositoryPolicyRequest": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the registry that contains the repository.\n If you do not specify a registry, the default public registry is assumed.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The name of the repository to receive the policy.

          ", + "smithy.api#required": {} + } + }, + "policyText": { + "target": "com.amazonaws.ecrpublic#RepositoryPolicyText", + "traits": { + "smithy.api#documentation": "

          The JSON repository policy text to apply to the repository. For more information, see\n Amazon ECR Repository\n Policies in the Amazon Elastic Container Registry User Guide.

          ", + "smithy.api#required": {} + } + }, + "force": { + "target": "com.amazonaws.ecrpublic#ForceFlag", + "traits": { + "smithy.api#documentation": "

          If the policy you are attempting to set on a repository policy would prevent you from\n setting another policy in the future, you must force the SetRepositoryPolicy operation. This is intended to prevent accidental\n repository lock outs.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#SetRepositoryPolicyResponse": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The registry ID associated with the request.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The repository name associated with the request.

          " + } + }, + "policyText": { + "target": "com.amazonaws.ecrpublic#RepositoryPolicyText", + "traits": { + "smithy.api#documentation": "

          The JSON repository policy text applied to the repository.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#SpencerFrontendService": { + "type": "service", + "version": "2020-10-30", + "operations": [ + { + "target": "com.amazonaws.ecrpublic#BatchCheckLayerAvailability" + }, + { + "target": "com.amazonaws.ecrpublic#BatchDeleteImage" + }, + { + "target": "com.amazonaws.ecrpublic#CompleteLayerUpload" + }, + { + "target": "com.amazonaws.ecrpublic#CreateRepository" + }, + { + "target": "com.amazonaws.ecrpublic#DeleteRepository" + }, + { + "target": "com.amazonaws.ecrpublic#DeleteRepositoryPolicy" + }, + { + "target": "com.amazonaws.ecrpublic#DescribeImages" + }, + { + "target": "com.amazonaws.ecrpublic#DescribeImageTags" + }, + { + "target": "com.amazonaws.ecrpublic#DescribeRegistries" + }, + { + "target": "com.amazonaws.ecrpublic#DescribeRepositories" + }, + { + "target": "com.amazonaws.ecrpublic#GetAuthorizationToken" + }, + { + "target": "com.amazonaws.ecrpublic#GetRegistryCatalogData" + }, + { + "target": "com.amazonaws.ecrpublic#GetRepositoryCatalogData" + }, + { + "target": "com.amazonaws.ecrpublic#GetRepositoryPolicy" + }, + { + "target": "com.amazonaws.ecrpublic#InitiateLayerUpload" + }, + { + "target": "com.amazonaws.ecrpublic#PutImage" + }, + { + "target": "com.amazonaws.ecrpublic#PutRegistryCatalogData" + }, + { + "target": "com.amazonaws.ecrpublic#PutRepositoryCatalogData" + }, + { + "target": "com.amazonaws.ecrpublic#SetRepositoryPolicy" + }, + { + "target": "com.amazonaws.ecrpublic#UploadLayerPart" + } + ], + "traits": { + "aws.api#service": { + "sdkId": "ECR PUBLIC", + "arnNamespace": "ecr-public", + "cloudFormationName": "ECRPUBLIC", + "cloudTrailEventSource": "ecrpublic.amazonaws.com" + }, + "aws.auth#sigv4": { + "name": "ecr-public" + }, + "aws.protocols#awsJson1_1": {}, + "smithy.api#documentation": "Amazon Elastic Container Registry Public\n

          Amazon Elastic Container Registry (Amazon ECR) is a managed container image registry service. Amazon ECR provides both\n public and private registries to host your container images. You can use the familiar\n Docker CLI, or their preferred client, to push, pull, and manage images. Amazon ECR provides a\n secure, scalable, and reliable registry for your Docker or Open Container Initiative (OCI)\n images. Amazon ECR supports public repositories with this API. For information about the Amazon ECR\n API for private repositories, see Amazon Elastic Container Registry API Reference.

          ", + "smithy.api#title": "Amazon Elastic Container Registry Public", + "smithy.api#xmlNamespace": { + "uri": "http://ecr-public.amazonaws.com/doc/2020-12-02/" + } + } + }, + "com.amazonaws.ecrpublic#UnsupportedCommandException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The action is not supported in this Region.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#UploadId": { + "type": "string", + "traits": { + "smithy.api#pattern": "[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}" + } + }, + "com.amazonaws.ecrpublic#UploadLayerPart": { + "type": "operation", + "input": { + "target": "com.amazonaws.ecrpublic#UploadLayerPartRequest" + }, + "output": { + "target": "com.amazonaws.ecrpublic#UploadLayerPartResponse" + }, + "errors": [ + { + "target": "com.amazonaws.ecrpublic#InvalidLayerPartException" + }, + { + "target": "com.amazonaws.ecrpublic#InvalidParameterException" + }, + { + "target": "com.amazonaws.ecrpublic#LimitExceededException" + }, + { + "target": "com.amazonaws.ecrpublic#RegistryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#RepositoryNotFoundException" + }, + { + "target": "com.amazonaws.ecrpublic#ServerException" + }, + { + "target": "com.amazonaws.ecrpublic#UnsupportedCommandException" + }, + { + "target": "com.amazonaws.ecrpublic#UploadNotFoundException" + } + ], + "traits": { + "smithy.api#documentation": "

          Uploads an image layer part to Amazon ECR.

          \n

          When an image is pushed, each new image layer is uploaded in parts. The maximum size of\n each image layer part can be 20971520 bytes (or about 20MB). The UploadLayerPart API is\n called once per each new image layer part.

          \n \n

          This operation is used by the Amazon ECR proxy and is not generally used by customers for pulling and pushing images. In most cases, you should use the docker CLI to pull, tag, and push images.

          \n
          " + } + }, + "com.amazonaws.ecrpublic#UploadLayerPartRequest": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryIdOrAlias", + "traits": { + "smithy.api#documentation": "

          The AWS account ID associated with the registry to which you are uploading layer parts.\n If you do not specify a registry, the default public registry is assumed.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The name of the repository to which you are uploading layer parts.

          ", + "smithy.api#required": {} + } + }, + "uploadId": { + "target": "com.amazonaws.ecrpublic#UploadId", + "traits": { + "smithy.api#documentation": "

          The upload ID from a previous InitiateLayerUpload operation to\n associate with the layer part upload.

          ", + "smithy.api#required": {} + } + }, + "partFirstByte": { + "target": "com.amazonaws.ecrpublic#PartSize", + "traits": { + "smithy.api#documentation": "

          The position of the first byte of the layer part witin the overall image layer.

          ", + "smithy.api#required": {} + } + }, + "partLastByte": { + "target": "com.amazonaws.ecrpublic#PartSize", + "traits": { + "smithy.api#documentation": "

          The position of the last byte of the layer part within the overall image layer.

          ", + "smithy.api#required": {} + } + }, + "layerPartBlob": { + "target": "com.amazonaws.ecrpublic#LayerPartBlob", + "traits": { + "smithy.api#documentation": "

          The base64-encoded layer part payload.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.ecrpublic#UploadLayerPartResponse": { + "type": "structure", + "members": { + "registryId": { + "target": "com.amazonaws.ecrpublic#RegistryId", + "traits": { + "smithy.api#documentation": "

          The registry ID associated with the request.

          " + } + }, + "repositoryName": { + "target": "com.amazonaws.ecrpublic#RepositoryName", + "traits": { + "smithy.api#documentation": "

          The repository name associated with the request.

          " + } + }, + "uploadId": { + "target": "com.amazonaws.ecrpublic#UploadId", + "traits": { + "smithy.api#documentation": "

          The upload ID associated with the request.

          " + } + }, + "lastByteReceived": { + "target": "com.amazonaws.ecrpublic#PartSize", + "traits": { + "smithy.api#documentation": "

          The integer value of the last byte received in the request.

          " + } + } + } + }, + "com.amazonaws.ecrpublic#UploadNotFoundException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.ecrpublic#ExceptionMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The upload could not be found, or the specified upload ID is not valid for this\n repository.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.ecrpublic#Url": { + "type": "string" + }, + "com.amazonaws.ecrpublic#UsageText": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 10240 + } + } + } + } +} diff --git a/codegen/sdk-codegen/aws-models/eks.2017-11-01.json b/codegen/sdk-codegen/aws-models/eks.2017-11-01.json index 4618c426e9b0..11d59ad8ddd2 100644 --- a/codegen/sdk-codegen/aws-models/eks.2017-11-01.json +++ b/codegen/sdk-codegen/aws-models/eks.2017-11-01.json @@ -49,6 +49,9 @@ "type": "service", "version": "2017-11-01", "operations": [ + { + "target": "com.amazonaws.eks#CreateAddon" + }, { "target": "com.amazonaws.eks#CreateCluster" }, @@ -58,6 +61,9 @@ { "target": "com.amazonaws.eks#CreateNodegroup" }, + { + "target": "com.amazonaws.eks#DeleteAddon" + }, { "target": "com.amazonaws.eks#DeleteCluster" }, @@ -67,6 +73,12 @@ { "target": "com.amazonaws.eks#DeleteNodegroup" }, + { + "target": "com.amazonaws.eks#DescribeAddon" + }, + { + "target": "com.amazonaws.eks#DescribeAddonVersions" + }, { "target": "com.amazonaws.eks#DescribeCluster" }, @@ -79,6 +91,9 @@ { "target": "com.amazonaws.eks#DescribeUpdate" }, + { + "target": "com.amazonaws.eks#ListAddons" + }, { "target": "com.amazonaws.eks#ListClusters" }, @@ -100,6 +115,9 @@ { "target": "com.amazonaws.eks#UntagResource" }, + { + "target": "com.amazonaws.eks#UpdateAddon" + }, { "target": "com.amazonaws.eks#UpdateClusterConfig" }, @@ -128,6 +146,239 @@ "smithy.api#title": "Amazon Elastic Kubernetes Service" } }, + "com.amazonaws.eks#Addon": { + "type": "structure", + "members": { + "addonName": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The name of the add-on.

          " + } + }, + "clusterName": { + "target": "com.amazonaws.eks#ClusterName", + "traits": { + "smithy.api#documentation": "

          The name of the cluster.

          " + } + }, + "status": { + "target": "com.amazonaws.eks#AddonStatus", + "traits": { + "smithy.api#documentation": "

          The status of the add-on.

          " + } + }, + "addonVersion": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The version of the add-on.

          " + } + }, + "health": { + "target": "com.amazonaws.eks#AddonHealth", + "traits": { + "smithy.api#documentation": "

          An object that represents the health of the add-on.

          " + } + }, + "addonArn": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the add-on.

          " + } + }, + "createdAt": { + "target": "com.amazonaws.eks#Timestamp", + "traits": { + "smithy.api#documentation": "

          The date and time that the add-on was created.

          " + } + }, + "modifiedAt": { + "target": "com.amazonaws.eks#Timestamp", + "traits": { + "smithy.api#documentation": "

          The date and time that the add-on was last modified.

          " + } + }, + "serviceAccountRoleArn": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the IAM role that is bound to the Kubernetes service account used\n by the add-on.

          " + } + }, + "tags": { + "target": "com.amazonaws.eks#TagMap", + "traits": { + "smithy.api#documentation": "

          The metadata that you apply to the cluster to assist with categorization and\n organization. Each tag consists of a key and an optional value, both of which you\n define. Cluster tags do not propagate to any other resources associated with the\n cluster.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An Amazon EKS add-on.

          " + } + }, + "com.amazonaws.eks#AddonHealth": { + "type": "structure", + "members": { + "issues": { + "target": "com.amazonaws.eks#AddonIssueList", + "traits": { + "smithy.api#documentation": "

          An object that represents the add-on's health issues.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          The health of the add-on.

          " + } + }, + "com.amazonaws.eks#AddonInfo": { + "type": "structure", + "members": { + "addonName": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The name of the add-on.

          " + } + }, + "type": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The type of the add-on.

          " + } + }, + "addonVersions": { + "target": "com.amazonaws.eks#AddonVersionInfoList", + "traits": { + "smithy.api#documentation": "

          An object that represents information about available add-on versions and compatible\n Kubernetes versions.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Information about an add-on.

          " + } + }, + "com.amazonaws.eks#AddonIssue": { + "type": "structure", + "members": { + "code": { + "target": "com.amazonaws.eks#AddonIssueCode", + "traits": { + "smithy.api#documentation": "

          A code that describes the type of issue.

          " + } + }, + "message": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          A message that provides details about the issue and what might cause it.

          " + } + }, + "resourceIds": { + "target": "com.amazonaws.eks#StringList", + "traits": { + "smithy.api#documentation": "

          The resource IDs of the issue.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An issue related to an add-on.

          " + } + }, + "com.amazonaws.eks#AddonIssueCode": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "AccessDenied", + "name": "ACCESS_DENIED" + }, + { + "value": "InternalFailure", + "name": "INTERNAL_FAILURE" + }, + { + "value": "ClusterUnreachable", + "name": "CLUSTER_UNREACHABLE" + }, + { + "value": "InsufficientNumberOfReplicas", + "name": "INSUFFICIENT_NUMBER_OF_REPLICAS" + }, + { + "value": "ConfigurationConflict", + "name": "CONFIGURATION_CONFLICT" + } + ] + } + }, + "com.amazonaws.eks#AddonIssueList": { + "type": "list", + "member": { + "target": "com.amazonaws.eks#AddonIssue" + } + }, + "com.amazonaws.eks#AddonStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "CREATING" + }, + { + "value": "ACTIVE" + }, + { + "value": "CREATE_FAILED" + }, + { + "value": "UPDATING" + }, + { + "value": "DELETING" + }, + { + "value": "DELETE_FAILED" + }, + { + "value": "DEGRADED" + } + ] + } + }, + "com.amazonaws.eks#AddonVersionInfo": { + "type": "structure", + "members": { + "addonVersion": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The version of the add-on.

          " + } + }, + "architecture": { + "target": "com.amazonaws.eks#StringList", + "traits": { + "smithy.api#documentation": "

          The architectures that the version supports.

          " + } + }, + "compatibilities": { + "target": "com.amazonaws.eks#Compatibilities", + "traits": { + "smithy.api#documentation": "

          An object that represents the compatibilities of a version.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Information about an add-on version.

          " + } + }, + "com.amazonaws.eks#AddonVersionInfoList": { + "type": "list", + "member": { + "target": "com.amazonaws.eks#AddonVersionInfo" + } + }, + "com.amazonaws.eks#Addons": { + "type": "list", + "member": { + "target": "com.amazonaws.eks#AddonInfo" + } + }, "com.amazonaws.eks#AutoScalingGroup": { "type": "structure", "members": { @@ -185,6 +436,19 @@ } } }, + "com.amazonaws.eks#CapacityTypes": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "ON_DEMAND" + }, + { + "value": "SPOT" + } + ] + } + }, "com.amazonaws.eks#Certificate": { "type": "structure", "members": { @@ -202,20 +466,23 @@ "com.amazonaws.eks#ClientException": { "type": "structure", "members": { + "clusterName": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The Amazon EKS cluster associated with the exception.

          " + } + }, "nodegroupName": { "target": "com.amazonaws.eks#String", "traits": { "smithy.api#documentation": "

          The Amazon EKS managed node group associated with the exception.

          " } }, - "message": { + "addonName": { "target": "com.amazonaws.eks#String" }, - "clusterName": { - "target": "com.amazonaws.eks#String", - "traits": { - "smithy.api#documentation": "

          The Amazon EKS cluster associated with the exception.

          " - } + "message": { + "target": "com.amazonaws.eks#String" } }, "traits": { @@ -227,16 +494,16 @@ "com.amazonaws.eks#Cluster": { "type": "structure", "members": { - "endpoint": { + "name": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The endpoint for your Kubernetes API server.

          " + "smithy.api#documentation": "

          The name of the cluster.

          " } }, - "resourcesVpcConfig": { - "target": "com.amazonaws.eks#VpcConfigResponse", + "arn": { + "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The VPC configuration used by the cluster control plane. Amazon EKS VPC resources have\n specific requirements to work properly with Kubernetes. For more information, see Cluster VPC\n Considerations and Cluster Security Group Considerations in the\n Amazon EKS User Guide.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the cluster.

          " } }, "createdAt": { @@ -245,40 +512,40 @@ "smithy.api#documentation": "

          The Unix epoch timestamp in seconds for when the cluster was created.

          " } }, - "status": { - "target": "com.amazonaws.eks#ClusterStatus", + "version": { + "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The current status of the cluster.

          " + "smithy.api#documentation": "

          The Kubernetes server version for the cluster.

          " } }, - "kubernetesNetworkConfig": { - "target": "com.amazonaws.eks#KubernetesNetworkConfigResponse", + "endpoint": { + "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          Network configuration settings for your cluster.

          " + "smithy.api#documentation": "

          The endpoint for your Kubernetes API server.

          " } }, - "certificateAuthority": { - "target": "com.amazonaws.eks#Certificate", + "roleArn": { + "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The certificate-authority-data for your cluster.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the IAM role that provides permissions for the Kubernetes control\n plane to make calls to AWS API operations on your behalf.

          " } }, - "logging": { - "target": "com.amazonaws.eks#Logging", + "resourcesVpcConfig": { + "target": "com.amazonaws.eks#VpcConfigResponse", "traits": { - "smithy.api#documentation": "

          The logging configuration for your cluster.

          " + "smithy.api#documentation": "

          The VPC configuration used by the cluster control plane. Amazon EKS VPC resources have\n specific requirements to work properly with Kubernetes. For more information, see Cluster VPC\n Considerations and Cluster Security Group Considerations in the\n Amazon EKS User Guide.

          " } }, - "name": { - "target": "com.amazonaws.eks#String", + "kubernetesNetworkConfig": { + "target": "com.amazonaws.eks#KubernetesNetworkConfigResponse", "traits": { - "smithy.api#documentation": "

          The name of the cluster.

          " + "smithy.api#documentation": "

          The Kubernetes network configuration for the cluster.

          " } }, - "arn": { - "target": "com.amazonaws.eks#String", + "logging": { + "target": "com.amazonaws.eks#Logging", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the cluster.

          " + "smithy.api#documentation": "

          The logging configuration for your cluster.

          " } }, "identity": { @@ -287,22 +554,22 @@ "smithy.api#documentation": "

          The identity provider information for the cluster.

          " } }, - "roleArn": { - "target": "com.amazonaws.eks#String", + "status": { + "target": "com.amazonaws.eks#ClusterStatus", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the IAM role that provides permissions for the Kubernetes control\n plane to make calls to AWS API operations on your behalf.

          " + "smithy.api#documentation": "

          The current status of the cluster.

          " } }, - "encryptionConfig": { - "target": "com.amazonaws.eks#EncryptionConfigList", + "certificateAuthority": { + "target": "com.amazonaws.eks#Certificate", "traits": { - "smithy.api#documentation": "

          The encryption configuration for the cluster.

          " + "smithy.api#documentation": "

          The certificate-authority-data for your cluster.

          " } }, - "tags": { - "target": "com.amazonaws.eks#TagMap", + "clientRequestToken": { + "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The metadata that you apply to the cluster to assist with categorization and\n organization. Each tag consists of a key and an optional value, both of which you\n define. Cluster tags do not propagate to any other resources associated with the\n cluster.

          " + "smithy.api#documentation": "

          Unique, case-sensitive identifier that you provide to ensure the idempotency of the\n request.

          " } }, "platformVersion": { @@ -311,16 +578,16 @@ "smithy.api#documentation": "

          The platform version of your Amazon EKS cluster. For more information, see Platform\n Versions in the \n Amazon EKS User Guide\n .

          " } }, - "clientRequestToken": { - "target": "com.amazonaws.eks#String", + "tags": { + "target": "com.amazonaws.eks#TagMap", "traits": { - "smithy.api#documentation": "

          Unique, case-sensitive identifier that you provide to ensure the idempotency of the\n request.

          " + "smithy.api#documentation": "

          The metadata that you apply to the cluster to assist with categorization and\n organization. Each tag consists of a key and an optional value, both of which you\n define. Cluster tags do not propagate to any other resources associated with the\n cluster.

          " } }, - "version": { - "target": "com.amazonaws.eks#String", + "encryptionConfig": { + "target": "com.amazonaws.eks#EncryptionConfigList", "traits": { - "smithy.api#documentation": "

          The Kubernetes server version for the cluster.

          " + "smithy.api#documentation": "

          The encryption configuration for the cluster.

          " } } }, @@ -360,6 +627,134 @@ ] } }, + "com.amazonaws.eks#Compatibilities": { + "type": "list", + "member": { + "target": "com.amazonaws.eks#Compatibility" + } + }, + "com.amazonaws.eks#Compatibility": { + "type": "structure", + "members": { + "clusterVersion": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The supported Kubernetes version of the cluster.

          " + } + }, + "platformVersions": { + "target": "com.amazonaws.eks#StringList", + "traits": { + "smithy.api#documentation": "

          The supported compute platform.

          " + } + }, + "defaultVersion": { + "target": "com.amazonaws.eks#Boolean", + "traits": { + "smithy.api#documentation": "

          The supported default version.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Compatibility information.

          " + } + }, + "com.amazonaws.eks#CreateAddon": { + "type": "operation", + "input": { + "target": "com.amazonaws.eks#CreateAddonRequest" + }, + "output": { + "target": "com.amazonaws.eks#CreateAddonResponse" + }, + "errors": [ + { + "target": "com.amazonaws.eks#ClientException" + }, + { + "target": "com.amazonaws.eks#InvalidParameterException" + }, + { + "target": "com.amazonaws.eks#InvalidRequestException" + }, + { + "target": "com.amazonaws.eks#ResourceInUseException" + }, + { + "target": "com.amazonaws.eks#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.eks#ServerException" + } + ], + "traits": { + "smithy.api#documentation": "

          Creates an Amazon EKS add-on.

          \n

          Amazon EKS add-ons help to automate the provisioning and lifecycle management of common\n operational software for Amazon EKS clusters. Amazon EKS add-ons can only be used with Amazon EKS\n clusters running version 1.18 with platform version eks.3 or later because\n add-ons rely on the Server-side Apply Kubernetes feature, which is only available in\n Kubernetes 1.18 and later.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/clusters/{clusterName}/addons", + "code": 200 + } + } + }, + "com.amazonaws.eks#CreateAddonRequest": { + "type": "structure", + "members": { + "clusterName": { + "target": "com.amazonaws.eks#ClusterName", + "traits": { + "smithy.api#documentation": "

          The name of the cluster to create the add-on for.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "addonName": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The name of the add-on. The name must match one of the names returned by \n ListAddons\n .

          ", + "smithy.api#required": {} + } + }, + "addonVersion": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The version of the add-on. The version must match one of the versions returned by \n DescribeAddonVersions\n .

          " + } + }, + "serviceAccountRoleArn": { + "target": "com.amazonaws.eks#RoleArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an existing IAM role to bind to the add-on's service account. The role must be assigned the IAM permissions required by the add-on. If you don't specify an existing IAM role, then the add-on uses the\n permissions assigned to the node IAM role. For more information, see Amazon EKS node IAM role in the Amazon EKS User Guide.

          \n \n

          To specify an existing IAM role, you must have an IAM OpenID Connect (OIDC) provider created for\n your cluster. For more information, see Enabling\n IAM roles for service accounts on your cluster in the\n Amazon EKS User Guide.

          \n
          " + } + }, + "resolveConflicts": { + "target": "com.amazonaws.eks#ResolveConflicts", + "traits": { + "smithy.api#documentation": "

          How to resolve parameter value conflicts when migrating an existing add-on to an\n Amazon EKS add-on.

          " + } + }, + "clientRequestToken": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          A unique, case-sensitive identifier that you provide to ensure the idempotency of the\n request.

          ", + "smithy.api#idempotencyToken": {} + } + }, + "tags": { + "target": "com.amazonaws.eks#TagMap", + "traits": { + "smithy.api#documentation": "

          The metadata to apply to the cluster to assist with categorization and organization.\n Each tag consists of a key and an optional value, both of which you define.

          " + } + } + } + }, + "com.amazonaws.eks#CreateAddonResponse": { + "type": "structure", + "members": { + "addon": { + "target": "com.amazonaws.eks#Addon" + } + } + }, "com.amazonaws.eks#CreateCluster": { "type": "operation", "input": { @@ -403,30 +798,17 @@ "com.amazonaws.eks#CreateClusterRequest": { "type": "structure", "members": { - "encryptionConfig": { - "target": "com.amazonaws.eks#EncryptionConfigList", - "traits": { - "smithy.api#documentation": "

          The encryption configuration for the cluster.

          " - } - }, - "resourcesVpcConfig": { - "target": "com.amazonaws.eks#VpcConfigRequest", + "name": { + "target": "com.amazonaws.eks#ClusterName", "traits": { - "smithy.api#documentation": "

          The VPC configuration used by the cluster control plane. Amazon EKS VPC resources have\n specific requirements to work properly with Kubernetes. For more information, see Cluster VPC\n Considerations and Cluster Security Group Considerations in the\n Amazon EKS User Guide. You must specify at least two subnets. You can specify up to five\n security groups, but we recommend that you use a dedicated security group for your\n cluster control plane.

          ", + "smithy.api#documentation": "

          The unique name to give to your cluster.

          ", "smithy.api#required": {} } }, - "logging": { - "target": "com.amazonaws.eks#Logging", - "traits": { - "smithy.api#documentation": "

          Enable or disable exporting the Kubernetes control plane logs for your cluster to\n CloudWatch Logs. By default, cluster control plane logs aren't exported to CloudWatch Logs. For more\n information, see Amazon EKS Cluster Control Plane Logs in the\n \n Amazon EKS User Guide\n .

          \n \n

          CloudWatch Logs ingestion, archive storage, and data scanning rates apply to exported\n control plane logs. For more information, see Amazon CloudWatch Pricing.

          \n
          " - } - }, - "clientRequestToken": { + "version": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          Unique, case-sensitive identifier that you provide to ensure the idempotency of the\n request.

          ", - "smithy.api#idempotencyToken": {} + "smithy.api#documentation": "

          The desired Kubernetes version for your cluster. If you don't specify a value here,\n the latest version available in Amazon EKS is used.

          " } }, "roleArn": { @@ -436,16 +818,30 @@ "smithy.api#required": {} } }, + "resourcesVpcConfig": { + "target": "com.amazonaws.eks#VpcConfigRequest", + "traits": { + "smithy.api#documentation": "

          The VPC configuration used by the cluster control plane. Amazon EKS VPC resources have\n specific requirements to work properly with Kubernetes. For more information, see Cluster VPC\n Considerations and Cluster Security Group Considerations in the\n Amazon EKS User Guide. You must specify at least two subnets. You can specify up to five\n security groups, but we recommend that you use a dedicated security group for your\n cluster control plane.

          ", + "smithy.api#required": {} + } + }, "kubernetesNetworkConfig": { "target": "com.amazonaws.eks#KubernetesNetworkConfigRequest", "traits": { "smithy.api#documentation": "

          The Kubernetes network configuration for the cluster.

          " } }, - "version": { + "logging": { + "target": "com.amazonaws.eks#Logging", + "traits": { + "smithy.api#documentation": "

          Enable or disable exporting the Kubernetes control plane logs for your cluster to\n CloudWatch Logs. By default, cluster control plane logs aren't exported to CloudWatch Logs. For more\n information, see Amazon EKS Cluster Control Plane Logs in the\n \n Amazon EKS User Guide\n .

          \n \n

          CloudWatch Logs ingestion, archive storage, and data scanning rates apply to exported\n control plane logs. For more information, see Amazon CloudWatch Pricing.

          \n
          " + } + }, + "clientRequestToken": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The desired Kubernetes version for your cluster. If you don't specify a value here,\n the latest version available in Amazon EKS is used.

          " + "smithy.api#documentation": "

          Unique, case-sensitive identifier that you provide to ensure the idempotency of the\n request.

          ", + "smithy.api#idempotencyToken": {} } }, "tags": { @@ -454,11 +850,10 @@ "smithy.api#documentation": "

          The metadata to apply to the cluster to assist with categorization and organization.\n Each tag consists of a key and an optional value, both of which you define.

          " } }, - "name": { - "target": "com.amazonaws.eks#ClusterName", + "encryptionConfig": { + "target": "com.amazonaws.eks#EncryptionConfigList", "traits": { - "smithy.api#documentation": "

          The unique name to give to your cluster.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The encryption configuration for the cluster.

          " } } } @@ -521,30 +916,31 @@ "smithy.api#required": {} } }, - "selectors": { - "target": "com.amazonaws.eks#FargateProfileSelectors", + "clusterName": { + "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The selectors to match for pods to use this Fargate profile. Each selector must have an\n associated namespace. Optionally, you can also specify labels for a namespace. You may\n specify up to five selectors in a Fargate profile.

          " + "smithy.api#documentation": "

          The name of the Amazon EKS cluster to apply the Fargate profile to.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} } }, - "subnets": { - "target": "com.amazonaws.eks#StringList", + "podExecutionRoleArn": { + "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The IDs of subnets to launch your pods into. At this time, pods running on Fargate are\n not assigned public IP addresses, so only private subnets (with no direct route to an\n Internet Gateway) are accepted for this parameter.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pod execution role to use for pods that match the selectors in\n the Fargate profile. The pod execution role allows Fargate infrastructure to register with\n your cluster as a node, and it provides read access to Amazon ECR image repositories. For\n more information, see Pod Execution Role in the\n Amazon EKS User Guide.

          ", + "smithy.api#required": {} } }, - "tags": { - "target": "com.amazonaws.eks#TagMap", + "subnets": { + "target": "com.amazonaws.eks#StringList", "traits": { - "smithy.api#documentation": "

          The metadata to apply to the Fargate profile to assist with categorization and\n organization. Each tag consists of a key and an optional value, both of which you\n define. Fargate profile tags do not propagate to any other resources associated with the\n Fargate profile, such as the pods that are scheduled with it.

          " + "smithy.api#documentation": "

          The IDs of subnets to launch your pods into. At this time, pods running on Fargate are\n not assigned public IP addresses, so only private subnets (with no direct route to an\n Internet Gateway) are accepted for this parameter.

          " } }, - "clusterName": { - "target": "com.amazonaws.eks#String", + "selectors": { + "target": "com.amazonaws.eks#FargateProfileSelectors", "traits": { - "smithy.api#documentation": "

          The name of the Amazon EKS cluster to apply the Fargate profile to.

          ", - "smithy.api#httpLabel": {}, - "smithy.api#required": {} + "smithy.api#documentation": "

          The selectors to match for pods to use this Fargate profile. Each selector must have an\n associated namespace. Optionally, you can also specify labels for a namespace. You may\n specify up to five selectors in a Fargate profile.

          " } }, "clientRequestToken": { @@ -554,11 +950,10 @@ "smithy.api#idempotencyToken": {} } }, - "podExecutionRoleArn": { - "target": "com.amazonaws.eks#String", + "tags": { + "target": "com.amazonaws.eks#TagMap", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pod execution role to use for pods that match the selectors in\n the Fargate profile. The pod execution role allows Fargate infrastructure to register with\n your cluster as a node, and it provides read access to Amazon ECR image repositories. For\n more information, see Pod Execution Role in the\n Amazon EKS User Guide.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The metadata to apply to the Fargate profile to assist with categorization and\n organization. Each tag consists of a key and an optional value, both of which you\n define. Fargate profile tags do not propagate to any other resources associated with the\n Fargate profile, such as the pods that are scheduled with it.

          " } } } @@ -606,7 +1001,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Creates a managed worker node group for an Amazon EKS cluster. You can only create a node\n group for your cluster that is equal to the current Kubernetes version for the cluster.\n All node groups are created with the latest AMI release version for the respective minor\n Kubernetes version of the cluster, unless you deploy a custom AMI using a launch\n template. For more information about using launch templates, see Launch\n template support.

          \n

          An Amazon EKS managed node group is an Amazon EC2 Auto Scaling group and associated Amazon EC2 instances that\n are managed by AWS for an Amazon EKS cluster. Each node group uses a version of the\n Amazon EKS-optimized Amazon Linux 2 AMI. For more information, see Managed\n Node Groups in the Amazon EKS User Guide.

          ", + "smithy.api#documentation": "

          Creates a managed worker node group for an Amazon EKS cluster. You can only create a node\n group for your cluster that is equal to the current Kubernetes version for the cluster.\n All node groups are created with the latest AMI release version for the respective minor\n Kubernetes version of the cluster, unless you deploy a custom AMI using a launch\n template. For more information about using launch templates, see Launch\n template support.

          \n

          An Amazon EKS managed node group is an Amazon EC2 Auto Scaling group and associated Amazon EC2 instances that\n are managed by AWS for an Amazon EKS cluster. Each node group uses a version of the Amazon EKS\n optimized Amazon Linux 2 AMI. For more information, see Managed\n Node Groups in the Amazon EKS User Guide.

          ", "smithy.api#http": { "method": "POST", "uri": "/clusters/{clusterName}/node-groups", @@ -617,100 +1012,106 @@ "com.amazonaws.eks#CreateNodegroupRequest": { "type": "structure", "members": { - "tags": { - "target": "com.amazonaws.eks#TagMap", - "traits": { - "smithy.api#documentation": "

          The metadata to apply to the node group to assist with categorization and\n organization. Each tag consists of a key and an optional value, both of which you\n define. Node group tags do not propagate to any other resources associated with the node\n group, such as the Amazon EC2 instances or subnets.

          " - } - }, - "version": { + "clusterName": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The Kubernetes version to use for your managed nodes. By default, the Kubernetes\n version of the cluster is used, and this is the only accepted specified value.\n If you specify launchTemplate, and your launch template uses a custom AMI, then don't specify version,\n or the node group deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          " + "smithy.api#documentation": "

          The name of the cluster to create the node group in.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} } }, - "remoteAccess": { - "target": "com.amazonaws.eks#RemoteAccessConfig", + "nodegroupName": { + "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The remote access (SSH) configuration to use with your node group. If you specify launchTemplate,\n then don't specify remoteAccess, or the node group deployment\n will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          " + "smithy.api#documentation": "

          The unique name to give your node group.

          ", + "smithy.api#required": {} } }, - "launchTemplate": { - "target": "com.amazonaws.eks#LaunchTemplateSpecification", + "scalingConfig": { + "target": "com.amazonaws.eks#NodegroupScalingConfig", "traits": { - "smithy.api#documentation": "

          An object representing a node group's launch template specification. If specified,\n then do not specify instanceTypes, diskSize, or\n remoteAccess and make sure that the launch template meets the\n requirements in launchTemplateSpecification.

          " + "smithy.api#documentation": "

          The scaling configuration details for the Auto Scaling group that is created for your\n node group.

          " } }, - "releaseVersion": { - "target": "com.amazonaws.eks#String", + "diskSize": { + "target": "com.amazonaws.eks#BoxedInteger", "traits": { - "smithy.api#documentation": "

          The AMI version of the Amazon EKS-optimized AMI to use with your node group. By default,\n the latest available AMI version for the node group's current Kubernetes version is\n used. For more information, see Amazon EKS-Optimized Linux AMI Versions in the Amazon EKS User Guide.\n If you specify launchTemplate, and your launch template uses a custom AMI, then don't specify releaseVersion,\n or the node group deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          " + "smithy.api#documentation": "

          The root device disk size (in GiB) for your node group instances. The default disk\n size is 20 GiB. If you specify launchTemplate, then don't specify diskSize,\n or the node group deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          " } }, - "nodeRole": { - "target": "com.amazonaws.eks#String", + "subnets": { + "target": "com.amazonaws.eks#StringList", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the IAM role to associate with your node group. The Amazon EKS worker\n node kubelet daemon makes calls to AWS APIs on your behalf. Worker nodes\n receive permissions for these API calls through an IAM instance profile and associated\n policies. Before you can launch worker nodes and register them into a cluster, you must\n create an IAM role for those worker nodes to use when they are launched. For more\n information, see Amazon EKS Worker Node IAM Role in the\n \n Amazon EKS User Guide\n . If you specify launchTemplate, then don't specify \n \n IamInstanceProfile\n in your launch template, or the node group \n deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          ", + "smithy.api#documentation": "

          The subnets to use for the Auto Scaling group that is created for your node group.\n These subnets must have the tag key kubernetes.io/cluster/CLUSTER_NAME with\n a value of shared, where CLUSTER_NAME is replaced with the\n name of your cluster. If you specify launchTemplate, then don't specify \n SubnetId\n in your launch template, or the node group \n deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          ", "smithy.api#required": {} } }, "instanceTypes": { "target": "com.amazonaws.eks#StringList", "traits": { - "smithy.api#documentation": "

          The instance type to use for your node group. You can specify a single instance type\n for a node group. The default value for instanceTypes is\n t3.medium. If you choose a GPU instance type, be sure to specify\n AL2_x86_64_GPU with the amiType parameter.\n If you specify launchTemplate, then don't specify instanceTypes, or the node group \n deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          " + "smithy.api#documentation": "

          Specify the instance types for a node group. If you specify a GPU instance type, be\n sure to specify AL2_x86_64_GPU with the amiType parameter. If\n you specify launchTemplate, then you can specify zero or one instance type\n in your launch template or you can specify 0-20 instance types for\n instanceTypes. If however, you specify an instance type in your launch\n template and specify any instanceTypes, the node group\n deployment will fail. If you don't specify an instance type in a launch template or for\n instanceTypes, then t3.medium is used, by default. If you\n specify Spot for capacityType, then we recommend specifying\n multiple values for instanceTypes. For more information, see Managed node group\n capacity types and Launch template support in\n the Amazon EKS User Guide.

          " } }, - "subnets": { - "target": "com.amazonaws.eks#StringList", + "amiType": { + "target": "com.amazonaws.eks#AMITypes", "traits": { - "smithy.api#documentation": "

          The subnets to use for the Auto Scaling group that is created for your node group.\n These subnets must have the tag key kubernetes.io/cluster/CLUSTER_NAME with\n a value of shared, where CLUSTER_NAME is replaced with the\n name of your cluster. If you specify launchTemplate, then don't specify \n SubnetId\n in your launch template, or the node group \n deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The AMI type for your node group. GPU instance types should use the\n AL2_x86_64_GPU AMI type. Non-GPU instances should use the\n AL2_x86_64 AMI type. Arm instances should use the\n AL2_ARM_64 AMI type. All types use the Amazon EKS optimized Amazon Linux 2 AMI.\n If you specify launchTemplate, and your launch template uses a custom AMI, then don't specify amiType,\n or the node group deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          " } }, - "nodegroupName": { + "remoteAccess": { + "target": "com.amazonaws.eks#RemoteAccessConfig", + "traits": { + "smithy.api#documentation": "

          The remote access (SSH) configuration to use with your node group. If you specify launchTemplate,\n then don't specify remoteAccess, or the node group deployment\n will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          " + } + }, + "nodeRole": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The unique name to give your node group.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the IAM role to associate with your node group. The Amazon EKS worker\n node kubelet daemon makes calls to AWS APIs on your behalf. Worker nodes\n receive permissions for these API calls through an IAM instance profile and associated\n policies. Before you can launch worker nodes and register them into a cluster, you must\n create an IAM role for those worker nodes to use when they are launched. For more\n information, see Amazon EKS Worker Node IAM Role in the\n \n Amazon EKS User Guide\n . If you specify launchTemplate, then don't specify \n \n IamInstanceProfile\n in your launch template,\n or the node group deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          ", "smithy.api#required": {} } }, - "amiType": { - "target": "com.amazonaws.eks#AMITypes", + "labels": { + "target": "com.amazonaws.eks#labelsMap", "traits": { - "smithy.api#documentation": "

          The AMI type for your node group. GPU instance types should use the\n AL2_x86_64_GPU AMI type. Non-GPU instances should use the\n AL2_x86_64 AMI type. Arm instances should use the\n AL2_ARM_64 AMI type. All types use the Amazon EKS-optimized Amazon Linux 2 AMI.\n If you specify launchTemplate, and your launch template uses a custom AMI, then don't specify amiType,\n or the node group deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          " + "smithy.api#documentation": "

          The Kubernetes labels to be applied to the nodes in the node group when they are\n created.

          " } }, - "clusterName": { + "tags": { + "target": "com.amazonaws.eks#TagMap", + "traits": { + "smithy.api#documentation": "

          The metadata to apply to the node group to assist with categorization and\n organization. Each tag consists of a key and an optional value, both of which you\n define. Node group tags do not propagate to any other resources associated with the node\n group, such as the Amazon EC2 instances or subnets.

          " + } + }, + "clientRequestToken": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The name of the cluster to create the node group in.

          ", - "smithy.api#httpLabel": {}, - "smithy.api#required": {} + "smithy.api#documentation": "

          Unique, case-sensitive identifier that you provide to ensure the idempotency of the\n request.

          ", + "smithy.api#idempotencyToken": {} } }, - "labels": { - "target": "com.amazonaws.eks#labelsMap", + "launchTemplate": { + "target": "com.amazonaws.eks#LaunchTemplateSpecification", "traits": { - "smithy.api#documentation": "

          The Kubernetes labels to be applied to the nodes in the node group when they are\n created.

          " + "smithy.api#documentation": "

          An object representing a node group's launch template specification. If specified,\n then do not specify instanceTypes, diskSize, or\n remoteAccess and make sure that the launch template meets the\n requirements in launchTemplateSpecification.

          " } }, - "diskSize": { - "target": "com.amazonaws.eks#BoxedInteger", + "capacityType": { + "target": "com.amazonaws.eks#CapacityTypes", "traits": { - "smithy.api#documentation": "

          The root device disk size (in GiB) for your node group instances. The default disk\n size is 20 GiB. If you specify launchTemplate, then don't specify diskSize,\n or the node group deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          " + "smithy.api#documentation": "

          The capacity type for your node group.

          " } }, - "clientRequestToken": { + "version": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          Unique, case-sensitive identifier that you provide to ensure the idempotency of the\n request.

          ", - "smithy.api#idempotencyToken": {} + "smithy.api#documentation": "

          The Kubernetes version to use for your managed nodes. By default, the Kubernetes\n version of the cluster is used, and this is the only accepted specified value.\n If you specify launchTemplate, and your launch template uses a custom AMI, then don't specify version,\n or the node group deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          " } }, - "scalingConfig": { - "target": "com.amazonaws.eks#NodegroupScalingConfig", + "releaseVersion": { + "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The scaling configuration details for the Auto Scaling group that is created for your\n node group.

          " + "smithy.api#documentation": "

          The AMI version of the Amazon EKS optimized AMI to use with your node group. By default,\n the latest available AMI version for the node group's current Kubernetes version is\n used. For more information, see Amazon EKS\n optimized Amazon Linux 2 AMI versions in the Amazon EKS User Guide. If you specify launchTemplate,\n and your launch template uses a custom AMI, then don't specify releaseVersion, or the node group \n deployment will fail. For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          " } } } @@ -726,6 +1127,69 @@ } } }, + "com.amazonaws.eks#DeleteAddon": { + "type": "operation", + "input": { + "target": "com.amazonaws.eks#DeleteAddonRequest" + }, + "output": { + "target": "com.amazonaws.eks#DeleteAddonResponse" + }, + "errors": [ + { + "target": "com.amazonaws.eks#ClientException" + }, + { + "target": "com.amazonaws.eks#InvalidParameterException" + }, + { + "target": "com.amazonaws.eks#InvalidRequestException" + }, + { + "target": "com.amazonaws.eks#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.eks#ServerException" + } + ], + "traits": { + "smithy.api#documentation": "

          Delete an Amazon EKS add-on.

          \n

          When you remove the add-on, it will also be deleted from the cluster. You can always\n manually start an add-on on the cluster using the Kubernetes API.

          ", + "smithy.api#http": { + "method": "DELETE", + "uri": "/clusters/{clusterName}/addons/{addonName}", + "code": 200 + } + } + }, + "com.amazonaws.eks#DeleteAddonRequest": { + "type": "structure", + "members": { + "clusterName": { + "target": "com.amazonaws.eks#ClusterName", + "traits": { + "smithy.api#documentation": "

          The name of the cluster to delete the add-on from.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "addonName": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The name of the add-on. The name must match one of the names returned by \n ListAddons\n .

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.eks#DeleteAddonResponse": { + "type": "structure", + "members": { + "addon": { + "target": "com.amazonaws.eks#Addon" + } + } + }, "com.amazonaws.eks#DeleteCluster": { "type": "operation", "input": { @@ -916,6 +1380,163 @@ } } }, + "com.amazonaws.eks#DescribeAddon": { + "type": "operation", + "input": { + "target": "com.amazonaws.eks#DescribeAddonRequest" + }, + "output": { + "target": "com.amazonaws.eks#DescribeAddonResponse" + }, + "errors": [ + { + "target": "com.amazonaws.eks#ClientException" + }, + { + "target": "com.amazonaws.eks#InvalidParameterException" + }, + { + "target": "com.amazonaws.eks#InvalidRequestException" + }, + { + "target": "com.amazonaws.eks#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.eks#ServerException" + } + ], + "traits": { + "smithy.api#documentation": "

          Describes an Amazon EKS add-on.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/clusters/{clusterName}/addons/{addonName}", + "code": 200 + } + } + }, + "com.amazonaws.eks#DescribeAddonRequest": { + "type": "structure", + "members": { + "clusterName": { + "target": "com.amazonaws.eks#ClusterName", + "traits": { + "smithy.api#documentation": "

          The name of the cluster.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "addonName": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The name of the add-on. The name must match one of the names returned by \n ListAddons\n .

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.eks#DescribeAddonResponse": { + "type": "structure", + "members": { + "addon": { + "target": "com.amazonaws.eks#Addon" + } + } + }, + "com.amazonaws.eks#DescribeAddonVersions": { + "type": "operation", + "input": { + "target": "com.amazonaws.eks#DescribeAddonVersionsRequest" + }, + "output": { + "target": "com.amazonaws.eks#DescribeAddonVersionsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.eks#InvalidParameterException" + }, + { + "target": "com.amazonaws.eks#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.eks#ServerException" + } + ], + "traits": { + "smithy.api#documentation": "

          Describes the Kubernetes versions that the add-on can be used with.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/addons/supported-versions", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "nextToken", + "outputToken": "nextToken", + "items": "addons", + "pageSize": "maxResults" + } + } + }, + "com.amazonaws.eks#DescribeAddonVersionsRequest": { + "type": "structure", + "members": { + "kubernetesVersion": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The Kubernetes versions that the add-on can be used with.

          ", + "smithy.api#httpQuery": "kubernetesVersion" + } + }, + "maxResults": { + "target": "com.amazonaws.eks#DescribeAddonVersionsRequestMaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of results to return.

          ", + "smithy.api#httpQuery": "maxResults" + } + }, + "nextToken": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The nextToken value returned from a previous paginated\n DescribeAddonVersionsRequest where maxResults was used and\n the results exceeded the value of that parameter. Pagination continues from the end of\n the previous results that returned the nextToken value.

          \n \n

          This token should be treated as an opaque identifier that is used only to\n retrieve the next items in a list and not for other programmatic purposes.

          \n
          ", + "smithy.api#httpQuery": "nextToken" + } + }, + "addonName": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The name of the add-on. The name must match one of the names returned by \n ListAddons\n .

          ", + "smithy.api#httpQuery": "addonName" + } + } + } + }, + "com.amazonaws.eks#DescribeAddonVersionsRequestMaxResults": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.eks#DescribeAddonVersionsResponse": { + "type": "structure", + "members": { + "addons": { + "target": "com.amazonaws.eks#Addons", + "traits": { + "smithy.api#documentation": "

          The list of available versions with Kubernetes version compatibility.

          " + } + }, + "nextToken": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The nextToken value returned from a previous paginated\n DescribeAddonVersionsResponse where maxResults was used\n and the results exceeded the value of that parameter. Pagination continues from the end\n of the previous results that returned the nextToken value.

          \n \n

          This token should be treated as an opaque identifier that is used only to\n retrieve the next items in a list and not for other programmatic purposes.

          \n
          " + } + } + } + }, "com.amazonaws.eks#DescribeCluster": { "type": "operation", "input": { @@ -1005,18 +1626,18 @@ "com.amazonaws.eks#DescribeFargateProfileRequest": { "type": "structure", "members": { - "fargateProfileName": { + "clusterName": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The name of the Fargate profile to describe.

          ", + "smithy.api#documentation": "

          The name of the Amazon EKS cluster associated with the Fargate profile.

          ", "smithy.api#httpLabel": {}, "smithy.api#required": {} } }, - "clusterName": { + "fargateProfileName": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The name of the Amazon EKS cluster associated with the Fargate profile.

          ", + "smithy.api#documentation": "

          The name of the Fargate profile to describe.

          ", "smithy.api#httpLabel": {}, "smithy.api#required": {} } @@ -1156,6 +1777,13 @@ "smithy.api#documentation": "

          The name of the Amazon EKS node group associated with the update.

          ", "smithy.api#httpQuery": "nodegroupName" } + }, + "addonName": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The name of the add-on. The name must match one of the names returned by \n ListAddons\n .

          ", + "smithy.api#httpQuery": "addonName" + } } } }, @@ -1253,6 +1881,14 @@ { "value": "ClusterUnreachable", "name": "CLUSTER_UNREACHABLE" + }, + { + "value": "InsufficientNumberOfReplicas", + "name": "INSUFFICIENT_NUMBER_OF_REPLICAS" + }, + { + "value": "ConfigurationConflict", + "name": "CONFIGURATION_CONFLICT" } ] } @@ -1260,12 +1896,6 @@ "com.amazonaws.eks#ErrorDetail": { "type": "structure", "members": { - "resourceIds": { - "target": "com.amazonaws.eks#StringList", - "traits": { - "smithy.api#documentation": "

          An optional field that contains the resource IDs associated with the error.

          " - } - }, "errorCode": { "target": "com.amazonaws.eks#ErrorCode", "traits": { @@ -1277,6 +1907,12 @@ "traits": { "smithy.api#documentation": "

          A more complete description of the error.

          " } + }, + "resourceIds": { + "target": "com.amazonaws.eks#StringList", + "traits": { + "smithy.api#documentation": "

          An optional field that contains the resource IDs associated with the error.

          " + } } }, "traits": { @@ -1292,22 +1928,16 @@ "com.amazonaws.eks#FargateProfile": { "type": "structure", "members": { - "createdAt": { - "target": "com.amazonaws.eks#Timestamp", - "traits": { - "smithy.api#documentation": "

          The Unix epoch timestamp in seconds for when the Fargate profile was created.

          " - } - }, "fargateProfileName": { "target": "com.amazonaws.eks#String", "traits": { "smithy.api#documentation": "

          The name of the Fargate profile.

          " } }, - "selectors": { - "target": "com.amazonaws.eks#FargateProfileSelectors", + "fargateProfileArn": { + "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The selectors to match for pods to use this Fargate profile.

          " + "smithy.api#documentation": "

          The full Amazon Resource Name (ARN) of the Fargate profile.

          " } }, "clusterName": { @@ -1316,34 +1946,40 @@ "smithy.api#documentation": "

          The name of the Amazon EKS cluster that the Fargate profile belongs to.

          " } }, + "createdAt": { + "target": "com.amazonaws.eks#Timestamp", + "traits": { + "smithy.api#documentation": "

          The Unix epoch timestamp in seconds for when the Fargate profile was created.

          " + } + }, "podExecutionRoleArn": { "target": "com.amazonaws.eks#String", "traits": { "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pod execution role to use for pods that match the selectors in\n the Fargate profile. For more information, see Pod\n Execution Role in the Amazon EKS User Guide.

          " } }, - "tags": { - "target": "com.amazonaws.eks#TagMap", + "subnets": { + "target": "com.amazonaws.eks#StringList", "traits": { - "smithy.api#documentation": "

          The metadata applied to the Fargate profile to assist with categorization and\n organization. Each tag consists of a key and an optional value, both of which you\n define. Fargate profile tags do not propagate to any other resources associated with the\n Fargate profile, such as the pods that are scheduled with it.

          " + "smithy.api#documentation": "

          The IDs of subnets to launch pods into.

          " } }, - "status": { - "target": "com.amazonaws.eks#FargateProfileStatus", + "selectors": { + "target": "com.amazonaws.eks#FargateProfileSelectors", "traits": { - "smithy.api#documentation": "

          The current status of the Fargate profile.

          " + "smithy.api#documentation": "

          The selectors to match for pods to use this Fargate profile.

          " } }, - "fargateProfileArn": { - "target": "com.amazonaws.eks#String", + "status": { + "target": "com.amazonaws.eks#FargateProfileStatus", "traits": { - "smithy.api#documentation": "

          The full Amazon Resource Name (ARN) of the Fargate profile.

          " + "smithy.api#documentation": "

          The current status of the Fargate profile.

          " } }, - "subnets": { - "target": "com.amazonaws.eks#StringList", + "tags": { + "target": "com.amazonaws.eks#TagMap", "traits": { - "smithy.api#documentation": "

          The IDs of subnets to launch pods into.

          " + "smithy.api#documentation": "

          The metadata applied to the Fargate profile to assist with categorization and\n organization. Each tag consists of a key and an optional value, both of which you\n define. Fargate profile tags do not propagate to any other resources associated with the\n Fargate profile, such as the pods that are scheduled with it.

          " } } }, @@ -1363,17 +1999,17 @@ "com.amazonaws.eks#FargateProfileSelector": { "type": "structure", "members": { - "labels": { - "target": "com.amazonaws.eks#FargateProfileLabel", - "traits": { - "smithy.api#documentation": "

          The Kubernetes labels that the selector should match. A pod must contain all of the\n labels that are specified in the selector for it to be considered a match.

          " - } - }, "namespace": { "target": "com.amazonaws.eks#String", "traits": { "smithy.api#documentation": "

          The Kubernetes namespace that the selector should match.

          " } + }, + "labels": { + "target": "com.amazonaws.eks#FargateProfileLabel", + "traits": { + "smithy.api#documentation": "

          The Kubernetes labels that the selector should match. A pod must contain all of the\n labels that are specified in the selector for it to be considered a match.

          " + } } }, "traits": { @@ -1435,13 +2071,10 @@ "com.amazonaws.eks#InvalidParameterException": { "type": "structure", "members": { - "message": { - "target": "com.amazonaws.eks#String" - }, - "fargateProfileName": { + "clusterName": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The Fargate profile associated with the exception.

          " + "smithy.api#documentation": "

          The Amazon EKS cluster associated with the exception.

          " } }, "nodegroupName": { @@ -1450,11 +2083,17 @@ "smithy.api#documentation": "

          The Amazon EKS managed node group associated with the exception.

          " } }, - "clusterName": { + "fargateProfileName": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The Amazon EKS cluster associated with the exception.

          " + "smithy.api#documentation": "

          The Fargate profile associated with the exception.

          " } + }, + "addonName": { + "target": "com.amazonaws.eks#String" + }, + "message": { + "target": "com.amazonaws.eks#String" } }, "traits": { @@ -1466,20 +2105,23 @@ "com.amazonaws.eks#InvalidRequestException": { "type": "structure", "members": { + "clusterName": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The Amazon EKS cluster associated with the exception.

          " + } + }, "nodegroupName": { "target": "com.amazonaws.eks#String", "traits": { "smithy.api#documentation": "

          The Amazon EKS managed node group associated with the exception.

          " } }, - "message": { + "addonName": { "target": "com.amazonaws.eks#String" }, - "clusterName": { - "target": "com.amazonaws.eks#String", - "traits": { - "smithy.api#documentation": "

          The Amazon EKS cluster associated with the exception.

          " - } + "message": { + "target": "com.amazonaws.eks#String" } }, "traits": { @@ -1491,10 +2133,10 @@ "com.amazonaws.eks#Issue": { "type": "structure", "members": { - "resourceIds": { - "target": "com.amazonaws.eks#StringList", + "code": { + "target": "com.amazonaws.eks#NodegroupIssueCode", "traits": { - "smithy.api#documentation": "

          The AWS resources that are afflicted by this issue.

          " + "smithy.api#documentation": "

          A brief description of the error.

          \n
            \n
          • \n

            \n AccessDenied: Amazon EKS or one or more of your\n managed nodes is failing to authenticate or authorize with your Kubernetes\n cluster API server.

            \n
          • \n
          • \n

            \n AsgInstanceLaunchFailures: Your Auto Scaling group is\n experiencing failures while attempting to launch instances.

            \n
          • \n
          • \n

            \n AutoScalingGroupNotFound: We couldn't find\n the Auto Scaling group associated with the managed node group. You may be able to\n recreate an Auto Scaling group with the same settings to recover.

            \n
          • \n
          • \n

            \n ClusterUnreachable: Amazon EKS or one or more of\n your managed nodes is unable to to communicate with your Kubernetes cluster API\n server. This can happen if there are network disruptions or if API servers are\n timing out processing requests.

            \n
          • \n
          • \n

            \n Ec2LaunchTemplateNotFound: We couldn't find\n the Amazon EC2 launch template for your managed node group. You may be able to\n recreate a launch template with the same settings to recover.

            \n
          • \n
          • \n

            \n Ec2LaunchTemplateVersionMismatch: The Amazon EC2\n launch template version for your managed node group does not match the version\n that Amazon EKS created. You may be able to revert to the version that Amazon EKS created\n to recover.

            \n
          • \n
          • \n

            \n Ec2SecurityGroupDeletionFailure: We could not\n delete the remote access security group for your managed node group. Remove any\n dependencies from the security group.

            \n
          • \n
          • \n

            \n Ec2SecurityGroupNotFound: We couldn't find\n the cluster security group for the cluster. You must recreate your\n cluster.

            \n
          • \n
          • \n

            \n Ec2SubnetInvalidConfiguration: One or more\n Amazon EC2 subnets specified for a node group do not automatically assign public IP\n addresses to instances launched into it. If you want your instances to be\n assigned a public IP address, then you need to enable the auto-assign\n public IP address setting for the subnet. See Modifying\n the public IPv4 addressing attribute for your subnet in the Amazon\n VPC User Guide.

            \n
          • \n
          • \n

            \n IamInstanceProfileNotFound: We couldn't find\n the IAM instance profile for your managed node group. You may be able to\n recreate an instance profile with the same settings to recover.

            \n
          • \n
          • \n

            \n IamNodeRoleNotFound: We couldn't find the\n IAM role for your managed node group. You may be able to recreate an IAM role\n with the same settings to recover.

            \n
          • \n
          • \n

            \n InstanceLimitExceeded: Your AWS account is\n unable to launch any more instances of the specified instance type. You may be\n able to request an Amazon EC2 instance limit increase to recover.

            \n
          • \n
          • \n

            \n InsufficientFreeAddresses: One or more of the\n subnets associated with your managed node group does not have enough available\n IP addresses for new nodes.

            \n
          • \n
          • \n

            \n InternalFailure: These errors are usually\n caused by an Amazon EKS server-side issue.

            \n
          • \n
          • \n

            \n NodeCreationFailure: Your launched instances\n are unable to register with your Amazon EKS cluster. Common causes of this failure\n are insufficient worker node IAM\n role permissions or lack of outbound internet access for the nodes.\n

            \n
          • \n
          " } }, "message": { @@ -1503,10 +2145,10 @@ "smithy.api#documentation": "

          The error message associated with the issue.

          " } }, - "code": { - "target": "com.amazonaws.eks#NodegroupIssueCode", + "resourceIds": { + "target": "com.amazonaws.eks#StringList", "traits": { - "smithy.api#documentation": "

          A brief description of the error.

          \n
            \n
          • \n

            \n AutoScalingGroupNotFound: We couldn't find\n the Auto Scaling group associated with the managed node group. You may be able to\n recreate an Auto Scaling group with the same settings to recover.

            \n
          • \n
          • \n

            \n Ec2SecurityGroupNotFound: We couldn't find\n the cluster security group for the cluster. You must recreate your\n cluster.

            \n
          • \n
          • \n

            \n Ec2SecurityGroupDeletionFailure: We could not\n delete the remote access security group for your managed node group. Remove any\n dependencies from the security group.

            \n
          • \n
          • \n

            \n Ec2LaunchTemplateNotFound: We couldn't find\n the Amazon EC2 launch template for your managed node group. You may be able to\n recreate a launch template with the same settings to recover.

            \n
          • \n
          • \n

            \n Ec2LaunchTemplateVersionMismatch: The Amazon EC2\n launch template version for your managed node group does not match the version\n that Amazon EKS created. You may be able to revert to the version that Amazon EKS created\n to recover.

            \n
          • \n
          • \n

            \n Ec2SubnetInvalidConfiguration: One or more\n Amazon EC2 subnets specified for a node group do not automatically assign public IP\n addresses to instances launched into it. If you want your instances to be\n assigned a public IP address, then you need to enable the auto-assign\n public IP address setting for the subnet. See Modifying\n the public IPv4 addressing attribute for your subnet in the Amazon\n VPC User Guide.

            \n
          • \n
          • \n

            \n IamInstanceProfileNotFound: We couldn't find\n the IAM instance profile for your managed node group. You may be able to\n recreate an instance profile with the same settings to recover.

            \n
          • \n
          • \n

            \n IamNodeRoleNotFound: We couldn't find the\n IAM role for your managed node group. You may be able to recreate an IAM role\n with the same settings to recover.

            \n
          • \n
          • \n

            \n AsgInstanceLaunchFailures: Your Auto Scaling group is\n experiencing failures while attempting to launch instances.

            \n
          • \n
          • \n

            \n NodeCreationFailure: Your launched instances\n are unable to register with your Amazon EKS cluster. Common causes of this failure\n are insufficient worker node IAM\n role permissions or lack of outbound internet access for the nodes.\n

            \n
          • \n
          • \n

            \n InstanceLimitExceeded: Your AWS account is\n unable to launch any more instances of the specified instance type. You may be\n able to request an Amazon EC2 instance limit increase to recover.

            \n
          • \n
          • \n

            \n InsufficientFreeAddresses: One or more of the\n subnets associated with your managed node group does not have enough available\n IP addresses for new nodes.

            \n
          • \n
          • \n

            \n AccessDenied: Amazon EKS or one or more of your\n managed nodes is unable to communicate with your cluster API server.

            \n
          • \n
          • \n

            \n InternalFailure: These errors are usually\n caused by an Amazon EKS server-side issue.

            \n
          • \n
          " + "smithy.api#documentation": "

          The AWS resources that are afflicted by this issue.

          " } } }, @@ -1526,7 +2168,7 @@ "serviceIpv4Cidr": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The CIDR block to assign Kubernetes service IP addresses from. If you don't specify a block, Kubernetes assigns addresses from either the 10.100.0.0/16 or 172.20.0.0/16 CIDR blocks. We recommend that you specify a block that does not overlap with resources in other networks\n that are peered or connected to your VPC. The block must meet the following requirements:

          \n
            \n
          • \n

            Within one of the following private IP address blocks: 10.0.0.0/8, 172.16.0.0.0/12, or 192.168.0.0/16.

            \n
          • \n
          • \n

            Doesn't overlap with any CIDR block assigned to the VPC that you selected for VPC.

            \n
          • \n
          • \n

            Between /24 and /12.

            \n
          • \n
          \n \n

          You can only specify a custom CIDR block when you create a cluster and can't change this value once the cluster is created.

          \n
          " + "smithy.api#documentation": "

          The CIDR block to assign Kubernetes service IP addresses from. If you don't specify a\n block, Kubernetes assigns addresses from either the 10.100.0.0/16 or 172.20.0.0/16 CIDR\n blocks. We recommend that you specify a block that does not overlap with resources in\n other networks that are peered or connected to your VPC. The block must meet the\n following requirements:

          \n
            \n
          • \n

            Within one of the following private IP address blocks: 10.0.0.0/8,\n 172.16.0.0.0/12, or 192.168.0.0/16.

            \n
          • \n
          • \n

            Doesn't overlap with any CIDR block assigned to the VPC that you selected for\n VPC.

            \n
          • \n
          • \n

            Between /24 and /12.

            \n
          • \n
          \n \n

          You can only specify a custom CIDR block when you create a cluster and can't\n change this value once the cluster is created.

          \n
          " } } }, @@ -1540,7 +2182,7 @@ "serviceIpv4Cidr": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The CIDR block that Kubernetes service IP addresses are assigned from. If you didn't specify a CIDR block, then Kubernetes assigns addresses from either the 10.100.0.0/16 or 172.20.0.0/16 CIDR blocks. If this was specified, then it was specified when the cluster was created and it\n cannot be changed.

          " + "smithy.api#documentation": "

          The CIDR block that Kubernetes service IP addresses are assigned from. If you didn't\n specify a CIDR block when you created the cluster, then Kubernetes assigns addresses\n from either the 10.100.0.0/16 or 172.20.0.0/16 CIDR blocks. If this was specified, then\n it was specified when the cluster was created and it cannot be changed.

          " } } }, @@ -1551,6 +2193,12 @@ "com.amazonaws.eks#LaunchTemplateSpecification": { "type": "structure", "members": { + "name": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The name of the launch template.

          " + } + }, "version": { "target": "com.amazonaws.eks#String", "traits": { @@ -1562,16 +2210,104 @@ "traits": { "smithy.api#documentation": "

          The ID of the launch template.

          " } + } + }, + "traits": { + "smithy.api#documentation": "

          An object representing a node group launch template specification. The launch template\n cannot include \n SubnetId\n , \n IamInstanceProfile\n , \n RequestSpotInstances\n , \n HibernationOptions\n , or \n TerminateInstances\n , or the node group deployment or\n update will fail. For more information about launch templates, see \n CreateLaunchTemplate\n in the Amazon EC2 API Reference.\n For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          \n

          Specify either name or id, but not both.

          " + } + }, + "com.amazonaws.eks#ListAddons": { + "type": "operation", + "input": { + "target": "com.amazonaws.eks#ListAddonsRequest" + }, + "output": { + "target": "com.amazonaws.eks#ListAddonsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.eks#ClientException" }, - "name": { + { + "target": "com.amazonaws.eks#InvalidParameterException" + }, + { + "target": "com.amazonaws.eks#InvalidRequestException" + }, + { + "target": "com.amazonaws.eks#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.eks#ServerException" + } + ], + "traits": { + "smithy.api#documentation": "

          Lists the available add-ons.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/clusters/{clusterName}/addons", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "nextToken", + "outputToken": "nextToken", + "items": "addons", + "pageSize": "maxResults" + } + } + }, + "com.amazonaws.eks#ListAddonsRequest": { + "type": "structure", + "members": { + "clusterName": { + "target": "com.amazonaws.eks#ClusterName", + "traits": { + "smithy.api#documentation": "

          The name of the cluster.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "maxResults": { + "target": "com.amazonaws.eks#ListAddonsRequestMaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of add-on results returned by ListAddonsRequest in\n paginated output. When you use this parameter, ListAddonsRequest returns\n only maxResults results in a single page along with a\n nextToken response element. You can see the remaining results of the\n initial request by sending another ListAddonsRequest request with the\n returned nextToken value. This value can be between 1 and\n 100. If you don't use this parameter, ListAddonsRequest\n returns up to 100 results and a nextToken value, if\n applicable.

          ", + "smithy.api#httpQuery": "maxResults" + } + }, + "nextToken": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The name of the launch template.

          " + "smithy.api#documentation": "

          The nextToken value returned from a previous paginated\n ListAddonsRequest where maxResults was used and the\n results exceeded the value of that parameter. Pagination continues from the end of the\n previous results that returned the nextToken value.

          \n \n

          This token should be treated as an opaque identifier that is used only to\n retrieve the next items in a list and not for other programmatic purposes.

          \n
          ", + "smithy.api#httpQuery": "nextToken" } } - }, + } + }, + "com.amazonaws.eks#ListAddonsRequestMaxResults": { + "type": "integer", "traits": { - "smithy.api#documentation": "

          An object representing a node group launch template specification. The launch template\n cannot include \n SubnetId\n , \n IamInstanceProfile\n , \n RequestSpotInstances\n , \n HibernationOptions\n , or \n TerminateInstances\n , or the node group deployment or update\n will fail. For more information about launch templates, see \n CreateLaunchTemplate\n in the Amazon EC2 API Reference.\n For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          \n

          Specify either name or id, but not both.

          " + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.eks#ListAddonsResponse": { + "type": "structure", + "members": { + "addons": { + "target": "com.amazonaws.eks#StringList", + "traits": { + "smithy.api#documentation": "

          A list of available add-ons.

          " + } + }, + "nextToken": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The nextToken value returned from a previous paginated\n ListAddonsResponse where maxResults was used and the\n results exceeded the value of that parameter. Pagination continues from the end of the\n previous results that returned the nextToken value.

          \n \n

          This token should be treated as an opaque identifier that is used only to\n retrieve the next items in a list and not for other programmatic purposes.

          \n
          " + } + } } }, "com.amazonaws.eks#ListClusters": { @@ -1705,19 +2441,19 @@ "smithy.api#required": {} } }, - "nextToken": { - "target": "com.amazonaws.eks#String", - "traits": { - "smithy.api#documentation": "

          The nextToken value returned from a previous paginated\n ListFargateProfiles request where maxResults was used and\n the results exceeded the value of that parameter. Pagination continues from the end of\n the previous results that returned the nextToken value.

          ", - "smithy.api#httpQuery": "nextToken" - } - }, "maxResults": { "target": "com.amazonaws.eks#FargateProfilesRequestMaxResults", "traits": { "smithy.api#documentation": "

          The maximum number of Fargate profile results returned by\n ListFargateProfiles in paginated output. When you use this parameter,\n ListFargateProfiles returns only maxResults results in a\n single page along with a nextToken response element. You can see the\n remaining results of the initial request by sending another\n ListFargateProfiles request with the returned nextToken\n value. This value can be between 1 and 100. If you don't\n use this parameter, ListFargateProfiles returns up to 100\n results and a nextToken value if applicable.

          ", "smithy.api#httpQuery": "maxResults" } + }, + "nextToken": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The nextToken value returned from a previous paginated\n ListFargateProfiles request where maxResults was used and\n the results exceeded the value of that parameter. Pagination continues from the end of\n the previous results that returned the nextToken value.

          ", + "smithy.api#httpQuery": "nextToken" + } } } }, @@ -1781,6 +2517,14 @@ "com.amazonaws.eks#ListNodegroupsRequest": { "type": "structure", "members": { + "clusterName": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The name of the Amazon EKS cluster that you would like to list node groups in.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, "maxResults": { "target": "com.amazonaws.eks#ListNodegroupsRequestMaxResults", "traits": { @@ -1794,14 +2538,6 @@ "smithy.api#documentation": "

          The nextToken value returned from a previous paginated\n ListNodegroups request where maxResults was used and the\n results exceeded the value of that parameter. Pagination continues from the end of the\n previous results that returned the nextToken value.

          ", "smithy.api#httpQuery": "nextToken" } - }, - "clusterName": { - "target": "com.amazonaws.eks#String", - "traits": { - "smithy.api#documentation": "

          The name of the Amazon EKS cluster that you would like to list node groups in.

          ", - "smithy.api#httpLabel": {}, - "smithy.api#required": {} - } } } }, @@ -1818,17 +2554,17 @@ "com.amazonaws.eks#ListNodegroupsResponse": { "type": "structure", "members": { - "nextToken": { - "target": "com.amazonaws.eks#String", - "traits": { - "smithy.api#documentation": "

          The nextToken value to include in a future ListNodegroups\n request. When the results of a ListNodegroups request exceed\n maxResults, you can use this value to retrieve the next page of\n results. This value is null when there are no more results to\n return.

          " - } - }, "nodegroups": { "target": "com.amazonaws.eks#StringList", "traits": { "smithy.api#documentation": "

          A list of all of the node groups associated with the specified cluster.

          " } + }, + "nextToken": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The nextToken value to include in a future ListNodegroups\n request. When the results of a ListNodegroups request exceed\n maxResults, you can use this value to retrieve the next page of\n results. This value is null when there are no more results to\n return.

          " + } } } }, @@ -1929,6 +2665,20 @@ "smithy.api#required": {} } }, + "nodegroupName": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The name of the Amazon EKS managed node group to list updates for.

          ", + "smithy.api#httpQuery": "nodegroupName" + } + }, + "addonName": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The names of the installed add-ons that have available updates.

          ", + "smithy.api#httpQuery": "addonName" + } + }, "nextToken": { "target": "com.amazonaws.eks#String", "traits": { @@ -1942,13 +2692,6 @@ "smithy.api#documentation": "

          The maximum number of update results returned by ListUpdates in paginated\n output. When you use this parameter, ListUpdates returns only\n maxResults results in a single page along with a nextToken\n response element. You can see the remaining results of the initial request by sending\n another ListUpdates request with the returned nextToken value.\n This value can be between 1 and 100. If you don't use this\n parameter, ListUpdates returns up to 100 results and a\n nextToken value if applicable.

          ", "smithy.api#httpQuery": "maxResults" } - }, - "nodegroupName": { - "target": "com.amazonaws.eks#String", - "traits": { - "smithy.api#documentation": "

          The name of the Amazon EKS managed node group to list updates for.

          ", - "smithy.api#httpQuery": "nodegroupName" - } } } }, @@ -1982,17 +2725,17 @@ "com.amazonaws.eks#LogSetup": { "type": "structure", "members": { - "enabled": { - "target": "com.amazonaws.eks#BoxedBoolean", - "traits": { - "smithy.api#documentation": "

          If a log type is enabled, that log type exports its control plane logs to CloudWatch Logs. If a\n log type isn't enabled, that log type doesn't export its control plane logs. Each\n individual log type can be enabled or disabled independently.

          " - } - }, "types": { "target": "com.amazonaws.eks#LogTypes", "traits": { "smithy.api#documentation": "

          The available cluster control plane log types.

          " } + }, + "enabled": { + "target": "com.amazonaws.eks#BoxedBoolean", + "traits": { + "smithy.api#documentation": "

          If a log type is enabled, that log type exports its control plane logs to CloudWatch Logs. If a\n log type isn't enabled, that log type doesn't export its control plane logs. Each\n individual log type can be enabled or disabled independently.

          " + } } }, "traits": { @@ -2061,52 +2804,34 @@ "smithy.api#documentation": "

          The name associated with an Amazon EKS managed node group.

          " } }, - "amiType": { - "target": "com.amazonaws.eks#AMITypes", - "traits": { - "smithy.api#documentation": "

          If the node group was deployed using a launch template with a custom AMI, then this is\n CUSTOM. For node groups that weren't deployed using a launch template,\n this is the AMI type that was specified in the node group configuration.

          " - } - }, - "labels": { - "target": "com.amazonaws.eks#labelsMap", - "traits": { - "smithy.api#documentation": "

          The Kubernetes labels applied to the nodes in the node group.

          \n \n

          Only labels that are applied with the Amazon EKS API are shown here. There may be other\n Kubernetes labels applied to the nodes in this group.

          \n
          " - } - }, - "subnets": { - "target": "com.amazonaws.eks#StringList", - "traits": { - "smithy.api#documentation": "

          The subnets that were specified for the Auto Scaling group that is associated with\n your node group.

          " - } - }, - "health": { - "target": "com.amazonaws.eks#NodegroupHealth", + "nodegroupArn": { + "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The health status of the node group. If there are issues with your node group's\n health, they are listed here.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) associated with the managed node group.

          " } }, - "status": { - "target": "com.amazonaws.eks#NodegroupStatus", + "clusterName": { + "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The current status of the managed node group.

          " + "smithy.api#documentation": "

          The name of the cluster that the managed node group resides in.

          " } }, - "clusterName": { + "version": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The name of the cluster that the managed node group resides in.

          " + "smithy.api#documentation": "

          The Kubernetes version of the managed node group.

          " } }, - "nodegroupArn": { + "releaseVersion": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) associated with the managed node group.

          " + "smithy.api#documentation": "

          If the node group was deployed using a launch template with a custom AMI, then this is\n the AMI ID that was specified in the launch template. For node groups that weren't\n deployed using a launch template, this is the version of the Amazon EKS optimized AMI that\n the node group was deployed with.

          " } }, - "remoteAccess": { - "target": "com.amazonaws.eks#RemoteAccessConfig", + "createdAt": { + "target": "com.amazonaws.eks#Timestamp", "traits": { - "smithy.api#documentation": "

          If the node group wasn't deployed with a launch template, then this is the remote\n access configuration that is associated with the node group. If the node group was\n deployed with a launch template, then this is\n null.

          " + "smithy.api#documentation": "

          The Unix epoch timestamp in seconds for when the managed node group was\n created.

          " } }, "modifiedAt": { @@ -2115,22 +2840,22 @@ "smithy.api#documentation": "

          The Unix epoch timestamp in seconds for when the managed node group was last\n modified.

          " } }, - "scalingConfig": { - "target": "com.amazonaws.eks#NodegroupScalingConfig", + "status": { + "target": "com.amazonaws.eks#NodegroupStatus", "traits": { - "smithy.api#documentation": "

          The scaling configuration details for the Auto Scaling group that is associated with\n your node group.

          " + "smithy.api#documentation": "

          The current status of the managed node group.

          " } }, - "resources": { - "target": "com.amazonaws.eks#NodegroupResources", + "capacityType": { + "target": "com.amazonaws.eks#CapacityTypes", "traits": { - "smithy.api#documentation": "

          The resources associated with the node group, such as Auto Scaling groups and security\n groups for remote access.

          " + "smithy.api#documentation": "

          The capacity type of your managed node group.

          " } }, - "launchTemplate": { - "target": "com.amazonaws.eks#LaunchTemplateSpecification", + "scalingConfig": { + "target": "com.amazonaws.eks#NodegroupScalingConfig", "traits": { - "smithy.api#documentation": "

          If a launch template was used to create the node group, then this is the launch\n template that was used.

          " + "smithy.api#documentation": "

          The scaling configuration details for the Auto Scaling group that is associated with\n your node group.

          " } }, "instanceTypes": { @@ -2139,22 +2864,22 @@ "smithy.api#documentation": "

          If the node group wasn't deployed with a launch template, then this is the instance\n type that is associated with the node group. If the node group was deployed with a\n launch template, then this is null.

          " } }, - "tags": { - "target": "com.amazonaws.eks#TagMap", + "subnets": { + "target": "com.amazonaws.eks#StringList", "traits": { - "smithy.api#documentation": "

          The metadata applied to the node group to assist with categorization and organization.\n Each tag consists of a key and an optional value, both of which you define. Node group\n tags do not propagate to any other resources associated with the node group, such as the\n Amazon EC2 instances or subnets.

          " + "smithy.api#documentation": "

          The subnets that were specified for the Auto Scaling group that is associated with\n your node group.

          " } }, - "version": { - "target": "com.amazonaws.eks#String", + "remoteAccess": { + "target": "com.amazonaws.eks#RemoteAccessConfig", "traits": { - "smithy.api#documentation": "

          The Kubernetes version of the managed node group.

          " + "smithy.api#documentation": "

          If the node group wasn't deployed with a launch template, then this is the remote\n access configuration that is associated with the node group. If the node group was\n deployed with a launch template, then this is null.

          " } }, - "createdAt": { - "target": "com.amazonaws.eks#Timestamp", + "amiType": { + "target": "com.amazonaws.eks#AMITypes", "traits": { - "smithy.api#documentation": "

          The Unix epoch timestamp in seconds for when the managed node group was\n created.

          " + "smithy.api#documentation": "

          If the node group was deployed using a launch template with a custom AMI, then this is\n CUSTOM. For node groups that weren't deployed using a launch template,\n this is the AMI type that was specified in the node group configuration.

          " } }, "nodeRole": { @@ -2163,16 +2888,40 @@ "smithy.api#documentation": "

          The IAM role associated with your node group. The Amazon EKS worker node\n kubelet daemon makes calls to AWS APIs on your behalf. Worker nodes\n receive permissions for these API calls through an IAM instance profile and associated\n policies.

          " } }, + "labels": { + "target": "com.amazonaws.eks#labelsMap", + "traits": { + "smithy.api#documentation": "

          The Kubernetes labels applied to the nodes in the node group.

          \n \n

          Only labels that are applied with the Amazon EKS API are shown here. There may be other\n Kubernetes labels applied to the nodes in this group.

          \n
          " + } + }, + "resources": { + "target": "com.amazonaws.eks#NodegroupResources", + "traits": { + "smithy.api#documentation": "

          The resources associated with the node group, such as Auto Scaling groups and security\n groups for remote access.

          " + } + }, "diskSize": { "target": "com.amazonaws.eks#BoxedInteger", "traits": { "smithy.api#documentation": "

          If the node group wasn't deployed with a launch template, then this is the disk size\n in the node group configuration. If the node group was deployed with a launch template,\n then this is null.

          " } }, - "releaseVersion": { - "target": "com.amazonaws.eks#String", + "health": { + "target": "com.amazonaws.eks#NodegroupHealth", + "traits": { + "smithy.api#documentation": "

          The health status of the node group. If there are issues with your node group's\n health, they are listed here.

          " + } + }, + "launchTemplate": { + "target": "com.amazonaws.eks#LaunchTemplateSpecification", + "traits": { + "smithy.api#documentation": "

          If a launch template was used to create the node group, then this is the launch\n template that was used.

          " + } + }, + "tags": { + "target": "com.amazonaws.eks#TagMap", "traits": { - "smithy.api#documentation": "

          If the node group was deployed using a launch template with a custom AMI, then this is\n the AMI ID that was specified in the launch template. For node groups that weren't\n deployed using a launch template, this is the version of the Amazon EKS-optimized AMI that\n the node group was deployed with.

          " + "smithy.api#documentation": "

          The metadata applied to the node group to assist with categorization and organization.\n Each tag consists of a key and an optional value, both of which you define. Node group\n tags do not propagate to any other resources associated with the node group, such as the\n Amazon EC2 instances or subnets.

          " } } }, @@ -2276,17 +3025,17 @@ "com.amazonaws.eks#NodegroupResources": { "type": "structure", "members": { - "remoteAccessSecurityGroup": { - "target": "com.amazonaws.eks#String", - "traits": { - "smithy.api#documentation": "

          The remote access security group associated with the node group. This security group\n controls SSH access to the worker nodes.

          " - } - }, "autoScalingGroups": { "target": "com.amazonaws.eks#AutoScalingGroupList", "traits": { "smithy.api#documentation": "

          The Auto Scaling groups associated with the node group.

          " } + }, + "remoteAccessSecurityGroup": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The remote access security group associated with the node group. This security group\n controls SSH access to the worker nodes.

          " + } } }, "traits": { @@ -2296,16 +3045,16 @@ "com.amazonaws.eks#NodegroupScalingConfig": { "type": "structure", "members": { - "maxSize": { + "minSize": { "target": "com.amazonaws.eks#Capacity", "traits": { - "smithy.api#documentation": "

          The maximum number of worker nodes that the managed node group can scale out to.\n Managed node groups can support up to 100 nodes by default.

          " + "smithy.api#documentation": "

          The minimum number of worker nodes that the managed node group can scale in to. This\n number must be greater than zero.

          " } }, - "minSize": { + "maxSize": { "target": "com.amazonaws.eks#Capacity", "traits": { - "smithy.api#documentation": "

          The minimum number of worker nodes that the managed node group can scale in to. This\n number must be greater than zero.

          " + "smithy.api#documentation": "

          The maximum number of worker nodes that the managed node group can scale out to.\n Managed node groups can support up to 100 nodes by default.

          " } }, "desiredSize": { @@ -2408,23 +3157,39 @@ "smithy.api#documentation": "

          An object representing the remote access configuration for the managed node\n group.

          " } }, + "com.amazonaws.eks#ResolveConflicts": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "OVERWRITE" + }, + { + "value": "NONE" + } + ] + } + }, "com.amazonaws.eks#ResourceInUseException": { "type": "structure", "members": { - "message": { - "target": "com.amazonaws.eks#String" - }, - "nodegroupName": { + "clusterName": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The Amazon EKS managed node group associated with the exception.

          " + "smithy.api#documentation": "

          The Amazon EKS cluster associated with the exception.

          " } }, - "clusterName": { + "nodegroupName": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The Amazon EKS cluster associated with the exception.

          " + "smithy.api#documentation": "

          The Amazon EKS managed node group associated with the exception.

          " } + }, + "addonName": { + "target": "com.amazonaws.eks#String" + }, + "message": { + "target": "com.amazonaws.eks#String" } }, "traits": { @@ -2473,14 +3238,17 @@ "smithy.api#documentation": "

          The Amazon EKS managed node group associated with the exception.

          " } }, - "message": { - "target": "com.amazonaws.eks#String" - }, "fargateProfileName": { "target": "com.amazonaws.eks#String", "traits": { "smithy.api#documentation": "

          The Fargate profile associated with the exception.

          " } + }, + "addonName": { + "target": "com.amazonaws.eks#String" + }, + "message": { + "target": "com.amazonaws.eks#String" } }, "traits": { @@ -2489,23 +3257,35 @@ "smithy.api#httpError": 404 } }, + "com.amazonaws.eks#RoleArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 255 + } + } + }, "com.amazonaws.eks#ServerException": { "type": "structure", "members": { - "message": { - "target": "com.amazonaws.eks#String" - }, - "nodegroupName": { + "clusterName": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The Amazon EKS managed node group associated with the exception.

          " + "smithy.api#documentation": "

          The Amazon EKS cluster associated with the exception.

          " } }, - "clusterName": { + "nodegroupName": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The Amazon EKS cluster associated with the exception.

          " + "smithy.api#documentation": "

          The Amazon EKS managed node group associated with the exception.

          " } + }, + "addonName": { + "target": "com.amazonaws.eks#String" + }, + "message": { + "target": "com.amazonaws.eks#String" } }, "traits": { @@ -2600,13 +3380,6 @@ "com.amazonaws.eks#TagResourceRequest": { "type": "structure", "members": { - "tags": { - "target": "com.amazonaws.eks#TagMap", - "traits": { - "smithy.api#documentation": "

          The tags to add to the resource. A tag is an array of key-value pairs.

          ", - "smithy.api#required": {} - } - }, "resourceArn": { "target": "com.amazonaws.eks#String", "traits": { @@ -2614,6 +3387,13 @@ "smithy.api#httpLabel": {}, "smithy.api#required": {} } + }, + "tags": { + "target": "com.amazonaws.eks#TagMap", + "traits": { + "smithy.api#documentation": "

          The tags to add to the resource. A tag is an array of key-value pairs.

          ", + "smithy.api#required": {} + } } } }, @@ -2636,126 +3416,217 @@ "com.amazonaws.eks#UnsupportedAvailabilityZoneException": { "type": "structure", "members": { + "message": { + "target": "com.amazonaws.eks#String" + }, + "clusterName": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The Amazon EKS cluster associated with the exception.

          " + } + }, + "nodegroupName": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The Amazon EKS managed node group associated with the exception.

          " + } + }, "validZones": { "target": "com.amazonaws.eks#StringList", "traits": { "smithy.api#documentation": "

          The supported Availability Zones for your account. Choose subnets in these\n Availability Zones for your cluster.

          " } + } + }, + "traits": { + "smithy.api#documentation": "

          At least one of your specified cluster subnets is in an Availability Zone that does\n not support Amazon EKS. The exception output specifies the supported Availability Zones for\n your account, from which you can choose subnets for your cluster.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 400 + } + }, + "com.amazonaws.eks#UntagResource": { + "type": "operation", + "input": { + "target": "com.amazonaws.eks#UntagResourceRequest" + }, + "output": { + "target": "com.amazonaws.eks#UntagResourceResponse" + }, + "errors": [ + { + "target": "com.amazonaws.eks#BadRequestException" }, - "clusterName": { + { + "target": "com.amazonaws.eks#NotFoundException" + } + ], + "traits": { + "smithy.api#documentation": "

          Deletes specified tags from a resource.

          ", + "smithy.api#http": { + "method": "DELETE", + "uri": "/tags/{resourceArn}", + "code": 200 + } + } + }, + "com.amazonaws.eks#UntagResourceRequest": { + "type": "structure", + "members": { + "resourceArn": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the resource from which to delete tags. Currently, the supported\n resources are Amazon EKS clusters and managed node groups.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "tagKeys": { + "target": "com.amazonaws.eks#TagKeyList", + "traits": { + "smithy.api#documentation": "

          The keys of the tags to be removed.

          ", + "smithy.api#httpQuery": "tagKeys", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.eks#UntagResourceResponse": { + "type": "structure", + "members": {} + }, + "com.amazonaws.eks#Update": { + "type": "structure", + "members": { + "id": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The Amazon EKS cluster associated with the exception.

          " + "smithy.api#documentation": "

          A UUID that is used to track the update.

          " + } + }, + "status": { + "target": "com.amazonaws.eks#UpdateStatus", + "traits": { + "smithy.api#documentation": "

          The current status of the update.

          " + } + }, + "type": { + "target": "com.amazonaws.eks#UpdateType", + "traits": { + "smithy.api#documentation": "

          The type of the update.

          " + } + }, + "params": { + "target": "com.amazonaws.eks#UpdateParams", + "traits": { + "smithy.api#documentation": "

          A key-value map that contains the parameters associated with the update.

          " + } + }, + "createdAt": { + "target": "com.amazonaws.eks#Timestamp", + "traits": { + "smithy.api#documentation": "

          The Unix epoch timestamp in seconds for when the update was created.

          " } }, - "nodegroupName": { - "target": "com.amazonaws.eks#String", + "errors": { + "target": "com.amazonaws.eks#ErrorDetails", "traits": { - "smithy.api#documentation": "

          The Amazon EKS managed node group associated with the exception.

          " + "smithy.api#documentation": "

          Any errors associated with a Failed update.

          " } - }, - "message": { - "target": "com.amazonaws.eks#String" } }, "traits": { - "smithy.api#documentation": "

          At least one of your specified cluster subnets is in an Availability Zone that does\n not support Amazon EKS. The exception output specifies the supported Availability Zones for\n your account, from which you can choose subnets for your cluster.

          ", - "smithy.api#error": "client", - "smithy.api#httpError": 400 + "smithy.api#documentation": "

          An object representing an asynchronous update.

          " } }, - "com.amazonaws.eks#UntagResource": { + "com.amazonaws.eks#UpdateAddon": { "type": "operation", "input": { - "target": "com.amazonaws.eks#UntagResourceRequest" + "target": "com.amazonaws.eks#UpdateAddonRequest" }, "output": { - "target": "com.amazonaws.eks#UntagResourceResponse" + "target": "com.amazonaws.eks#UpdateAddonResponse" }, "errors": [ { - "target": "com.amazonaws.eks#BadRequestException" + "target": "com.amazonaws.eks#ClientException" }, { - "target": "com.amazonaws.eks#NotFoundException" + "target": "com.amazonaws.eks#InvalidParameterException" + }, + { + "target": "com.amazonaws.eks#InvalidRequestException" + }, + { + "target": "com.amazonaws.eks#ResourceInUseException" + }, + { + "target": "com.amazonaws.eks#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.eks#ServerException" } ], "traits": { - "smithy.api#documentation": "

          Deletes specified tags from a resource.

          ", + "smithy.api#documentation": "

          Updates an Amazon EKS add-on.

          ", "smithy.api#http": { - "method": "DELETE", - "uri": "/tags/{resourceArn}", + "method": "POST", + "uri": "/clusters/{clusterName}/addons/{addonName}/update", "code": 200 } } }, - "com.amazonaws.eks#UntagResourceRequest": { + "com.amazonaws.eks#UpdateAddonRequest": { "type": "structure", "members": { - "tagKeys": { - "target": "com.amazonaws.eks#TagKeyList", + "clusterName": { + "target": "com.amazonaws.eks#ClusterName", "traits": { - "smithy.api#documentation": "

          The keys of the tags to be removed.

          ", - "smithy.api#httpQuery": "tagKeys", + "smithy.api#documentation": "

          The name of the cluster.

          ", + "smithy.api#httpLabel": {}, "smithy.api#required": {} } }, - "resourceArn": { + "addonName": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the resource from which to delete tags. Currently, the supported\n resources are Amazon EKS clusters and managed node groups.

          ", + "smithy.api#documentation": "

          The name of the add-on. The name must match one of the names returned by \n ListAddons\n .

          ", "smithy.api#httpLabel": {}, "smithy.api#required": {} } - } - } - }, - "com.amazonaws.eks#UntagResourceResponse": { - "type": "structure", - "members": {} - }, - "com.amazonaws.eks#Update": { - "type": "structure", - "members": { - "status": { - "target": "com.amazonaws.eks#UpdateStatus", - "traits": { - "smithy.api#documentation": "

          The current status of the update.

          " - } - }, - "createdAt": { - "target": "com.amazonaws.eks#Timestamp", - "traits": { - "smithy.api#documentation": "

          The Unix epoch timestamp in seconds for when the update was created.

          " - } }, - "errors": { - "target": "com.amazonaws.eks#ErrorDetails", + "addonVersion": { + "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          Any errors associated with a Failed update.

          " + "smithy.api#documentation": "

          The version of the add-on. The version must match one of the versions returned by \n DescribeAddonVersions\n .

          " } }, - "type": { - "target": "com.amazonaws.eks#UpdateType", + "serviceAccountRoleArn": { + "target": "com.amazonaws.eks#RoleArn", "traits": { - "smithy.api#documentation": "

          The type of the update.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an existing IAM role to bind to the add-on's service account. The role must be assigned the IAM permissions required by the add-on. If you don't specify an existing IAM role, then the add-on uses the\n permissions assigned to the node IAM role. For more information, see Amazon EKS node IAM role in the Amazon EKS User Guide.

          \n \n

          To specify an existing IAM role, you must have an IAM OpenID Connect (OIDC) provider created for\n your cluster. For more information, see Enabling\n IAM roles for service accounts on your cluster in the\n Amazon EKS User Guide.

          \n
          " } }, - "params": { - "target": "com.amazonaws.eks#UpdateParams", + "resolveConflicts": { + "target": "com.amazonaws.eks#ResolveConflicts", "traits": { - "smithy.api#documentation": "

          A key-value map that contains the parameters associated with the update.

          " + "smithy.api#documentation": "

          How to resolve parameter value conflicts when applying the new version of the add-on\n to the cluster.

          " } }, - "id": { + "clientRequestToken": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          A UUID that is used to track the update.

          " + "smithy.api#documentation": "

          Unique, case-sensitive identifier that you provide to ensure the idempotency of the\n request.

          ", + "smithy.api#idempotencyToken": {} } } - }, - "traits": { - "smithy.api#documentation": "

          An object representing an asynchronous update.

          " + } + }, + "com.amazonaws.eks#UpdateAddonResponse": { + "type": "structure", + "members": { + "update": { + "target": "com.amazonaws.eks#Update" + } } }, "com.amazonaws.eks#UpdateClusterConfig": { @@ -2798,12 +3669,6 @@ "com.amazonaws.eks#UpdateClusterConfigRequest": { "type": "structure", "members": { - "logging": { - "target": "com.amazonaws.eks#Logging", - "traits": { - "smithy.api#documentation": "

          Enable or disable exporting the Kubernetes control plane logs for your cluster to\n CloudWatch Logs. By default, cluster control plane logs aren't exported to CloudWatch Logs. For more\n information, see Amazon EKS Cluster Control Plane Logs in the\n \n Amazon EKS User Guide\n .

          \n \n

          CloudWatch Logs ingestion, archive storage, and data scanning rates apply to exported\n control plane logs. For more information, see Amazon CloudWatch Pricing.

          \n
          " - } - }, "name": { "target": "com.amazonaws.eks#String", "traits": { @@ -2815,6 +3680,12 @@ "resourcesVpcConfig": { "target": "com.amazonaws.eks#VpcConfigRequest" }, + "logging": { + "target": "com.amazonaws.eks#Logging", + "traits": { + "smithy.api#documentation": "

          Enable or disable exporting the Kubernetes control plane logs for your cluster to\n CloudWatch Logs. By default, cluster control plane logs aren't exported to CloudWatch Logs. For more\n information, see Amazon EKS Cluster Control Plane Logs in the\n \n Amazon EKS User Guide\n .

          \n \n

          CloudWatch Logs ingestion, archive storage, and data scanning rates apply to exported\n control plane logs. For more information, see Amazon CloudWatch Pricing.

          \n
          " + } + }, "clientRequestToken": { "target": "com.amazonaws.eks#String", "traits": { @@ -2872,11 +3743,12 @@ "com.amazonaws.eks#UpdateClusterVersionRequest": { "type": "structure", "members": { - "clientRequestToken": { + "name": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          Unique, case-sensitive identifier that you provide to ensure the idempotency of the\n request.

          ", - "smithy.api#idempotencyToken": {} + "smithy.api#documentation": "

          The name of the Amazon EKS cluster to update.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} } }, "version": { @@ -2886,12 +3758,11 @@ "smithy.api#required": {} } }, - "name": { + "clientRequestToken": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The name of the Amazon EKS cluster to update.

          ", - "smithy.api#httpLabel": {}, - "smithy.api#required": {} + "smithy.api#documentation": "

          Unique, case-sensitive identifier that you provide to ensure the idempotency of the\n request.

          ", + "smithy.api#idempotencyToken": {} } } } @@ -2967,10 +3838,12 @@ "com.amazonaws.eks#UpdateNodegroupConfigRequest": { "type": "structure", "members": { - "scalingConfig": { - "target": "com.amazonaws.eks#NodegroupScalingConfig", + "clusterName": { + "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The scaling configuration details for the Auto Scaling group after the update.

          " + "smithy.api#documentation": "

          The name of the Amazon EKS cluster that the managed node group resides in.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} } }, "nodegroupName": { @@ -2981,12 +3854,16 @@ "smithy.api#required": {} } }, - "clusterName": { - "target": "com.amazonaws.eks#String", + "labels": { + "target": "com.amazonaws.eks#UpdateLabelsPayload", "traits": { - "smithy.api#documentation": "

          The name of the Amazon EKS cluster that the managed node group resides in.

          ", - "smithy.api#httpLabel": {}, - "smithy.api#required": {} + "smithy.api#documentation": "

          The Kubernetes labels to be applied to the nodes in the node group after the\n update.

          " + } + }, + "scalingConfig": { + "target": "com.amazonaws.eks#NodegroupScalingConfig", + "traits": { + "smithy.api#documentation": "

          The scaling configuration details for the Auto Scaling group after the update.

          " } }, "clientRequestToken": { @@ -2995,12 +3872,6 @@ "smithy.api#documentation": "

          Unique, case-sensitive identifier that you provide to ensure the idempotency of the\n request.

          ", "smithy.api#idempotencyToken": {} } - }, - "labels": { - "target": "com.amazonaws.eks#UpdateLabelsPayload", - "traits": { - "smithy.api#documentation": "

          The Kubernetes labels to be applied to the nodes in the node group after the\n update.

          " - } } } }, @@ -3041,7 +3912,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Updates the Kubernetes version or AMI version of an Amazon EKS managed node group.

          \n

          You can update a node group using a launch template only if the node group was\n originally deployed with a launch template. If you need to update a custom AMI in a node\n group that was deployed with a launch template, then update your custom AMI, specify the\n new ID in a new version of the launch template, and then update the node group to the\n new version of the launch template.

          \n

          If you update without a launch template, then you can update to the latest available\n AMI version of a node group's current Kubernetes version by not specifying a Kubernetes\n version in the request. You can update to the latest AMI version of your cluster's\n current Kubernetes version by specifying your cluster's Kubernetes version in the\n request. For more information, see Amazon EKS-Optimized Linux AMI Versions in the Amazon EKS User Guide.

          \n

          You cannot roll back a node group to an earlier Kubernetes version or AMI\n version.

          \n

          When a node in a managed node group is terminated due to a scaling action or update,\n the pods in that node are drained first. Amazon EKS attempts to drain the nodes gracefully\n and will fail if it is unable to do so. You can force the update if Amazon EKS\n is unable to drain the nodes as a result of a pod disruption budget issue.

          ", + "smithy.api#documentation": "

          Updates the Kubernetes version or AMI version of an Amazon EKS managed node group.

          \n

          You can update a node group using a launch template only if the node group was\n originally deployed with a launch template. If you need to update a custom AMI in a node\n group that was deployed with a launch template, then update your custom AMI, specify the\n new ID in a new version of the launch template, and then update the node group to the\n new version of the launch template.

          \n

          If you update without a launch template, then you can update to the latest available\n AMI version of a node group's current Kubernetes version by not specifying a Kubernetes\n version in the request. You can update to the latest AMI version of your cluster's\n current Kubernetes version by specifying your cluster's Kubernetes version in the\n request. For more information, see Amazon EKS\n optimized Amazon Linux 2 AMI versions in the Amazon EKS User Guide.

          \n

          You cannot roll back a node group to an earlier Kubernetes version or AMI\n version.

          \n

          When a node in a managed node group is terminated due to a scaling action or update,\n the pods in that node are drained first. Amazon EKS attempts to drain the nodes gracefully\n and will fail if it is unable to do so. You can force the update if Amazon EKS\n is unable to drain the nodes as a result of a pod disruption budget issue.

          ", "smithy.api#http": { "method": "POST", "uri": "/clusters/{clusterName}/node-groups/{nodegroupName}/update-version", @@ -3052,22 +3923,32 @@ "com.amazonaws.eks#UpdateNodegroupVersionRequest": { "type": "structure", "members": { - "version": { + "clusterName": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The Kubernetes version to update to. If no version is specified, then the Kubernetes\n version of the node group does not change. You can specify the Kubernetes version of the\n cluster to update the node group to the latest AMI version of the cluster's Kubernetes\n version. If you specify launchTemplate, and your launch template uses a custom AMI, then don't specify \n version, or the node group update will fail.\n For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          " + "smithy.api#documentation": "

          The name of the Amazon EKS cluster that is associated with the managed node group to\n update.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} } }, - "force": { - "target": "com.amazonaws.eks#Boolean", + "nodegroupName": { + "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          Force the update if the existing node group's pods are unable to be drained due to a\n pod disruption budget issue. If an update fails because pods could not be drained, you\n can force the update after it fails to terminate the old node whether or not any pods\n are running on the node.

          " + "smithy.api#documentation": "

          The name of the managed node group to update.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "version": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The Kubernetes version to update to. If no version is specified, then the Kubernetes\n version of the node group does not change. You can specify the Kubernetes version of the\n cluster to update the node group to the latest AMI version of the cluster's Kubernetes\n version. If you specify launchTemplate, and your launch template uses a custom AMI, then don't specify \n version, or the node group update will fail.\n For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          " } }, "releaseVersion": { "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The AMI version of the Amazon EKS-optimized AMI to use for the update. By default, the\n latest available AMI version for the node group's Kubernetes version is used. For more\n information, see Amazon EKS-Optimized Linux AMI Versions in the\n Amazon EKS User Guide. If you specify launchTemplate, and your launch template uses a custom AMI, then don't specify \n releaseVersion, or the node group update will fail.\n For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          " + "smithy.api#documentation": "

          The AMI version of the Amazon EKS optimized AMI to use for the update. By default, the\n latest available AMI version for the node group's Kubernetes version is used. For more\n information, see Amazon EKS optimized Amazon Linux 2 AMI versions in the\n Amazon EKS User Guide. If you specify launchTemplate, and your launch template uses a custom AMI, then don't specify \n releaseVersion, or the node group update will fail.\n For more information about using launch templates with Amazon EKS, see Launch template support in the Amazon EKS User Guide.

          " } }, "launchTemplate": { @@ -3076,12 +3957,10 @@ "smithy.api#documentation": "

          An object representing a node group's launch template specification. You can only\n update a node group using a launch template if the node group was originally deployed\n with a launch template.

          " } }, - "nodegroupName": { - "target": "com.amazonaws.eks#String", + "force": { + "target": "com.amazonaws.eks#Boolean", "traits": { - "smithy.api#documentation": "

          The name of the managed node group to update.

          ", - "smithy.api#httpLabel": {}, - "smithy.api#required": {} + "smithy.api#documentation": "

          Force the update if the existing node group's pods are unable to be drained due to a\n pod disruption budget issue. If an update fails because pods could not be drained, you\n can force the update after it fails to terminate the old node whether or not any pods\n are running on the node.

          " } }, "clientRequestToken": { @@ -3090,14 +3969,6 @@ "smithy.api#documentation": "

          Unique, case-sensitive identifier that you provide to ensure the idempotency of the\n request.

          ", "smithy.api#idempotencyToken": {} } - }, - "clusterName": { - "target": "com.amazonaws.eks#String", - "traits": { - "smithy.api#documentation": "

          The name of the Amazon EKS cluster that is associated with the managed node group to\n update.

          ", - "smithy.api#httpLabel": {}, - "smithy.api#required": {} - } } } }, @@ -3112,17 +3983,17 @@ "com.amazonaws.eks#UpdateParam": { "type": "structure", "members": { - "value": { - "target": "com.amazonaws.eks#String", - "traits": { - "smithy.api#documentation": "

          The value of the keys submitted as part of an update request.

          " - } - }, "type": { "target": "com.amazonaws.eks#UpdateParamType", "traits": { "smithy.api#documentation": "

          The keys associated with an update request.

          " } + }, + "value": { + "target": "com.amazonaws.eks#String", + "traits": { + "smithy.api#documentation": "

          The value of the keys submitted as part of an update request.

          " + } } }, "traits": { @@ -3180,6 +4051,18 @@ { "value": "PublicAccessCidrs", "name": "PUBLIC_ACCESS_CIDRS" + }, + { + "value": "AddonVersion", + "name": "ADDON_VERSION" + }, + { + "value": "ServiceAccountRoleArn", + "name": "SERVICE_ACCOUNT_ROLE_ARN" + }, + { + "value": "ResolveConflicts", + "name": "RESOLVE_CONFLICTS" } ] } @@ -3232,6 +4115,10 @@ { "value": "ConfigUpdate", "name": "CONFIG_UPDATE" + }, + { + "value": "AddonUpdate", + "name": "ADDON_UPDATE" } ] } @@ -3239,16 +4126,16 @@ "com.amazonaws.eks#VpcConfigRequest": { "type": "structure", "members": { - "securityGroupIds": { + "subnetIds": { "target": "com.amazonaws.eks#StringList", "traits": { - "smithy.api#documentation": "

          Specify one or more security groups for the cross-account elastic network interfaces\n that Amazon EKS creates to use to allow communication between your worker nodes and the\n Kubernetes control plane. If you don't specify a security group, the default security\n group for your VPC is used.

          " + "smithy.api#documentation": "

          Specify subnets for your Amazon EKS worker nodes. Amazon EKS creates cross-account elastic\n network interfaces in these subnets to allow communication between your worker nodes and\n the Kubernetes control plane.

          " } }, - "endpointPrivateAccess": { - "target": "com.amazonaws.eks#BoxedBoolean", + "securityGroupIds": { + "target": "com.amazonaws.eks#StringList", "traits": { - "smithy.api#documentation": "

          Set this value to true to enable private access for your cluster's\n Kubernetes API server endpoint. If you enable private access, Kubernetes API requests\n from within your cluster's VPC use the private VPC endpoint. The default value for this\n parameter is false, which disables private access for your Kubernetes API\n server. If you disable private access and you have worker nodes or AWS Fargate pods in the\n cluster, then ensure that publicAccessCidrs includes the necessary CIDR\n blocks for communication with the worker nodes or Fargate pods. For more information, see\n Amazon EKS\n Cluster Endpoint Access Control in the\n \n Amazon EKS User Guide\n .

          " + "smithy.api#documentation": "

          Specify one or more security groups for the cross-account elastic network interfaces\n that Amazon EKS creates to use to allow communication between your worker nodes and the\n Kubernetes control plane. If you don't specify any security groups, then familiarize\n yourself with the difference between Amazon EKS defaults for clusters deployed with\n Kubernetes:

          \n
            \n
          • \n

            1.14 Amazon EKS platform version eks.2 and earlier

            \n
          • \n
          • \n

            1.14 Amazon EKS platform version eks.3 and later

            \n
          • \n
          \n

          For more information, see Amazon EKS security group\n considerations in the \n Amazon EKS User Guide\n .

          " } }, "endpointPublicAccess": { @@ -3257,16 +4144,16 @@ "smithy.api#documentation": "

          Set this value to false to disable public access to your cluster's\n Kubernetes API server endpoint. If you disable public access, your cluster's Kubernetes\n API server can only receive requests from within the cluster VPC. The default value for\n this parameter is true, which enables public access for your Kubernetes API\n server. For more information, see Amazon EKS Cluster\n Endpoint Access Control in the \n Amazon EKS User Guide\n .

          " } }, - "publicAccessCidrs": { - "target": "com.amazonaws.eks#StringList", + "endpointPrivateAccess": { + "target": "com.amazonaws.eks#BoxedBoolean", "traits": { - "smithy.api#documentation": "

          The CIDR blocks that are allowed access to your cluster's public Kubernetes API server\n endpoint. Communication to the endpoint from addresses outside of the CIDR blocks that\n you specify is denied. The default value is 0.0.0.0/0. If you've disabled\n private endpoint access and you have worker nodes or AWS Fargate pods in the cluster, then\n ensure that you specify the necessary CIDR blocks. For more information, see Amazon EKS Cluster\n Endpoint Access Control in the \n Amazon EKS User Guide\n .

          " + "smithy.api#documentation": "

          Set this value to true to enable private access for your cluster's\n Kubernetes API server endpoint. If you enable private access, Kubernetes API requests\n from within your cluster's VPC use the private VPC endpoint. The default value for this\n parameter is false, which disables private access for your Kubernetes API\n server. If you disable private access and you have worker nodes or AWS Fargate pods in the\n cluster, then ensure that publicAccessCidrs includes the necessary CIDR\n blocks for communication with the worker nodes or Fargate pods. For more information, see\n Amazon EKS\n Cluster Endpoint Access Control in the\n \n Amazon EKS User Guide\n .

          " } }, - "subnetIds": { + "publicAccessCidrs": { "target": "com.amazonaws.eks#StringList", "traits": { - "smithy.api#documentation": "

          Specify subnets for your Amazon EKS worker nodes. Amazon EKS creates cross-account elastic\n network interfaces in these subnets to allow communication between your worker nodes and\n the Kubernetes control plane.

          " + "smithy.api#documentation": "

          The CIDR blocks that are allowed access to your cluster's public Kubernetes API server\n endpoint. Communication to the endpoint from addresses outside of the CIDR blocks that\n you specify is denied. The default value is 0.0.0.0/0. If you've disabled\n private endpoint access and you have worker nodes or AWS Fargate pods in the cluster, then\n ensure that you specify the necessary CIDR blocks. For more information, see Amazon EKS Cluster\n Endpoint Access Control in the \n Amazon EKS User Guide\n .

          " } } }, @@ -3277,10 +4164,16 @@ "com.amazonaws.eks#VpcConfigResponse": { "type": "structure", "members": { - "vpcId": { - "target": "com.amazonaws.eks#String", + "subnetIds": { + "target": "com.amazonaws.eks#StringList", "traits": { - "smithy.api#documentation": "

          The VPC associated with your cluster.

          " + "smithy.api#documentation": "

          The subnets associated with your cluster.

          " + } + }, + "securityGroupIds": { + "target": "com.amazonaws.eks#StringList", + "traits": { + "smithy.api#documentation": "

          The security groups associated with the cross-account elastic network interfaces that\n are used to allow communication between your worker nodes and the Kubernetes control\n plane.

          " } }, "clusterSecurityGroupId": { @@ -3289,10 +4182,10 @@ "smithy.api#documentation": "

          The cluster security group that was created by Amazon EKS for the cluster. Managed node\n groups use this security group for control-plane-to-data-plane communication.

          " } }, - "publicAccessCidrs": { - "target": "com.amazonaws.eks#StringList", + "vpcId": { + "target": "com.amazonaws.eks#String", "traits": { - "smithy.api#documentation": "

          The CIDR blocks that are allowed access to your cluster's public Kubernetes API server\n endpoint. Communication to the endpoint from addresses outside of the listed CIDR blocks\n is denied. The default value is 0.0.0.0/0. If you've disabled private\n endpoint access and you have worker nodes or AWS Fargate pods in the cluster, then ensure\n that the necessary CIDR blocks are listed. For more information, see Amazon EKS Cluster\n Endpoint Access Control in the \n Amazon EKS User Guide\n .

          " + "smithy.api#documentation": "

          The VPC associated with your cluster.

          " } }, "endpointPublicAccess": { @@ -3301,22 +4194,16 @@ "smithy.api#documentation": "

          This parameter indicates whether the Amazon EKS public API server endpoint is enabled. If\n the Amazon EKS public API server endpoint is disabled, your cluster's Kubernetes API server\n can only receive requests that originate from within the cluster VPC.

          " } }, - "subnetIds": { - "target": "com.amazonaws.eks#StringList", + "endpointPrivateAccess": { + "target": "com.amazonaws.eks#Boolean", "traits": { - "smithy.api#documentation": "

          The subnets associated with your cluster.

          " + "smithy.api#documentation": "

          This parameter indicates whether the Amazon EKS private API server endpoint is enabled. If\n the Amazon EKS private API server endpoint is enabled, Kubernetes API requests that originate\n from within your cluster's VPC use the private VPC endpoint instead of traversing the\n internet. If this value is disabled and you have worker nodes or AWS Fargate pods in the\n cluster, then ensure that publicAccessCidrs includes the necessary CIDR\n blocks for communication with the worker nodes or Fargate pods. For more information, see\n Amazon EKS\n Cluster Endpoint Access Control in the\n \n Amazon EKS User Guide\n .

          " } }, - "securityGroupIds": { + "publicAccessCidrs": { "target": "com.amazonaws.eks#StringList", "traits": { - "smithy.api#documentation": "

          The security groups associated with the cross-account elastic network interfaces that\n are used to allow communication between your worker nodes and the Kubernetes control\n plane.

          " - } - }, - "endpointPrivateAccess": { - "target": "com.amazonaws.eks#Boolean", - "traits": { - "smithy.api#documentation": "

          This parameter indicates whether the Amazon EKS private API server endpoint is enabled. If\n the Amazon EKS private API server endpoint is enabled, Kubernetes API requests that originate\n from within your cluster's VPC use the private VPC endpoint instead of traversing the\n internet. If this value is disabled and you have worker nodes or AWS Fargate pods in the\n cluster, then ensure that publicAccessCidrs includes the necessary CIDR\n blocks for communication with the worker nodes or Fargate pods. For more information, see\n Amazon EKS\n Cluster Endpoint Access Control in the\n \n Amazon EKS User Guide\n .

          " + "smithy.api#documentation": "

          The CIDR blocks that are allowed access to your cluster's public Kubernetes API server\n endpoint. Communication to the endpoint from addresses outside of the listed CIDR blocks\n is denied. The default value is 0.0.0.0/0. If you've disabled private\n endpoint access and you have worker nodes or AWS Fargate pods in the cluster, then ensure\n that the necessary CIDR blocks are listed. For more information, see Amazon EKS Cluster\n Endpoint Access Control in the \n Amazon EKS User Guide\n .

          " } } }, diff --git a/codegen/sdk-codegen/aws-models/honeycode.2020-03-01.json b/codegen/sdk-codegen/aws-models/honeycode.2020-03-01.json index e4dfbeb390c3..3a8980f1f439 100644 --- a/codegen/sdk-codegen/aws-models/honeycode.2020-03-01.json +++ b/codegen/sdk-codegen/aws-models/honeycode.2020-03-01.json @@ -37,7 +37,7 @@ } }, "traits": { - "smithy.api#documentation": "

          \n You do not have sufficient access to perform this action. Check that the workbook is owned by you and your\n IAM policy allows access to the screen/automation in the request.\n

          ", + "smithy.api#documentation": "

          \n You do not have sufficient access to perform this action. Check that the workbook is owned by you and your\n IAM policy allows access to the resource in the request.\n

          ", "smithy.api#error": "client", "smithy.api#httpError": 403 } @@ -68,134 +68,124 @@ "smithy.api#httpError": 504 } }, - "com.amazonaws.honeycode#ClientRequestToken": { + "com.amazonaws.honeycode#AwsUserArn": { "type": "string", "traits": { "smithy.api#length": { - "min": 32, - "max": 64 + "min": 20, + "max": 2048 } } }, - "com.amazonaws.honeycode#ColumnMetadata": { + "com.amazonaws.honeycode#BatchCreateTableRows": { + "type": "operation", + "input": { + "target": "com.amazonaws.honeycode#BatchCreateTableRowsRequest" + }, + "output": { + "target": "com.amazonaws.honeycode#BatchCreateTableRowsResult" + }, + "errors": [ + { + "target": "com.amazonaws.honeycode#AccessDeniedException" + }, + { + "target": "com.amazonaws.honeycode#InternalServerException" + }, + { + "target": "com.amazonaws.honeycode#RequestTimeoutException" + }, + { + "target": "com.amazonaws.honeycode#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.honeycode#ServiceQuotaExceededException" + }, + { + "target": "com.amazonaws.honeycode#ServiceUnavailableException" + }, + { + "target": "com.amazonaws.honeycode#ThrottlingException" + }, + { + "target": "com.amazonaws.honeycode#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n The BatchCreateTableRows API allows you to create one or more rows at the end of a table in a workbook.\n The API allows you to specify the values to set in some or all of the columns in the new rows.\n

          \n

          \n If a column is not explicitly set in a specific row, then the column level formula specified in the table\n will be applied to the new row. If there is no column level formula but the last row of the table has a\n formula, then that formula will be copied down to the new row. If there is no column level formula and\n no formula in the last row of the table, then that column will be left blank for the new rows.\n

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/workbooks/{workbookId}/tables/{tableId}/rows/batchcreate", + "code": 200 + } + } + }, + "com.amazonaws.honeycode#BatchCreateTableRowsRequest": { "type": "structure", "members": { - "name": { - "target": "com.amazonaws.honeycode#Name", + "workbookId": { + "target": "com.amazonaws.honeycode#ResourceId", "traits": { - "smithy.api#documentation": "

          The name of the column.

          ", + "smithy.api#documentation": "

          The ID of the workbook where the new rows are being added.

          \n

          \n If a workbook with the specified ID could not be found, this API throws ResourceNotFoundException.\n

          ", + "smithy.api#httpLabel": {}, "smithy.api#required": {} } }, - "format": { - "target": "com.amazonaws.honeycode#Format", + "tableId": { + "target": "com.amazonaws.honeycode#ResourceId", "traits": { - "smithy.api#documentation": "

          The format of the column.

          ", + "smithy.api#documentation": "

          The ID of the table where the new rows are being added.

          \n

          \n If a table with the specified ID could not be found, this API throws ResourceNotFoundException.\n

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "rowsToCreate": { + "target": "com.amazonaws.honeycode#CreateRowDataList", + "traits": { + "smithy.api#documentation": "

          \n The list of rows to create at the end of the table. Each item in this list needs to have a batch item id\n to uniquely identify the element in the request and the cells to create for that row.\n You need to specify at least one item in this list.\n

          \n

          \n Note that if one of the column ids in any of the rows in the request does not exist in the table, then the\n request fails and no updates are made to the table.\n

          ", "smithy.api#required": {} } + }, + "clientRequestToken": { + "target": "com.amazonaws.honeycode#ClientRequestToken", + "traits": { + "smithy.api#documentation": "

          \n The request token for performing the batch create operation.\n Request tokens help to identify duplicate requests. If a call times out or fails due to a transient error\n like a failed network connection, you can retry the call with the same request token. The service ensures\n that if the first call using that request token is successfully performed, the second call will not perform\n the operation again.\n

          \n

          \n Note that request tokens are valid only for a few minutes. You cannot use request tokens to dedupe requests\n spanning hours or days.\n

          " + } } - }, - "traits": { - "smithy.api#documentation": "

          Metadata for column in the table.

          " } }, - "com.amazonaws.honeycode#DataItem": { + "com.amazonaws.honeycode#BatchCreateTableRowsResult": { "type": "structure", "members": { - "formattedValue": { - "target": "com.amazonaws.honeycode#FormattedValue", + "workbookCursor": { + "target": "com.amazonaws.honeycode#WorkbookCursor", "traits": { - "smithy.api#documentation": "

          The formatted value of the data. e.g. John Smith.

          " + "smithy.api#documentation": "

          The updated workbook cursor after adding the new rows at the end of the table.

          ", + "smithy.api#required": {} } }, - "overrideFormat": { - "target": "com.amazonaws.honeycode#Format", + "createdRows": { + "target": "com.amazonaws.honeycode#CreatedRowsMap", "traits": { - "smithy.api#documentation": "

          \n The overrideFormat is optional and is specified only if a particular row of data has a different format for\n the data than the default format defined on the screen or the table.\n

          " + "smithy.api#documentation": "

          The map of batch item id to the row id that was created for that item.

          ", + "smithy.api#required": {} } }, - "rawValue": { - "target": "com.amazonaws.honeycode#RawValue", + "failedBatchItems": { + "target": "com.amazonaws.honeycode#FailedBatchItems", "traits": { - "smithy.api#documentation": "

          The raw value of the data. e.g. jsmith@example.com

          " + "smithy.api#documentation": "

          \n The list of batch items in the request that could not be added to the table. Each element in this list\n contains one item from the request that could not be added to the table along with the reason why\n that item could not be added.\n

          " } } - }, - "traits": { - "smithy.api#documentation": "

          The data in a particular data cell defined on the screen.

          ", - "smithy.api#sensitive": {} - } - }, - "com.amazonaws.honeycode#DataItems": { - "type": "list", - "member": { - "target": "com.amazonaws.honeycode#DataItem" - } - }, - "com.amazonaws.honeycode#ErrorMessage": { - "type": "string" - }, - "com.amazonaws.honeycode#Format": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "AUTO", - "name": "Auto" - }, - { - "value": "NUMBER", - "name": "Number" - }, - { - "value": "CURRENCY", - "name": "Currency" - }, - { - "value": "DATE", - "name": "Date" - }, - { - "value": "TIME", - "name": "Time" - }, - { - "value": "DATE_TIME", - "name": "DateTime" - }, - { - "value": "PERCENTAGE", - "name": "Percentage" - }, - { - "value": "TEXT", - "name": "Text" - }, - { - "value": "ACCOUNTING", - "name": "Accounting" - }, - { - "value": "CONTACT", - "name": "Contact" - }, - { - "value": "ROWLINK", - "name": "Rowlink" - } - ] } }, - "com.amazonaws.honeycode#FormattedValue": { - "type": "string" - }, - "com.amazonaws.honeycode#GetScreenData": { + "com.amazonaws.honeycode#BatchDeleteTableRows": { "type": "operation", "input": { - "target": "com.amazonaws.honeycode#GetScreenDataRequest" + "target": "com.amazonaws.honeycode#BatchDeleteTableRowsRequest" }, "output": { - "target": "com.amazonaws.honeycode#GetScreenDataResult" + "target": "com.amazonaws.honeycode#BatchDeleteTableRowsResult" }, "errors": [ { @@ -221,114 +211,94 @@ } ], "traits": { - "smithy.api#documentation": "

          \n The GetScreenData API allows retrieval of data from a screen in a Honeycode app.\n The API allows setting local variables in the screen to filter, sort or otherwise affect what will be\n displayed on the screen.\n

          ", + "smithy.api#documentation": "

          \n The BatchDeleteTableRows API allows you to delete one or more rows from a table in a workbook.\n You need to specify the ids of the rows that you want to delete from the table.\n

          ", "smithy.api#http": { "method": "POST", - "uri": "/screendata", + "uri": "/workbooks/{workbookId}/tables/{tableId}/rows/batchdelete", "code": 200 } } }, - "com.amazonaws.honeycode#GetScreenDataRequest": { + "com.amazonaws.honeycode#BatchDeleteTableRowsRequest": { "type": "structure", "members": { - "nextToken": { - "target": "com.amazonaws.honeycode#PaginationToken", - "traits": { - "smithy.api#documentation": "

          \n This parameter is optional. If a nextToken is not specified, the API returns the first page of data.\n

          \n

          \n Pagination tokens expire after 1 hour. If you use a token that was returned more than an hour back, the API\n will throw ValidationException.\n

          " - } - }, - "variables": { - "target": "com.amazonaws.honeycode#VariableValueMap", - "traits": { - "smithy.api#documentation": "

          \n Variables are optional and are needed only if the screen requires them to render correctly. Variables are\n specified as a map where the key is the name of the variable as defined on the screen. The value is an\n object which currently has only one property, rawValue, which holds the value of the variable to be passed\n to the screen.\n

          " - } - }, - "appId": { + "workbookId": { "target": "com.amazonaws.honeycode#ResourceId", "traits": { - "smithy.api#documentation": "

          The ID of the app that contains the screem.

          ", + "smithy.api#documentation": "

          The ID of the workbook where the rows are being deleted.

          \n

          \n If a workbook with the specified id could not be found, this API throws ResourceNotFoundException.\n

          ", + "smithy.api#httpLabel": {}, "smithy.api#required": {} } }, - "maxResults": { - "target": "com.amazonaws.honeycode#MaxResults", + "tableId": { + "target": "com.amazonaws.honeycode#ResourceId", "traits": { - "smithy.api#documentation": "

          \n The number of results to be returned on a single page.\n Specify a number between 1 and 100. The maximum value is 100.\n

          \n

          \n This parameter is optional. If you don't specify this parameter, the default page size is 100.\n

          " + "smithy.api#documentation": "

          The ID of the table where the rows are being deleted.

          \n

          \n If a table with the specified id could not be found, this API throws ResourceNotFoundException.\n

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} } }, - "screenId": { - "target": "com.amazonaws.honeycode#ResourceId", + "rowIds": { + "target": "com.amazonaws.honeycode#RowIdList", "traits": { - "smithy.api#documentation": "

          The ID of the screen.

          ", + "smithy.api#documentation": "

          \n The list of row ids to delete from the table. You need to specify at least one row id in this list.\n

          \n

          \n Note that if one of the row ids provided in the request does not exist in the table, then the request fails\n and no rows are deleted from the table.\n

          ", "smithy.api#required": {} } }, - "workbookId": { - "target": "com.amazonaws.honeycode#ResourceId", + "clientRequestToken": { + "target": "com.amazonaws.honeycode#ClientRequestToken", "traits": { - "smithy.api#documentation": "

          The ID of the workbook that contains the screen.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          \n The request token for performing the delete action.\n Request tokens help to identify duplicate requests. If a call times out or fails due to a transient error\n like a failed network connection, you can retry the call with the same request token. The service ensures\n that if the first call using that request token is successfully performed, the second call will not perform\n the action again.\n

          \n

          \n Note that request tokens are valid only for a few minutes. You cannot use request tokens to dedupe requests\n spanning hours or days.\n

          " } } } }, - "com.amazonaws.honeycode#GetScreenDataResult": { + "com.amazonaws.honeycode#BatchDeleteTableRowsResult": { "type": "structure", "members": { - "nextToken": { - "target": "com.amazonaws.honeycode#PaginationToken", - "traits": { - "smithy.api#documentation": "

          \n Provides the pagination token to load the next page if there are more results matching the request. If a\n pagination token is not present in the response, it means that all data matching the query has been loaded.\n

          " - } - }, "workbookCursor": { "target": "com.amazonaws.honeycode#WorkbookCursor", "traits": { - "smithy.api#documentation": "

          \n Indicates the cursor of the workbook at which the data returned by this workbook is read. Workbook cursor\n keeps increasing with every update and the increments are not sequential.\n

          ", + "smithy.api#documentation": "

          The updated workbook cursor after deleting the rows from the table.

          ", "smithy.api#required": {} } }, - "results": { - "target": "com.amazonaws.honeycode#ResultSetMap", + "failedBatchItems": { + "target": "com.amazonaws.honeycode#FailedBatchItems", "traits": { - "smithy.api#documentation": "

          A map of all the rows on the screen keyed by block name.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          \n The list of row ids in the request that could not be deleted from the table. Each element in this list\n contains one row id from the request that could not be deleted along with the reason why that item could not\n be deleted.\n

          " } } } }, - "com.amazonaws.honeycode#InternalServerException": { - "type": "structure", - "members": { - "message": { - "target": "com.amazonaws.honeycode#ErrorMessage" - } - }, + "com.amazonaws.honeycode#BatchErrorMessage": { + "type": "string", "traits": { - "smithy.api#documentation": "

          There were unexpected errors from the server.

          ", - "smithy.api#error": "server", - "smithy.api#httpError": 500 + "smithy.api#pattern": "^(?!\\s*$).+" } }, - "com.amazonaws.honeycode#InvokeScreenAutomation": { + "com.amazonaws.honeycode#BatchItemId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 64 + }, + "smithy.api#pattern": "^(?!\\s*$).+" + } + }, + "com.amazonaws.honeycode#BatchUpdateTableRows": { "type": "operation", "input": { - "target": "com.amazonaws.honeycode#InvokeScreenAutomationRequest" + "target": "com.amazonaws.honeycode#BatchUpdateTableRowsRequest" }, "output": { - "target": "com.amazonaws.honeycode#InvokeScreenAutomationResult" + "target": "com.amazonaws.honeycode#BatchUpdateTableRowsResult" }, "errors": [ { "target": "com.amazonaws.honeycode#AccessDeniedException" }, - { - "target": "com.amazonaws.honeycode#AutomationExecutionException" - }, - { - "target": "com.amazonaws.honeycode#AutomationExecutionTimeoutException" - }, { "target": "com.amazonaws.honeycode#InternalServerException" }, @@ -349,261 +319,2128 @@ } ], "traits": { - "smithy.api#documentation": "

          \n The InvokeScreenAutomation API allows invoking an action defined in a screen in a Honeycode app.\n The API allows setting local variables, which can then be used in the automation being invoked.\n This allows automating the Honeycode app interactions to write, update or delete data in the workbook.\n

          ", + "smithy.api#documentation": "

          \n The BatchUpdateTableRows API allows you to update one or more rows in a table in a workbook.\n

          \n

          \n You can specify the values to set in some or all of the columns in the table for the specified\n rows.\n If a column is not explicitly specified in a particular row, then that column will not be updated\n for that row. To clear out the data in a specific cell, you need to set the value as an empty string\n (\"\").\n

          ", "smithy.api#http": { "method": "POST", - "uri": "/workbooks/{workbookId}/apps/{appId}/screens/{screenId}/automations/{screenAutomationId}", + "uri": "/workbooks/{workbookId}/tables/{tableId}/rows/batchupdate", "code": 200 } } }, - "com.amazonaws.honeycode#InvokeScreenAutomationRequest": { + "com.amazonaws.honeycode#BatchUpdateTableRowsRequest": { "type": "structure", "members": { "workbookId": { "target": "com.amazonaws.honeycode#ResourceId", "traits": { - "smithy.api#documentation": "

          The ID of the workbook that contains the screen automation.

          ", + "smithy.api#documentation": "

          The ID of the workbook where the rows are being updated.

          \n

          \n If a workbook with the specified id could not be found, this API throws ResourceNotFoundException.\n

          ", "smithy.api#httpLabel": {}, "smithy.api#required": {} } }, - "rowId": { - "target": "com.amazonaws.honeycode#RowId", + "tableId": { + "target": "com.amazonaws.honeycode#ResourceId", "traits": { - "smithy.api#documentation": "

          \n The row ID for the automation if the automation is defined inside a block with source or list.\n

          " + "smithy.api#documentation": "

          The ID of the table where the rows are being updated.

          \n

          \n If a table with the specified id could not be found, this API throws ResourceNotFoundException.\n

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "rowsToUpdate": { + "target": "com.amazonaws.honeycode#UpdateRowDataList", + "traits": { + "smithy.api#documentation": "

          \n The list of rows to update in the table. Each item in this list needs to contain the row id to update\n along with the map of column id to cell values for each column in that row that needs to be updated.\n You need to specify at least one row in this list, and for each row, you need to specify at least one\n column to update.\n

          \n

          \n Note that if one of the row or column ids in the request does not exist in the table, then the request fails\n and no updates are made to the table.\n

          ", + "smithy.api#required": {} } }, "clientRequestToken": { "target": "com.amazonaws.honeycode#ClientRequestToken", "traits": { - "smithy.api#documentation": "

          \n The request token for performing the automation action.\n Request tokens help to identify duplicate requests. If a call times out or fails due to a transient error\n like a failed network connection, you can retry the call with the same request token. The service ensures\n that if the first call using that request token is successfully performed, the second call will return the\n response of the previous call rather than performing the action again.\n

          \n

          \n Note that request tokens are valid only for a few minutes. You cannot use request tokens to dedupe requests\n spanning hours or days.\n

          " + "smithy.api#documentation": "

          \n The request token for performing the update action.\n Request tokens help to identify duplicate requests. If a call times out or fails due to a transient error\n like a failed network connection, you can retry the call with the same request token. The service ensures\n that if the first call using that request token is successfully performed, the second call will not perform\n the action again.\n

          \n

          \n Note that request tokens are valid only for a few minutes. You cannot use request tokens to dedupe requests\n spanning hours or days.\n

          " } - }, - "screenId": { - "target": "com.amazonaws.honeycode#ResourceId", + } + } + }, + "com.amazonaws.honeycode#BatchUpdateTableRowsResult": { + "type": "structure", + "members": { + "workbookCursor": { + "target": "com.amazonaws.honeycode#WorkbookCursor", "traits": { - "smithy.api#documentation": "

          The ID of the screen that contains the screen automation.

          ", - "smithy.api#httpLabel": {}, + "smithy.api#documentation": "

          The updated workbook cursor after adding the new rows at the end of the table.

          ", "smithy.api#required": {} } }, - "appId": { - "target": "com.amazonaws.honeycode#ResourceId", + "failedBatchItems": { + "target": "com.amazonaws.honeycode#FailedBatchItems", "traits": { - "smithy.api#documentation": "

          The ID of the app that contains the screen automation.

          ", - "smithy.api#httpLabel": {}, - "smithy.api#required": {} + "smithy.api#documentation": "

          \n The list of batch items in the request that could not be updated in the table. Each element in this list\n contains one item from the request that could not be updated in the table along with the reason why\n that item could not be updated.\n

          " } - }, - "screenAutomationId": { + } + } + }, + "com.amazonaws.honeycode#BatchUpsertTableRows": { + "type": "operation", + "input": { + "target": "com.amazonaws.honeycode#BatchUpsertTableRowsRequest" + }, + "output": { + "target": "com.amazonaws.honeycode#BatchUpsertTableRowsResult" + }, + "errors": [ + { + "target": "com.amazonaws.honeycode#AccessDeniedException" + }, + { + "target": "com.amazonaws.honeycode#InternalServerException" + }, + { + "target": "com.amazonaws.honeycode#RequestTimeoutException" + }, + { + "target": "com.amazonaws.honeycode#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.honeycode#ServiceQuotaExceededException" + }, + { + "target": "com.amazonaws.honeycode#ServiceUnavailableException" + }, + { + "target": "com.amazonaws.honeycode#ThrottlingException" + }, + { + "target": "com.amazonaws.honeycode#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n The BatchUpsertTableRows API allows you to upsert one or more rows in a table. The upsert\n operation takes a filter expression as input and evaluates it to find matching rows on the destination\n table. If matching rows are found, it will update the cells in the matching rows to new values specified\n in the request. If no matching rows are found, a new row is added at the end of the table and the cells in\n that row are set to the new values specified in the request.\n

          \n

          \n You can specify the values to set in some or all of the columns in the table for the\n matching or newly appended rows. If a column is not explicitly specified for a particular row, then that\n column will not be updated for that row. To clear out the data in a specific cell, you need to set the value\n as an empty string (\"\").\n

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/workbooks/{workbookId}/tables/{tableId}/rows/batchupsert", + "code": 200 + } + } + }, + "com.amazonaws.honeycode#BatchUpsertTableRowsRequest": { + "type": "structure", + "members": { + "workbookId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the workbook where the rows are being upserted.

          \n

          \n If a workbook with the specified id could not be found, this API throws ResourceNotFoundException.\n

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "tableId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the table where the rows are being upserted.

          \n

          \n If a table with the specified id could not be found, this API throws ResourceNotFoundException.\n

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "rowsToUpsert": { + "target": "com.amazonaws.honeycode#UpsertRowDataList", + "traits": { + "smithy.api#documentation": "

          \n The list of rows to upsert in the table. Each item in this list needs to have a batch item id to uniquely\n identify the element in the request, a filter expression to find the rows to update for that element\n and the cell values to set for each column in the upserted rows. You need to specify\n at least one item in this list.\n

          \n

          \n Note that if one of the filter formulas in the request fails to evaluate because of an error or one of the\n column ids in any of the rows does not exist in the table, then the request fails\n and no updates are made to the table.\n

          ", + "smithy.api#required": {} + } + }, + "clientRequestToken": { + "target": "com.amazonaws.honeycode#ClientRequestToken", + "traits": { + "smithy.api#documentation": "

          \n The request token for performing the update action.\n Request tokens help to identify duplicate requests. If a call times out or fails due to a transient error\n like a failed network connection, you can retry the call with the same request token. The service ensures\n that if the first call using that request token is successfully performed, the second call will not perform\n the action again.\n

          \n

          \n Note that request tokens are valid only for a few minutes. You cannot use request tokens to dedupe requests\n spanning hours or days.\n

          " + } + } + } + }, + "com.amazonaws.honeycode#BatchUpsertTableRowsResult": { + "type": "structure", + "members": { + "rows": { + "target": "com.amazonaws.honeycode#UpsertRowsResultMap", + "traits": { + "smithy.api#documentation": "

          \n A map with the batch item id as the key and the result of the upsert operation as the value. The\n result of the upsert operation specifies whether existing rows were updated or a new row was appended, along\n with the list of row ids that were affected.\n

          ", + "smithy.api#required": {} + } + }, + "workbookCursor": { + "target": "com.amazonaws.honeycode#WorkbookCursor", + "traits": { + "smithy.api#documentation": "

          The updated workbook cursor after updating or appending rows in the table.

          ", + "smithy.api#required": {} + } + }, + "failedBatchItems": { + "target": "com.amazonaws.honeycode#FailedBatchItems", + "traits": { + "smithy.api#documentation": "

          \n The list of batch items in the request that could not be updated or appended in the table. Each element in\n this list contains one item from the request that could not be updated in the table along with the reason\n why that item could not be updated or appended.\n

          " + } + } + } + }, + "com.amazonaws.honeycode#Cell": { + "type": "structure", + "members": { + "formula": { + "target": "com.amazonaws.honeycode#Formula", + "traits": { + "smithy.api#documentation": "

          \n The formula contained in the cell. This field is empty if a cell does not have a formula.\n

          " + } + }, + "format": { + "target": "com.amazonaws.honeycode#Format", + "traits": { + "smithy.api#documentation": "

          The format of the cell. If this field is empty, then the format is either not specified in the\n workbook or the format is set to AUTO.

          " + } + }, + "rawValue": { + "target": "com.amazonaws.honeycode#RawValue", + "traits": { + "smithy.api#documentation": "

          \n The raw value of the data contained in the cell. The raw value depends on the format of the data in the\n cell. However the attribute in the API return value is always a string containing the raw value.\n

          \n

          \n Cells with format DATE, DATE_TIME or TIME have the raw value as a floating point\n number where the whole number represents the number of days since 1/1/1900 and the fractional part\n represents the fraction of the day since midnight. For example, a cell with date 11/3/2020 has the raw value\n \"44138\". A cell with the time 9:00 AM has the raw value \"0.375\" and a cell with date/time value of\n 11/3/2020 9:00 AM has the raw value \"44138.375\". Notice that even though the raw value is a number in all\n three cases, it is still represented as a string.\n

          \n

          \n Cells with format NUMBER, CURRENCY, PERCENTAGE and ACCOUNTING have the raw value of the data as the number\n representing the data being displayed. For example, the number 1.325 with two decimal places in the format\n will have it's raw value as \"1.325\" and formatted value as \"1.33\". A currency value for\n $10 will have the raw value as \"10\" and formatted value as \"$10.00\". A value representing 20% with two\n decimal places in the format will have its raw value as \"0.2\" and the formatted value as \"20.00%\". An\n accounting value of -$25 will have \"-25\" as the raw value and \"$ (25.00)\" as the formatted value.\n

          \n

          \n Cells with format TEXT will have the raw text as the raw value. For example, a cell with text \"John Smith\"\n will have \"John Smith\" as both the raw value and the formatted value.\n

          \n

          \n Cells with format CONTACT will have the name of the contact as a formatted value and the email address of\n the contact as the raw value. For example, a contact for John Smith will have \"John Smith\" as the\n formatted value and \"john.smith@example.com\" as the raw value.\n

          \n

          \n Cells with format ROWLINK (aka picklist) will have the first column of the linked row as the formatted value\n and the row id of the linked row as the raw value. For example, a cell containing a picklist to a table\n that displays task status might have \"Completed\" as the formatted value and\n \"row:dfcefaee-5b37-4355-8f28-40c3e4ff5dd4/ca432b2f-b8eb-431d-9fb5-cbe0342f9f03\" as the raw value.\n

          \n

          \n Cells with format AUTO or cells without any format that are auto-detected as one of the formats above will\n contain the raw and formatted values as mentioned above, based on the auto-detected formats. If there is no\n auto-detected format, the raw and formatted values will be the same as the data in the cell.\n

          " + } + }, + "formattedValue": { + "target": "com.amazonaws.honeycode#FormattedValue", + "traits": { + "smithy.api#documentation": "

          \n The formatted value of the cell. This is the value that you see displayed in the cell in the UI.\n

          \n

          \n Note that the formatted value of a cell is always represented as a string irrespective of the data that is\n stored in the cell. For example, if a cell contains a date, the formatted value of the cell is the string\n representation of the formatted date being shown in the cell in the UI. See details in the rawValue field\n below for how cells of different formats will have different raw and formatted values.\n

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An object that represents a single cell in a table.

          ", + "smithy.api#sensitive": {} + } + }, + "com.amazonaws.honeycode#CellInput": { + "type": "structure", + "members": { + "fact": { + "target": "com.amazonaws.honeycode#Fact", + "traits": { + "smithy.api#documentation": "

          \n Fact represents the data that is entered into a cell. This data can be free text or a formula. Formulas need\n to start with the equals (=) sign.\n

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n CellInput object contains the data needed to create or update cells in a table.\n

          " + } + }, + "com.amazonaws.honeycode#Cells": { + "type": "list", + "member": { + "target": "com.amazonaws.honeycode#Cell" + } + }, + "com.amazonaws.honeycode#ClientRequestToken": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 32, + "max": 64 + }, + "smithy.api#pattern": "^(?!\\s*$).+" + } + }, + "com.amazonaws.honeycode#ColumnMetadata": { + "type": "structure", + "members": { + "name": { + "target": "com.amazonaws.honeycode#Name", + "traits": { + "smithy.api#documentation": "

          The name of the column.

          ", + "smithy.api#required": {} + } + }, + "format": { + "target": "com.amazonaws.honeycode#Format", + "traits": { + "smithy.api#documentation": "

          The format of the column.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Metadata for column in the table.

          " + } + }, + "com.amazonaws.honeycode#CreateRowData": { + "type": "structure", + "members": { + "batchItemId": { + "target": "com.amazonaws.honeycode#BatchItemId", + "traits": { + "smithy.api#documentation": "

          \n An external identifier that represents the single row that is being created as part of the\n BatchCreateTableRows request. This can be any string that you can use to identify the row in the request.\n The BatchCreateTableRows API puts the batch item id in the results to allow you to link data in the\n request to data in the results.\n

          ", + "smithy.api#required": {} + } + }, + "cellsToCreate": { + "target": "com.amazonaws.honeycode#RowDataInput", + "traits": { + "smithy.api#documentation": "

          \n A map representing the cells to create in the new row. The key is the column id of the\n cell and the value is the CellInput object that represents the data to set in that cell.\n

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n Data needed to create a single row in a table as part of the BatchCreateTableRows request.\n

          " + } + }, + "com.amazonaws.honeycode#CreateRowDataList": { + "type": "list", + "member": { + "target": "com.amazonaws.honeycode#CreateRowData" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.honeycode#CreatedRowsMap": { + "type": "map", + "key": { + "target": "com.amazonaws.honeycode#BatchItemId" + }, + "value": { + "target": "com.amazonaws.honeycode#RowId" + } + }, + "com.amazonaws.honeycode#DataItem": { + "type": "structure", + "members": { + "overrideFormat": { + "target": "com.amazonaws.honeycode#Format", + "traits": { + "smithy.api#documentation": "

          \n The overrideFormat is optional and is specified only if a particular row of data has a different format for\n the data than the default format defined on the screen or the table.\n

          " + } + }, + "rawValue": { + "target": "com.amazonaws.honeycode#RawValue", + "traits": { + "smithy.api#documentation": "

          The raw value of the data. e.g. jsmith@example.com

          " + } + }, + "formattedValue": { + "target": "com.amazonaws.honeycode#FormattedValue", + "traits": { + "smithy.api#documentation": "

          The formatted value of the data. e.g. John Smith.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          The data in a particular data cell defined on the screen.

          ", + "smithy.api#sensitive": {} + } + }, + "com.amazonaws.honeycode#DataItems": { + "type": "list", + "member": { + "target": "com.amazonaws.honeycode#DataItem" + } + }, + "com.amazonaws.honeycode#DelimitedTextDelimiter": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 1 + }, + "smithy.api#pattern": "^[^\\n\\r\\x00\\x08\\x0B\\x0C\\x0E\\x1F]?$" + } + }, + "com.amazonaws.honeycode#DelimitedTextImportOptions": { + "type": "structure", + "members": { + "delimiter": { + "target": "com.amazonaws.honeycode#DelimitedTextDelimiter", + "traits": { + "smithy.api#documentation": "

          The delimiter to use for separating columns in a single row of the input.

          ", + "smithy.api#required": {} + } + }, + "hasHeaderRow": { + "target": "com.amazonaws.honeycode#HasHeaderRow", + "traits": { + "smithy.api#documentation": "

          Indicates whether the input file has a header row at the top containing the column names.

          " + } + }, + "ignoreEmptyRows": { + "target": "com.amazonaws.honeycode#IgnoreEmptyRows", + "traits": { + "smithy.api#documentation": "

          A parameter to indicate whether empty rows should be ignored or be included in the import.

          " + } + }, + "dataCharacterEncoding": { + "target": "com.amazonaws.honeycode#ImportDataCharacterEncoding", + "traits": { + "smithy.api#documentation": "

          The encoding of the data in the input file.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n An object that contains the options relating to parsing delimited text as part of an import request.\n

          " + } + }, + "com.amazonaws.honeycode#DescribeTableDataImportJob": { + "type": "operation", + "input": { + "target": "com.amazonaws.honeycode#DescribeTableDataImportJobRequest" + }, + "output": { + "target": "com.amazonaws.honeycode#DescribeTableDataImportJobResult" + }, + "errors": [ + { + "target": "com.amazonaws.honeycode#AccessDeniedException" + }, + { + "target": "com.amazonaws.honeycode#InternalServerException" + }, + { + "target": "com.amazonaws.honeycode#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.honeycode#ServiceUnavailableException" + }, + { + "target": "com.amazonaws.honeycode#ThrottlingException" + }, + { + "target": "com.amazonaws.honeycode#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n The DescribeTableDataImportJob API allows you to retrieve the status and details of a table data import job.\n

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/workbooks/{workbookId}/tables/{tableId}/import/{jobId}", + "code": 200 + } + } + }, + "com.amazonaws.honeycode#DescribeTableDataImportJobRequest": { + "type": "structure", + "members": { + "workbookId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the workbook into which data was imported.

          \n

          \n If a workbook with the specified id could not be found, this API throws ResourceNotFoundException.\n

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "tableId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the table into which data was imported.

          \n

          \n If a table with the specified id could not be found, this API throws ResourceNotFoundException.\n

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "jobId": { + "target": "com.amazonaws.honeycode#JobId", + "traits": { + "smithy.api#documentation": "

          The ID of the job that was returned by the StartTableDataImportJob request.

          \n

          \n If a job with the specified id could not be found, this API throws ResourceNotFoundException.\n

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.honeycode#DescribeTableDataImportJobResult": { + "type": "structure", + "members": { + "jobStatus": { + "target": "com.amazonaws.honeycode#TableDataImportJobStatus", + "traits": { + "smithy.api#documentation": "

          \n The current status of the import job.\n

          ", + "smithy.api#required": {} + } + }, + "message": { + "target": "com.amazonaws.honeycode#TableDataImportJobMessage", + "traits": { + "smithy.api#documentation": "

          \n A message providing more details about the current status of the import job.\n

          ", + "smithy.api#required": {} + } + }, + "jobMetadata": { + "target": "com.amazonaws.honeycode#TableDataImportJobMetadata", + "traits": { + "smithy.api#documentation": "

          \n The metadata about the job that was submitted for import.\n

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.honeycode#DestinationOptions": { + "type": "structure", + "members": { + "columnMap": { + "target": "com.amazonaws.honeycode#ImportColumnMap", + "traits": { + "smithy.api#documentation": "

          A map of the column id to the import properties for each column.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An object that contains the options relating to the destination of the import request.

          " + } + }, + "com.amazonaws.honeycode#Email": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 3, + "max": 254 + }, + "smithy.api#pattern": "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$", + "smithy.api#sensitive": {} + } + }, + "com.amazonaws.honeycode#ErrorMessage": { + "type": "string" + }, + "com.amazonaws.honeycode#Fact": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 8192 + }, + "smithy.api#pattern": "[\\s\\S]*", + "smithy.api#sensitive": {} + } + }, + "com.amazonaws.honeycode#FailedBatchItem": { + "type": "structure", + "members": { + "id": { + "target": "com.amazonaws.honeycode#BatchItemId", + "traits": { + "smithy.api#documentation": "

          \n The id of the batch item that failed. This is the batch item id for the BatchCreateTableRows and\n BatchUpsertTableRows operations and the row id for the BatchUpdateTableRows and BatchDeleteTableRows\n operations.\n

          ", + "smithy.api#required": {} + } + }, + "errorMessage": { + "target": "com.amazonaws.honeycode#BatchErrorMessage", + "traits": { + "smithy.api#documentation": "

          \n The error message that indicates why the batch item failed.\n

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n A single item in a batch that failed to perform the intended action because of an error preventing it from\n succeeding.\n

          " + } + }, + "com.amazonaws.honeycode#FailedBatchItems": { + "type": "list", + "member": { + "target": "com.amazonaws.honeycode#FailedBatchItem" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 100 + } + } + }, + "com.amazonaws.honeycode#Filter": { + "type": "structure", + "members": { + "formula": { + "target": "com.amazonaws.honeycode#Formula", + "traits": { + "smithy.api#documentation": "

          \n A formula representing a filter function that returns zero or more matching rows from a table. Valid\n formulas in this field return a list of rows from a table. The most common ways of writing a formula to\n return a list of rows are to use the FindRow() or Filter() functions. Any other formula that returns zero or\n more rows is also acceptable. For example, you can use a formula that points to a cell that contains a\n filter function.\n

          ", + "smithy.api#required": {} + } + }, + "contextRowId": { + "target": "com.amazonaws.honeycode#RowId", + "traits": { + "smithy.api#documentation": "

          \n The optional contextRowId attribute can be used to specify the row id of the context row if the filter\n formula contains unqualified references to table columns and needs a context row to evaluate them\n successfully.\n

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n An object that represents a filter formula along with the id of the context row under which the filter\n function needs to evaluate.\n

          " + } + }, + "com.amazonaws.honeycode#Format": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "AUTO", + "name": "Auto" + }, + { + "value": "NUMBER", + "name": "Number" + }, + { + "value": "CURRENCY", + "name": "Currency" + }, + { + "value": "DATE", + "name": "Date" + }, + { + "value": "TIME", + "name": "Time" + }, + { + "value": "DATE_TIME", + "name": "DateTime" + }, + { + "value": "PERCENTAGE", + "name": "Percentage" + }, + { + "value": "TEXT", + "name": "Text" + }, + { + "value": "ACCOUNTING", + "name": "Accounting" + }, + { + "value": "CONTACT", + "name": "Contact" + }, + { + "value": "ROWLINK", + "name": "Rowlink" + } + ] + } + }, + "com.amazonaws.honeycode#FormattedValue": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 8192 + }, + "smithy.api#pattern": "[\\s\\S]*" + } + }, + "com.amazonaws.honeycode#Formula": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 8192 + }, + "smithy.api#pattern": "^=.*", + "smithy.api#sensitive": {} + } + }, + "com.amazonaws.honeycode#GetScreenData": { + "type": "operation", + "input": { + "target": "com.amazonaws.honeycode#GetScreenDataRequest" + }, + "output": { + "target": "com.amazonaws.honeycode#GetScreenDataResult" + }, + "errors": [ + { + "target": "com.amazonaws.honeycode#AccessDeniedException" + }, + { + "target": "com.amazonaws.honeycode#InternalServerException" + }, + { + "target": "com.amazonaws.honeycode#RequestTimeoutException" + }, + { + "target": "com.amazonaws.honeycode#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.honeycode#ServiceUnavailableException" + }, + { + "target": "com.amazonaws.honeycode#ThrottlingException" + }, + { + "target": "com.amazonaws.honeycode#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n The GetScreenData API allows retrieval of data from a screen in a Honeycode app.\n The API allows setting local variables in the screen to filter, sort or otherwise affect what will be\n displayed on the screen.\n

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/screendata", + "code": 200 + } + } + }, + "com.amazonaws.honeycode#GetScreenDataRequest": { + "type": "structure", + "members": { + "workbookId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the workbook that contains the screen.

          ", + "smithy.api#required": {} + } + }, + "appId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the app that contains the screem.

          ", + "smithy.api#required": {} + } + }, + "screenId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the screen.

          ", + "smithy.api#required": {} + } + }, + "variables": { + "target": "com.amazonaws.honeycode#VariableValueMap", + "traits": { + "smithy.api#documentation": "

          \n Variables are optional and are needed only if the screen requires them to render correctly. Variables are\n specified as a map where the key is the name of the variable as defined on the screen. The value is an\n object which currently has only one property, rawValue, which holds the value of the variable to be passed\n to the screen.\n

          " + } + }, + "maxResults": { + "target": "com.amazonaws.honeycode#MaxResults", + "traits": { + "smithy.api#documentation": "

          \n The number of results to be returned on a single page.\n Specify a number between 1 and 100. The maximum value is 100.\n

          \n

          \n This parameter is optional. If you don't specify this parameter, the default page size is 100.\n

          " + } + }, + "nextToken": { + "target": "com.amazonaws.honeycode#PaginationToken", + "traits": { + "smithy.api#documentation": "

          \n This parameter is optional. If a nextToken is not specified, the API returns the first page of data.\n

          \n

          \n Pagination tokens expire after 1 hour. If you use a token that was returned more than an hour back, the API\n will throw ValidationException.\n

          " + } + } + } + }, + "com.amazonaws.honeycode#GetScreenDataResult": { + "type": "structure", + "members": { + "results": { + "target": "com.amazonaws.honeycode#ResultSetMap", + "traits": { + "smithy.api#documentation": "

          A map of all the rows on the screen keyed by block name.

          ", + "smithy.api#required": {} + } + }, + "workbookCursor": { + "target": "com.amazonaws.honeycode#WorkbookCursor", + "traits": { + "smithy.api#documentation": "

          \n Indicates the cursor of the workbook at which the data returned by this workbook is read. Workbook cursor\n keeps increasing with every update and the increments are not sequential.\n

          ", + "smithy.api#required": {} + } + }, + "nextToken": { + "target": "com.amazonaws.honeycode#PaginationToken", + "traits": { + "smithy.api#documentation": "

          \n Provides the pagination token to load the next page if there are more results matching the request. If a\n pagination token is not present in the response, it means that all data matching the query has been loaded.\n

          " + } + } + } + }, + "com.amazonaws.honeycode#HasHeaderRow": { + "type": "boolean" + }, + "com.amazonaws.honeycode#IgnoreEmptyRows": { + "type": "boolean" + }, + "com.amazonaws.honeycode#ImportColumnMap": { + "type": "map", + "key": { + "target": "com.amazonaws.honeycode#ResourceId" + }, + "value": { + "target": "com.amazonaws.honeycode#SourceDataColumnProperties" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 100 + } + } + }, + "com.amazonaws.honeycode#ImportDataCharacterEncoding": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "UTF-8", + "name": "UTF_8" + }, + { + "value": "US-ASCII", + "name": "US_ASCII" + }, + { + "value": "ISO-8859-1", + "name": "ISO_8859_1" + }, + { + "value": "UTF-16BE", + "name": "UTF_16BE" + }, + { + "value": "UTF-16LE", + "name": "UTF_16LE" + }, + { + "value": "UTF-16", + "name": "UTF_16" + } + ] + } + }, + "com.amazonaws.honeycode#ImportDataSource": { + "type": "structure", + "members": { + "dataSourceConfig": { + "target": "com.amazonaws.honeycode#ImportDataSourceConfig", + "traits": { + "smithy.api#documentation": "

          The configuration parameters for the data source of the import

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          An object that has details about the source of the data that was submitted for import.

          " + } + }, + "com.amazonaws.honeycode#ImportDataSourceConfig": { + "type": "structure", + "members": { + "dataSourceUrl": { + "target": "com.amazonaws.honeycode#SecureURL", + "traits": { + "smithy.api#documentation": "

          \n The URL from which source data will be downloaded for the import request.\n

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n An object that contains the configuration parameters for the data source of an import request.\n

          " + } + }, + "com.amazonaws.honeycode#ImportJobSubmitter": { + "type": "structure", + "members": { + "email": { + "target": "com.amazonaws.honeycode#Email", + "traits": { + "smithy.api#documentation": "

          The email id of the submitter of the import job, if available.

          " + } + }, + "userArn": { + "target": "com.amazonaws.honeycode#AwsUserArn", + "traits": { + "smithy.api#documentation": "

          The AWS user ARN of the submitter of the import job, if available.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An object that contains the attributes of the submitter of the import job.

          " + } + }, + "com.amazonaws.honeycode#ImportOptions": { + "type": "structure", + "members": { + "destinationOptions": { + "target": "com.amazonaws.honeycode#DestinationOptions", + "traits": { + "smithy.api#documentation": "

          Options relating to the destination of the import request.

          " + } + }, + "delimitedTextOptions": { + "target": "com.amazonaws.honeycode#DelimitedTextImportOptions", + "traits": { + "smithy.api#documentation": "

          Options relating to parsing delimited text. Required if dataFormat is DELIMITED_TEXT.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An object that contains the options specified by the sumitter of the import request.

          " + } + }, + "com.amazonaws.honeycode#ImportSourceDataFormat": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "DELIMITED_TEXT", + "name": "DelimitedText" + } + ] + } + }, + "com.amazonaws.honeycode#InternalServerException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.honeycode#ErrorMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          There were unexpected errors from the server.

          ", + "smithy.api#error": "server", + "smithy.api#httpError": 500 + } + }, + "com.amazonaws.honeycode#InvokeScreenAutomation": { + "type": "operation", + "input": { + "target": "com.amazonaws.honeycode#InvokeScreenAutomationRequest" + }, + "output": { + "target": "com.amazonaws.honeycode#InvokeScreenAutomationResult" + }, + "errors": [ + { + "target": "com.amazonaws.honeycode#AccessDeniedException" + }, + { + "target": "com.amazonaws.honeycode#AutomationExecutionException" + }, + { + "target": "com.amazonaws.honeycode#AutomationExecutionTimeoutException" + }, + { + "target": "com.amazonaws.honeycode#InternalServerException" + }, + { + "target": "com.amazonaws.honeycode#RequestTimeoutException" + }, + { + "target": "com.amazonaws.honeycode#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.honeycode#ServiceUnavailableException" + }, + { + "target": "com.amazonaws.honeycode#ThrottlingException" + }, + { + "target": "com.amazonaws.honeycode#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n The InvokeScreenAutomation API allows invoking an action defined in a screen in a Honeycode app.\n The API allows setting local variables, which can then be used in the automation being invoked.\n This allows automating the Honeycode app interactions to write, update or delete data in the workbook.\n

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/workbooks/{workbookId}/apps/{appId}/screens/{screenId}/automations/{screenAutomationId}", + "code": 200 + } + } + }, + "com.amazonaws.honeycode#InvokeScreenAutomationRequest": { + "type": "structure", + "members": { + "workbookId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the workbook that contains the screen automation.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "appId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the app that contains the screen automation.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "screenId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the screen that contains the screen automation.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "screenAutomationId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the automation action to be performed.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "variables": { + "target": "com.amazonaws.honeycode#VariableValueMap", + "traits": { + "smithy.api#documentation": "

          \n Variables are specified as a map where the key is the name of the variable as defined on the screen. The value is an\n object which currently has only one property, rawValue, which holds the value of the variable to be passed\n to the screen. Any variables defined in a screen are required to be passed in the call.\n

          " + } + }, + "rowId": { + "target": "com.amazonaws.honeycode#RowId", + "traits": { + "smithy.api#documentation": "

          \n The row ID for the automation if the automation is defined inside a block with source or list.\n

          " + } + }, + "clientRequestToken": { + "target": "com.amazonaws.honeycode#ClientRequestToken", + "traits": { + "smithy.api#documentation": "

          \n The request token for performing the automation action.\n Request tokens help to identify duplicate requests. If a call times out or fails due to a transient error\n like a failed network connection, you can retry the call with the same request token. The service ensures\n that if the first call using that request token is successfully performed, the second call will return the\n response of the previous call rather than performing the action again.\n

          \n

          \n Note that request tokens are valid only for a few minutes. You cannot use request tokens to dedupe requests\n spanning hours or days.\n

          " + } + } + } + }, + "com.amazonaws.honeycode#InvokeScreenAutomationResult": { + "type": "structure", + "members": { + "workbookCursor": { + "target": "com.amazonaws.honeycode#WorkbookCursor", + "traits": { + "smithy.api#documentation": "

          The updated workbook cursor after performing the automation action.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.honeycode#JobId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 100 + }, + "smithy.api#pattern": "^[^\\n\\r\\x00\\x08\\x0B\\x0C\\x0E\\x1F]*$" + } + }, + "com.amazonaws.honeycode#ListTableColumns": { + "type": "operation", + "input": { + "target": "com.amazonaws.honeycode#ListTableColumnsRequest" + }, + "output": { + "target": "com.amazonaws.honeycode#ListTableColumnsResult" + }, + "errors": [ + { + "target": "com.amazonaws.honeycode#AccessDeniedException" + }, + { + "target": "com.amazonaws.honeycode#InternalServerException" + }, + { + "target": "com.amazonaws.honeycode#RequestTimeoutException" + }, + { + "target": "com.amazonaws.honeycode#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.honeycode#ServiceUnavailableException" + }, + { + "target": "com.amazonaws.honeycode#ThrottlingException" + }, + { + "target": "com.amazonaws.honeycode#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n The ListTableColumns API allows you to retrieve a list of all the columns in a table in a workbook.\n

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/workbooks/{workbookId}/tables/{tableId}/columns", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "nextToken", + "outputToken": "nextToken" + } + } + }, + "com.amazonaws.honeycode#ListTableColumnsRequest": { + "type": "structure", + "members": { + "workbookId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the workbook that contains the table whose columns are being retrieved.

          \n

          \n If a workbook with the specified id could not be found, this API throws ResourceNotFoundException.\n

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "tableId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the table whose columns are being retrieved.

          \n

          \n If a table with the specified id could not be found, this API throws ResourceNotFoundException.\n

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "nextToken": { + "target": "com.amazonaws.honeycode#PaginationToken", + "traits": { + "smithy.api#documentation": "

          \n This parameter is optional. If a nextToken is not specified, the API returns the first page of data.\n

          \n

          \n Pagination tokens expire after 1 hour. If you use a token that was returned more than an hour back, the API\n will throw ValidationException.\n

          ", + "smithy.api#httpQuery": "nextToken" + } + } + } + }, + "com.amazonaws.honeycode#ListTableColumnsResult": { + "type": "structure", + "members": { + "tableColumns": { + "target": "com.amazonaws.honeycode#TableColumns", + "traits": { + "smithy.api#documentation": "

          \n The list of columns in the table.\n

          ", + "smithy.api#required": {} + } + }, + "nextToken": { + "target": "com.amazonaws.honeycode#PaginationToken", + "traits": { + "smithy.api#documentation": "

          \n Provides the pagination token to load the next page if there are more results matching the request. If a\n pagination token is not present in the response, it means that all data matching the request has been\n loaded.\n

          " + } + }, + "workbookCursor": { + "target": "com.amazonaws.honeycode#WorkbookCursor", + "traits": { + "smithy.api#documentation": "

          \n Indicates the cursor of the workbook at which the data returned by this request is read. Workbook cursor\n keeps increasing with every update and the increments are not sequential.\n

          " + } + } + } + }, + "com.amazonaws.honeycode#ListTableRows": { + "type": "operation", + "input": { + "target": "com.amazonaws.honeycode#ListTableRowsRequest" + }, + "output": { + "target": "com.amazonaws.honeycode#ListTableRowsResult" + }, + "errors": [ + { + "target": "com.amazonaws.honeycode#AccessDeniedException" + }, + { + "target": "com.amazonaws.honeycode#InternalServerException" + }, + { + "target": "com.amazonaws.honeycode#RequestTimeoutException" + }, + { + "target": "com.amazonaws.honeycode#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.honeycode#ServiceUnavailableException" + }, + { + "target": "com.amazonaws.honeycode#ThrottlingException" + }, + { + "target": "com.amazonaws.honeycode#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n The ListTableRows API allows you to retrieve a list of all the rows in a table in a workbook.\n

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/workbooks/{workbookId}/tables/{tableId}/rows/list", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "nextToken", + "outputToken": "nextToken", + "pageSize": "maxResults" + } + } + }, + "com.amazonaws.honeycode#ListTableRowsRequest": { + "type": "structure", + "members": { + "workbookId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the workbook that contains the table whose rows are being retrieved.

          \n

          \n If a workbook with the specified id could not be found, this API throws ResourceNotFoundException.\n

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "tableId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the table whose rows are being retrieved.

          \n

          \n If a table with the specified id could not be found, this API throws ResourceNotFoundException.\n

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "rowIds": { + "target": "com.amazonaws.honeycode#RowIdList", + "traits": { + "smithy.api#documentation": "

          \n This parameter is optional. If one or more row ids are specified in this list, then only the specified\n row ids are returned in the result. If no row ids are specified here, then all the rows in the table are\n returned.\n

          " + } + }, + "maxResults": { + "target": "com.amazonaws.honeycode#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of rows to return in each page of the results.

          " + } + }, + "nextToken": { + "target": "com.amazonaws.honeycode#PaginationToken", + "traits": { + "smithy.api#documentation": "

          \n This parameter is optional. If a nextToken is not specified, the API returns the first page of data.\n

          \n

          \n Pagination tokens expire after 1 hour. If you use a token that was returned more than an hour back, the API\n will throw ValidationException.\n

          " + } + } + } + }, + "com.amazonaws.honeycode#ListTableRowsResult": { + "type": "structure", + "members": { + "columnIds": { + "target": "com.amazonaws.honeycode#ResourceIds", + "traits": { + "smithy.api#documentation": "

          \n The list of columns in the table whose row data is returned in the result.\n

          ", + "smithy.api#required": {} + } + }, + "rows": { + "target": "com.amazonaws.honeycode#TableRows", + "traits": { + "smithy.api#documentation": "

          \n The list of rows in the table. Note that this result is paginated, so this list contains a maximum of 100\n rows.\n

          ", + "smithy.api#required": {} + } + }, + "rowIdsNotFound": { + "target": "com.amazonaws.honeycode#RowIdList", + "traits": { + "smithy.api#documentation": "

          \n The list of row ids included in the request that were not found in the table.\n

          " + } + }, + "nextToken": { + "target": "com.amazonaws.honeycode#PaginationToken", + "traits": { + "smithy.api#documentation": "

          \n Provides the pagination token to load the next page if there are more results matching the request. If a\n pagination token is not present in the response, it means that all data matching the request has been\n loaded.\n

          " + } + }, + "workbookCursor": { + "target": "com.amazonaws.honeycode#WorkbookCursor", + "traits": { + "smithy.api#documentation": "

          \n Indicates the cursor of the workbook at which the data returned by this request is read. Workbook cursor\n keeps increasing with every update and the increments are not sequential.\n

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.honeycode#ListTables": { + "type": "operation", + "input": { + "target": "com.amazonaws.honeycode#ListTablesRequest" + }, + "output": { + "target": "com.amazonaws.honeycode#ListTablesResult" + }, + "errors": [ + { + "target": "com.amazonaws.honeycode#AccessDeniedException" + }, + { + "target": "com.amazonaws.honeycode#InternalServerException" + }, + { + "target": "com.amazonaws.honeycode#RequestTimeoutException" + }, + { + "target": "com.amazonaws.honeycode#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.honeycode#ServiceUnavailableException" + }, + { + "target": "com.amazonaws.honeycode#ThrottlingException" + }, + { + "target": "com.amazonaws.honeycode#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n The ListTables API allows you to retrieve a list of all the tables in a workbook.\n

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/workbooks/{workbookId}/tables", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "nextToken", + "outputToken": "nextToken", + "pageSize": "maxResults" + } + } + }, + "com.amazonaws.honeycode#ListTablesRequest": { + "type": "structure", + "members": { + "workbookId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the workbook whose tables are being retrieved.

          \n

          \n If a workbook with the specified id could not be found, this API throws ResourceNotFoundException.\n

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "maxResults": { + "target": "com.amazonaws.honeycode#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of tables to return in each page of the results.

          ", + "smithy.api#httpQuery": "maxResults" + } + }, + "nextToken": { + "target": "com.amazonaws.honeycode#PaginationToken", + "traits": { + "smithy.api#documentation": "

          \n This parameter is optional. If a nextToken is not specified, the API returns the first page of data.\n

          \n

          \n Pagination tokens expire after 1 hour. If you use a token that was returned more than an hour back, the API\n will throw ValidationException.\n

          ", + "smithy.api#httpQuery": "nextToken" + } + } + } + }, + "com.amazonaws.honeycode#ListTablesResult": { + "type": "structure", + "members": { + "tables": { + "target": "com.amazonaws.honeycode#Tables", + "traits": { + "smithy.api#documentation": "

          \n The list of tables in the workbook.\n

          ", + "smithy.api#required": {} + } + }, + "nextToken": { + "target": "com.amazonaws.honeycode#PaginationToken", + "traits": { + "smithy.api#documentation": "

          \n Provides the pagination token to load the next page if there are more results matching the request. If a\n pagination token is not present in the response, it means that all data matching the request has been\n loaded.\n

          " + } + }, + "workbookCursor": { + "target": "com.amazonaws.honeycode#WorkbookCursor", + "traits": { + "smithy.api#documentation": "

          \n Indicates the cursor of the workbook at which the data returned by this request is read. Workbook cursor\n keeps increasing with every update and the increments are not sequential.\n

          " + } + } + } + }, + "com.amazonaws.honeycode#MaxResults": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.honeycode#Name": { + "type": "string", + "traits": { + "smithy.api#sensitive": {} + } + }, + "com.amazonaws.honeycode#PaginationToken": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 1024 + }, + "smithy.api#pattern": "^(?!\\s*$).+" + } + }, + "com.amazonaws.honeycode#QueryTableRows": { + "type": "operation", + "input": { + "target": "com.amazonaws.honeycode#QueryTableRowsRequest" + }, + "output": { + "target": "com.amazonaws.honeycode#QueryTableRowsResult" + }, + "errors": [ + { + "target": "com.amazonaws.honeycode#AccessDeniedException" + }, + { + "target": "com.amazonaws.honeycode#InternalServerException" + }, + { + "target": "com.amazonaws.honeycode#RequestTimeoutException" + }, + { + "target": "com.amazonaws.honeycode#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.honeycode#ServiceUnavailableException" + }, + { + "target": "com.amazonaws.honeycode#ThrottlingException" + }, + { + "target": "com.amazonaws.honeycode#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n The QueryTableRows API allows you to use a filter formula to query for specific rows in a table.\n

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/workbooks/{workbookId}/tables/{tableId}/rows/query", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "nextToken", + "outputToken": "nextToken", + "pageSize": "maxResults" + } + } + }, + "com.amazonaws.honeycode#QueryTableRowsRequest": { + "type": "structure", + "members": { + "workbookId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the workbook whose table rows are being queried.

          \n

          \n If a workbook with the specified id could not be found, this API throws ResourceNotFoundException.\n

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "tableId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the table whose rows are being queried.

          \n

          \n If a table with the specified id could not be found, this API throws ResourceNotFoundException.\n

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "filterFormula": { + "target": "com.amazonaws.honeycode#Filter", + "traits": { + "smithy.api#documentation": "

          An object that represents a filter formula along with the id of the context row under which the filter\n function needs to evaluate.

          ", + "smithy.api#required": {} + } + }, + "maxResults": { + "target": "com.amazonaws.honeycode#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of rows to return in each page of the results.

          " + } + }, + "nextToken": { + "target": "com.amazonaws.honeycode#PaginationToken", + "traits": { + "smithy.api#documentation": "

          \n This parameter is optional. If a nextToken is not specified, the API returns the first page of data.\n

          \n

          \n Pagination tokens expire after 1 hour. If you use a token that was returned more than an hour back, the API\n will throw ValidationException.\n

          " + } + } + } + }, + "com.amazonaws.honeycode#QueryTableRowsResult": { + "type": "structure", + "members": { + "columnIds": { + "target": "com.amazonaws.honeycode#ResourceIds", + "traits": { + "smithy.api#documentation": "

          \n The list of columns in the table whose row data is returned in the result.\n

          ", + "smithy.api#required": {} + } + }, + "rows": { + "target": "com.amazonaws.honeycode#TableRows", + "traits": { + "smithy.api#documentation": "

          \n The list of rows in the table that match the query filter.\n

          ", + "smithy.api#required": {} + } + }, + "nextToken": { + "target": "com.amazonaws.honeycode#PaginationToken", + "traits": { + "smithy.api#documentation": "

          \n Provides the pagination token to load the next page if there are more results matching the request. If a\n pagination token is not present in the response, it means that all data matching the request has been\n loaded.\n

          " + } + }, + "workbookCursor": { + "target": "com.amazonaws.honeycode#WorkbookCursor", + "traits": { + "smithy.api#documentation": "

          \n Indicates the cursor of the workbook at which the data returned by this request is read. Workbook cursor\n keeps increasing with every update and the increments are not sequential.\n

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.honeycode#RawValue": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 32767 + }, + "smithy.api#pattern": "[\\s\\S]*" + } + }, + "com.amazonaws.honeycode#RequestTimeoutException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.honeycode#ErrorMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          The request timed out.

          ", + "smithy.api#error": "server", + "smithy.api#httpError": 504 + } + }, + "com.amazonaws.honeycode#ResourceId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 36, + "max": 36 + }, + "smithy.api#pattern": "[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}" + } + }, + "com.amazonaws.honeycode#ResourceIds": { + "type": "list", + "member": { + "target": "com.amazonaws.honeycode#ResourceId" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.honeycode#ResourceNotFoundException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.honeycode#ErrorMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          A Workbook, Table, App, Screen or Screen Automation was not found with the given ID.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 404 + } + }, + "com.amazonaws.honeycode#ResultHeader": { + "type": "list", + "member": { + "target": "com.amazonaws.honeycode#ColumnMetadata" + } + }, + "com.amazonaws.honeycode#ResultRow": { + "type": "structure", + "members": { + "rowId": { + "target": "com.amazonaws.honeycode#RowId", + "traits": { + "smithy.api#documentation": "

          The ID for a particular row.

          " + } + }, + "dataItems": { + "target": "com.amazonaws.honeycode#DataItems", + "traits": { + "smithy.api#documentation": "

          List of all the data cells in a row.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          A single row in the ResultSet.

          " + } + }, + "com.amazonaws.honeycode#ResultRows": { + "type": "list", + "member": { + "target": "com.amazonaws.honeycode#ResultRow" + } + }, + "com.amazonaws.honeycode#ResultSet": { + "type": "structure", + "members": { + "headers": { + "target": "com.amazonaws.honeycode#ResultHeader", + "traits": { + "smithy.api#documentation": "

          \n List of headers for all the data cells in the block. The header identifies the name and default format of\n the data cell. Data cells appear in the same order in all rows as defined in the header. The names and\n formats are not repeated in the rows. If a particular row does not have a value for a data cell, a blank\n value is used.\n

          \n

          \n For example, a task list that displays the task name, due date and assigned person might have headers\n [ { \"name\": \"Task Name\"}, {\"name\": \"Due Date\", \"format\": \"DATE\"}, {\"name\": \"Assigned\", \"format\": \"CONTACT\"} ].\n Every row in the result will have the task name as the first item, due date as the second item and assigned\n person as the third item. If a particular task does not have a due date, that row will still have a blank\n value in the second element and the assigned person will still be in the third element.\n

          ", + "smithy.api#required": {} + } + }, + "rows": { + "target": "com.amazonaws.honeycode#ResultRows", + "traits": { + "smithy.api#documentation": "

          \n List of rows returned by the request. Each row has a row Id and a list of data cells in that row. The data\n cells will be present in the same order as they are defined in the header.\n

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n ResultSet contains the results of the request for a single block or list defined on the screen.\n

          " + } + }, + "com.amazonaws.honeycode#ResultSetMap": { + "type": "map", + "key": { + "target": "com.amazonaws.honeycode#Name" + }, + "value": { + "target": "com.amazonaws.honeycode#ResultSet" + } + }, + "com.amazonaws.honeycode#RowDataInput": { + "type": "map", + "key": { + "target": "com.amazonaws.honeycode#ResourceId" + }, + "value": { + "target": "com.amazonaws.honeycode#CellInput" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.honeycode#RowId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 77, + "max": 77 + }, + "smithy.api#pattern": "row:[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\\/[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}" + } + }, + "com.amazonaws.honeycode#RowIdList": { + "type": "list", + "member": { + "target": "com.amazonaws.honeycode#RowId" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.honeycode#SecureURL": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 8000 + }, + "smithy.api#pattern": "^https:\\/\\/[^\\n\\r\\x00\\x08\\x0B\\x0C\\x0E\\x1F]*$" + } + }, + "com.amazonaws.honeycode#ServiceQuotaExceededException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.honeycode#ErrorMessage", + "traits": { + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          \n The request caused service quota to be breached.\n

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 402 + } + }, + "com.amazonaws.honeycode#ServiceUnavailableException": { + "type": "structure", + "members": { + "message": { + "target": "com.amazonaws.honeycode#ErrorMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          Remote service is unreachable.

          ", + "smithy.api#error": "server", + "smithy.api#httpError": 503 + } + }, + "com.amazonaws.honeycode#SheetsPublicApiService": { + "type": "service", + "version": "2020-03-01", + "operations": [ + { + "target": "com.amazonaws.honeycode#BatchCreateTableRows" + }, + { + "target": "com.amazonaws.honeycode#BatchDeleteTableRows" + }, + { + "target": "com.amazonaws.honeycode#BatchUpdateTableRows" + }, + { + "target": "com.amazonaws.honeycode#BatchUpsertTableRows" + }, + { + "target": "com.amazonaws.honeycode#DescribeTableDataImportJob" + }, + { + "target": "com.amazonaws.honeycode#GetScreenData" + }, + { + "target": "com.amazonaws.honeycode#InvokeScreenAutomation" + }, + { + "target": "com.amazonaws.honeycode#ListTableColumns" + }, + { + "target": "com.amazonaws.honeycode#ListTableRows" + }, + { + "target": "com.amazonaws.honeycode#ListTables" + }, + { + "target": "com.amazonaws.honeycode#QueryTableRows" + }, + { + "target": "com.amazonaws.honeycode#StartTableDataImportJob" + } + ], + "traits": { + "aws.api#service": { + "sdkId": "Honeycode", + "arnNamespace": "honeycode", + "cloudFormationName": "Honeycode", + "cloudTrailEventSource": "honeycode.amazonaws.com" + }, + "aws.auth#sigv4": { + "name": "honeycode" + }, + "aws.protocols#restJson1": {}, + "smithy.api#documentation": "

          \n Amazon Honeycode is a fully managed service that allows you to quickly build mobile and web apps for teams—without\n programming. Build Honeycode apps for managing almost anything, like projects, customers, operations, approvals,\n resources, and even your team.\n

          ", + "smithy.api#title": "Amazon Honeycode" + } + }, + "com.amazonaws.honeycode#SourceDataColumnIndex": { + "type": "integer", + "traits": { + "smithy.api#range": { + "min": 1 + } + } + }, + "com.amazonaws.honeycode#SourceDataColumnProperties": { + "type": "structure", + "members": { + "columnIndex": { + "target": "com.amazonaws.honeycode#SourceDataColumnIndex", + "traits": { + "smithy.api#documentation": "

          The index of the column in the input file.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          An object that contains the properties for importing data to a specific column in a table.

          " + } + }, + "com.amazonaws.honeycode#StartTableDataImportJob": { + "type": "operation", + "input": { + "target": "com.amazonaws.honeycode#StartTableDataImportJobRequest" + }, + "output": { + "target": "com.amazonaws.honeycode#StartTableDataImportJobResult" + }, + "errors": [ + { + "target": "com.amazonaws.honeycode#AccessDeniedException" + }, + { + "target": "com.amazonaws.honeycode#InternalServerException" + }, + { + "target": "com.amazonaws.honeycode#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.honeycode#ServiceUnavailableException" + }, + { + "target": "com.amazonaws.honeycode#ThrottlingException" + }, + { + "target": "com.amazonaws.honeycode#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          \n The StartTableDataImportJob API allows you to start an import job on a table. This API will only return\n the id of the job that was started. To find out the status of the import request, you need to call the\n DescribeTableDataImportJob API.\n

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/workbooks/{workbookId}/tables/{destinationTableId}/import", + "code": 200 + } + } + }, + "com.amazonaws.honeycode#StartTableDataImportJobRequest": { + "type": "structure", + "members": { + "workbookId": { "target": "com.amazonaws.honeycode#ResourceId", "traits": { - "smithy.api#documentation": "

          The ID of the automation action to be performed.

          ", + "smithy.api#documentation": "

          The ID of the workbook where the rows are being imported.

          \n

          \n If a workbook with the specified id could not be found, this API throws ResourceNotFoundException.\n

          ", "smithy.api#httpLabel": {}, "smithy.api#required": {} } }, - "variables": { - "target": "com.amazonaws.honeycode#VariableValueMap", + "dataSource": { + "target": "com.amazonaws.honeycode#ImportDataSource", "traits": { - "smithy.api#documentation": "

          \n Variables are optional and are needed only if the screen requires them to render correctly. Variables are\n specified as a map where the key is the name of the variable as defined on the screen. The value is an\n object which currently has only one property, rawValue, which holds the value of the variable to be passed\n to the screen.\n

          " + "smithy.api#documentation": "

          \n The source of the data that is being imported. The size of source must be no larger than 100 MB.\n Source must have no more than 100,000 cells and no more than 1,000 rows.\n

          ", + "smithy.api#required": {} + } + }, + "dataFormat": { + "target": "com.amazonaws.honeycode#ImportSourceDataFormat", + "traits": { + "smithy.api#documentation": "

          \n The format of the data that is being imported. Currently the only option supported is \"DELIMITED_TEXT\".\n

          ", + "smithy.api#required": {} + } + }, + "destinationTableId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the table where the rows are being imported.

          \n

          \n If a table with the specified id could not be found, this API throws ResourceNotFoundException.\n

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "importOptions": { + "target": "com.amazonaws.honeycode#ImportOptions", + "traits": { + "smithy.api#documentation": "

          \n The options for customizing this import request.\n

          ", + "smithy.api#required": {} + } + }, + "clientRequestToken": { + "target": "com.amazonaws.honeycode#ClientRequestToken", + "traits": { + "smithy.api#documentation": "

          \n The request token for performing the update action.\n Request tokens help to identify duplicate requests. If a call times out or fails due to a transient error\n like a failed network connection, you can retry the call with the same request token. The service ensures\n that if the first call using that request token is successfully performed, the second call will not perform\n the action again.\n

          \n

          \n Note that request tokens are valid only for a few minutes. You cannot use request tokens to dedupe requests\n spanning hours or days.\n

          ", + "smithy.api#required": {} } } } }, - "com.amazonaws.honeycode#InvokeScreenAutomationResult": { + "com.amazonaws.honeycode#StartTableDataImportJobResult": { "type": "structure", "members": { - "workbookCursor": { - "target": "com.amazonaws.honeycode#WorkbookCursor", + "jobId": { + "target": "com.amazonaws.honeycode#JobId", "traits": { - "smithy.api#documentation": "

          The updated workbook cursor after performing the automation action.

          ", + "smithy.api#documentation": "

          \n The id that is assigned to this import job. Future requests to find out the status of this import job\n need to send this id in the appropriate parameter in the request.\n

          ", + "smithy.api#required": {} + } + }, + "jobStatus": { + "target": "com.amazonaws.honeycode#TableDataImportJobStatus", + "traits": { + "smithy.api#documentation": "

          \n The status of the import job immediately after submitting the request.\n

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.honeycode#MaxResults": { - "type": "integer", - "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 1, - "max": 100 + "com.amazonaws.honeycode#Table": { + "type": "structure", + "members": { + "tableId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The id of the table.

          " + } + }, + "tableName": { + "target": "com.amazonaws.honeycode#TableName", + "traits": { + "smithy.api#documentation": "

          The name of the table.

          " + } } - } - }, - "com.amazonaws.honeycode#Name": { - "type": "string", - "traits": { - "smithy.api#sensitive": {} - } - }, - "com.amazonaws.honeycode#PaginationToken": { - "type": "string", + }, "traits": { - "smithy.api#length": { - "min": 1, - "max": 1024 - } + "smithy.api#documentation": "

          An object representing the properties of a table in a workbook.

          " } }, - "com.amazonaws.honeycode#RawValue": { - "type": "string" - }, - "com.amazonaws.honeycode#RequestTimeoutException": { + "com.amazonaws.honeycode#TableColumn": { "type": "structure", "members": { - "message": { - "target": "com.amazonaws.honeycode#ErrorMessage" + "tableColumnId": { + "target": "com.amazonaws.honeycode#ResourceId", + "traits": { + "smithy.api#documentation": "

          The id of the column in the table.

          " + } + }, + "tableColumnName": { + "target": "com.amazonaws.honeycode#TableColumnName", + "traits": { + "smithy.api#documentation": "

          The name of the column in the table.

          " + } + }, + "format": { + "target": "com.amazonaws.honeycode#Format", + "traits": { + "smithy.api#documentation": "

          \n The column level format that is applied in the table. An empty value in this field means that the\n column format is the default value 'AUTO'.\n

          " + } } }, "traits": { - "smithy.api#documentation": "

          The request timed out.

          ", - "smithy.api#error": "server", - "smithy.api#httpError": 504 + "smithy.api#documentation": "

          An object that contains attributes about a single column in a table

          " } }, - "com.amazonaws.honeycode#ResourceId": { - "type": "string", - "traits": { - "smithy.api#pattern": "[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}" + "com.amazonaws.honeycode#TableColumnName": { + "type": "string" + }, + "com.amazonaws.honeycode#TableColumns": { + "type": "list", + "member": { + "target": "com.amazonaws.honeycode#TableColumn" } }, - "com.amazonaws.honeycode#ResourceNotFoundException": { + "com.amazonaws.honeycode#TableDataImportJobMessage": { + "type": "string" + }, + "com.amazonaws.honeycode#TableDataImportJobMetadata": { "type": "structure", "members": { - "message": { - "target": "com.amazonaws.honeycode#ErrorMessage" + "submitter": { + "target": "com.amazonaws.honeycode#ImportJobSubmitter", + "traits": { + "smithy.api#documentation": "

          Details about the submitter of the import request.

          ", + "smithy.api#required": {} + } + }, + "submitTime": { + "target": "com.amazonaws.honeycode#TimestampInMillis", + "traits": { + "smithy.api#documentation": "

          The timestamp when the job was submitted for import.

          ", + "smithy.api#required": {} + } + }, + "importOptions": { + "target": "com.amazonaws.honeycode#ImportOptions", + "traits": { + "smithy.api#documentation": "

          The options that was specified at the time of submitting the import request.

          ", + "smithy.api#required": {} + } + }, + "dataSource": { + "target": "com.amazonaws.honeycode#ImportDataSource", + "traits": { + "smithy.api#documentation": "

          The source of the data that was submitted for import.

          ", + "smithy.api#required": {} + } } }, "traits": { - "smithy.api#documentation": "

          A Workbook, App, Screen or Screen Automation was not found with the given ID.

          ", - "smithy.api#error": "client", - "smithy.api#httpError": 404 + "smithy.api#documentation": "

          The metadata associated with the table data import job that was submitted.

          " } }, - "com.amazonaws.honeycode#ResultHeader": { - "type": "list", - "member": { - "target": "com.amazonaws.honeycode#ColumnMetadata" + "com.amazonaws.honeycode#TableDataImportJobStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "SUBMITTED", + "name": "Submitted" + }, + { + "value": "IN_PROGRESS", + "name": "InProgress" + }, + { + "value": "COMPLETED", + "name": "Completed" + }, + { + "value": "FAILED", + "name": "Failed" + } + ] } }, - "com.amazonaws.honeycode#ResultRow": { + "com.amazonaws.honeycode#TableName": { + "type": "string" + }, + "com.amazonaws.honeycode#TableRow": { "type": "structure", "members": { "rowId": { "target": "com.amazonaws.honeycode#RowId", "traits": { - "smithy.api#documentation": "

          The ID for a particular row.

          " + "smithy.api#documentation": "

          The id of the row in the table.

          ", + "smithy.api#required": {} } }, - "dataItems": { - "target": "com.amazonaws.honeycode#DataItems", + "cells": { + "target": "com.amazonaws.honeycode#Cells", "traits": { - "smithy.api#documentation": "

          List of all the data cells in a row.

          ", + "smithy.api#documentation": "

          A list of cells in the table row. The cells appear in the same order as the columns of the table.\n

          ", "smithy.api#required": {} } } }, "traits": { - "smithy.api#documentation": "

          A single row in the ResultSet.

          " + "smithy.api#documentation": "

          An object that contains attributes about a single row in a table

          " } }, - "com.amazonaws.honeycode#ResultRows": { + "com.amazonaws.honeycode#TableRows": { "type": "list", "member": { - "target": "com.amazonaws.honeycode#ResultRow" + "target": "com.amazonaws.honeycode#TableRow" } }, - "com.amazonaws.honeycode#ResultSet": { + "com.amazonaws.honeycode#Tables": { + "type": "list", + "member": { + "target": "com.amazonaws.honeycode#Table" + } + }, + "com.amazonaws.honeycode#ThrottlingException": { "type": "structure", "members": { - "rows": { - "target": "com.amazonaws.honeycode#ResultRows", + "message": { + "target": "com.amazonaws.honeycode#ErrorMessage" + } + }, + "traits": { + "smithy.api#documentation": "

          Tps(transactions per second) rate reached.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 429 + } + }, + "com.amazonaws.honeycode#TimestampInMillis": { + "type": "timestamp" + }, + "com.amazonaws.honeycode#UpdateRowData": { + "type": "structure", + "members": { + "rowId": { + "target": "com.amazonaws.honeycode#RowId", "traits": { - "smithy.api#documentation": "

          \n List of rows returned by the request. Each row has a row Id and a list of data cells in that row. The data\n cells will be present in the same order as they are defined in the header.\n

          ", + "smithy.api#documentation": "

          \n The id of the row that needs to be updated.\n

          ", "smithy.api#required": {} } }, - "headers": { - "target": "com.amazonaws.honeycode#ResultHeader", + "cellsToUpdate": { + "target": "com.amazonaws.honeycode#RowDataInput", "traits": { - "smithy.api#documentation": "

          \n List of headers for all the data cells in the block. The header identifies the name and default format of\n the data cell. Data cells appear in the same order in all rows as defined in the header. The names and\n formats are not repeated in the rows. If a particular row does not have a value for a data cell, a blank\n value is used.\n

          \n

          \n For example, a task list that displays the task name, due date and assigned person might have headers\n [ { \"name\": \"Task Name\"}, {\"name\": \"Due Date\", \"format\": \"DATE\"}, {\"name\": \"Assigned\", \"format\": \"CONTACT\"} ].\n Every row in the result will have the task name as the first item, due date as the second item and assigned\n person as the third item. If a particular task does not have a due date, that row will still have a blank\n value in the second element and the assigned person will still be in the third element.\n

          ", + "smithy.api#documentation": "

          \n A map representing the cells to update in the given row. The key is the column id of the\n cell and the value is the CellInput object that represents the data to set in that cell.\n

          ", "smithy.api#required": {} } } }, "traits": { - "smithy.api#documentation": "

          \n ResultSet contains the results of the request for a single block or list defined on the screen.\n

          " + "smithy.api#documentation": "

          \n Data needed to create a single row in a table as part of the BatchCreateTableRows request.\n

          " } }, - "com.amazonaws.honeycode#ResultSetMap": { - "type": "map", - "key": { - "target": "com.amazonaws.honeycode#Name" + "com.amazonaws.honeycode#UpdateRowDataList": { + "type": "list", + "member": { + "target": "com.amazonaws.honeycode#UpdateRowData" }, - "value": { - "target": "com.amazonaws.honeycode#ResultSet" + "traits": { + "smithy.api#length": { + "min": 1, + "max": 100 + } } }, - "com.amazonaws.honeycode#RowId": { + "com.amazonaws.honeycode#UpsertAction": { "type": "string", "traits": { - "smithy.api#pattern": "row:[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}\\/[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}" + "smithy.api#enum": [ + { + "value": "UPDATED", + "name": "Updated" + }, + { + "value": "APPENDED", + "name": "Appended" + } + ] } }, - "com.amazonaws.honeycode#ServiceUnavailableException": { + "com.amazonaws.honeycode#UpsertRowData": { "type": "structure", "members": { - "message": { - "target": "com.amazonaws.honeycode#ErrorMessage" + "batchItemId": { + "target": "com.amazonaws.honeycode#BatchItemId", + "traits": { + "smithy.api#documentation": "

          \n An external identifier that represents a single item in the request that is being upserted as part of the\n BatchUpsertTableRows request. This can be any string that you can use to identify the item in the request.\n The BatchUpsertTableRows API puts the batch item id in the results to allow you to link data in the\n request to data in the results.\n

          ", + "smithy.api#required": {} + } + }, + "filter": { + "target": "com.amazonaws.honeycode#Filter", + "traits": { + "smithy.api#documentation": "

          \n The filter formula to use to find existing matching rows to update. The formula needs to return zero or more\n rows. If the formula returns 0 rows, then a new row will be appended in the target table. If the formula\n returns one or more rows, then the returned rows will be updated.\n

          \n

          \n Note that the filter formula needs to return rows from the target table for the upsert operation to succeed.\n If the filter formula has a syntax error or it doesn't evaluate to zero or more rows in the target table\n for any one item in the input list, then the entire BatchUpsertTableRows request fails and no updates are\n made to the table.\n

          ", + "smithy.api#required": {} + } + }, + "cellsToUpdate": { + "target": "com.amazonaws.honeycode#RowDataInput", + "traits": { + "smithy.api#documentation": "

          \n A map representing the cells to update for the matching rows or an appended row. The key is the column id\n of the cell and the value is the CellInput object that represents the data to set in that cell.\n

          ", + "smithy.api#required": {} + } } }, "traits": { - "smithy.api#documentation": "

          Remote service is unreachable.

          ", - "smithy.api#error": "server", - "smithy.api#httpError": 503 + "smithy.api#documentation": "

          \n Data needed to upsert rows in a table as part of a single item in the BatchUpsertTableRows request.\n

          " } }, - "com.amazonaws.honeycode#SheetsPublicApiService": { - "type": "service", - "version": "2020-03-01", - "operations": [ - { - "target": "com.amazonaws.honeycode#GetScreenData" - }, - { - "target": "com.amazonaws.honeycode#InvokeScreenAutomation" - } - ], - "traits": { - "aws.api#service": { - "sdkId": "Honeycode", - "arnNamespace": "honeycode", - "cloudFormationName": "Honeycode", - "cloudTrailEventSource": "honeycode.amazonaws.com" - }, - "aws.auth#sigv4": { - "name": "honeycode" - }, - "aws.protocols#restJson1": {}, - "smithy.api#documentation": "

          \n Amazon Honeycode is a fully managed service that allows you to quickly build mobile and web apps for teams—without\n programming. Build Honeycode apps for managing almost anything, like projects, customers, operations, approvals,\n resources, and even your team.\n

          ", - "smithy.api#title": "Amazon Honeycode" + "com.amazonaws.honeycode#UpsertRowDataList": { + "type": "list", + "member": { + "target": "com.amazonaws.honeycode#UpsertRowData" } }, - "com.amazonaws.honeycode#ThrottlingException": { + "com.amazonaws.honeycode#UpsertRowsResult": { "type": "structure", "members": { - "message": { - "target": "com.amazonaws.honeycode#ErrorMessage" + "rowIds": { + "target": "com.amazonaws.honeycode#RowIdList", + "traits": { + "smithy.api#documentation": "

          \n The list of row ids that were changed as part of an upsert row operation. If the upsert resulted in an\n update, this list could potentially contain multiple rows that matched the filter and hence got updated.\n If the upsert resulted in an append, this list would only have the single row that was appended.\n

          ", + "smithy.api#required": {} + } + }, + "upsertAction": { + "target": "com.amazonaws.honeycode#UpsertAction", + "traits": { + "smithy.api#documentation": "

          \n The result of the upsert action.\n

          ", + "smithy.api#required": {} + } } }, "traits": { - "smithy.api#documentation": "

          Tps(transactions per second) rate reached.

          ", - "smithy.api#error": "client", - "smithy.api#httpError": 429 + "smithy.api#documentation": "

          \n An object that represents the result of a single upsert row request.\n

          " + } + }, + "com.amazonaws.honeycode#UpsertRowsResultMap": { + "type": "map", + "key": { + "target": "com.amazonaws.honeycode#BatchItemId" + }, + "value": { + "target": "com.amazonaws.honeycode#UpsertRowsResult" } }, "com.amazonaws.honeycode#ValidationException": { @@ -625,6 +2462,7 @@ "com.amazonaws.honeycode#VariableName": { "type": "string", "traits": { + "smithy.api#pattern": "^(?!\\s*$).+", "smithy.api#sensitive": {} } }, diff --git a/codegen/sdk-codegen/aws-models/lambda.2015-03-31.json b/codegen/sdk-codegen/aws-models/lambda.2015-03-31.json index 91f6f5102ebf..c5d0eb9bf1a3 100644 --- a/codegen/sdk-codegen/aws-models/lambda.2015-03-31.json +++ b/codegen/sdk-codegen/aws-models/lambda.2015-03-31.json @@ -753,12 +753,12 @@ "UntrustedArtifactOnDeployment": { "target": "com.amazonaws.lambda#CodeSigningPolicy", "traits": { - "smithy.api#documentation": "

          Code signing configuration policy for deployment validation failure. If you set the policy to\n Enforce, Lambda blocks the deployment request if code-signing validation checks fail. If you set the\n policy to Warn, Lambda allows the deployment and creates a CloudWatch log.

          \n

          Default value: Warn\n

          " + "smithy.api#documentation": "

          Code signing configuration policy for deployment validation failure. If you set the policy to\n Enforce, Lambda blocks the deployment request if signature validation checks fail. If you set the\n policy to Warn, Lambda allows the deployment and creates a CloudWatch log.

          \n

          Default value: Warn\n

          " } } }, "traits": { - "smithy.api#documentation": "

          Code signing configuration policies specifies the validation failure action for signature mismatch or\n expiry.

          " + "smithy.api#documentation": "

          Code signing configuration policies specifies the validation failure action for signature mismatch or\n expiry.

          " } }, "com.amazonaws.lambda#CodeSigningPolicy": { @@ -1137,7 +1137,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Creates a Lambda function. To create a function, you need a deployment package and an execution role. The\n deployment package contains your function code. The execution role grants the function permission to use AWS\n services, such as Amazon CloudWatch Logs for log streaming and AWS X-Ray for request tracing.

          \n \n

          When you create a function, Lambda provisions an instance of the function and its supporting resources. If\n your function connects to a VPC, this process can take a minute or so. During this time, you can't invoke or\n modify the function. The State, StateReason, and StateReasonCode fields in\n the response from GetFunctionConfiguration indicate when the function is ready to invoke. For\n more information, see Function\n States.

          \n \n

          A function has an unpublished version, and can have published versions and aliases. The unpublished version\n changes when you update your function's code and configuration. A published version is a snapshot of your function\n code and configuration that can't be changed. An alias is a named resource that maps to a version, and can be\n changed to map to a different version. Use the Publish parameter to create version 1 of\n your function from its initial configuration.

          \n \n

          The other parameters let you configure version-specific and function-level settings. You can modify\n version-specific settings later with UpdateFunctionConfiguration. Function-level settings apply\n to both the unpublished and published versions of the function, and include tags (TagResource)\n and per-function concurrency limits (PutFunctionConcurrency).

          \n \n

          To enable code signing for this function, specify the ARN of a code-signing configuration. When a user\n attempts to deploy a code package with UpdateFunctionCode, Lambda checks that the code\n package has a valid signature from a trusted publisher. The code-signing configuration \n includes set set of signing profiles, which define the trusted publishers for this function.

          \n \n

          If another account or an AWS service invokes your function, use AddPermission to grant\n permission by creating a resource-based IAM policy. You can grant permissions at the function level, on a version,\n or on an alias.

          \n \n

          To invoke your function directly, use Invoke. To invoke your function in response to events\n in other AWS services, create an event source mapping (CreateEventSourceMapping), or configure a\n function trigger in the other service. For more information, see Invoking Functions.

          ", + "smithy.api#documentation": "

          Creates a Lambda function. To create a function, you need a deployment package and an execution role. The\n deployment package is a ZIP archive or image container that contains your function code. The execution role grants the function permission to use AWS\n services, such as Amazon CloudWatch Logs for log streaming and AWS X-Ray for request tracing.

          \n \n

          When you create a function, Lambda provisions an instance of the function and its supporting resources. If\n your function connects to a VPC, this process can take a minute or so. During this time, you can't invoke or\n modify the function. The State, StateReason, and StateReasonCode fields in\n the response from GetFunctionConfiguration indicate when the function is ready to invoke. For\n more information, see Function\n States.

          \n \n

          A function has an unpublished version, and can have published versions and aliases. The unpublished version\n changes when you update your function's code and configuration. A published version is a snapshot of your function\n code and configuration that can't be changed. An alias is a named resource that maps to a version, and can be\n changed to map to a different version. Use the Publish parameter to create version 1 of\n your function from its initial configuration.

          \n \n

          The other parameters let you configure version-specific and function-level settings. You can modify\n version-specific settings later with UpdateFunctionConfiguration. Function-level settings apply\n to both the unpublished and published versions of the function, and include tags (TagResource)\n and per-function concurrency limits (PutFunctionConcurrency).

          \n \n

          You can use code signing if your deployment package is a ZIP archive. To enable code signing for this function, \n specify the ARN of a code-signing configuration. When a user\n attempts to deploy a code package with UpdateFunctionCode, Lambda checks that the code\n package has a valid signature from a trusted publisher. The code-signing configuration\n includes set set of signing profiles, which define the trusted publishers for this function.

          \n \n

          If another account or an AWS service invokes your function, use AddPermission to grant\n permission by creating a resource-based IAM policy. You can grant permissions at the function level, on a version,\n or on an alias.

          \n \n

          To invoke your function directly, use Invoke. To invoke your function in response to events\n in other AWS services, create an event source mapping (CreateEventSourceMapping), or configure a\n function trigger in the other service. For more information, see Invoking Functions.

          ", "smithy.api#http": { "method": "POST", "uri": "/2015-03-31/functions", @@ -1158,8 +1158,7 @@ "Runtime": { "target": "com.amazonaws.lambda#Runtime", "traits": { - "smithy.api#documentation": "

          The identifier of the function's runtime.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The identifier of the function's runtime.

          " } }, "Role": { @@ -1172,8 +1171,7 @@ "Handler": { "target": "com.amazonaws.lambda#Handler", "traits": { - "smithy.api#documentation": "

          The name of the method within your code that Lambda calls to execute your function. The format includes the\n file name. It can also include namespaces and other qualifiers, depending on the runtime. For more information,\n see Programming Model.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The name of the method within your code that Lambda calls to execute your function. The format includes the\n file name. It can also include namespaces and other qualifiers, depending on the runtime. For more information,\n see Programming Model.

          " } }, "Code": { @@ -1213,6 +1211,12 @@ "smithy.api#documentation": "

          For network connectivity to AWS resources in a VPC, specify a list of security groups and subnets in the VPC.\n When you connect a function to a VPC, it can only access resources and the internet through that VPC. For more\n information, see VPC Settings.

          " } }, + "PackageType": { + "target": "com.amazonaws.lambda#PackageType", + "traits": { + "smithy.api#documentation": "

          The type of deployment package. Set to Image for container image and set Zip for ZIP archive.

          " + } + }, "DeadLetterConfig": { "target": "com.amazonaws.lambda#DeadLetterConfig", "traits": { @@ -1255,10 +1259,16 @@ "smithy.api#documentation": "

          Connection settings for an Amazon EFS file system.

          " } }, + "ImageConfig": { + "target": "com.amazonaws.lambda#ImageConfig", + "traits": { + "smithy.api#documentation": "

          Configuration values that override the container image Dockerfile.

          " + } + }, "CodeSigningConfigArn": { "target": "com.amazonaws.lambda#CodeSigningConfigArn", "traits": { - "smithy.api#documentation": "

          To enable code signing for this function, specify the ARN of a code-signing configuration. A code-signing configuration\nincludes set set of signing profiles, which define the trusted publishers for this function.

          " + "smithy.api#documentation": "

          To enable code signing for this function, specify the ARN of a code-signing configuration. A code-signing configuration\nincludes a set of signing profiles, which define the trusted publishers for this function.

          " } } } @@ -2203,10 +2213,16 @@ "traits": { "smithy.api#documentation": "

          For versioned objects, the version of the deployment package object to use.

          " } + }, + "ImageUri": { + "target": "com.amazonaws.lambda#String", + "traits": { + "smithy.api#documentation": "

          URI of a container image in the Amazon ECR registry.

          " + } } }, "traits": { - "smithy.api#documentation": "

          The code for the Lambda function. You can specify either an object in Amazon S3, or upload a deployment\n package directly.

          " + "smithy.api#documentation": "

          The code for the Lambda function. You can specify either an object in Amazon S3, upload a ZIP archive deployment\n package directly, or specify the URI of a container image.

          " } }, "com.amazonaws.lambda#FunctionCodeLocation": { @@ -2223,6 +2239,18 @@ "traits": { "smithy.api#documentation": "

          A presigned URL that you can use to download the deployment package.

          " } + }, + "ImageUri": { + "target": "com.amazonaws.lambda#String", + "traits": { + "smithy.api#documentation": "

          URI of a container image in the Amazon ECR registry.

          " + } + }, + "ResolvedImageUri": { + "target": "com.amazonaws.lambda#String", + "traits": { + "smithy.api#documentation": "

          The resolved URI for the image.

          " + } } }, "traits": { @@ -2394,6 +2422,18 @@ "smithy.api#documentation": "

          Connection settings for an Amazon EFS file system.

          " } }, + "PackageType": { + "target": "com.amazonaws.lambda#PackageType", + "traits": { + "smithy.api#documentation": "

          The type of deployment package. Set to Image for container image and set Zip for ZIP archive.

          " + } + }, + "ImageConfigResponse": { + "target": "com.amazonaws.lambda#ImageConfigResponse", + "traits": { + "smithy.api#documentation": "

          The function's image configuration values.

          " + } + }, "SigningProfileVersionArn": { "target": "com.amazonaws.lambda#Arn", "traits": { @@ -3369,6 +3409,72 @@ "com.amazonaws.lambda#HttpStatus": { "type": "integer" }, + "com.amazonaws.lambda#ImageConfig": { + "type": "structure", + "members": { + "EntryPoint": { + "target": "com.amazonaws.lambda#StringList", + "traits": { + "smithy.api#documentation": "

          Specifies the entry point to their application, which is typically the location of the runtime\n executable.

          " + } + }, + "Command": { + "target": "com.amazonaws.lambda#StringList", + "traits": { + "smithy.api#documentation": "

          Specifies parameters that you want to pass in with ENTRYPOINT.

          " + } + }, + "WorkingDirectory": { + "target": "com.amazonaws.lambda#WorkingDirectory", + "traits": { + "smithy.api#documentation": "

          Specifies the working directory.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Configuration values that override the container image Dockerfile. See \n Override Container settings.

          " + } + }, + "com.amazonaws.lambda#ImageConfigError": { + "type": "structure", + "members": { + "ErrorCode": { + "target": "com.amazonaws.lambda#String", + "traits": { + "smithy.api#documentation": "

          Error code.

          " + } + }, + "Message": { + "target": "com.amazonaws.lambda#SensitiveString", + "traits": { + "smithy.api#documentation": "

          Error message.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Error response to GetFunctionConfiguration.

          " + } + }, + "com.amazonaws.lambda#ImageConfigResponse": { + "type": "structure", + "members": { + "ImageConfig": { + "target": "com.amazonaws.lambda#ImageConfig", + "traits": { + "smithy.api#documentation": "

          Configuration values that override the container image Dockerfile.

          " + } + }, + "Error": { + "target": "com.amazonaws.lambda#ImageConfigError", + "traits": { + "smithy.api#documentation": "

          Error response to GetFunctionConfiguration.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Response to GetFunctionConfiguration request.

          " + } + }, "com.amazonaws.lambda#Integer": { "type": "integer" }, @@ -3898,6 +4004,14 @@ { "value": "InvalidSecurityGroup", "name": "InvalidSecurityGroup" + }, + { + "value": "ImageDeleted", + "name": "ImageDeleted" + }, + { + "value": "ImageAccessDenied", + "name": "ImageAccessDenied" } ] } @@ -4257,7 +4371,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Returns a list of code\n signing configurations for the specified function. A request returns up to 10,000 configurations per\n call. You can use the MaxItems parameter to return fewer configurations per call.

          ", + "smithy.api#documentation": "

          Returns a list of code\n signing configurations. A request returns up to 10,000 configurations per\n call. You can use the MaxItems parameter to return fewer configurations per call.

          ", "smithy.api#http": { "method": "GET", "uri": "/2020-04-22/code-signing-configs", @@ -5152,7 +5266,7 @@ "smithy.api#box": {}, "smithy.api#range": { "min": 128, - "max": 3008 + "max": 10240 } } }, @@ -5225,6 +5339,21 @@ "smithy.api#pattern": "o-[a-z0-9]{10,32}" } }, + "com.amazonaws.lambda#PackageType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Zip", + "name": "Zip" + }, + { + "value": "Image", + "name": "Image" + } + ] + } + }, "com.amazonaws.lambda#ParallelizationFactor": { "type": "integer", "traits": { @@ -6449,6 +6578,14 @@ { "value": "InvalidSecurityGroup", "name": "InvalidSecurityGroup" + }, + { + "value": "ImageDeleted", + "name": "ImageDeleted" + }, + { + "value": "ImageAccessDenied", + "name": "ImageAccessDenied" } ] } @@ -6466,6 +6603,18 @@ "com.amazonaws.lambda#String": { "type": "string" }, + "com.amazonaws.lambda#StringList": { + "type": "list", + "member": { + "target": "com.amazonaws.lambda#String" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 1500 + } + } + }, "com.amazonaws.lambda#SubnetIPAddressLimitReachedException": { "type": "structure", "members": { @@ -7123,6 +7272,12 @@ "smithy.api#documentation": "

          For versioned objects, the version of the deployment package object to use.

          " } }, + "ImageUri": { + "target": "com.amazonaws.lambda#String", + "traits": { + "smithy.api#documentation": "

          URI of a container image in the Amazon ECR registry.

          " + } + }, "Publish": { "target": "com.amazonaws.lambda#Boolean", "traits": { @@ -7283,6 +7438,12 @@ "traits": { "smithy.api#documentation": "

          Connection settings for an Amazon EFS file system.

          " } + }, + "ImageConfig": { + "target": "com.amazonaws.lambda#ImageConfig", + "traits": { + "smithy.api#documentation": "

          Configuration values that override the container image Dockerfile.

          " + } } } }, @@ -7422,6 +7583,15 @@ "max": 1.0 } } + }, + "com.amazonaws.lambda#WorkingDirectory": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 1000 + } + } } } } diff --git a/codegen/sdk-codegen/aws-models/lookoutvision.2020-11-20.json b/codegen/sdk-codegen/aws-models/lookoutvision.2020-11-20.json new file mode 100644 index 000000000000..4709ea961364 --- /dev/null +++ b/codegen/sdk-codegen/aws-models/lookoutvision.2020-11-20.json @@ -0,0 +1,2456 @@ +{ + "smithy": "1.0", + "metadata": { + "suppressions": [ + { + "id": "HttpMethodSemantics", + "namespace": "*" + }, + { + "id": "HttpResponseCodeSemantics", + "namespace": "*" + }, + { + "id": "PaginatedTrait", + "namespace": "*" + }, + { + "id": "HttpHeaderTrait", + "namespace": "*" + }, + { + "id": "HttpUriConflict", + "namespace": "*" + }, + { + "id": "Service", + "namespace": "*" + } + ] + }, + "shapes": { + "com.amazonaws.lookoutvision#AccessDeniedException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.lookoutvision#ExceptionString", + "traits": { + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          You are not authorized to perform the action.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 403 + } + }, + "com.amazonaws.lookoutvision#AnomalyClassFilter": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 10 + }, + "smithy.api#pattern": "(normal|anomaly)" + } + }, + "com.amazonaws.lookoutvision#Boolean": { + "type": "boolean" + }, + "com.amazonaws.lookoutvision#ClientToken": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 64 + }, + "smithy.api#pattern": "^[a-zA-Z0-9-]+$" + } + }, + "com.amazonaws.lookoutvision#ConflictException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.lookoutvision#ExceptionString", + "traits": { + "smithy.api#required": {} + } + }, + "ResourceId": { + "target": "com.amazonaws.lookoutvision#ExceptionString", + "traits": { + "smithy.api#documentation": "

          The ID of the resource.

          ", + "smithy.api#required": {} + } + }, + "ResourceType": { + "target": "com.amazonaws.lookoutvision#ResourceType", + "traits": { + "smithy.api#documentation": "

          The type of the resource.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The update or deletion of a resource caused an inconsistent state.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 409 + } + }, + "com.amazonaws.lookoutvision#ContentType": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 255 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.lookoutvision#CreateDataset": { + "type": "operation", + "input": { + "target": "com.amazonaws.lookoutvision#CreateDatasetRequest" + }, + "output": { + "target": "com.amazonaws.lookoutvision#CreateDatasetResponse" + }, + "errors": [ + { + "target": "com.amazonaws.lookoutvision#AccessDeniedException" + }, + { + "target": "com.amazonaws.lookoutvision#ConflictException" + }, + { + "target": "com.amazonaws.lookoutvision#InternalServerException" + }, + { + "target": "com.amazonaws.lookoutvision#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.lookoutvision#ServiceQuotaExceededException" + }, + { + "target": "com.amazonaws.lookoutvision#ThrottlingException" + }, + { + "target": "com.amazonaws.lookoutvision#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Creates a new dataset in an Amazon Lookout for Vision project. CreateDataset can create a \n training or a test dataset from a valid dataset source (DatasetSource).

          \n

          If you want a single dataset project, specify train for the value of \n DatasetType.

          \n

          To have a project with separate training and test datasets, call CreateDataset twice.\n On the first call, specify train for the value of \n DatasetType. On the second call, specify test for the value of \n DatasetType.\n \n of dataset with

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/2020-11-20/projects/{ProjectName}/datasets", + "code": 202 + } + } + }, + "com.amazonaws.lookoutvision#CreateDatasetRequest": { + "type": "structure", + "members": { + "ProjectName": { + "target": "com.amazonaws.lookoutvision#ProjectName", + "traits": { + "smithy.api#documentation": "

          The name of the project in which you want to create a dataset.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "DatasetType": { + "target": "com.amazonaws.lookoutvision#DatasetType", + "traits": { + "smithy.api#documentation": "

          The type of the dataset. Specify train for a training dataset.\n Specify test for a test dataset.

          ", + "smithy.api#required": {} + } + }, + "DatasetSource": { + "target": "com.amazonaws.lookoutvision#DatasetSource", + "traits": { + "smithy.api#documentation": "

          The location of the manifest file that Amazon Lookout for Vision uses to create the dataset.

          \n

          If you don't specify DatasetSource, an empty dataset is created and the operation \n synchronously returns. Later, you can add JSON Lines by calling UpdateDatasetEntries.\n

          \n

          If you specify a value for DataSource, the manifest at the S3 location\n is validated and used to create the dataset. The call to CreateDataset is asynchronous\n and might take a while to complete. To find out the current status, Check the value of Status\n returned in a call to DescribeDataset.

          " + } + }, + "ClientToken": { + "target": "com.amazonaws.lookoutvision#ClientToken", + "traits": { + "smithy.api#documentation": "

          ClientToken is an idempotency token that ensures a call to CreateDataset\n completes only once. You choose the value to pass. For example, An issue, \n such as an network outage, might prevent you from getting a response from CreateDataset.\n In this case, safely retry your call\n to CreateDataset by using the same ClientToken parameter value. An error occurs\n if the other input parameters are not the same as in the first request. Using a different \n value for ClientToken is considered a new call to CreateDataset. An idempotency\n token is active for 8 hours.\n

          ", + "smithy.api#httpHeader": "X-Amzn-Client-Token", + "smithy.api#idempotencyToken": {} + } + } + } + }, + "com.amazonaws.lookoutvision#CreateDatasetResponse": { + "type": "structure", + "members": { + "DatasetMetadata": { + "target": "com.amazonaws.lookoutvision#DatasetMetadata", + "traits": { + "smithy.api#documentation": "

          Information about the dataset.

          " + } + } + } + }, + "com.amazonaws.lookoutvision#CreateModel": { + "type": "operation", + "input": { + "target": "com.amazonaws.lookoutvision#CreateModelRequest" + }, + "output": { + "target": "com.amazonaws.lookoutvision#CreateModelResponse" + }, + "errors": [ + { + "target": "com.amazonaws.lookoutvision#AccessDeniedException" + }, + { + "target": "com.amazonaws.lookoutvision#ConflictException" + }, + { + "target": "com.amazonaws.lookoutvision#InternalServerException" + }, + { + "target": "com.amazonaws.lookoutvision#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.lookoutvision#ServiceQuotaExceededException" + }, + { + "target": "com.amazonaws.lookoutvision#ThrottlingException" + }, + { + "target": "com.amazonaws.lookoutvision#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Creates a new version of a model within an an Amazon Lookout for Vision project. \n CreateModel is an asynchronous operation in which Amazon Lookout for Vision trains, tests,\n and evaluates a new version of a model.

          \n

          To get the current status, check the Status field returned\n in the response from DescribeModel.

          \n

          If the project has a single dataset, Amazon Lookout for Vision internally splits the dataset\n to create a training and a test dataset. \n If the project has a training and a test dataset, Lookout for Vision uses the respective datasets to train and test \n the model.

          \n

          After training completes, the evaluation metrics are stored at the location specified in\n OutputConfig.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/2020-11-20/projects/{ProjectName}/models", + "code": 202 + } + } + }, + "com.amazonaws.lookoutvision#CreateModelRequest": { + "type": "structure", + "members": { + "ProjectName": { + "target": "com.amazonaws.lookoutvision#ProjectName", + "traits": { + "smithy.api#documentation": "

          The name of the project in which you want to create a model version.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "Description": { + "target": "com.amazonaws.lookoutvision#ModelDescription", + "traits": { + "smithy.api#documentation": "

          A description for the version of the model.

          " + } + }, + "ClientToken": { + "target": "com.amazonaws.lookoutvision#ClientToken", + "traits": { + "smithy.api#documentation": "

          ClientToken is an idempotency token that ensures a call to CreateModel\n completes only once. You choose the value to pass. For example, An issue, \n such as an network outage, might prevent you from getting a response from CreateModel.\n In this case, safely retry your call\n to CreateModel by using the same ClientToken parameter value. An error occurs\n if the other input parameters are not the same as in the first request. Using a different \n value for ClientToken is considered a new call to CreateModel. An idempotency\n token is active for 8 hours.

          ", + "smithy.api#httpHeader": "X-Amzn-Client-Token", + "smithy.api#idempotencyToken": {} + } + }, + "OutputConfig": { + "target": "com.amazonaws.lookoutvision#OutputConfig", + "traits": { + "smithy.api#documentation": "

          The location where Amazon Lookout for Vision saves the training results.

          ", + "smithy.api#required": {} + } + }, + "KmsKeyId": { + "target": "com.amazonaws.lookoutvision#KmsKeyId", + "traits": { + "smithy.api#documentation": "

          The identifier of the AWS Key Management Service (AWS KMS) customer master key (CMK)\n to use for encypting the model. If this parameter is not specified, the\n model is encrypted by a key that AWS owns and manages.

          " + } + } + } + }, + "com.amazonaws.lookoutvision#CreateModelResponse": { + "type": "structure", + "members": { + "ModelMetadata": { + "target": "com.amazonaws.lookoutvision#ModelMetadata", + "traits": { + "smithy.api#documentation": "

          The response from a call to CreateModel.

          " + } + } + } + }, + "com.amazonaws.lookoutvision#CreateProject": { + "type": "operation", + "input": { + "target": "com.amazonaws.lookoutvision#CreateProjectRequest" + }, + "output": { + "target": "com.amazonaws.lookoutvision#CreateProjectResponse" + }, + "errors": [ + { + "target": "com.amazonaws.lookoutvision#AccessDeniedException" + }, + { + "target": "com.amazonaws.lookoutvision#ConflictException" + }, + { + "target": "com.amazonaws.lookoutvision#InternalServerException" + }, + { + "target": "com.amazonaws.lookoutvision#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.lookoutvision#ServiceQuotaExceededException" + }, + { + "target": "com.amazonaws.lookoutvision#ThrottlingException" + }, + { + "target": "com.amazonaws.lookoutvision#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Creates an empty Amazon Lookout for Vision project. After you create the project, add a dataset by calling\n CreateDataset.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/2020-11-20/projects", + "code": 200 + } + } + }, + "com.amazonaws.lookoutvision#CreateProjectRequest": { + "type": "structure", + "members": { + "ProjectName": { + "target": "com.amazonaws.lookoutvision#ProjectName", + "traits": { + "smithy.api#documentation": "

          S nsme for the project.

          ", + "smithy.api#required": {} + } + }, + "ClientToken": { + "target": "com.amazonaws.lookoutvision#ClientToken", + "traits": { + "smithy.api#documentation": "

          ClientToken is an idempotency token that ensures a call to CreateProject\n completes only once. You choose the value to pass. For example, An issue, \n such as an network outage, might prevent you from getting a response from CreateProject.\n In this case, safely retry your call\n to CreateProject by using the same ClientToken parameter value. An error occurs\n if the other input parameters are not the same as in the first request. Using a different \n value for ClientToken is considered a new call to CreateProject. An idempotency\n token is active for 8 hours.

          ", + "smithy.api#httpHeader": "X-Amzn-Client-Token", + "smithy.api#idempotencyToken": {} + } + } + } + }, + "com.amazonaws.lookoutvision#CreateProjectResponse": { + "type": "structure", + "members": { + "ProjectMetadata": { + "target": "com.amazonaws.lookoutvision#ProjectMetadata", + "traits": { + "smithy.api#documentation": "

          Information about the project.

          " + } + } + } + }, + "com.amazonaws.lookoutvision#DatasetChanges": { + "type": "blob", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 10485760 + } + } + }, + "com.amazonaws.lookoutvision#DatasetDescription": { + "type": "structure", + "members": { + "ProjectName": { + "target": "com.amazonaws.lookoutvision#ProjectName", + "traits": { + "smithy.api#documentation": "

          The name of the project that contains the dataset.

          " + } + }, + "DatasetType": { + "target": "com.amazonaws.lookoutvision#DatasetType", + "traits": { + "smithy.api#documentation": "

          The type of the dataset. The value train represents a training dataset or single dataset project.\n The value test represents a test dataset.

          " + } + }, + "CreationTimestamp": { + "target": "com.amazonaws.lookoutvision#DateTime", + "traits": { + "smithy.api#documentation": "

          The Unix timestamp for the time and date that the dataset was created.

          " + } + }, + "LastUpdatedTimestamp": { + "target": "com.amazonaws.lookoutvision#DateTime", + "traits": { + "smithy.api#documentation": "

          The Unix timestamp for the date and time that the dataset was last updated.

          " + } + }, + "Status": { + "target": "com.amazonaws.lookoutvision#DatasetStatus", + "traits": { + "smithy.api#documentation": "

          The status of the dataset.

          " + } + }, + "StatusMessage": { + "target": "com.amazonaws.lookoutvision#DatasetStatusMessage", + "traits": { + "smithy.api#documentation": "

          The status message for the dataset.

          " + } + }, + "ImageStats": { + "target": "com.amazonaws.lookoutvision#DatasetImageStats", + "traits": { + "smithy.api#documentation": "

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          The description for a dataset. For more information, see DescribeDataset.

          " + } + }, + "com.amazonaws.lookoutvision#DatasetEntry": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 2, + "max": 8192 + }, + "smithy.api#pattern": "^\\{.*\\}$" + } + }, + "com.amazonaws.lookoutvision#DatasetEntryList": { + "type": "list", + "member": { + "target": "com.amazonaws.lookoutvision#DatasetEntry" + } + }, + "com.amazonaws.lookoutvision#DatasetGroundTruthManifest": { + "type": "structure", + "members": { + "S3Object": { + "target": "com.amazonaws.lookoutvision#InputS3Object", + "traits": { + "smithy.api#documentation": "

          The S3 bucket location for the manifest file.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Location information about a manifest file. You can use a manifest file to\n create a dataset.

          " + } + }, + "com.amazonaws.lookoutvision#DatasetImageStats": { + "type": "structure", + "members": { + "Total": { + "target": "com.amazonaws.lookoutvision#Integer", + "traits": { + "smithy.api#documentation": "

          The total number of images in the dataset.

          " + } + }, + "Labeled": { + "target": "com.amazonaws.lookoutvision#Integer", + "traits": { + "smithy.api#documentation": "

          The total number of labeled images.

          " + } + }, + "Normal": { + "target": "com.amazonaws.lookoutvision#Integer", + "traits": { + "smithy.api#documentation": "

          The total number of images labeled as normal.

          " + } + }, + "Anomaly": { + "target": "com.amazonaws.lookoutvision#Integer", + "traits": { + "smithy.api#documentation": "

          the total number of images labeled as an anomaly.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Statistics about the images in a dataset.

          " + } + }, + "com.amazonaws.lookoutvision#DatasetMetadata": { + "type": "structure", + "members": { + "DatasetType": { + "target": "com.amazonaws.lookoutvision#DatasetType", + "traits": { + "smithy.api#documentation": "

          The type of the dataset.

          " + } + }, + "CreationTimestamp": { + "target": "com.amazonaws.lookoutvision#DateTime", + "traits": { + "smithy.api#documentation": "

          The Unix timestamp for the date and time that the dataset was created.

          " + } + }, + "Status": { + "target": "com.amazonaws.lookoutvision#DatasetStatus", + "traits": { + "smithy.api#documentation": "

          The status for the dataset.

          " + } + }, + "StatusMessage": { + "target": "com.amazonaws.lookoutvision#DatasetStatusMessage", + "traits": { + "smithy.api#documentation": "

          The status message for the dataset.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Sumary information for an Amazon Lookout for Vision dataset.

          " + } + }, + "com.amazonaws.lookoutvision#DatasetMetadataList": { + "type": "list", + "member": { + "target": "com.amazonaws.lookoutvision#DatasetMetadata" + } + }, + "com.amazonaws.lookoutvision#DatasetSource": { + "type": "structure", + "members": { + "GroundTruthManifest": { + "target": "com.amazonaws.lookoutvision#DatasetGroundTruthManifest", + "traits": { + "smithy.api#documentation": "

          Location information for the manifest file.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Information about the location of a manifest file that Amazon Lookout for Vision uses to to create a dataset.

          " + } + }, + "com.amazonaws.lookoutvision#DatasetStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "CREATE_IN_PROGRESS", + "name": "CREATE_IN_PROGRESS" + }, + { + "value": "CREATE_COMPLETE", + "name": "CREATE_COMPLETE" + }, + { + "value": "CREATE_FAILED", + "name": "CREATE_FAILED" + }, + { + "value": "UPDATE_IN_PROGRESS", + "name": "UPDATE_IN_PROGRESS" + }, + { + "value": "UPDATE_COMPLETE", + "name": "UPDATE_COMPLETE" + }, + { + "value": "UPDATE_FAILED_ROLLBACK_IN_PROGRESS", + "name": "UPDATE_FAILED_ROLLBACK_IN_PROGRESS" + }, + { + "value": "UPDATE_FAILED_ROLLBACK_COMPLETE", + "name": "UPDATE_FAILED_ROLLBACK_COMPLETE" + }, + { + "value": "DELETE_IN_PROGRESS", + "name": "DELETE_IN_PROGRESS" + }, + { + "value": "DELETE_COMPLETE", + "name": "DELETE_COMPLETE" + }, + { + "value": "DELETE_FAILED", + "name": "DELETE_FAILED" + } + ] + } + }, + "com.amazonaws.lookoutvision#DatasetStatusMessage": { + "type": "string" + }, + "com.amazonaws.lookoutvision#DatasetType": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 10 + }, + "smithy.api#pattern": "train|test" + } + }, + "com.amazonaws.lookoutvision#DateTime": { + "type": "timestamp" + }, + "com.amazonaws.lookoutvision#DeleteDataset": { + "type": "operation", + "input": { + "target": "com.amazonaws.lookoutvision#DeleteDatasetRequest" + }, + "output": { + "target": "com.amazonaws.lookoutvision#DeleteDatasetResponse" + }, + "errors": [ + { + "target": "com.amazonaws.lookoutvision#AccessDeniedException" + }, + { + "target": "com.amazonaws.lookoutvision#ConflictException" + }, + { + "target": "com.amazonaws.lookoutvision#InternalServerException" + }, + { + "target": "com.amazonaws.lookoutvision#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.lookoutvision#ThrottlingException" + }, + { + "target": "com.amazonaws.lookoutvision#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Deletes an existing Amazon Lookout for Vision dataset.

          \n

          If your the project has a single\n dataset, you must create a new dataset before you can create a model.

          \n

          If you project has a training dataset and a test dataset consider the following.

          \n
            \n
          • \n

            If you delete the test dataset, your project reverts to a single dataset project. If you then\n train the model, Amazon Lookout for Vision internally splits the remaining dataset into a training and test dataset.

            \n
          • \n
          • \n

            If you delete the training dataset, you must create a training dataset before you can create a model.

            \n
          • \n
          \n

          It might take a while to delete the dataset. To check the current status, check the Status field\n in the response from a call to DescribeDataset.

          ", + "smithy.api#http": { + "method": "DELETE", + "uri": "/2020-11-20/projects/{ProjectName}/datasets/{DatasetType}", + "code": 202 + } + } + }, + "com.amazonaws.lookoutvision#DeleteDatasetRequest": { + "type": "structure", + "members": { + "ProjectName": { + "target": "com.amazonaws.lookoutvision#ProjectName", + "traits": { + "smithy.api#documentation": "

          The name of the project that contains the dataset that you want to delete.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "DatasetType": { + "target": "com.amazonaws.lookoutvision#DatasetType", + "traits": { + "smithy.api#documentation": "

          The type of the dataset to delete. Specify train to delete the training dataset.\n Specify test to delete the test dataset. To delete the dataset in a single dataset project, \n specify train.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "ClientToken": { + "target": "com.amazonaws.lookoutvision#ClientToken", + "traits": { + "smithy.api#documentation": "

          ClientToken is an idempotency token that ensures a call to DeleteDataset\n completes only once. You choose the value to pass. For example, An issue, \n such as an network outage, might prevent you from getting a response from DeleteDataset.\n In this case, safely retry your call\n to DeleteDataset by using the same ClientToken parameter value. An error occurs\n if the other input parameters are not the same as in the first request. Using a different \n value for ClientToken is considered a new call to DeleteDataset. An idempotency\n token is active for 8 hours.

          ", + "smithy.api#httpHeader": "X-Amzn-Client-Token", + "smithy.api#idempotencyToken": {} + } + } + } + }, + "com.amazonaws.lookoutvision#DeleteDatasetResponse": { + "type": "structure", + "members": {} + }, + "com.amazonaws.lookoutvision#DeleteModel": { + "type": "operation", + "input": { + "target": "com.amazonaws.lookoutvision#DeleteModelRequest" + }, + "output": { + "target": "com.amazonaws.lookoutvision#DeleteModelResponse" + }, + "errors": [ + { + "target": "com.amazonaws.lookoutvision#AccessDeniedException" + }, + { + "target": "com.amazonaws.lookoutvision#ConflictException" + }, + { + "target": "com.amazonaws.lookoutvision#InternalServerException" + }, + { + "target": "com.amazonaws.lookoutvision#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.lookoutvision#ThrottlingException" + }, + { + "target": "com.amazonaws.lookoutvision#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Deletes an Amazon Lookout for Vision model. You can't delete a running model. To stop a running model,\n use the StopModel operation.

          ", + "smithy.api#http": { + "method": "DELETE", + "uri": "/2020-11-20/projects/{ProjectName}/models/{ModelVersion}", + "code": 202 + } + } + }, + "com.amazonaws.lookoutvision#DeleteModelRequest": { + "type": "structure", + "members": { + "ProjectName": { + "target": "com.amazonaws.lookoutvision#ProjectName", + "traits": { + "smithy.api#documentation": "

          The name of the project that contains the model that you want to delete.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "ModelVersion": { + "target": "com.amazonaws.lookoutvision#ModelVersion", + "traits": { + "smithy.api#documentation": "

          The version of the model that you want to delete.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "ClientToken": { + "target": "com.amazonaws.lookoutvision#ClientToken", + "traits": { + "smithy.api#documentation": "

          ClientToken is an idempotency token that ensures a call to DeleteModel\n completes only once. You choose the value to pass. For example, An issue, \n such as an network outage, might prevent you from getting a response from DeleteModel.\n In this case, safely retry your call\n to DeleteModel by using the same ClientToken parameter value. An error occurs\n if the other input parameters are not the same as in the first request. Using a different \n value for ClientToken is considered a new call to DeleteModel. An idempotency\n token is active for 8 hours.

          ", + "smithy.api#httpHeader": "X-Amzn-Client-Token", + "smithy.api#idempotencyToken": {} + } + } + } + }, + "com.amazonaws.lookoutvision#DeleteModelResponse": { + "type": "structure", + "members": { + "ModelArn": { + "target": "com.amazonaws.lookoutvision#ModelArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model that was deleted.

          " + } + } + } + }, + "com.amazonaws.lookoutvision#DeleteProject": { + "type": "operation", + "input": { + "target": "com.amazonaws.lookoutvision#DeleteProjectRequest" + }, + "output": { + "target": "com.amazonaws.lookoutvision#DeleteProjectResponse" + }, + "errors": [ + { + "target": "com.amazonaws.lookoutvision#AccessDeniedException" + }, + { + "target": "com.amazonaws.lookoutvision#ConflictException" + }, + { + "target": "com.amazonaws.lookoutvision#InternalServerException" + }, + { + "target": "com.amazonaws.lookoutvision#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.lookoutvision#ThrottlingException" + }, + { + "target": "com.amazonaws.lookoutvision#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Deletes an Amazon Lookout for Vision project.

          \n

          To delete a project, you must first delete each version of the model associated with \n the project. To delete a model use the DeleteModel operation.

          \n

          The training and test datasets are deleted automatically for you. \n The images referenced by the training and test datasets aren't deleted.

          ", + "smithy.api#http": { + "method": "DELETE", + "uri": "/2020-11-20/projects/{ProjectName}", + "code": 200 + } + } + }, + "com.amazonaws.lookoutvision#DeleteProjectRequest": { + "type": "structure", + "members": { + "ProjectName": { + "target": "com.amazonaws.lookoutvision#ProjectName", + "traits": { + "smithy.api#documentation": "

          The name of the project to delete.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "ClientToken": { + "target": "com.amazonaws.lookoutvision#ClientToken", + "traits": { + "smithy.api#documentation": "

          ClientToken is an idempotency token that ensures a call to DeleteProject\n completes only once. You choose the value to pass. For example, An issue, \n such as an network outage, might prevent you from getting a response from DeleteProject.\n In this case, safely retry your call\n to DeleteProject by using the same ClientToken parameter value. An error occurs\n if the other input parameters are not the same as in the first request. Using a different \n value for ClientToken is considered a new call to DeleteProject. An idempotency\n token is active for 8 hours.

          ", + "smithy.api#httpHeader": "X-Amzn-Client-Token", + "smithy.api#idempotencyToken": {} + } + } + } + }, + "com.amazonaws.lookoutvision#DeleteProjectResponse": { + "type": "structure", + "members": { + "ProjectArn": { + "target": "com.amazonaws.lookoutvision#ProjectArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the project that was deleted.

          " + } + } + } + }, + "com.amazonaws.lookoutvision#DescribeDataset": { + "type": "operation", + "input": { + "target": "com.amazonaws.lookoutvision#DescribeDatasetRequest" + }, + "output": { + "target": "com.amazonaws.lookoutvision#DescribeDatasetResponse" + }, + "errors": [ + { + "target": "com.amazonaws.lookoutvision#AccessDeniedException" + }, + { + "target": "com.amazonaws.lookoutvision#ConflictException" + }, + { + "target": "com.amazonaws.lookoutvision#InternalServerException" + }, + { + "target": "com.amazonaws.lookoutvision#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.lookoutvision#ThrottlingException" + }, + { + "target": "com.amazonaws.lookoutvision#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Describe an Amazon Lookout for Vision dataset.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/2020-11-20/projects/{ProjectName}/datasets/{DatasetType}", + "code": 200 + } + } + }, + "com.amazonaws.lookoutvision#DescribeDatasetRequest": { + "type": "structure", + "members": { + "ProjectName": { + "target": "com.amazonaws.lookoutvision#ProjectName", + "traits": { + "smithy.api#documentation": "

          The name of the project that contains the dataset that you want to describe.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "DatasetType": { + "target": "com.amazonaws.lookoutvision#DatasetType", + "traits": { + "smithy.api#documentation": "

          The type of the dataset to describe. Specify train to describe the \n training dataset. Specify test to describe the test dataset.\n If you have a single dataset project, specify train\n

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.lookoutvision#DescribeDatasetResponse": { + "type": "structure", + "members": { + "DatasetDescription": { + "target": "com.amazonaws.lookoutvision#DatasetDescription", + "traits": { + "smithy.api#documentation": "

          The description of the requested dataset.

          " + } + } + } + }, + "com.amazonaws.lookoutvision#DescribeModel": { + "type": "operation", + "input": { + "target": "com.amazonaws.lookoutvision#DescribeModelRequest" + }, + "output": { + "target": "com.amazonaws.lookoutvision#DescribeModelResponse" + }, + "errors": [ + { + "target": "com.amazonaws.lookoutvision#AccessDeniedException" + }, + { + "target": "com.amazonaws.lookoutvision#ConflictException" + }, + { + "target": "com.amazonaws.lookoutvision#InternalServerException" + }, + { + "target": "com.amazonaws.lookoutvision#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.lookoutvision#ThrottlingException" + }, + { + "target": "com.amazonaws.lookoutvision#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Describes a version of an Amazon Lookout for Vision model.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/2020-11-20/projects/{ProjectName}/models/{ModelVersion}", + "code": 200 + } + } + }, + "com.amazonaws.lookoutvision#DescribeModelRequest": { + "type": "structure", + "members": { + "ProjectName": { + "target": "com.amazonaws.lookoutvision#ProjectName", + "traits": { + "smithy.api#documentation": "

          The project that contains the version of a model that you want to describe.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "ModelVersion": { + "target": "com.amazonaws.lookoutvision#ModelVersion", + "traits": { + "smithy.api#documentation": "

          The version of the model that you want to describe.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.lookoutvision#DescribeModelResponse": { + "type": "structure", + "members": { + "ModelDescription": { + "target": "com.amazonaws.lookoutvision#ModelDescription", + "traits": { + "smithy.api#documentation": "

          Contains the description of the model.

          " + } + } + } + }, + "com.amazonaws.lookoutvision#DescribeProject": { + "type": "operation", + "input": { + "target": "com.amazonaws.lookoutvision#DescribeProjectRequest" + }, + "output": { + "target": "com.amazonaws.lookoutvision#DescribeProjectResponse" + }, + "errors": [ + { + "target": "com.amazonaws.lookoutvision#AccessDeniedException" + }, + { + "target": "com.amazonaws.lookoutvision#ConflictException" + }, + { + "target": "com.amazonaws.lookoutvision#InternalServerException" + }, + { + "target": "com.amazonaws.lookoutvision#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.lookoutvision#ThrottlingException" + }, + { + "target": "com.amazonaws.lookoutvision#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Describes an Amazon Lookout for Vision project.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/2020-11-20/projects/{ProjectName}", + "code": 200 + } + } + }, + "com.amazonaws.lookoutvision#DescribeProjectRequest": { + "type": "structure", + "members": { + "ProjectName": { + "target": "com.amazonaws.lookoutvision#ProjectName", + "traits": { + "smithy.api#documentation": "

          The name of the project that you want to describe.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.lookoutvision#DescribeProjectResponse": { + "type": "structure", + "members": { + "ProjectDescription": { + "target": "com.amazonaws.lookoutvision#ProjectDescription", + "traits": { + "smithy.api#documentation": "

          The description of the project.

          " + } + } + } + }, + "com.amazonaws.lookoutvision#DetectAnomalies": { + "type": "operation", + "input": { + "target": "com.amazonaws.lookoutvision#DetectAnomaliesRequest" + }, + "output": { + "target": "com.amazonaws.lookoutvision#DetectAnomaliesResponse" + }, + "errors": [ + { + "target": "com.amazonaws.lookoutvision#AccessDeniedException" + }, + { + "target": "com.amazonaws.lookoutvision#ConflictException" + }, + { + "target": "com.amazonaws.lookoutvision#InternalServerException" + }, + { + "target": "com.amazonaws.lookoutvision#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.lookoutvision#ThrottlingException" + }, + { + "target": "com.amazonaws.lookoutvision#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Detects anomalies in an image that you supply.

          \n

          The response from DetectAnomalies includes a boolean prediction\n that the image contains one or more anomalies and a confidence value for the prediction.

          \n \n

          Before calling DetectAnomalies, you must first start your model with the StartModel operation.\n You are charged for the amount of time, in minutes, that a model runs and for the number of anomaly detection units that your\n model uses. If you are not using a model, use the StopModel operation to stop your model.

          \n
          ", + "smithy.api#http": { + "method": "POST", + "uri": "/2020-11-20/projects/{ProjectName}/models/{ModelVersion}/detect", + "code": 200 + } + } + }, + "com.amazonaws.lookoutvision#DetectAnomaliesRequest": { + "type": "structure", + "members": { + "ProjectName": { + "target": "com.amazonaws.lookoutvision#ProjectName", + "traits": { + "smithy.api#documentation": "

          The name of the project that contains the model version that you want to use.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "ModelVersion": { + "target": "com.amazonaws.lookoutvision#ModelVersion", + "traits": { + "smithy.api#documentation": "

          The version of the model that you want to use.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "Body": { + "target": "com.amazonaws.lookoutvision#Stream", + "traits": { + "smithy.api#documentation": "

          The unencrypted image bytes that you want to analyze.

          ", + "smithy.api#httpPayload": {}, + "smithy.api#required": {} + } + }, + "ContentType": { + "target": "com.amazonaws.lookoutvision#ContentType", + "traits": { + "smithy.api#documentation": "

          The type of the image passed in Body.\n Valid values are image/png (PNG format images) and image/jpeg (JPG format images).

          ", + "smithy.api#httpHeader": "content-type", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.lookoutvision#DetectAnomaliesResponse": { + "type": "structure", + "members": { + "DetectAnomalyResult": { + "target": "com.amazonaws.lookoutvision#DetectAnomalyResult", + "traits": { + "smithy.api#documentation": "

          The results of the DetectAnomalies operation.

          " + } + } + } + }, + "com.amazonaws.lookoutvision#DetectAnomalyResult": { + "type": "structure", + "members": { + "Source": { + "target": "com.amazonaws.lookoutvision#ImageSource", + "traits": { + "smithy.api#documentation": "

          The source of the image that was analyzed. direct means that the\n images was supplied from the local computer. No other values are supported.

          " + } + }, + "IsAnomalous": { + "target": "com.amazonaws.lookoutvision#Boolean", + "traits": { + "smithy.api#documentation": "

          True if the image contains an anomaly, otherwise false.

          " + } + }, + "Confidence": { + "target": "com.amazonaws.lookoutvision#Float", + "traits": { + "smithy.api#documentation": "

          The confidence that Amazon Lookout for Vision has in the accuracy of the prediction.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          The prediction results from a call to DetectAnomalies.

          " + } + }, + "com.amazonaws.lookoutvision#ExceptionString": { + "type": "string" + }, + "com.amazonaws.lookoutvision#Float": { + "type": "float", + "traits": { + "smithy.api#box": {} + } + }, + "com.amazonaws.lookoutvision#ImageSource": { + "type": "structure", + "members": { + "Type": { + "target": "com.amazonaws.lookoutvision#ImageSourceType", + "traits": { + "smithy.api#documentation": "

          The type of the image.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          The source for an image.

          " + } + }, + "com.amazonaws.lookoutvision#ImageSourceType": { + "type": "string", + "traits": { + "smithy.api#pattern": "direct" + } + }, + "com.amazonaws.lookoutvision#InferenceUnits": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1 + } + } + }, + "com.amazonaws.lookoutvision#InputS3Object": { + "type": "structure", + "members": { + "Bucket": { + "target": "com.amazonaws.lookoutvision#S3BucketName", + "traits": { + "smithy.api#documentation": "

          The Amazon S3 bucket that contains the manifest.

          ", + "smithy.api#required": {} + } + }, + "Key": { + "target": "com.amazonaws.lookoutvision#S3ObjectKey", + "traits": { + "smithy.api#documentation": "

          The name and location of the manifest file withiin the bucket.

          ", + "smithy.api#required": {} + } + }, + "VersionId": { + "target": "com.amazonaws.lookoutvision#S3ObjectVersion", + "traits": { + "smithy.api#documentation": "

          The version ID of the bucket.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Amazon S3 Location information for an input manifest file.

          " + } + }, + "com.amazonaws.lookoutvision#Integer": { + "type": "integer", + "traits": { + "smithy.api#box": {} + } + }, + "com.amazonaws.lookoutvision#InternalServerException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.lookoutvision#ExceptionString", + "traits": { + "smithy.api#required": {} + } + }, + "RetryAfterSeconds": { + "target": "com.amazonaws.lookoutvision#RetryAfterSeconds", + "traits": { + "smithy.api#documentation": "

          The period of time, in seconds, before the operation can be retried.

          ", + "smithy.api#httpHeader": "Retry-After" + } + } + }, + "traits": { + "smithy.api#documentation": "

          Amazon Lookout for Vision experienced a service issue. Try your call again.

          ", + "smithy.api#error": "server", + "smithy.api#httpError": 500 + } + }, + "com.amazonaws.lookoutvision#IsLabeled": { + "type": "boolean", + "traits": { + "smithy.api#box": {} + } + }, + "com.amazonaws.lookoutvision#KmsKeyId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 2048 + }, + "smithy.api#pattern": "^[A-Za-z0-9][A-Za-z0-9:_/+=,@.-]{0,2048}$" + } + }, + "com.amazonaws.lookoutvision#ListDatasetEntries": { + "type": "operation", + "input": { + "target": "com.amazonaws.lookoutvision#ListDatasetEntriesRequest" + }, + "output": { + "target": "com.amazonaws.lookoutvision#ListDatasetEntriesResponse" + }, + "errors": [ + { + "target": "com.amazonaws.lookoutvision#AccessDeniedException" + }, + { + "target": "com.amazonaws.lookoutvision#ConflictException" + }, + { + "target": "com.amazonaws.lookoutvision#InternalServerException" + }, + { + "target": "com.amazonaws.lookoutvision#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.lookoutvision#ThrottlingException" + }, + { + "target": "com.amazonaws.lookoutvision#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Lists the JSON Lines within a dataset. An Amazon Lookout for Vision JSON Line contains the anomaly\n information for a single image, including the image location and the assigned label.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/2020-11-20/projects/{ProjectName}/datasets/{DatasetType}/entries", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.lookoutvision#ListDatasetEntriesRequest": { + "type": "structure", + "members": { + "ProjectName": { + "target": "com.amazonaws.lookoutvision#ProjectName", + "traits": { + "smithy.api#documentation": "

          The name of the project that contains the dataset that you want to list.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "DatasetType": { + "target": "com.amazonaws.lookoutvision#DatasetType", + "traits": { + "smithy.api#documentation": "

          The type of the dataset that you want to list. Specify train to list \n the training dataset. Specify test to list the test dataset. If you have a single dataset\n project, specify train.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "Labeled": { + "target": "com.amazonaws.lookoutvision#IsLabeled", + "traits": { + "smithy.api#documentation": "

          Specify true to include labeled entries, otherwise specify false. If you\n don't specify a value, Lookout for Vision returns all entries.

          ", + "smithy.api#httpQuery": "labeled" + } + }, + "AnomalyClass": { + "target": "com.amazonaws.lookoutvision#AnomalyClassFilter", + "traits": { + "smithy.api#documentation": "

          Specify normal to include only normal images. Specify anomaly to only include\n anomalous entries. If you don't specify a value, Amazon Lookout for Vision returns normal and anomalous images.

          ", + "smithy.api#httpQuery": "anomalyClass" + } + }, + "BeforeCreationDate": { + "target": "com.amazonaws.lookoutvision#DateTime", + "traits": { + "smithy.api#documentation": "

          Only includes entries before the specified date in the response. For example, 2020-06-23T00:00:00.

          ", + "smithy.api#httpQuery": "createdBefore" + } + }, + "AfterCreationDate": { + "target": "com.amazonaws.lookoutvision#DateTime", + "traits": { + "smithy.api#documentation": "

          Only includes entries after the specified date in the response. For example, 2020-06-23T00:00:00.

          ", + "smithy.api#httpQuery": "createdAfter" + } + }, + "NextToken": { + "target": "com.amazonaws.lookoutvision#PaginationToken", + "traits": { + "smithy.api#documentation": "

          If the previous response was incomplete (because there is more data to retrieve),\n Amazon Lookout for Vision returns a pagination token in the response. You can use this pagination token to\n retrieve the next set of dataset entries.

          ", + "smithy.api#httpQuery": "nextToken" + } + }, + "MaxResults": { + "target": "com.amazonaws.lookoutvision#PageSize", + "traits": { + "smithy.api#documentation": "

          The maximum number of results to return per paginated call. The largest value you can specify is 100. \n If you specify a value greater than 100, a ValidationException\n error occurs. The default value is 100.

          ", + "smithy.api#httpQuery": "maxResults" + } + }, + "SourceRefContains": { + "target": "com.amazonaws.lookoutvision#QueryString", + "traits": { + "smithy.api#documentation": "

          Perform a \"contains\" search on the values of the source-ref key within the dataset. \n For example a value of \"IMG_17\" returns all JSON Lines where the source-ref key value matches *IMG_17*.

          ", + "smithy.api#httpQuery": "sourceRefContains" + } + } + } + }, + "com.amazonaws.lookoutvision#ListDatasetEntriesResponse": { + "type": "structure", + "members": { + "DatasetEntries": { + "target": "com.amazonaws.lookoutvision#DatasetEntryList", + "traits": { + "smithy.api#documentation": "

          A list of the entries (JSON Lines) within the dataset.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.lookoutvision#PaginationToken", + "traits": { + "smithy.api#documentation": "

          If the response is truncated, Amazon Lookout for Vision returns this token\n that you can use in the subsequent request to retrieve the next set ofdataset entries.

          " + } + } + } + }, + "com.amazonaws.lookoutvision#ListModels": { + "type": "operation", + "input": { + "target": "com.amazonaws.lookoutvision#ListModelsRequest" + }, + "output": { + "target": "com.amazonaws.lookoutvision#ListModelsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.lookoutvision#AccessDeniedException" + }, + { + "target": "com.amazonaws.lookoutvision#ConflictException" + }, + { + "target": "com.amazonaws.lookoutvision#InternalServerException" + }, + { + "target": "com.amazonaws.lookoutvision#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.lookoutvision#ThrottlingException" + }, + { + "target": "com.amazonaws.lookoutvision#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Lists the versions of a model in an Amazon Lookout for Vision project.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/2020-11-20/projects/{ProjectName}/models", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.lookoutvision#ListModelsRequest": { + "type": "structure", + "members": { + "ProjectName": { + "target": "com.amazonaws.lookoutvision#ProjectName", + "traits": { + "smithy.api#documentation": "

          The name of the project that contains the model versions that you want to list.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "NextToken": { + "target": "com.amazonaws.lookoutvision#PaginationToken", + "traits": { + "smithy.api#documentation": "

          If the previous response was incomplete (because there is more data to retrieve),\n Amazon Lookout for Vision returns a pagination token in the response. You can use this pagination token to\n retrieve the next set of models.

          ", + "smithy.api#httpQuery": "nextToken" + } + }, + "MaxResults": { + "target": "com.amazonaws.lookoutvision#PageSize", + "traits": { + "smithy.api#documentation": "

          The maximum number of results to return per paginated call. The largest value you can specify is 100. \n If you specify a value greater than 100, a ValidationException\n error occurs. The default value is 100.

          ", + "smithy.api#httpQuery": "maxResults" + } + } + } + }, + "com.amazonaws.lookoutvision#ListModelsResponse": { + "type": "structure", + "members": { + "Models": { + "target": "com.amazonaws.lookoutvision#ModelMetadataList", + "traits": { + "smithy.api#documentation": "

          A list of model versions in the specified project.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.lookoutvision#PaginationToken", + "traits": { + "smithy.api#documentation": "

          If the response is truncated, Amazon Lookout for Vision returns this token\n that you can use in the subsequent request to retrieve the next set of models.

          " + } + } + } + }, + "com.amazonaws.lookoutvision#ListProjects": { + "type": "operation", + "input": { + "target": "com.amazonaws.lookoutvision#ListProjectsRequest" + }, + "output": { + "target": "com.amazonaws.lookoutvision#ListProjectsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.lookoutvision#AccessDeniedException" + }, + { + "target": "com.amazonaws.lookoutvision#ConflictException" + }, + { + "target": "com.amazonaws.lookoutvision#InternalServerException" + }, + { + "target": "com.amazonaws.lookoutvision#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.lookoutvision#ThrottlingException" + }, + { + "target": "com.amazonaws.lookoutvision#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Lists the Amazon Lookout for Vision projects in your AWS account.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/2020-11-20/projects", + "code": 200 + }, + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.lookoutvision#ListProjectsRequest": { + "type": "structure", + "members": { + "NextToken": { + "target": "com.amazonaws.lookoutvision#PaginationToken", + "traits": { + "smithy.api#documentation": "

          If the previous response was incomplete (because there is more data to retrieve),\n Amazon Lookout for Vision returns a pagination token in the response. You can use this pagination token to\n retrieve the next set of projects.

          ", + "smithy.api#httpQuery": "nextToken" + } + }, + "MaxResults": { + "target": "com.amazonaws.lookoutvision#PageSize", + "traits": { + "smithy.api#documentation": "

          The maximum number of results to return per paginated call. The largest value you can specify is 100. \n If you specify a value greater than 100, a ValidationException\n error occurs. The default value is 100.

          ", + "smithy.api#httpQuery": "maxResults" + } + } + } + }, + "com.amazonaws.lookoutvision#ListProjectsResponse": { + "type": "structure", + "members": { + "Projects": { + "target": "com.amazonaws.lookoutvision#ProjectMetadataList", + "traits": { + "smithy.api#documentation": "

          A list of projects in your AWS account.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.lookoutvision#PaginationToken", + "traits": { + "smithy.api#documentation": "

          If the response is truncated, Amazon Lookout for Vision returns this token\n that you can use in the subsequent request to retrieve the next set of projects.

          " + } + } + } + }, + "com.amazonaws.lookoutvision#LookoutVisionService": { + "type": "service", + "version": "2020-11-20", + "operations": [ + { + "target": "com.amazonaws.lookoutvision#CreateDataset" + }, + { + "target": "com.amazonaws.lookoutvision#CreateModel" + }, + { + "target": "com.amazonaws.lookoutvision#CreateProject" + }, + { + "target": "com.amazonaws.lookoutvision#DeleteDataset" + }, + { + "target": "com.amazonaws.lookoutvision#DeleteModel" + }, + { + "target": "com.amazonaws.lookoutvision#DeleteProject" + }, + { + "target": "com.amazonaws.lookoutvision#DescribeDataset" + }, + { + "target": "com.amazonaws.lookoutvision#DescribeModel" + }, + { + "target": "com.amazonaws.lookoutvision#DescribeProject" + }, + { + "target": "com.amazonaws.lookoutvision#DetectAnomalies" + }, + { + "target": "com.amazonaws.lookoutvision#ListDatasetEntries" + }, + { + "target": "com.amazonaws.lookoutvision#ListModels" + }, + { + "target": "com.amazonaws.lookoutvision#ListProjects" + }, + { + "target": "com.amazonaws.lookoutvision#StartModel" + }, + { + "target": "com.amazonaws.lookoutvision#StopModel" + }, + { + "target": "com.amazonaws.lookoutvision#UpdateDatasetEntries" + } + ], + "traits": { + "aws.api#service": { + "sdkId": "LookoutVision", + "arnNamespace": "lookoutvision", + "cloudFormationName": "LookoutVision", + "cloudTrailEventSource": "lookoutvision.amazonaws.com" + }, + "aws.auth#sigv4": { + "name": "lookoutvision" + }, + "aws.protocols#restJson1": {}, + "smithy.api#documentation": "

          This is the Amazon Lookout for Vision API Reference. It provides descriptions of actions, \n data types, common parameters, and common errors.

          \n

          Amazon Lookout for Vision enables you to find visual defects in industrial products,\n accurately and at scale. It uses computer vision to identify missing components in an industrial product,\n damage to vehicles or structures, irregularities in production lines, and even minuscule defects in\n silicon wafers — or any other physical item where quality is important such as a missing capacitor\n on printed circuit boards.

          ", + "smithy.api#title": "Amazon Lookout for Vision" + } + }, + "com.amazonaws.lookoutvision#ModelArn": { + "type": "string" + }, + "com.amazonaws.lookoutvision#ModelDescription": { + "type": "structure", + "members": { + "ModelVersion": { + "target": "com.amazonaws.lookoutvision#ModelVersion", + "traits": { + "smithy.api#documentation": "

          The version of the model

          " + } + }, + "ModelArn": { + "target": "com.amazonaws.lookoutvision#ModelArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model.

          " + } + }, + "CreationTimestamp": { + "target": "com.amazonaws.lookoutvision#DateTime", + "traits": { + "smithy.api#documentation": "

          The unix timestamp for the date and time that the model was created.

          " + } + }, + "Description": { + "target": "com.amazonaws.lookoutvision#ModelDescriptionMessage", + "traits": { + "smithy.api#documentation": "

          The description for the model.

          " + } + }, + "Status": { + "target": "com.amazonaws.lookoutvision#ModelStatus", + "traits": { + "smithy.api#documentation": "

          The status of the model.

          " + } + }, + "StatusMessage": { + "target": "com.amazonaws.lookoutvision#ModelStatusMessage", + "traits": { + "smithy.api#documentation": "

          The status message for the model.

          " + } + }, + "Performance": { + "target": "com.amazonaws.lookoutvision#ModelPerformance", + "traits": { + "smithy.api#documentation": "

          Performance metrics for the model. Created during training.

          " + } + }, + "OutputConfig": { + "target": "com.amazonaws.lookoutvision#OutputConfig", + "traits": { + "smithy.api#documentation": "

          The S3 location where Amazon Lookout for Vision saves model training files.

          " + } + }, + "EvaluationManifest": { + "target": "com.amazonaws.lookoutvision#OutputS3Object", + "traits": { + "smithy.api#documentation": "

          The S3 location where Amazon Lookout for Vision saves the manifest file\n that was used to test the trained model and generate the performance scores.

          " + } + }, + "EvaluationResult": { + "target": "com.amazonaws.lookoutvision#OutputS3Object", + "traits": { + "smithy.api#documentation": "

          The S3 location where Amazon Lookout for Vision saves the performance metrics.

          " + } + }, + "EvaluationEndTimestamp": { + "target": "com.amazonaws.lookoutvision#DateTime", + "traits": { + "smithy.api#documentation": "

          The unix timestamp for the date and time that the evaluation ended.

          " + } + }, + "KmsKeyId": { + "target": "com.amazonaws.lookoutvision#KmsKeyId", + "traits": { + "smithy.api#documentation": "

          The identifer for the AWS Key Management Service (AWS KMS) key that was used to encrypt the model\n during training.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Describes an Amazon Lookout for Vision model.

          " + } + }, + "com.amazonaws.lookoutvision#ModelDescriptionMessage": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 500 + }, + "smithy.api#pattern": "[0-9A-Za-z\\.\\-_]*" + } + }, + "com.amazonaws.lookoutvision#ModelHostingStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "RUNNING", + "name": "RUNNING" + }, + { + "value": "STARTING", + "name": "STARTING" + }, + { + "value": "STOPPED", + "name": "STOPPED" + }, + { + "value": "FAILED", + "name": "FAILED" + } + ] + } + }, + "com.amazonaws.lookoutvision#ModelMetadata": { + "type": "structure", + "members": { + "CreationTimestamp": { + "target": "com.amazonaws.lookoutvision#DateTime", + "traits": { + "smithy.api#documentation": "

          The unix timestamp for the date and time that the model was created.

          " + } + }, + "ModelVersion": { + "target": "com.amazonaws.lookoutvision#ModelVersion", + "traits": { + "smithy.api#documentation": "

          The version of the model.

          " + } + }, + "ModelArn": { + "target": "com.amazonaws.lookoutvision#ModelArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model.

          " + } + }, + "Description": { + "target": "com.amazonaws.lookoutvision#ModelDescriptionMessage", + "traits": { + "smithy.api#documentation": "

          The description for the model.

          " + } + }, + "Status": { + "target": "com.amazonaws.lookoutvision#ModelStatus", + "traits": { + "smithy.api#documentation": "

          The status of the model.

          " + } + }, + "StatusMessage": { + "target": "com.amazonaws.lookoutvision#ModelStatusMessage", + "traits": { + "smithy.api#documentation": "

          The status message for the model.

          " + } + }, + "Performance": { + "target": "com.amazonaws.lookoutvision#ModelPerformance", + "traits": { + "smithy.api#documentation": "

          Performance metrics for the model. Created during training.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Describes an Amazon Lookout for Vision model.

          " + } + }, + "com.amazonaws.lookoutvision#ModelMetadataList": { + "type": "list", + "member": { + "target": "com.amazonaws.lookoutvision#ModelMetadata" + } + }, + "com.amazonaws.lookoutvision#ModelPerformance": { + "type": "structure", + "members": { + "F1Score": { + "target": "com.amazonaws.lookoutvision#Float", + "traits": { + "smithy.api#documentation": "

          The overall F1 score metric for the trained model.

          " + } + }, + "Recall": { + "target": "com.amazonaws.lookoutvision#Float", + "traits": { + "smithy.api#documentation": "

          The overall recall metric value for the trained model.

          " + } + }, + "Precision": { + "target": "com.amazonaws.lookoutvision#Float", + "traits": { + "smithy.api#documentation": "

          The overall precision metric value for the trained model.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Information about the evaluation performance of a trained model.

          " + } + }, + "com.amazonaws.lookoutvision#ModelStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "TRAINING", + "name": "TRAINING" + }, + { + "value": "TRAINED", + "name": "TRAINED" + }, + { + "value": "TRAINING_FAILED", + "name": "TRAINING_FAILED" + }, + { + "value": "STARTING_HOSTING", + "name": "STARTING_HOSTING" + }, + { + "value": "HOSTED", + "name": "HOSTED" + }, + { + "value": "HOSTING_FAILED", + "name": "HOSTING_FAILED" + }, + { + "value": "STOPPING_HOSTING", + "name": "STOPPING_HOSTING" + }, + { + "value": "SYSTEM_UPDATING", + "name": "SYSTEM_UPDATING" + }, + { + "value": "DELETING", + "name": "DELETING" + } + ] + } + }, + "com.amazonaws.lookoutvision#ModelStatusMessage": { + "type": "string" + }, + "com.amazonaws.lookoutvision#ModelVersion": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 10 + }, + "smithy.api#pattern": "([1-9][0-9]*|latest)" + } + }, + "com.amazonaws.lookoutvision#OutputConfig": { + "type": "structure", + "members": { + "S3Location": { + "target": "com.amazonaws.lookoutvision#S3Location", + "traits": { + "smithy.api#documentation": "

          The S3 location for the output.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The S3 location where Amazon Lookout for Vision saves model training files.

          " + } + }, + "com.amazonaws.lookoutvision#OutputS3Object": { + "type": "structure", + "members": { + "Bucket": { + "target": "com.amazonaws.lookoutvision#S3BucketName", + "traits": { + "smithy.api#documentation": "

          The bucket that contains the training output.

          ", + "smithy.api#required": {} + } + }, + "Key": { + "target": "com.amazonaws.lookoutvision#S3ObjectKey", + "traits": { + "smithy.api#documentation": "

          The location of the training output in the bucket.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The S3 location where Amazon Lookout for Vision saves training output.

          " + } + }, + "com.amazonaws.lookoutvision#PageSize": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.lookoutvision#PaginationToken": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 2048 + }, + "smithy.api#pattern": "^[a-zA-Z0-9\\/\\+\\=]{0,2048}$" + } + }, + "com.amazonaws.lookoutvision#ProjectArn": { + "type": "string" + }, + "com.amazonaws.lookoutvision#ProjectDescription": { + "type": "structure", + "members": { + "ProjectArn": { + "target": "com.amazonaws.lookoutvision#ProjectArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the project.

          " + } + }, + "ProjectName": { + "target": "com.amazonaws.lookoutvision#ProjectName", + "traits": { + "smithy.api#documentation": "

          The name of the project.

          " + } + }, + "CreationTimestamp": { + "target": "com.amazonaws.lookoutvision#DateTime", + "traits": { + "smithy.api#documentation": "

          The unix timestamp for the date and time that the project was created.

          " + } + }, + "Datasets": { + "target": "com.amazonaws.lookoutvision#DatasetMetadataList", + "traits": { + "smithy.api#documentation": "

          A list of datasets in the project.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Describe an Amazon Lookout for Vision project. For more information, see DescribeProject.

          " + } + }, + "com.amazonaws.lookoutvision#ProjectMetadata": { + "type": "structure", + "members": { + "ProjectArn": { + "target": "com.amazonaws.lookoutvision#ProjectArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the project.

          " + } + }, + "ProjectName": { + "target": "com.amazonaws.lookoutvision#ProjectName", + "traits": { + "smithy.api#documentation": "

          The name of the project.

          " + } + }, + "CreationTimestamp": { + "target": "com.amazonaws.lookoutvision#DateTime", + "traits": { + "smithy.api#documentation": "

          The unix timestamp for the date and time that the project was created.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Metadata about an Amazon Lookout for Vision project.

          " + } + }, + "com.amazonaws.lookoutvision#ProjectMetadataList": { + "type": "list", + "member": { + "target": "com.amazonaws.lookoutvision#ProjectMetadata" + } + }, + "com.amazonaws.lookoutvision#ProjectName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 255 + }, + "smithy.api#pattern": "[a-zA-Z0-9][a-zA-Z0-9_\\-]*" + } + }, + "com.amazonaws.lookoutvision#QueryString": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 2048 + }, + "smithy.api#pattern": ".*\\S.*" + } + }, + "com.amazonaws.lookoutvision#ResourceNotFoundException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.lookoutvision#ExceptionString", + "traits": { + "smithy.api#required": {} + } + }, + "ResourceId": { + "target": "com.amazonaws.lookoutvision#ExceptionString", + "traits": { + "smithy.api#documentation": "

          The ID of the resource.

          ", + "smithy.api#required": {} + } + }, + "ResourceType": { + "target": "com.amazonaws.lookoutvision#ResourceType", + "traits": { + "smithy.api#documentation": "

          The type of the resource.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The resource could not be found.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 404 + } + }, + "com.amazonaws.lookoutvision#ResourceType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "PROJECT", + "name": "PROJECT" + }, + { + "value": "DATASET", + "name": "DATASET" + }, + { + "value": "MODEL", + "name": "MODEL" + }, + { + "value": "TRIAL", + "name": "TRIAL" + } + ] + } + }, + "com.amazonaws.lookoutvision#RetryAfterSeconds": { + "type": "integer" + }, + "com.amazonaws.lookoutvision#S3BucketName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 3, + "max": 63 + }, + "smithy.api#pattern": "[0-9A-Za-z\\.\\-_]*" + } + }, + "com.amazonaws.lookoutvision#S3KeyPrefix": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 1024 + }, + "smithy.api#pattern": "^([a-zA-Z0-9!_.*'()-][/a-zA-Z0-9!_.*'()-]*)?$" + } + }, + "com.amazonaws.lookoutvision#S3Location": { + "type": "structure", + "members": { + "Bucket": { + "target": "com.amazonaws.lookoutvision#S3BucketName", + "traits": { + "smithy.api#documentation": "

          The S3 bucket that contain the manifest file.

          ", + "smithy.api#required": {} + } + }, + "Prefix": { + "target": "com.amazonaws.lookoutvision#S3KeyPrefix", + "traits": { + "smithy.api#documentation": "

          The path and name of the manifest file with the S3 bucket.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Information about the location of a manifest file.

          " + } + }, + "com.amazonaws.lookoutvision#S3ObjectKey": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 1024 + }, + "smithy.api#pattern": "^([a-zA-Z0-9!_.*'()-][/a-zA-Z0-9!_.*'()-]*)?$" + } + }, + "com.amazonaws.lookoutvision#S3ObjectVersion": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 1024 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.lookoutvision#ServiceQuotaExceededException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.lookoutvision#ExceptionString", + "traits": { + "smithy.api#required": {} + } + }, + "ResourceId": { + "target": "com.amazonaws.lookoutvision#ExceptionString", + "traits": { + "smithy.api#documentation": "

          The ID of the resource.

          " + } + }, + "ResourceType": { + "target": "com.amazonaws.lookoutvision#ResourceType", + "traits": { + "smithy.api#documentation": "

          The type of the resource.

          " + } + }, + "QuotaCode": { + "target": "com.amazonaws.lookoutvision#ExceptionString", + "traits": { + "smithy.api#documentation": "

          The quota code.

          ", + "smithy.api#required": {} + } + }, + "ServiceCode": { + "target": "com.amazonaws.lookoutvision#ExceptionString", + "traits": { + "smithy.api#documentation": "

          The service code.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          A service quota was exceeded the allowed limit. For more information, see\n Limits in Amazon Lookout for Vision in the Amazon Lookout for Vision Developer Guide.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 402 + } + }, + "com.amazonaws.lookoutvision#StartModel": { + "type": "operation", + "input": { + "target": "com.amazonaws.lookoutvision#StartModelRequest" + }, + "output": { + "target": "com.amazonaws.lookoutvision#StartModelResponse" + }, + "errors": [ + { + "target": "com.amazonaws.lookoutvision#AccessDeniedException" + }, + { + "target": "com.amazonaws.lookoutvision#ConflictException" + }, + { + "target": "com.amazonaws.lookoutvision#InternalServerException" + }, + { + "target": "com.amazonaws.lookoutvision#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.lookoutvision#ServiceQuotaExceededException" + }, + { + "target": "com.amazonaws.lookoutvision#ThrottlingException" + }, + { + "target": "com.amazonaws.lookoutvision#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Starts the running of the version of an Amazon Lookout for Vision model. Starting a model takes a while\n to complete. To check the current state of the model, use DescribeModel.

          \n

          Once the model is running, you can detect custom labels in new images by calling \n DetectAnomalies.

          \n \n

          You are charged for the amount of time that the model is running. To stop a running\n model, call StopModel.

          \n
          ", + "smithy.api#http": { + "method": "POST", + "uri": "/2020-11-20/projects/{ProjectName}/models/{ModelVersion}/start", + "code": 202 + } + } + }, + "com.amazonaws.lookoutvision#StartModelRequest": { + "type": "structure", + "members": { + "ProjectName": { + "target": "com.amazonaws.lookoutvision#ProjectName", + "traits": { + "smithy.api#documentation": "

          The name of the project that contains the model that you want to start.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "ModelVersion": { + "target": "com.amazonaws.lookoutvision#ModelVersion", + "traits": { + "smithy.api#documentation": "

          The version of the model that you want to start.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "MinInferenceUnits": { + "target": "com.amazonaws.lookoutvision#InferenceUnits", + "traits": { + "smithy.api#documentation": "

          The minimum number of inference units to use. A single\n inference unit represents 1 hour of processing and can support up to 5 Transaction Pers Second (TPS).\n Use a higher number to increase the TPS throughput of your model. You are charged for the number\n of inference units that you use.\n

          ", + "smithy.api#required": {} + } + }, + "ClientToken": { + "target": "com.amazonaws.lookoutvision#ClientToken", + "traits": { + "smithy.api#documentation": "

          ClientToken is an idempotency token that ensures a call to StartModel\n completes only once. You choose the value to pass. For example, An issue, \n such as an network outage, might prevent you from getting a response from StartModel.\n In this case, safely retry your call\n to StartModel by using the same ClientToken parameter value. An error occurs\n if the other input parameters are not the same as in the first request. Using a different \n value for ClientToken is considered a new call to StartModel. An idempotency\n token is active for 8 hours.\n

          ", + "smithy.api#httpHeader": "X-Amzn-Client-Token", + "smithy.api#idempotencyToken": {} + } + } + } + }, + "com.amazonaws.lookoutvision#StartModelResponse": { + "type": "structure", + "members": { + "Status": { + "target": "com.amazonaws.lookoutvision#ModelHostingStatus", + "traits": { + "smithy.api#documentation": "

          The current running status of the model.

          " + } + } + } + }, + "com.amazonaws.lookoutvision#StopModel": { + "type": "operation", + "input": { + "target": "com.amazonaws.lookoutvision#StopModelRequest" + }, + "output": { + "target": "com.amazonaws.lookoutvision#StopModelResponse" + }, + "errors": [ + { + "target": "com.amazonaws.lookoutvision#AccessDeniedException" + }, + { + "target": "com.amazonaws.lookoutvision#ConflictException" + }, + { + "target": "com.amazonaws.lookoutvision#InternalServerException" + }, + { + "target": "com.amazonaws.lookoutvision#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.lookoutvision#ThrottlingException" + }, + { + "target": "com.amazonaws.lookoutvision#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Stops a running model. The operation might take a while to complete. To\n check the current status, call DescribeModel.

          ", + "smithy.api#http": { + "method": "POST", + "uri": "/2020-11-20/projects/{ProjectName}/models/{ModelVersion}/stop", + "code": 202 + } + } + }, + "com.amazonaws.lookoutvision#StopModelRequest": { + "type": "structure", + "members": { + "ProjectName": { + "target": "com.amazonaws.lookoutvision#ProjectName", + "traits": { + "smithy.api#documentation": "

          The name of the project that contains the model that you want to stop.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "ModelVersion": { + "target": "com.amazonaws.lookoutvision#ModelVersion", + "traits": { + "smithy.api#documentation": "

          The version of the model that you want to stop.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "ClientToken": { + "target": "com.amazonaws.lookoutvision#ClientToken", + "traits": { + "smithy.api#documentation": "

          ClientToken is an idempotency token that ensures a call to StopModel\n completes only once. You choose the value to pass. For example, An issue, \n such as an network outage, might prevent you from getting a response from StopModel.\n In this case, safely retry your call\n to StopModel by using the same ClientToken parameter value. An error occurs\n if the other input parameters are not the same as in the first request. Using a different \n value for ClientToken is considered a new call to StopModel. An idempotency\n token is active for 8 hours.\n \n \n

          ", + "smithy.api#httpHeader": "X-Amzn-Client-Token", + "smithy.api#idempotencyToken": {} + } + } + } + }, + "com.amazonaws.lookoutvision#StopModelResponse": { + "type": "structure", + "members": { + "Status": { + "target": "com.amazonaws.lookoutvision#ModelHostingStatus", + "traits": { + "smithy.api#documentation": "

          The status of the model.

          " + } + } + } + }, + "com.amazonaws.lookoutvision#Stream": { + "type": "blob", + "traits": { + "smithy.api#requiresLength": {}, + "smithy.api#streaming": {} + } + }, + "com.amazonaws.lookoutvision#ThrottlingException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.lookoutvision#ExceptionString", + "traits": { + "smithy.api#required": {} + } + }, + "QuotaCode": { + "target": "com.amazonaws.lookoutvision#ExceptionString", + "traits": { + "smithy.api#documentation": "

          The quota code.

          " + } + }, + "ServiceCode": { + "target": "com.amazonaws.lookoutvision#ExceptionString", + "traits": { + "smithy.api#documentation": "

          The service code.

          " + } + }, + "RetryAfterSeconds": { + "target": "com.amazonaws.lookoutvision#RetryAfterSeconds", + "traits": { + "smithy.api#documentation": "

          The period of time, in seconds, before the operation can be retried.

          ", + "smithy.api#httpHeader": "Retry-After" + } + } + }, + "traits": { + "smithy.api#documentation": "

          Amazon Lookout for Vision is temporarily unable to process the request. Try your call again.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 429 + } + }, + "com.amazonaws.lookoutvision#UpdateDatasetEntries": { + "type": "operation", + "input": { + "target": "com.amazonaws.lookoutvision#UpdateDatasetEntriesRequest" + }, + "output": { + "target": "com.amazonaws.lookoutvision#UpdateDatasetEntriesResponse" + }, + "errors": [ + { + "target": "com.amazonaws.lookoutvision#AccessDeniedException" + }, + { + "target": "com.amazonaws.lookoutvision#ConflictException" + }, + { + "target": "com.amazonaws.lookoutvision#InternalServerException" + }, + { + "target": "com.amazonaws.lookoutvision#ResourceNotFoundException" + }, + { + "target": "com.amazonaws.lookoutvision#ThrottlingException" + }, + { + "target": "com.amazonaws.lookoutvision#ValidationException" + } + ], + "traits": { + "smithy.api#documentation": "

          Adds one or more JSON Line entries to a dataset. A JSON Line includes information about an image\n used for training or testing an Amazon Lookout for Vision model. The following is an example JSON Line.

          \n \n \n

          Updating a dataset might take a while to complete. To check the current status, call DescribeDataset and\n check the Status field in the response.

          ", + "smithy.api#http": { + "method": "PATCH", + "uri": "/2020-11-20/projects/{ProjectName}/datasets/{DatasetType}/entries", + "code": 202 + } + } + }, + "com.amazonaws.lookoutvision#UpdateDatasetEntriesRequest": { + "type": "structure", + "members": { + "ProjectName": { + "target": "com.amazonaws.lookoutvision#ProjectName", + "traits": { + "smithy.api#documentation": "

          The name of the project that contains the dataset that you want to update.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "DatasetType": { + "target": "com.amazonaws.lookoutvision#DatasetType", + "traits": { + "smithy.api#documentation": "

          The type of the dataset that you want to update. Specify train to update\n the training dataset. Specify test to update the test dataset. If you\n have a single dataset project, specify train.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "Changes": { + "target": "com.amazonaws.lookoutvision#DatasetChanges", + "traits": { + "smithy.api#documentation": "

          The entries to add to the dataset.

          ", + "smithy.api#required": {} + } + }, + "ClientToken": { + "target": "com.amazonaws.lookoutvision#ClientToken", + "traits": { + "smithy.api#documentation": "

          ClientToken is an idempotency token that ensures a call to UpdateDatasetEntries\n completes only once. You choose the value to pass. For example, An issue, \n such as an network outage, might prevent you from getting a response from UpdateDatasetEntries.\n In this case, safely retry your call\n to UpdateDatasetEntries by using the same ClientToken parameter value. An error occurs\n if the other input parameters are not the same as in the first request. Using a different \n value for ClientToken is considered a new call to UpdateDatasetEntries. An idempotency\n token is active for 8 hours.\n

          ", + "smithy.api#httpHeader": "X-Amzn-Client-Token", + "smithy.api#idempotencyToken": {} + } + } + } + }, + "com.amazonaws.lookoutvision#UpdateDatasetEntriesResponse": { + "type": "structure", + "members": { + "Status": { + "target": "com.amazonaws.lookoutvision#DatasetStatus", + "traits": { + "smithy.api#documentation": "

          The status of the dataset update.

          " + } + } + } + }, + "com.amazonaws.lookoutvision#ValidationException": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.lookoutvision#ExceptionString", + "traits": { + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          An input validation error occured. For example, invalid characters in a project name,\n or if a pagination token is invalid.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 400 + } + } + } +} diff --git a/codegen/sdk-codegen/aws-models/rds-data.2018-08-01.json b/codegen/sdk-codegen/aws-models/rds-data.2018-08-01.json index ff401c7fc384..3ec488966fc7 100644 --- a/codegen/sdk-codegen/aws-models/rds-data.2018-08-01.json +++ b/codegen/sdk-codegen/aws-models/rds-data.2018-08-01.json @@ -900,7 +900,7 @@ "cloudTrailEventSource": "rds-data.amazonaws.com" }, "aws.auth#sigv4": { - "name": "" + "name": "rds-data" }, "aws.protocols#restJson1": {}, "smithy.api#cors": {}, diff --git a/codegen/sdk-codegen/aws-models/s3.2006-03-01.json b/codegen/sdk-codegen/aws-models/s3.2006-03-01.json index 425cc06ae6f2..7c1113ef0a77 100644 --- a/codegen/sdk-codegen/aws-models/s3.2006-03-01.json +++ b/codegen/sdk-codegen/aws-models/s3.2006-03-01.json @@ -680,7 +680,7 @@ "CreationDate": { "target": "com.amazonaws.s3#CreationDate", "traits": { - "smithy.api#documentation": "

          Date the bucket was created.

          " + "smithy.api#documentation": "

          Date the bucket was created. This date can change when making changes to your bucket, such as editing its bucket policy.

          " } } }, @@ -736,6 +736,9 @@ ] } }, + "com.amazonaws.s3#BucketKeyEnabled": { + "type": "boolean" + }, "com.amazonaws.s3#BucketLifecycleConfiguration": { "type": "structure", "members": { @@ -1159,6 +1162,13 @@ "smithy.api#httpHeader": "x-amz-server-side-encryption-aws-kms-key-id" } }, + "BucketKeyEnabled": { + "target": "com.amazonaws.s3#BucketKeyEnabled", + "traits": { + "smithy.api#documentation": "

          Indicates whether the multipart upload uses an S3 Bucket Key for server-side encryption with AWS KMS (SSE-KMS).

          ", + "smithy.api#httpHeader": "x-amz-server-side-encryption-bucket-key-enabled" + } + }, "RequestCharged": { "target": "com.amazonaws.s3#RequestCharged", "traits": { @@ -1340,7 +1350,7 @@ } ], "traits": { - "smithy.api#documentation": "

          Creates a copy of an object that is already stored in Amazon S3.

          \n \n

          You can store individual objects of up to 5 TB in Amazon S3. You create a copy of your\n object up to 5 GB in size in a single atomic operation using this API. However, to copy\n an object greater than 5 GB, you must use the multipart upload Upload Part - Copy API.\n For more information, see Copy Object Using the REST Multipart Upload API.

          \n
          \n

          All copy requests must be authenticated. Additionally, you must have\n read access to the source object and write\n access to the destination bucket. For more information, see REST Authentication. Both the Region\n that you want to copy the object from and the Region that you want to copy the object to\n must be enabled for your account.

          \n

          A copy request might return an error when Amazon S3 receives the copy request or while Amazon S3\n is copying the files. If the error occurs before the copy operation starts, you receive a\n standard Amazon S3 error. If the error occurs during the copy operation, the error response is\n embedded in the 200 OK response. This means that a 200 OK\n response can contain either a success or an error. Design your application to parse the\n contents of the response and handle it appropriately.

          \n

          If the copy is successful, you receive a response with information about the copied\n object.

          \n \n

          If the request is an HTTP 1.1 request, the response is chunk encoded. If it were not,\n it would not contain the content-length, and you would need to read the entire\n body.

          \n
          \n

          The copy request charge is based on the storage class and Region that you specify for\n the destination object. For pricing information, see Amazon S3 pricing.

          \n \n

          Amazon S3 transfer acceleration does not support cross-Region copies. If you request a\n cross-Region copy using a transfer acceleration endpoint, you get a 400 Bad\n Request error. For more information, see Transfer Acceleration.

          \n
          \n

          \n Metadata\n

          \n

          When copying an object, you can preserve all metadata (default) or specify new metadata.\n However, the ACL is not preserved and is set to private for the user making the request. To\n override the default ACL setting, specify a new ACL when generating a copy request. For\n more information, see Using ACLs.

          \n

          To specify whether you want the object metadata copied from the source object or\n replaced with metadata provided in the request, you can optionally add the\n x-amz-metadata-directive header. When you grant permissions, you can use\n the s3:x-amz-metadata-directive condition key to enforce certain metadata\n behavior when objects are uploaded. For more information, see Specifying Conditions in a\n Policy in the Amazon S3 Developer Guide. For a complete list of\n Amazon S3-specific condition keys, see Actions, Resources, and Condition Keys for\n Amazon S3.

          \n

          \n \n x-amz-copy-source-if Headers\n

          \n

          To only copy an object under certain conditions, such as whether the Etag\n matches or whether the object was modified before or after a specified date, use the\n following request parameters:

          \n
            \n
          • \n

            \n x-amz-copy-source-if-match\n

            \n
          • \n
          • \n

            \n x-amz-copy-source-if-none-match\n

            \n
          • \n
          • \n

            \n x-amz-copy-source-if-unmodified-since\n

            \n
          • \n
          • \n

            \n x-amz-copy-source-if-modified-since\n

            \n
          • \n
          \n

          If both the x-amz-copy-source-if-match and\n x-amz-copy-source-if-unmodified-since headers are present in the request\n and evaluate as follows, Amazon S3 returns 200 OK and copies the data:

          \n
            \n
          • \n

            \n x-amz-copy-source-if-match condition evaluates to true

            \n
          • \n
          • \n

            \n x-amz-copy-source-if-unmodified-since condition evaluates to\n false

            \n
          • \n
          \n\n

          If both the x-amz-copy-source-if-none-match and\n x-amz-copy-source-if-modified-since headers are present in the request and\n evaluate as follows, Amazon S3 returns the 412 Precondition Failed response\n code:

          \n
            \n
          • \n

            \n x-amz-copy-source-if-none-match condition evaluates to false

            \n
          • \n
          • \n

            \n x-amz-copy-source-if-modified-since condition evaluates to\n true

            \n
          • \n
          \n\n \n

          All headers with the x-amz- prefix, including\n x-amz-copy-source, must be signed.

          \n
          \n

          \n Encryption\n

          \n

          The source object that you are copying can be encrypted or unencrypted. The source\n object can be encrypted with server-side encryption using AWS managed encryption keys\n (SSE-S3 or SSE-KMS) or by using a customer-provided encryption key. With server-side\n encryption, Amazon S3 encrypts your data as it writes it to disks in its data centers and\n decrypts the data when you access it.

          \n

          You can optionally use the appropriate encryption-related headers to request server-side\n encryption for the target object. You have the option to provide your own encryption key or\n use SSE-S3 or SSE-KMS, regardless of the form of server-side encryption that was used to\n encrypt the source object. You can even request encryption if the source object was not\n encrypted. For more information about server-side encryption, see Using\n Server-Side Encryption.

          \n

          \n Access Control List (ACL)-Specific Request\n Headers\n

          \n

          When copying an object, you can optionally use headers to grant ACL-based permissions.\n By default, all objects are private. Only the owner has full access control. When adding a\n new object, you can grant permissions to individual AWS accounts or to predefined groups\n defined by Amazon S3. These permissions are then added to the ACL on the object. For more\n information, see Access Control List (ACL) Overview and Managing ACLs Using the REST\n API.

          \n\n

          \n Storage Class Options\n

          \n

          You can use the CopyObject operation to change the storage class of an\n object that is already stored in Amazon S3 using the StorageClass parameter. For\n more information, see Storage\n Classes in the Amazon S3 Service Developer Guide.

          \n

          \n Versioning\n

          \n

          By default, x-amz-copy-source identifies the current version of an object\n to copy. If the current version is a delete marker, Amazon S3 behaves as if the object was\n deleted. To copy a different version, use the versionId subresource.

          \n

          If you enable versioning on the target bucket, Amazon S3 generates a unique version ID for\n the object being copied. This version ID is different from the version ID of the source\n object. Amazon S3 returns the version ID of the copied object in the\n x-amz-version-id response header in the response.

          \n

          If you do not enable versioning or suspend it on the target bucket, the version ID that\n Amazon S3 generates is always null.

          \n

          If the source object's storage class is GLACIER, you must restore a copy of this object\n before you can use it as a source object for the copy operation. For more information, see\n RestoreObject.

          \n

          The following operations are related to CopyObject:

          \n \n

          For more information, see Copying\n Objects.

          ", + "smithy.api#documentation": "

          Creates a copy of an object that is already stored in Amazon S3.

          \n \n

          You can store individual objects of up to 5 TB in Amazon S3. You create a copy of your\n object up to 5 GB in size in a single atomic operation using this API. However, to copy\n an object greater than 5 GB, you must use the multipart upload Upload Part - Copy API.\n For more information, see Copy Object Using the REST Multipart Upload API.

          \n
          \n

          All copy requests must be authenticated. Additionally, you must have\n read access to the source object and write\n access to the destination bucket. For more information, see REST Authentication. Both the Region\n that you want to copy the object from and the Region that you want to copy the object to\n must be enabled for your account.

          \n

          A copy request might return an error when Amazon S3 receives the copy request or while Amazon S3\n is copying the files. If the error occurs before the copy operation starts, you receive a\n standard Amazon S3 error. If the error occurs during the copy operation, the error response is\n embedded in the 200 OK response. This means that a 200 OK\n response can contain either a success or an error. Design your application to parse the\n contents of the response and handle it appropriately.

          \n

          If the copy is successful, you receive a response with information about the copied\n object.

          \n \n

          If the request is an HTTP 1.1 request, the response is chunk encoded. If it were not,\n it would not contain the content-length, and you would need to read the entire\n body.

          \n
          \n

          The copy request charge is based on the storage class and Region that you specify for\n the destination object. For pricing information, see Amazon S3 pricing.

          \n \n

          Amazon S3 transfer acceleration does not support cross-Region copies. If you request a\n cross-Region copy using a transfer acceleration endpoint, you get a 400 Bad\n Request error. For more information, see Transfer Acceleration.

          \n
          \n

          \n Metadata\n

          \n

          When copying an object, you can preserve all metadata (default) or specify new metadata.\n However, the ACL is not preserved and is set to private for the user making the request. To\n override the default ACL setting, specify a new ACL when generating a copy request. For\n more information, see Using ACLs.

          \n

          To specify whether you want the object metadata copied from the source object or\n replaced with metadata provided in the request, you can optionally add the\n x-amz-metadata-directive header. When you grant permissions, you can use\n the s3:x-amz-metadata-directive condition key to enforce certain metadata\n behavior when objects are uploaded. For more information, see Specifying Conditions in a\n Policy in the Amazon S3 Developer Guide. For a complete list of\n Amazon S3-specific condition keys, see Actions, Resources, and Condition Keys for\n Amazon S3.

          \n

          \n \n x-amz-copy-source-if Headers\n

          \n

          To only copy an object under certain conditions, such as whether the Etag\n matches or whether the object was modified before or after a specified date, use the\n following request parameters:

          \n
            \n
          • \n

            \n x-amz-copy-source-if-match\n

            \n
          • \n
          • \n

            \n x-amz-copy-source-if-none-match\n

            \n
          • \n
          • \n

            \n x-amz-copy-source-if-unmodified-since\n

            \n
          • \n
          • \n

            \n x-amz-copy-source-if-modified-since\n

            \n
          • \n
          \n

          If both the x-amz-copy-source-if-match and\n x-amz-copy-source-if-unmodified-since headers are present in the request\n and evaluate as follows, Amazon S3 returns 200 OK and copies the data:

          \n
            \n
          • \n

            \n x-amz-copy-source-if-match condition evaluates to true

            \n
          • \n
          • \n

            \n x-amz-copy-source-if-unmodified-since condition evaluates to\n false

            \n
          • \n
          \n\n

          If both the x-amz-copy-source-if-none-match and\n x-amz-copy-source-if-modified-since headers are present in the request and\n evaluate as follows, Amazon S3 returns the 412 Precondition Failed response\n code:

          \n
            \n
          • \n

            \n x-amz-copy-source-if-none-match condition evaluates to false

            \n
          • \n
          • \n

            \n x-amz-copy-source-if-modified-since condition evaluates to\n true

            \n
          • \n
          \n\n \n

          All headers with the x-amz- prefix, including\n x-amz-copy-source, must be signed.

          \n
          \n

          \n Server-side encryption\n

          \n

          When you perform a CopyObject operation, you can optionally use the appropriate encryption-related headers to encrypt the object using server-side encryption with AWS managed encryption keys (SSE-S3 or SSE-KMS) or a customer-provided encryption key. With server-side encryption, Amazon S3 encrypts your data as it writes it to disks in its data centers and decrypts the data when you access it. For more information about server-side encryption, see Using\n Server-Side Encryption.

          \n

          If a target object uses SSE-KMS, you can enable an S3 Bucket Key for the object. For more\n information, see Amazon S3 Bucket Keys in the Amazon Simple Storage Service Developer Guide.

          \n

          \n Access Control List (ACL)-Specific Request\n Headers\n

          \n

          When copying an object, you can optionally use headers to grant ACL-based permissions.\n By default, all objects are private. Only the owner has full access control. When adding a\n new object, you can grant permissions to individual AWS accounts or to predefined groups\n defined by Amazon S3. These permissions are then added to the ACL on the object. For more\n information, see Access Control List (ACL) Overview and Managing ACLs Using the REST\n API.

          \n\n

          \n Storage Class Options\n

          \n

          You can use the CopyObject operation to change the storage class of an\n object that is already stored in Amazon S3 using the StorageClass parameter. For\n more information, see Storage\n Classes in the Amazon S3 Service Developer Guide.

          \n

          \n Versioning\n

          \n

          By default, x-amz-copy-source identifies the current version of an object\n to copy. If the current version is a delete marker, Amazon S3 behaves as if the object was\n deleted. To copy a different version, use the versionId subresource.

          \n

          If you enable versioning on the target bucket, Amazon S3 generates a unique version ID for\n the object being copied. This version ID is different from the version ID of the source\n object. Amazon S3 returns the version ID of the copied object in the\n x-amz-version-id response header in the response.

          \n

          If you do not enable versioning or suspend it on the target bucket, the version ID that\n Amazon S3 generates is always null.

          \n

          If the source object's storage class is GLACIER, you must restore a copy of this object\n before you can use it as a source object for the copy operation. For more information, see\n RestoreObject.

          \n

          The following operations are related to CopyObject:

          \n \n

          For more information, see Copying\n Objects.

          ", "smithy.api#http": { "method": "PUT", "uri": "/{Bucket}/{Key+}?x-id=CopyObject", @@ -1414,6 +1424,13 @@ "smithy.api#httpHeader": "x-amz-server-side-encryption-context" } }, + "BucketKeyEnabled": { + "target": "com.amazonaws.s3#BucketKeyEnabled", + "traits": { + "smithy.api#documentation": "

          Indicates whether the copied object uses an S3 Bucket Key for server-side encryption with AWS KMS (SSE-KMS).

          ", + "smithy.api#httpHeader": "x-amz-server-side-encryption-bucket-key-enabled" + } + }, "RequestCharged": { "target": "com.amazonaws.s3#RequestCharged", "traits": { @@ -1631,6 +1648,13 @@ "smithy.api#httpHeader": "x-amz-server-side-encryption-context" } }, + "BucketKeyEnabled": { + "target": "com.amazonaws.s3#BucketKeyEnabled", + "traits": { + "smithy.api#documentation": "

          Specifies whether Amazon S3 should use an S3 Bucket Key for object encryption with server-side encryption using AWS KMS (SSE-KMS). Setting this header to true causes Amazon S3 to use an S3 Bucket Key for object encryption with SSE-KMS.

          \n

          Specifying this header with a COPY operation doesn’t affect bucket-level settings for S3 Bucket Key.

          ", + "smithy.api#httpHeader": "x-amz-server-side-encryption-bucket-key-enabled" + } + }, "CopySourceSSECustomerAlgorithm": { "target": "com.amazonaws.s3#CopySourceSSECustomerAlgorithm", "traits": { @@ -1986,6 +2010,13 @@ "smithy.api#httpHeader": "x-amz-server-side-encryption-context" } }, + "BucketKeyEnabled": { + "target": "com.amazonaws.s3#BucketKeyEnabled", + "traits": { + "smithy.api#documentation": "

          Indicates whether the multipart upload uses an S3 Bucket Key for server-side encryption with AWS KMS (SSE-KMS).

          ", + "smithy.api#httpHeader": "x-amz-server-side-encryption-bucket-key-enabled" + } + }, "RequestCharged": { "target": "com.amazonaws.s3#RequestCharged", "traits": { @@ -2153,6 +2184,13 @@ "smithy.api#httpHeader": "x-amz-server-side-encryption-context" } }, + "BucketKeyEnabled": { + "target": "com.amazonaws.s3#BucketKeyEnabled", + "traits": { + "smithy.api#documentation": "

          Specifies whether Amazon S3 should use an S3 Bucket Key for object encryption with server-side encryption using AWS KMS (SSE-KMS). Setting this header to true causes Amazon S3 to use an S3 Bucket Key for object encryption with SSE-KMS.

          \n

          Specifying this header with an object operation doesn’t affect bucket-level settings for S3 Bucket Key.

          ", + "smithy.api#httpHeader": "x-amz-server-side-encryption-bucket-key-enabled" + } + }, "RequestPayer": { "target": "com.amazonaws.s3#RequestPayer", "traits": { @@ -2779,7 +2817,7 @@ } }, "traits": { - "smithy.api#documentation": "

          Specifies whether Amazon S3 replicates delete markers. If you specify a Filter\n in your replication configuration, you must also include a\n DeleteMarkerReplication element. If your Filter includes a\n Tag element, the DeleteMarkerReplication\n Status must be set to Disabled, because Amazon S3 does not support replicating\n delete markers for tag-based rules. For an example configuration, see Basic\n Rule Configuration.

          \n

          For more information about delete marker replication, see Basic Rule Configuration.

          \n \n

          If you are using an earlier version of the replication configuration, Amazon S3 handles\n replication of delete markers differently. For more information, see Backward Compatibility.

          \n
          " + "smithy.api#documentation": "

          Specifies whether Amazon S3 replicates delete markers. If you specify a Filter\n in your replication configuration, you must also include a\n DeleteMarkerReplication element. If your Filter includes a\n Tag element, the DeleteMarkerReplication\n Status must be set to Disabled, because Amazon S3 does not support replicating\n delete markers for tag-based rules. For an example configuration, see Basic Rule Configuration.

          \n

          For more information about delete marker replication, see Basic Rule\n Configuration.

          \n \n

          If you are using an earlier version of the replication configuration, Amazon S3 handles\n replication of delete markers differently. For more information, see Backward Compatibility.

          \n
          " } }, "com.amazonaws.s3#DeleteMarkerReplicationStatus": { @@ -4929,6 +4967,13 @@ "smithy.api#httpHeader": "x-amz-server-side-encryption-aws-kms-key-id" } }, + "BucketKeyEnabled": { + "target": "com.amazonaws.s3#BucketKeyEnabled", + "traits": { + "smithy.api#documentation": "

          Indicates whether the object uses an S3 Bucket Key for server-side encryption with AWS KMS (SSE-KMS).

          ", + "smithy.api#httpHeader": "x-amz-server-side-encryption-bucket-key-enabled" + } + }, "StorageClass": { "target": "com.amazonaws.s3#StorageClass", "traits": { @@ -5713,6 +5758,13 @@ "smithy.api#httpHeader": "x-amz-server-side-encryption-aws-kms-key-id" } }, + "BucketKeyEnabled": { + "target": "com.amazonaws.s3#BucketKeyEnabled", + "traits": { + "smithy.api#documentation": "

          Indicates whether the object uses an S3 Bucket Key for server-side encryption with AWS KMS (SSE-KMS).

          ", + "smithy.api#httpHeader": "x-amz-server-side-encryption-bucket-key-enabled" + } + }, "StorageClass": { "target": "com.amazonaws.s3#StorageClass", "traits": { @@ -5729,7 +5781,7 @@ "ReplicationStatus": { "target": "com.amazonaws.s3#ReplicationStatus", "traits": { - "smithy.api#documentation": "

          Amazon S3 can return this header if your request involves a bucket that is either a source or\n destination in a replication rule.

          \n\n

          In replication, you have a source bucket on which you configure replication and\n destination bucket where Amazon S3 stores object replicas. When you request an object\n (GetObject) or object metadata (HeadObject) from these\n buckets, Amazon S3 will return the x-amz-replication-status header in the response\n as follows:

          \n
            \n
          • \n

            If requesting an object from the source bucket — Amazon S3 will return the\n x-amz-replication-status header if the object in your request is\n eligible for replication.

            \n

            For example, suppose that in your replication configuration, you specify object\n prefix TaxDocs requesting Amazon S3 to replicate objects with key prefix\n TaxDocs. Any objects you upload with this key name prefix, for\n example TaxDocs/document1.pdf, are eligible for replication. For any\n object request with this key name prefix, Amazon S3 will return the\n x-amz-replication-status header with value PENDING, COMPLETED or\n FAILED indicating object replication status.

            \n
          • \n
          • \n

            If requesting an object from the destination bucket — Amazon S3 will return the\n x-amz-replication-status header with value REPLICA if the object in\n your request is a replica that Amazon S3 created.

            \n
          • \n
          \n\n

          For more information, see Replication.

          ", + "smithy.api#documentation": "

          Amazon S3 can return this header if your request involves a bucket that is either a source or\n a destination in a replication rule.

          \n\n

          In replication, you have a source bucket on which you configure replication and\n destination bucket or buckets where Amazon S3 stores object replicas. When you request an object\n (GetObject) or object metadata (HeadObject) from these\n buckets, Amazon S3 will return the x-amz-replication-status header in the response\n as follows:

          \n
            \n
          • \n

            If requesting an object from the source bucket — Amazon S3 will return the\n x-amz-replication-status header if the object in your request is\n eligible for replication.

            \n

            For example, suppose that in your replication configuration, you specify object\n prefix TaxDocs requesting Amazon S3 to replicate objects with key prefix\n TaxDocs. Any objects you upload with this key name prefix, for\n example TaxDocs/document1.pdf, are eligible for replication. For any\n object request with this key name prefix, Amazon S3 will return the\n x-amz-replication-status header with value PENDING, COMPLETED or\n FAILED indicating object replication status.

            \n
          • \n
          • \n

            If requesting an object from a destination bucket — Amazon S3 will return the\n x-amz-replication-status header with value REPLICA if the object in\n your request is a replica that Amazon S3 created and there is no replica modification\n replication in progress.

            \n
          • \n
          • \n

            When replicating objects to multiple destination buckets the\n x-amz-replication-status header acts differently. The header of the\n source object will only return a value of COMPLETED when replication is successful to\n all destinations. The header will remain at value PENDING until replication has\n completed for all destinations. If one or more destinations fails replication the\n header will return FAILED.

            \n
          • \n
          \n\n

          For more information, see Replication.

          ", "smithy.api#httpHeader": "x-amz-replication-status" } }, @@ -9111,7 +9163,7 @@ "target": "com.amazonaws.s3#PutBucketEncryptionRequest" }, "traits": { - "smithy.api#documentation": "

          This implementation of the PUT operation uses the encryption\n subresource to set the default encryption state of an existing bucket.

          \n

          This implementation of the PUT operation sets default encryption for a\n bucket using server-side encryption with Amazon S3-managed keys SSE-S3 or AWS KMS customer\n master keys (CMKs) (SSE-KMS). For information about the Amazon S3 default encryption feature,\n see Amazon S3 Default Bucket\n Encryption.

          \n \n

          This operation requires AWS Signature Version 4. For more information, see Authenticating Requests (AWS Signature\n Version 4).

          \n
          \n

          To use this operation, you must have permissions to perform the\n s3:PutEncryptionConfiguration action. The bucket owner has this permission\n by default. The bucket owner can grant this permission to others. For more information\n about permissions, see Permissions Related to Bucket Subresource Operations and Managing Access Permissions to Your Amazon S3\n Resources in the Amazon Simple Storage Service Developer Guide.

          \n \n

          \n Related Resources\n

          \n ", + "smithy.api#documentation": "

          This operation uses the encryption subresource to configure default\n encryption and Amazon S3 Bucket Key for an existing bucket.

          \n

          Default encryption for a bucket can use server-side encryption with Amazon S3-managed keys\n (SSE-S3) or AWS KMS customer master keys (SSE-KMS). If you specify default encryption\n using SSE-KMS, you can also configure Amazon S3 Bucket Key. For information about default\n encryption, see Amazon S3 default bucket encryption\n in the Amazon Simple Storage Service Developer Guide. For more information about S3 Bucket Keys,\n see Amazon S3 Bucket Keys in the Amazon Simple Storage Service Developer Guide.

          \n \n

          This operation requires AWS Signature Version 4. For more information, see Authenticating Requests (AWS Signature\n Version 4).

          \n
          \n

          To use this operation, you must have permissions to perform the\n s3:PutEncryptionConfiguration action. The bucket owner has this permission\n by default. The bucket owner can grant this permission to others. For more information\n about permissions, see Permissions Related to Bucket Subresource Operations and Managing Access Permissions to Your Amazon S3\n Resources in the Amazon Simple Storage Service Developer Guide.

          \n \n

          \n Related Resources\n

          \n ", "smithy.api#http": { "method": "PUT", "uri": "/{Bucket}?encryption", @@ -9543,7 +9595,7 @@ "target": "com.amazonaws.s3#PutBucketReplicationRequest" }, "traits": { - "smithy.api#documentation": "

          Creates a replication configuration or replaces an existing one. For more information,\n see Replication in the Amazon S3 Developer Guide.

          \n \n

          To perform this operation, the user or role performing the operation must have the\n iam:PassRole permission.

          \n
          \n

          Specify the replication configuration in the request body. In the replication\n configuration, you provide the name of the destination bucket where you want Amazon S3 to\n replicate objects, the IAM role that Amazon S3 can assume to replicate objects on your behalf,\n and other relevant information.

          \n\n\n

          A replication configuration must include at least one rule, and can contain a maximum of\n 1,000. Each rule identifies a subset of objects to replicate by filtering the objects in\n the source bucket. To choose additional subsets of objects to replicate, add a rule for\n each subset. All rules must specify the same destination bucket.

          \n\n

          To specify a subset of the objects in the source bucket to apply a replication rule to,\n add the Filter element as a child of the Rule element. You can filter objects based on an\n object key prefix, one or more object tags, or both. When you add the Filter element in the\n configuration, you must also add the following elements:\n DeleteMarkerReplication, Status, and\n Priority.

          \n \n

          If you are using an earlier version of the replication configuration, Amazon S3 handles\n replication of delete markers differently. For more information, see Backward Compatibility.

          \n
          \n

          For information about enabling versioning on a bucket, see Using Versioning.

          \n\n

          By default, a resource owner, in this case the AWS account that created the bucket, can\n perform this operation. The resource owner can also grant others permissions to perform the\n operation. For more information about permissions, see Specifying Permissions in a Policy\n and Managing Access Permissions to Your\n Amazon S3 Resources.

          \n\n

          \n Handling Replication of Encrypted Objects\n

          \n

          By default, Amazon S3 doesn't replicate objects that are stored at rest using server-side\n encryption with CMKs stored in AWS KMS. To replicate AWS KMS-encrypted objects, add the\n following: SourceSelectionCriteria, SseKmsEncryptedObjects,\n Status, EncryptionConfiguration, and\n ReplicaKmsKeyID. For information about replication configuration, see\n Replicating Objects\n Created with SSE Using CMKs stored in AWS KMS.

          \n\n

          For information on PutBucketReplication errors, see List of\n replication-related error codes\n

          \n\n\n

          The following operations are related to PutBucketReplication:

          \n ", + "smithy.api#documentation": "

          Creates a replication configuration or replaces an existing one. For more information,\n see Replication in the Amazon S3 Developer Guide.

          \n \n

          To perform this operation, the user or role performing the operation must have the\n iam:PassRole permission.

          \n
          \n

          Specify the replication configuration in the request body. In the replication\n configuration, you provide the name of the destination bucket or buckets where you want\n Amazon S3 to replicate objects, the IAM role that Amazon S3 can assume to replicate objects on your\n behalf, and other relevant information.

          \n\n\n

          A replication configuration must include at least one rule, and can contain a maximum of\n 1,000. Each rule identifies a subset of objects to replicate by filtering the objects in\n the source bucket. To choose additional subsets of objects to replicate, add a rule for\n each subset.

          \n\n

          To specify a subset of the objects in the source bucket to apply a replication rule to,\n add the Filter element as a child of the Rule element. You can filter objects based on an\n object key prefix, one or more object tags, or both. When you add the Filter element in the\n configuration, you must also add the following elements:\n DeleteMarkerReplication, Status, and\n Priority.

          \n \n

          If you are using an earlier version of the replication configuration, Amazon S3 handles\n replication of delete markers differently. For more information, see Backward Compatibility.

          \n
          \n

          For information about enabling versioning on a bucket, see Using Versioning.

          \n\n

          By default, a resource owner, in this case the AWS account that created the bucket, can\n perform this operation. The resource owner can also grant others permissions to perform the\n operation. For more information about permissions, see Specifying Permissions in a Policy\n and Managing Access Permissions to Your\n Amazon S3 Resources.

          \n\n

          \n Handling Replication of Encrypted Objects\n

          \n

          By default, Amazon S3 doesn't replicate objects that are stored at rest using server-side\n encryption with CMKs stored in AWS KMS. To replicate AWS KMS-encrypted objects, add the\n following: SourceSelectionCriteria, SseKmsEncryptedObjects,\n Status, EncryptionConfiguration, and\n ReplicaKmsKeyID. For information about replication configuration, see\n Replicating Objects\n Created with SSE Using CMKs stored in AWS KMS.

          \n\n

          For information on PutBucketReplication errors, see List of\n replication-related error codes\n

          \n\n\n

          The following operations are related to PutBucketReplication:

          \n ", "smithy.api#http": { "method": "PUT", "uri": "/{Bucket}?replication", @@ -9809,7 +9861,7 @@ "target": "com.amazonaws.s3#PutObjectOutput" }, "traits": { - "smithy.api#documentation": "

          Adds an object to a bucket. You must have WRITE permissions on a bucket to add an object\n to it.

          \n\n\n

          Amazon S3 never adds partial objects; if you receive a success response, Amazon S3 added the\n entire object to the bucket.

          \n\n

          Amazon S3 is a distributed system. If it receives multiple write requests for the same object\n simultaneously, it overwrites all but the last object written. Amazon S3 does not provide object\n locking; if you need this, make sure to build it into your application layer or use\n versioning instead.

          \n\n

          To ensure that data is not corrupted traversing the network, use the\n Content-MD5 header. When you use this header, Amazon S3 checks the object\n against the provided MD5 value and, if they do not match, returns an error. Additionally,\n you can calculate the MD5 while putting an object to Amazon S3 and compare the returned ETag to\n the calculated MD5 value.

          \n \n

          The Content-MD5 header is required for any request to upload an object\n with a retention period configured using Amazon S3 Object Lock. For more information about\n Amazon S3 Object Lock, see Amazon S3 Object Lock Overview\n in the Amazon Simple Storage Service Developer Guide.

          \n
          \n\n\n

          \n Server-side Encryption\n

          \n

          You can optionally request server-side encryption. With server-side encryption, Amazon S3\n encrypts your data as it writes it to disks in its data centers and decrypts the data when\n you access it. You have the option to provide your own encryption key or use AWS managed\n encryption keys. For more information, see Using Server-Side\n Encryption.

          \n

          \n Access Control List (ACL)-Specific Request\n Headers\n

          \n

          You can use headers to grant ACL- based permissions. By default, all objects are\n private. Only the owner has full access control. When adding a new object, you can grant\n permissions to individual AWS accounts or to predefined groups defined by Amazon S3. These\n permissions are then added to the ACL on the object. For more information, see Access Control List\n (ACL) Overview and Managing ACLs Using the REST\n API.

          \n\n

          \n Storage Class Options\n

          \n

          By default, Amazon S3 uses the STANDARD Storage Class to store newly created objects. The\n STANDARD storage class provides high durability and high availability. Depending on\n performance needs, you can specify a different Storage Class. Amazon S3 on Outposts only uses\n the OUTPOSTS Storage Class. For more information, see Storage Classes in the Amazon S3\n Service Developer Guide.

          \n\n\n

          \n Versioning\n

          \n

          If you enable versioning for a bucket, Amazon S3 automatically generates a unique version ID\n for the object being stored. Amazon S3 returns this ID in the response. When you enable\n versioning for a bucket, if Amazon S3 receives multiple write requests for the same object\n simultaneously, it stores all of the objects.

          \n

          For more information about versioning, see Adding Objects to\n Versioning Enabled Buckets. For information about returning the versioning state\n of a bucket, see GetBucketVersioning.

          \n\n\n

          \n Related Resources\n

          \n ", + "smithy.api#documentation": "

          Adds an object to a bucket. You must have WRITE permissions on a bucket to add an object\n to it.

          \n\n\n

          Amazon S3 never adds partial objects; if you receive a success response, Amazon S3 added the\n entire object to the bucket.

          \n\n

          Amazon S3 is a distributed system. If it receives multiple write requests for the same object\n simultaneously, it overwrites all but the last object written. Amazon S3 does not provide object\n locking; if you need this, make sure to build it into your application layer or use\n versioning instead.

          \n\n

          To ensure that data is not corrupted traversing the network, use the\n Content-MD5 header. When you use this header, Amazon S3 checks the object\n against the provided MD5 value and, if they do not match, returns an error. Additionally,\n you can calculate the MD5 while putting an object to Amazon S3 and compare the returned ETag to\n the calculated MD5 value.

          \n \n

          The Content-MD5 header is required for any request to upload an object\n with a retention period configured using Amazon S3 Object Lock. For more information about\n Amazon S3 Object Lock, see Amazon S3 Object Lock Overview\n in the Amazon Simple Storage Service Developer Guide.

          \n
          \n\n\n

          \n Server-side Encryption\n

          \n

          You can optionally request server-side encryption. With server-side encryption, Amazon S3 encrypts your data as it writes it to disks in its data centers and decrypts the data\n when you access it. You have the option to provide your own encryption key or use AWS\n managed encryption keys (SSE-S3 or SSE-KMS). For more information, see Using Server-Side\n Encryption.

          \n

          If you request server-side encryption using AWS Key Management Service (SSE-KMS), you can enable an S3 Bucket Key at the object-level. For more information, see Amazon S3 Bucket Keys in the Amazon Simple Storage Service Developer Guide.

          \n

          \n Access Control List (ACL)-Specific Request\n Headers\n

          \n

          You can use headers to grant ACL- based permissions. By default, all objects are\n private. Only the owner has full access control. When adding a new object, you can grant\n permissions to individual AWS accounts or to predefined groups defined by Amazon S3. These\n permissions are then added to the ACL on the object. For more information, see Access Control List\n (ACL) Overview and Managing ACLs Using the REST\n API.

          \n\n

          \n Storage Class Options\n

          \n

          By default, Amazon S3 uses the STANDARD Storage Class to store newly created objects. The\n STANDARD storage class provides high durability and high availability. Depending on\n performance needs, you can specify a different Storage Class. Amazon S3 on Outposts only uses\n the OUTPOSTS Storage Class. For more information, see Storage Classes in the Amazon S3\n Service Developer Guide.

          \n\n\n

          \n Versioning\n

          \n

          If you enable versioning for a bucket, Amazon S3 automatically generates a unique version ID\n for the object being stored. Amazon S3 returns this ID in the response. When you enable\n versioning for a bucket, if Amazon S3 receives multiple write requests for the same object\n simultaneously, it stores all of the objects.

          \n

          For more information about versioning, see Adding Objects to\n Versioning Enabled Buckets. For information about returning the versioning state\n of a bucket, see GetBucketVersioning.

          \n\n\n

          \n Related Resources\n

          \n ", "smithy.api#http": { "method": "PUT", "uri": "/{Bucket}/{Key+}?x-id=PutObject", @@ -10167,6 +10219,13 @@ "smithy.api#httpHeader": "x-amz-server-side-encryption-context" } }, + "BucketKeyEnabled": { + "target": "com.amazonaws.s3#BucketKeyEnabled", + "traits": { + "smithy.api#documentation": "

          Indicates whether the uploaded object uses an S3 Bucket Key for server-side encryption with AWS KMS (SSE-KMS).

          ", + "smithy.api#httpHeader": "x-amz-server-side-encryption-bucket-key-enabled" + } + }, "RequestCharged": { "target": "com.amazonaws.s3#RequestCharged", "traits": { @@ -10355,6 +10414,13 @@ "smithy.api#httpHeader": "x-amz-server-side-encryption-context" } }, + "BucketKeyEnabled": { + "target": "com.amazonaws.s3#BucketKeyEnabled", + "traits": { + "smithy.api#documentation": "

          Specifies whether Amazon S3 should use an S3 Bucket Key for object encryption with server-side encryption using AWS KMS (SSE-KMS). Setting this header to true causes Amazon S3 to use an S3 Bucket Key for object encryption with SSE-KMS.

          \n

          Specifying this header with a PUT operation doesn’t affect bucket-level settings for S3 Bucket Key.

          ", + "smithy.api#httpHeader": "x-amz-server-side-encryption-bucket-key-enabled" + } + }, "RequestPayer": { "target": "com.amazonaws.s3#RequestPayer", "traits": { @@ -10772,6 +10838,34 @@ "com.amazonaws.s3#ReplicaKmsKeyID": { "type": "string" }, + "com.amazonaws.s3#ReplicaModifications": { + "type": "structure", + "members": { + "Status": { + "target": "com.amazonaws.s3#ReplicaModificationsStatus", + "traits": { + "smithy.api#documentation": "

          Specifies whether Amazon S3 replicates modifications on replicas.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          A filter that you can specify for selection for modifications on replicas. Amazon S3 doesn't\n replicate replica modifications by default. In the latest version of replication\n configuration (when Filter is specified), you can specify this element and set\n the status to Enabled to replicate modifications on replicas.

          \n \n

          If you don't specify the Filter element, Amazon S3 assumes that the\n replication configuration is the earlier version, V1. In the earlier version, this\n element is not allowed.

          \n
          " + } + }, + "com.amazonaws.s3#ReplicaModificationsStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Enabled" + }, + { + "value": "Disabled" + } + ] + } + }, "com.amazonaws.s3#ReplicationConfiguration": { "type": "structure", "members": { @@ -10808,7 +10902,7 @@ "Priority": { "target": "com.amazonaws.s3#Priority", "traits": { - "smithy.api#documentation": "

          The priority associated with the rule. If you specify multiple rules in a replication\n configuration, Amazon S3 prioritizes the rules to prevent conflicts when filtering. If two or\n more rules identify the same object based on a specified filter, the rule with higher\n priority takes precedence. For example:

          \n
            \n
          • \n

            Same object quality prefix-based filter criteria if prefixes you specified in\n multiple rules overlap

            \n
          • \n
          • \n

            Same object qualify tag-based filter criteria specified in multiple rules

            \n
          • \n
          \n

          For more information, see Replication in the\n Amazon Simple Storage Service Developer Guide.

          " + "smithy.api#documentation": "

          The priority indicates which rule has precedence whenever two or more replication rules\n conflict. Amazon S3 will attempt to replicate objects according to all replication rules.\n However, if there are two or more rules with the same destination bucket, then objects will\n be replicated according to the rule with the highest priority. The higher the number, the\n higher the priority.

          \n

          For more information, see Replication in the\n Amazon Simple Storage Service Developer Guide.

          " } }, "Prefix": { @@ -11639,6 +11733,12 @@ "traits": { "smithy.api#documentation": "

          Specifies the default server-side encryption to apply to new objects in the bucket. If a\n PUT Object request doesn't specify any server-side encryption, this default encryption will\n be applied.

          " } + }, + "BucketKeyEnabled": { + "target": "com.amazonaws.s3#BucketKeyEnabled", + "traits": { + "smithy.api#documentation": "

          Specifies whether Amazon S3 should use an S3 Bucket Key with server-side encryption using KMS (SSE-KMS) for new objects in the bucket. Existing objects are not affected. Setting the BucketKeyEnabled element to true causes Amazon S3 to use an S3 Bucket Key. By default, S3 Bucket Key is not enabled.

          \n

          For more information, see Amazon S3 Bucket Keys in the Amazon Simple Storage Service Developer Guide.

          " + } } }, "traits": { @@ -11665,6 +11765,12 @@ "traits": { "smithy.api#documentation": "

          A container for filter information for the selection of Amazon S3 objects encrypted with AWS\n KMS. If you include SourceSelectionCriteria in the replication configuration,\n this element is required.

          " } + }, + "ReplicaModifications": { + "target": "com.amazonaws.s3#ReplicaModifications", + "traits": { + "smithy.api#documentation": "

          A filter that you can specify for selections for modifications on replicas. Amazon S3 doesn't\n replicate replica modifications by default. In the latest version of replication\n configuration (when Filter is specified), you can specify this element and set\n the status to Enabled to replicate modifications on replicas.

          \n \n

          If you don't specify the Filter element, Amazon S3 assumes that the\n replication configuration is the earlier version, V1. In the earlier version, this\n element is not allowed

          \n
          " + } } }, "traits": { @@ -12174,6 +12280,13 @@ "smithy.api#httpHeader": "x-amz-server-side-encryption-aws-kms-key-id" } }, + "BucketKeyEnabled": { + "target": "com.amazonaws.s3#BucketKeyEnabled", + "traits": { + "smithy.api#documentation": "

          Indicates whether the multipart upload uses an S3 Bucket Key for server-side encryption with AWS KMS (SSE-KMS).

          ", + "smithy.api#httpHeader": "x-amz-server-side-encryption-bucket-key-enabled" + } + }, "RequestCharged": { "target": "com.amazonaws.s3#RequestCharged", "traits": { @@ -12362,6 +12475,13 @@ "smithy.api#httpHeader": "x-amz-server-side-encryption-aws-kms-key-id" } }, + "BucketKeyEnabled": { + "target": "com.amazonaws.s3#BucketKeyEnabled", + "traits": { + "smithy.api#documentation": "

          Indicates whether the multipart upload uses an S3 Bucket Key for server-side encryption with AWS KMS (SSE-KMS).

          ", + "smithy.api#httpHeader": "x-amz-server-side-encryption-bucket-key-enabled" + } + }, "RequestCharged": { "target": "com.amazonaws.s3#RequestCharged", "traits": { diff --git a/codegen/sdk-codegen/aws-models/sagemaker-featurestore-runtime.2020-07-01.json b/codegen/sdk-codegen/aws-models/sagemaker-featurestore-runtime.2020-07-01.json new file mode 100644 index 000000000000..e4711519978c --- /dev/null +++ b/codegen/sdk-codegen/aws-models/sagemaker-featurestore-runtime.2020-07-01.json @@ -0,0 +1,388 @@ +{ + "smithy": "1.0", + "metadata": { + "suppressions": [ + { + "id": "HttpMethodSemantics", + "namespace": "*" + }, + { + "id": "HttpResponseCodeSemantics", + "namespace": "*" + }, + { + "id": "PaginatedTrait", + "namespace": "*" + }, + { + "id": "HttpHeaderTrait", + "namespace": "*" + }, + { + "id": "HttpUriConflict", + "namespace": "*" + }, + { + "id": "Service", + "namespace": "*" + } + ] + }, + "shapes": { + "com.amazonaws.sagemakerfeaturestoreruntime#AccessForbidden": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#Message" + } + }, + "traits": { + "smithy.api#documentation": "

          You do not have permission to perform an action.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 403 + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#AmazonSageMakerFeatureStoreRuntime": { + "type": "service", + "version": "2020-07-01", + "operations": [ + { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#DeleteRecord" + }, + { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#GetRecord" + }, + { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#PutRecord" + } + ], + "traits": { + "aws.api#service": { + "sdkId": "SageMaker FeatureStore Runtime", + "arnNamespace": "sagemaker", + "cloudFormationName": "SageMakerFeatureStoreRuntime", + "cloudTrailEventSource": "sagemakerfeaturestoreruntime.amazonaws.com" + }, + "aws.auth#sigv4": { + "name": "sagemaker" + }, + "aws.protocols#restJson1": {}, + "smithy.api#documentation": "

          Contains all data plane API operations and data types for the Amazon SageMaker Feature\n Store. Use this API to put, delete, and retrieve (get) features from a feature\n store.

          \n

          Use the following operations to configure your OnlineStore and\n OfflineStore features, and to create and manage feature groups:

          \n ", + "smithy.api#title": "Amazon SageMaker Feature Store Runtime" + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#DeleteRecord": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#DeleteRecordRequest" + }, + "errors": [ + { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#AccessForbidden" + }, + { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#InternalFailure" + }, + { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#ServiceUnavailable" + }, + { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#ValidationError" + } + ], + "traits": { + "smithy.api#documentation": "

          Deletes a Record from a FeatureGroup. A new record will show\n up in the OfflineStore when the DeleteRecord API is called. This\n record will have a value of True in the is_deleted column.

          ", + "smithy.api#http": { + "method": "DELETE", + "uri": "/FeatureGroup/{FeatureGroupName}", + "code": 200 + } + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#DeleteRecordRequest": { + "type": "structure", + "members": { + "FeatureGroupName": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#FeatureGroupName", + "traits": { + "smithy.api#documentation": "

          The name of the feature group to delete the record from.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "RecordIdentifierValueAsString": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#ValueAsString", + "traits": { + "smithy.api#documentation": "

          The value for the RecordIdentifier that uniquely identifies the record, in\n string format.

          ", + "smithy.api#httpQuery": "RecordIdentifierValueAsString", + "smithy.api#required": {} + } + }, + "EventTime": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#ValueAsString", + "traits": { + "smithy.api#documentation": "

          Timestamp indicating when the deletion event occurred. EventTime can be\n used to query data at a certain point in time.

          ", + "smithy.api#httpQuery": "EventTime", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#FeatureGroupName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 64 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#FeatureName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 64 + }, + "smithy.api#pattern": "^[a-zA-Z0-9]([-_]*[a-zA-Z0-9])*" + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#FeatureNames": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#FeatureName" + }, + "traits": { + "smithy.api#length": { + "min": 1 + } + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#FeatureValue": { + "type": "structure", + "members": { + "FeatureName": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#FeatureName", + "traits": { + "smithy.api#documentation": "

          The name of a feature that a feature value corresponds to.

          ", + "smithy.api#required": {} + } + }, + "ValueAsString": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#ValueAsString", + "traits": { + "smithy.api#documentation": "

          The value associated with a feature, in string format. Note that features types can be\n String, Integral, or Fractional. This value represents all three types as a string.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The value associated with a feature.

          " + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#GetRecord": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#GetRecordRequest" + }, + "output": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#GetRecordResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#AccessForbidden" + }, + { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#InternalFailure" + }, + { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#ResourceNotFound" + }, + { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#ServiceUnavailable" + }, + { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#ValidationError" + } + ], + "traits": { + "smithy.api#documentation": "

          Use for OnlineStore serving from a FeatureStore. Only the\n latest records stored in the OnlineStore can be retrieved. If no Record with\n RecordIdentifierValue is found, then an empty result is returned.

          ", + "smithy.api#http": { + "method": "GET", + "uri": "/FeatureGroup/{FeatureGroupName}", + "code": 200 + } + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#GetRecordRequest": { + "type": "structure", + "members": { + "FeatureGroupName": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#FeatureGroupName", + "traits": { + "smithy.api#documentation": "

          The name of the feature group in which you want to put the records.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "RecordIdentifierValueAsString": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#ValueAsString", + "traits": { + "smithy.api#documentation": "

          The value that corresponds to RecordIdentifier type and uniquely identifies\n the record in the FeatureGroup.

          ", + "smithy.api#httpQuery": "RecordIdentifierValueAsString", + "smithy.api#required": {} + } + }, + "FeatureNames": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#FeatureNames", + "traits": { + "smithy.api#documentation": "

          List of names of Features to be retrieved. If not specified, the latest value for all\n the Features are returned.

          ", + "smithy.api#httpQuery": "FeatureName" + } + } + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#GetRecordResponse": { + "type": "structure", + "members": { + "Record": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#Record", + "traits": { + "smithy.api#documentation": "

          The record you requested. A list of FeatureValues.

          " + } + } + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#InternalFailure": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#Message" + } + }, + "traits": { + "smithy.api#documentation": "

          An internal failure occurred. Try your request again. If the problem \n persists, contact AWS customer support.

          ", + "smithy.api#error": "server", + "smithy.api#httpError": 500 + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#Message": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 2048 + } + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#PutRecord": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#PutRecordRequest" + }, + "errors": [ + { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#AccessForbidden" + }, + { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#InternalFailure" + }, + { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#ServiceUnavailable" + }, + { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#ValidationError" + } + ], + "traits": { + "smithy.api#documentation": "

          Used for data ingestion into the FeatureStore. The PutRecord\n API writes to both the OnlineStore and OfflineStore. If the\n record is the latest record for the recordIdentifier, the record is written to\n both the OnlineStore and OfflineStore. If the record is a\n historic record, it is written only to the OfflineStore.

          ", + "smithy.api#http": { + "method": "PUT", + "uri": "/FeatureGroup/{FeatureGroupName}", + "code": 200 + } + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#PutRecordRequest": { + "type": "structure", + "members": { + "FeatureGroupName": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#FeatureGroupName", + "traits": { + "smithy.api#documentation": "

          The name of the feature group that you want to insert the record into.

          ", + "smithy.api#httpLabel": {}, + "smithy.api#required": {} + } + }, + "Record": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#Record", + "traits": { + "smithy.api#documentation": "

          List of FeatureValues to be inserted. This will be a full over-write. If you only want\n to update few of the feature values, do the following:

          \n
            \n
          • \n

            Use GetRecord to retrieve the latest record.

            \n
          • \n
          • \n

            Update the record returned from GetRecord.

            \n
          • \n
          • \n

            Use PutRecord to update feature values.

            \n
          • \n
          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#Record": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#FeatureValue" + }, + "traits": { + "smithy.api#length": { + "min": 1 + } + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#ResourceNotFound": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#Message" + } + }, + "traits": { + "smithy.api#documentation": "

          A resource that is required to perform an action was not found.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 404 + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#ServiceUnavailable": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#Message" + } + }, + "traits": { + "smithy.api#documentation": "

          The service is currently unavailable.

          ", + "smithy.api#error": "server", + "smithy.api#httpError": 503 + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#ValidationError": { + "type": "structure", + "members": { + "Message": { + "target": "com.amazonaws.sagemakerfeaturestoreruntime#Message" + } + }, + "traits": { + "smithy.api#documentation": "

          There was an error validating your request.

          ", + "smithy.api#error": "client", + "smithy.api#httpError": 400 + } + }, + "com.amazonaws.sagemakerfeaturestoreruntime#ValueAsString": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 358400 + }, + "smithy.api#pattern": ".*" + } + } + } +} diff --git a/codegen/sdk-codegen/aws-models/sagemaker.2017-07-24.json b/codegen/sdk-codegen/aws-models/sagemaker.2017-07-24.json index 8301d40bf979..86f2abca27bc 100644 --- a/codegen/sdk-codegen/aws-models/sagemaker.2017-07-24.json +++ b/codegen/sdk-codegen/aws-models/sagemaker.2017-07-24.json @@ -45,6 +45,192 @@ "smithy.api#pattern": "^\\d+$" } }, + "com.amazonaws.sagemaker#ActionArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:action/.*" + } + }, + "com.amazonaws.sagemaker#ActionSource": { + "type": "structure", + "members": { + "SourceUri": { + "target": "com.amazonaws.sagemaker#String2048", + "traits": { + "smithy.api#documentation": "

          The URI of the source.

          ", + "smithy.api#required": {} + } + }, + "SourceType": { + "target": "com.amazonaws.sagemaker#String256", + "traits": { + "smithy.api#documentation": "

          The type of the source.

          " + } + }, + "SourceId": { + "target": "com.amazonaws.sagemaker#String256", + "traits": { + "smithy.api#documentation": "

          The ID of the source.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          A structure describing the source of an action.

          " + } + }, + "com.amazonaws.sagemaker#ActionStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Unknown", + "name": "UNKNOWN" + }, + { + "value": "InProgress", + "name": "IN_PROGRESS" + }, + { + "value": "Completed", + "name": "COMPLETED" + }, + { + "value": "Failed", + "name": "FAILED" + }, + { + "value": "Stopping", + "name": "STOPPING" + }, + { + "value": "Stopped", + "name": "STOPPED" + } + ] + } + }, + "com.amazonaws.sagemaker#ActionSummaries": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ActionSummary" + } + }, + "com.amazonaws.sagemaker#ActionSummary": { + "type": "structure", + "members": { + "ActionArn": { + "target": "com.amazonaws.sagemaker#ActionArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the action.

          " + } + }, + "ActionName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "traits": { + "smithy.api#documentation": "

          The name of the action.

          " + } + }, + "Source": { + "target": "com.amazonaws.sagemaker#ActionSource", + "traits": { + "smithy.api#documentation": "

          The source of the action.

          " + } + }, + "ActionType": { + "target": "com.amazonaws.sagemaker#String64", + "traits": { + "smithy.api#documentation": "

          The type of the action.

          " + } + }, + "Status": { + "target": "com.amazonaws.sagemaker#ActionStatus", + "traits": { + "smithy.api#documentation": "

          The status of the action.

          " + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          When the action was created.

          " + } + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          When the action was last modified.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Lists the properties of an action. An action represents an action\n or activity. Some examples are a workflow step and a model deployment. Generally, an\n action involves at least one input artifact or output artifact.

          " + } + }, + "com.amazonaws.sagemaker#AddAssociation": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#AddAssociationRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#AddAssociationResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + }, + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Creates an association between the source and the destination. A\n source can be associated with multiple destinations, and a destination can be associated\n with multiple sources. An association is a lineage tracking entity. For more information, see\n Amazon SageMaker\n ML Lineage Tracking.

          " + } + }, + "com.amazonaws.sagemaker#AddAssociationRequest": { + "type": "structure", + "members": { + "SourceArn": { + "target": "com.amazonaws.sagemaker#AssociationEntityArn", + "traits": { + "smithy.api#documentation": "

          The ARN of the source.

          ", + "smithy.api#required": {} + } + }, + "DestinationArn": { + "target": "com.amazonaws.sagemaker#AssociationEntityArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the destination.

          ", + "smithy.api#required": {} + } + }, + "AssociationType": { + "target": "com.amazonaws.sagemaker#AssociationEdgeType", + "traits": { + "smithy.api#documentation": "

          The type of association. The following are suggested uses for each type. Amazon SageMaker\n places no restrictions on their use.

          \n
            \n
          • \n

            ContributedTo - The source contributed to the destination or had a part in\n enabling the destination. For example, the training data contributed to the training\n job.

            \n
          • \n
          • \n

            AssociatedWith - The source is connected to the destination. For example, an\n approval workflow is associated with a model deployment.

            \n
          • \n
          • \n

            DerivedFrom - The destination is a modification of the source. For example, a digest\n output of a channel input for a processing job is derived from the original inputs.

            \n
          • \n
          • \n

            Produced - The source generated the destination. For example, a training job\n produced a model artifact.

            \n
          • \n
          " + } + } + } + }, + "com.amazonaws.sagemaker#AddAssociationResponse": { + "type": "structure", + "members": { + "SourceArn": { + "target": "com.amazonaws.sagemaker#AssociationEntityArn", + "traits": { + "smithy.api#documentation": "

          The ARN of the source.

          " + } + }, + "DestinationArn": { + "target": "com.amazonaws.sagemaker#AssociationEntityArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the destination.

          " + } + } + } + }, "com.amazonaws.sagemaker#AddTags": { "type": "operation", "input": { @@ -70,7 +256,7 @@ "Tags": { "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          An array of Tag objects. Each tag is a key-value pair. Only the\n key parameter is required. If you don't specify a value, Amazon SageMaker sets the\n value to an empty string.

          ", + "smithy.api#documentation": "

          An array of key-value pairs. You can use tags to categorize your AWS resources in\n different ways, for example, by purpose, owner, or environment. For more information,\n see Tagging AWS\n Resources.

          ", "smithy.api#required": {} } } @@ -99,6 +285,42 @@ } } }, + "com.amazonaws.sagemaker#Alarm": { + "type": "structure", + "members": { + "AlarmName": { + "target": "com.amazonaws.sagemaker#AlarmName", + "traits": { + "smithy.api#documentation": "

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          This API is not supported.

          " + } + }, + "com.amazonaws.sagemaker#AlarmList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#Alarm" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 10 + } + } + }, + "com.amazonaws.sagemaker#AlarmName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 255 + }, + "smithy.api#pattern": "^(?!\\s*$).+" + } + }, "com.amazonaws.sagemaker#AlgorithmArn": { "type": "string", "traits": { @@ -494,7 +716,7 @@ "min": 0, "max": 63 }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,62}" } }, "com.amazonaws.sagemaker#AppImageConfigSortKey": { @@ -657,6 +879,9 @@ "target": "com.amazonaws.sagemaker#AppDetails" } }, + "com.amazonaws.sagemaker#AppManaged": { + "type": "boolean" + }, "com.amazonaws.sagemaker#AppName": { "type": "string", "traits": { @@ -664,7 +889,7 @@ "min": 0, "max": 63 }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,62}" } }, "com.amazonaws.sagemaker#AppNetworkAccessType": { @@ -766,6 +991,16 @@ ] } }, + "com.amazonaws.sagemaker#ApprovalDescription": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 1024 + }, + "smithy.api#pattern": ".*" + } + }, "com.amazonaws.sagemaker#ArnOrName": { "type": "string", "traits": { @@ -776,850 +1011,867 @@ "smithy.api#pattern": "(arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:[a-z\\-]*\\/)?([a-zA-Z0-9]([a-zA-Z0-9-]){0,62})(?Associates a trial component with a trial. A trial component can be associated with\n multiple trials. To disassociate a trial component from a trial, call the DisassociateTrialComponent API.

          " + "smithy.api#pattern": "^[\\w:]+$" } }, - "com.amazonaws.sagemaker#AssociateTrialComponentRequest": { + "com.amazonaws.sagemaker#ArtifactSource": { "type": "structure", "members": { - "TrialComponentName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "SourceUri": { + "target": "com.amazonaws.sagemaker#String2048", "traits": { - "smithy.api#documentation": "

          The name of the component to associated with the trial.

          ", + "smithy.api#documentation": "

          The URI of the source.

          ", "smithy.api#required": {} } }, - "TrialName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "SourceTypes": { + "target": "com.amazonaws.sagemaker#ArtifactSourceTypes", "traits": { - "smithy.api#documentation": "

          The name of the trial to associate with.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A list of source types.

          " } } + }, + "traits": { + "smithy.api#documentation": "

          A structure describing the source of an artifact.

          " } }, - "com.amazonaws.sagemaker#AssociateTrialComponentResponse": { + "com.amazonaws.sagemaker#ArtifactSourceIdType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "MD5Hash", + "name": "MD5_HASH" + }, + { + "value": "S3ETag", + "name": "S3_ETAG" + }, + { + "value": "S3Version", + "name": "S3_VERSION" + }, + { + "value": "Custom", + "name": "CUSTOM" + } + ] + } + }, + "com.amazonaws.sagemaker#ArtifactSourceType": { "type": "structure", "members": { - "TrialComponentArn": { - "target": "com.amazonaws.sagemaker#TrialComponentArn", + "SourceIdType": { + "target": "com.amazonaws.sagemaker#ArtifactSourceIdType", "traits": { - "smithy.api#documentation": "

          The ARN of the trial component.

          " + "smithy.api#documentation": "

          The type of ID.

          ", + "smithy.api#required": {} } }, - "TrialArn": { - "target": "com.amazonaws.sagemaker#TrialArn", + "Value": { + "target": "com.amazonaws.sagemaker#String256", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the trial.

          " + "smithy.api#documentation": "

          The ID.

          ", + "smithy.api#required": {} } } - } - }, - "com.amazonaws.sagemaker#AttributeName": { - "type": "string", + }, "traits": { - "smithy.api#length": { - "min": 1, - "max": 256 - }, - "smithy.api#pattern": ".+" + "smithy.api#documentation": "

          The ID and ID type of an artifact source.

          " } }, - "com.amazonaws.sagemaker#AttributeNames": { + "com.amazonaws.sagemaker#ArtifactSourceTypes": { "type": "list", "member": { - "target": "com.amazonaws.sagemaker#AttributeName" - }, - "traits": { - "smithy.api#length": { - "min": 0, - "max": 16 - } + "target": "com.amazonaws.sagemaker#ArtifactSourceType" } }, - "com.amazonaws.sagemaker#AuthMode": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "SSO", - "name": "SSO" - }, - { - "value": "IAM", - "name": "IAM" - } - ] + "com.amazonaws.sagemaker#ArtifactSummaries": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ArtifactSummary" } }, - "com.amazonaws.sagemaker#AutoMLCandidate": { + "com.amazonaws.sagemaker#ArtifactSummary": { "type": "structure", "members": { - "CandidateName": { - "target": "com.amazonaws.sagemaker#CandidateName", - "traits": { - "smithy.api#documentation": "

          The candidate name.

          ", - "smithy.api#required": {} - } - }, - "FinalAutoMLJobObjectiveMetric": { - "target": "com.amazonaws.sagemaker#FinalAutoMLJobObjectiveMetric" - }, - "ObjectiveStatus": { - "target": "com.amazonaws.sagemaker#ObjectiveStatus", + "ArtifactArn": { + "target": "com.amazonaws.sagemaker#ArtifactArn", "traits": { - "smithy.api#documentation": "

          The objective status.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the artifact.

          " } }, - "CandidateSteps": { - "target": "com.amazonaws.sagemaker#CandidateSteps", + "ArtifactName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The candidate's steps.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The name of the artifact.

          " } }, - "CandidateStatus": { - "target": "com.amazonaws.sagemaker#CandidateStatus", + "Source": { + "target": "com.amazonaws.sagemaker#ArtifactSource", "traits": { - "smithy.api#documentation": "

          The candidate's status.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The source of the artifact.

          " } }, - "InferenceContainers": { - "target": "com.amazonaws.sagemaker#AutoMLContainerDefinitions", + "ArtifactType": { + "target": "com.amazonaws.sagemaker#String256", "traits": { - "smithy.api#documentation": "

          The inference containers.

          " + "smithy.api#documentation": "

          The type of the artifact.

          " } }, "CreationTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The creation time.

          ", - "smithy.api#required": {} - } - }, - "EndTime": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          The end time.

          " + "smithy.api#documentation": "

          When the artifact was created.

          " } }, "LastModifiedTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The last modified time.

          ", - "smithy.api#required": {} - } - }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#AutoMLFailureReason", - "traits": { - "smithy.api#documentation": "

          The failure reason.

          " + "smithy.api#documentation": "

          When the artifact was last modified.

          " } } }, "traits": { - "smithy.api#documentation": "

          An Autopilot job returns recommendations, or candidates. Each candidate has futher details\n about the steps involed, and the status.

          " + "smithy.api#documentation": "

          Lists a summary of the properties of an artifact. An artifact represents a URI\n addressable object or data. Some examples are a dataset and a model.

          " } }, - "com.amazonaws.sagemaker#AutoMLCandidateStep": { + "com.amazonaws.sagemaker#AssemblyType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "None", + "name": "NONE" + }, + { + "value": "Line", + "name": "LINE" + } + ] + } + }, + "com.amazonaws.sagemaker#AssociateTrialComponent": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#AssociateTrialComponentRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#AssociateTrialComponentResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + }, + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Associates a trial component with a trial. A trial component can be associated with\n multiple trials. To disassociate a trial component from a trial, call the DisassociateTrialComponent API.

          " + } + }, + "com.amazonaws.sagemaker#AssociateTrialComponentRequest": { "type": "structure", "members": { - "CandidateStepType": { - "target": "com.amazonaws.sagemaker#CandidateStepType", + "TrialComponentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          Whether the Candidate is at the transform, training, or processing step.

          ", + "smithy.api#documentation": "

          The name of the component to associated with the trial.

          ", "smithy.api#required": {} } }, - "CandidateStepArn": { - "target": "com.amazonaws.sagemaker#CandidateStepArn", + "TrialName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The ARN for the Candidate's step.

          ", + "smithy.api#documentation": "

          The name of the trial to associate with.

          ", "smithy.api#required": {} } + } + } + }, + "com.amazonaws.sagemaker#AssociateTrialComponentResponse": { + "type": "structure", + "members": { + "TrialComponentArn": { + "target": "com.amazonaws.sagemaker#TrialComponentArn", + "traits": { + "smithy.api#documentation": "

          The ARN of the trial component.

          " + } }, - "CandidateStepName": { - "target": "com.amazonaws.sagemaker#CandidateStepName", + "TrialArn": { + "target": "com.amazonaws.sagemaker#TrialArn", "traits": { - "smithy.api#documentation": "

          The name for the Candidate's step.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the trial.

          " } } - }, + } + }, + "com.amazonaws.sagemaker#AssociationEdgeType": { + "type": "string", "traits": { - "smithy.api#documentation": "

          Information about the steps for a Candidate, and what step it is working on.

          " + "smithy.api#enum": [ + { + "value": "ContributedTo", + "name": "CONTRIBUTED_TO" + }, + { + "value": "AssociatedWith", + "name": "ASSOCIATED_WITH" + }, + { + "value": "DerivedFrom", + "name": "DERIVED_FROM" + }, + { + "value": "Produced", + "name": "PRODUCED" + } + ] } }, - "com.amazonaws.sagemaker#AutoMLCandidates": { + "com.amazonaws.sagemaker#AssociationEntityArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:(experiment|experiment-trial-component|artifact|action|context)/.*" + } + }, + "com.amazonaws.sagemaker#AssociationSummaries": { "type": "list", "member": { - "target": "com.amazonaws.sagemaker#AutoMLCandidate" + "target": "com.amazonaws.sagemaker#AssociationSummary" } }, - "com.amazonaws.sagemaker#AutoMLChannel": { + "com.amazonaws.sagemaker#AssociationSummary": { "type": "structure", "members": { - "DataSource": { - "target": "com.amazonaws.sagemaker#AutoMLDataSource", + "SourceArn": { + "target": "com.amazonaws.sagemaker#AssociationEntityArn", "traits": { - "smithy.api#documentation": "

          The data source.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The ARN of the source.

          " } }, - "CompressionType": { - "target": "com.amazonaws.sagemaker#CompressionType", + "DestinationArn": { + "target": "com.amazonaws.sagemaker#AssociationEntityArn", "traits": { - "smithy.api#documentation": "

          You can use Gzip or None. The default value is None.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the destination.

          " } }, - "TargetAttributeName": { - "target": "com.amazonaws.sagemaker#TargetAttributeName", + "SourceType": { + "target": "com.amazonaws.sagemaker#String256", "traits": { - "smithy.api#documentation": "

          The name of the target variable in supervised learning, a.k.a. 'y'.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The source type.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          Similar to Channel. A channel is a named input source that training algorithms can\n consume. Refer to Channel for detailed descriptions.

          " - } - }, - "com.amazonaws.sagemaker#AutoMLContainerDefinition": { - "type": "structure", - "members": { - "Image": { - "target": "com.amazonaws.sagemaker#ContainerImage", + }, + "DestinationType": { + "target": "com.amazonaws.sagemaker#String256", "traits": { - "smithy.api#documentation": "

          The ECR path of the container. Refer to ContainerDefinition for more details.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The destination type.

          " } }, - "ModelDataUrl": { - "target": "com.amazonaws.sagemaker#Url", + "AssociationType": { + "target": "com.amazonaws.sagemaker#AssociationEdgeType", "traits": { - "smithy.api#documentation": "

          The location of the model artifacts. Refer to ContainerDefinition for more\n details.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The type of the association.

          " } }, - "Environment": { - "target": "com.amazonaws.sagemaker#EnvironmentMap", + "SourceName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          Environment variables to set in the container. Refer to ContainerDefinition for more\n details.

          " + "smithy.api#documentation": "

          The name of the source.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          A list of container definitions that describe the different containers that make up one\n AutoML candidate. Refer to ContainerDefinition for more details.

          " - } - }, - "com.amazonaws.sagemaker#AutoMLContainerDefinitions": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#AutoMLContainerDefinition" - }, - "traits": { - "smithy.api#length": { - "min": 0, - "max": 5 - } - } - }, - "com.amazonaws.sagemaker#AutoMLDataSource": { - "type": "structure", - "members": { - "S3DataSource": { - "target": "com.amazonaws.sagemaker#AutoMLS3DataSource", + }, + "DestinationName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The Amazon S3 location of the input data.

          \n \n

          The input data must be in CSV format and contain at least 500 rows.

          \n
          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The name of the destination.

          " + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          When the association was created.

          " } + }, + "CreatedBy": { + "target": "com.amazonaws.sagemaker#UserContext" } }, "traits": { - "smithy.api#documentation": "

          The data source for the Autopilot job.

          " + "smithy.api#documentation": "

          Lists a summary of the properties of an association. An association is an entity that\n links other lineage or experiment entities. An example would be an association between a\n training job and a model.

          " } }, - "com.amazonaws.sagemaker#AutoMLFailureReason": { + "com.amazonaws.sagemaker#AthenaCatalog": { "type": "string", "traits": { - "smithy.api#length": { - "min": 0, - "max": 1024 - } - } - }, - "com.amazonaws.sagemaker#AutoMLInputDataConfig": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#AutoMLChannel" - }, - "traits": { + "smithy.api#documentation": "

          The name of the data catalog used in Athena query execution.

          ", "smithy.api#length": { "min": 1, - "max": 20 - } + "max": 256 + }, + "smithy.api#pattern": "[\\u0020-\\uD7FF\\uE000-\\uFFFD\\uD800\\uDC00-\\uDBFF\\uDFFF\\t]*" } }, - "com.amazonaws.sagemaker#AutoMLJobArn": { + "com.amazonaws.sagemaker#AthenaDatabase": { "type": "string", "traits": { + "smithy.api#documentation": "

          The name of the database used in the Athena query execution.

          ", "smithy.api#length": { "min": 1, - "max": 256 + "max": 255 }, - "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:automl-job/.*" + "smithy.api#pattern": ".*" } }, - "com.amazonaws.sagemaker#AutoMLJobArtifacts": { + "com.amazonaws.sagemaker#AthenaDatasetDefinition": { "type": "structure", "members": { - "CandidateDefinitionNotebookLocation": { - "target": "com.amazonaws.sagemaker#CandidateDefinitionNotebookLocation", + "Catalog": { + "target": "com.amazonaws.sagemaker#AthenaCatalog", "traits": { - "smithy.api#documentation": "

          The URL to the notebook location.

          " + "smithy.api#required": {} } }, - "DataExplorationNotebookLocation": { - "target": "com.amazonaws.sagemaker#DataExplorationNotebookLocation", - "traits": { - "smithy.api#documentation": "

          The URL to the notebook location.

          " - } - } - }, - "traits": { - "smithy.api#documentation": "

          Artifacts that are generation during a job.

          " - } - }, - "com.amazonaws.sagemaker#AutoMLJobCompletionCriteria": { - "type": "structure", - "members": { - "MaxCandidates": { - "target": "com.amazonaws.sagemaker#MaxCandidates", + "Database": { + "target": "com.amazonaws.sagemaker#AthenaDatabase", "traits": { - "smithy.api#documentation": "

          The maximum number of times a training job is allowed to run.

          " + "smithy.api#required": {} } }, - "MaxRuntimePerTrainingJobInSeconds": { - "target": "com.amazonaws.sagemaker#MaxRuntimePerTrainingJobInSeconds", + "QueryString": { + "target": "com.amazonaws.sagemaker#AthenaQueryString", "traits": { - "smithy.api#documentation": "

          The maximum time, in seconds, a job is allowed to run.

          " + "smithy.api#required": {} } }, - "MaxAutoMLJobRuntimeInSeconds": { - "target": "com.amazonaws.sagemaker#MaxAutoMLJobRuntimeInSeconds", + "WorkGroup": { + "target": "com.amazonaws.sagemaker#AthenaWorkGroup" + }, + "OutputS3Uri": { + "target": "com.amazonaws.sagemaker#S3Uri", "traits": { - "smithy.api#documentation": "

          The maximum time, in seconds, an AutoML job is allowed to wait for a trial to complete.\n It must be equal to or greater than MaxRuntimePerTrainingJobInSeconds.

          " + "smithy.api#documentation": "

          The location in Amazon S3 where Athena query results are stored.

          ", + "smithy.api#required": {} } - } - }, - "traits": { - "smithy.api#documentation": "

          How long a job is allowed to run, or how many candidates a job is allowed to\n generate.

          " - } - }, - "com.amazonaws.sagemaker#AutoMLJobConfig": { - "type": "structure", - "members": { - "CompletionCriteria": { - "target": "com.amazonaws.sagemaker#AutoMLJobCompletionCriteria", + }, + "KmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", "traits": { - "smithy.api#documentation": "

          How long a job is allowed to run, or how many candidates a job is allowed to\n generate.

          " + "smithy.api#documentation": "

          The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data generated from\n an Athena query execution.

          " } }, - "SecurityConfig": { - "target": "com.amazonaws.sagemaker#AutoMLSecurityConfig", + "OutputFormat": { + "target": "com.amazonaws.sagemaker#AthenaResultFormat", "traits": { - "smithy.api#documentation": "

          Security configuration for traffic encryption or Amazon VPC settings.

          " + "smithy.api#required": {} } + }, + "OutputCompression": { + "target": "com.amazonaws.sagemaker#AthenaResultCompressionType" } }, "traits": { - "smithy.api#documentation": "

          A collection of settings used for a job.

          " + "smithy.api#documentation": "

          Configuration for Athena Dataset Definition input.

          " } }, - "com.amazonaws.sagemaker#AutoMLJobName": { + "com.amazonaws.sagemaker#AthenaQueryString": { "type": "string", "traits": { + "smithy.api#documentation": "

          The SQL query statements, to be executed.

          ", "smithy.api#length": { "min": 1, - "max": 32 + "max": 4096 }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" - } - }, - "com.amazonaws.sagemaker#AutoMLJobObjective": { - "type": "structure", - "members": { - "MetricName": { - "target": "com.amazonaws.sagemaker#AutoMLMetricEnum", - "traits": { - "smithy.api#documentation": "

          The name of the objective metric used to measure the predictive quality of a machine\n learning system. This metric is optimized during training to provide the best estimate for\n model parameter values from data.

          \n

          Here are the options:

          \n
            \n
          • \n

            \n MSE: The mean squared error (MSE) is\n the average of the squared differences between the predicted and actual values. It is\n used for regression. MSE values are always positive, the better a model is at\n predicting the actual values the smaller the MSE value. When the data contains\n outliers, they tend to dominate the MSE which might cause subpar prediction\n performance.

            \n
          • \n
          • \n

            \n Accuracy: The ratio of the number\n correctly classified items to the total number (correctly and incorrectly)\n classified. It is used for binary and multiclass classification. Measures how close\n the predicted class values are to the actual values. Accuracy values vary between\n zero and one, one being perfect accuracy and zero perfect inaccuracy.

            \n
          • \n
          • \n

            \n F1: The F1 score is the harmonic\n mean of the precision and recall. It is used for binary classification into classes\n traditionally referred to as positive and negative. Predictions are said to be true\n when they match their actual (correct) class; false when they do not. Precision is\n the ratio of the true positive predictions to all positive predictions (including the\n false positives) in a data set and measures the quality of the prediction when it\n predicts the positive class. Recall (or sensitivity) is the ratio of the true\n positive predictions to all actual positive instances and measures how completely a\n model predicts the actual class members in a data set. The standard F1 score weighs\n precision and recall equally. But which metric is paramount typically depends on\n specific aspects of a problem. F1 scores vary between zero and one, one being the\n best possible performance and zero the worst.

            \n
          • \n
          • \n

            \n AUC: The area under the curve (AUC)\n metric is used to compare and evaluate binary classification by algorithms such as\n logistic regression that return probabilities. A threshold is needed to map the\n probabilities into classifications. The relevant curve is the receiver operating\n characteristic curve that plots the true positive rate (TPR) of predictions (or\n recall) against the false positive rate (FPR) as a function of the threshold value,\n above which a prediction is considered positive. Increasing the threshold results in\n fewer false positives but more false negatives. AUC is the area under this receiver\n operating characteristic curve and so provides an aggregated measure of the model\n performance across all possible classification thresholds. The AUC score can also be\n interpreted as the probability that a randomly selected positive data point is more\n likely to be predicted positive than a randomly selected negative example. AUC scores\n vary between zero and one, one being perfect accuracy and one half not better than a\n random classifier. Values less that one half predict worse than a random predictor\n and such consistently bad predictors can be inverted to obtain better than random\n predictors.

            \n
          • \n
          • \n

            \n F1macro: The F1macro score applies\n F1 scoring to multiclass classification. In this context, you have multiple classes\n to predict. You just calculate the precision and recall for each class as you did for\n the positive class in binary classification. Then used these values to calculate the\n F1 score for each class and average them to obtain the F1macro score. F1macro scores\n vary between zero and one, one being the best possible performance and zero the\n worst.

            \n
          • \n
          \n

          If you do not specify a metric explicitly, the default behavior is to automatically\n use:

          \n
            \n
          • \n

            \n MSE: for regression.

            \n
          • \n
          • \n

            \n F1: for binary classification

            \n
          • \n
          • \n

            \n Accuracy: for multiclass classification.

            \n
          • \n
          ", - "smithy.api#required": {} - } - } - }, - "traits": { - "smithy.api#documentation": "

          Specifies a metric to minimize or maximize as the objective of a job.

          " + "smithy.api#pattern": "[\\s\\S]+" } }, - "com.amazonaws.sagemaker#AutoMLJobObjectiveType": { + "com.amazonaws.sagemaker#AthenaResultCompressionType": { "type": "string", "traits": { + "smithy.api#documentation": "

          The compression used for Athena query results.

          ", "smithy.api#enum": [ { - "value": "Maximize", - "name": "MAXIMIZE" + "value": "GZIP", + "name": "GZIP" }, { - "value": "Minimize", - "name": "MINIMIZE" + "value": "SNAPPY", + "name": "SNAPPY" + }, + { + "value": "ZLIB", + "name": "ZLIB" } ] } }, - "com.amazonaws.sagemaker#AutoMLJobSecondaryStatus": { + "com.amazonaws.sagemaker#AthenaResultFormat": { "type": "string", "traits": { + "smithy.api#documentation": "

          The data storage format for Athena query results.

          ", "smithy.api#enum": [ { - "value": "Starting", - "name": "STARTING" - }, - { - "value": "AnalyzingData", - "name": "ANALYZING_DATA" - }, - { - "value": "FeatureEngineering", - "name": "FEATURE_ENGINEERING" - }, - { - "value": "ModelTuning", - "name": "MODEL_TUNING" - }, - { - "value": "MaxCandidatesReached", - "name": "MAX_CANDIDATES_REACHED" - }, - { - "value": "Failed", - "name": "FAILED" + "value": "PARQUET", + "name": "PARQUET" }, { - "value": "Stopped", - "name": "STOPPED" + "value": "ORC", + "name": "ORC" }, { - "value": "MaxAutoMLJobRuntimeReached", - "name": "MAX_AUTO_ML_JOB_RUNTIME_REACHED" + "value": "AVRO", + "name": "AVRO" }, { - "value": "Stopping", - "name": "STOPPING" + "value": "JSON", + "name": "JSON" }, { - "value": "CandidateDefinitionsGenerated", - "name": "CANDIDATE_DEFINITIONS_GENERATED" + "value": "TEXTFILE", + "name": "TEXTFILE" } ] } }, - "com.amazonaws.sagemaker#AutoMLJobStatus": { + "com.amazonaws.sagemaker#AthenaWorkGroup": { + "type": "string", + "traits": { + "smithy.api#documentation": "

          The name of the workgroup in which the Athena query is being started.

          ", + "smithy.api#length": { + "min": 1, + "max": 128 + }, + "smithy.api#pattern": "[a-zA-Z0-9._-]+" + } + }, + "com.amazonaws.sagemaker#AttributeName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 256 + }, + "smithy.api#pattern": ".+" + } + }, + "com.amazonaws.sagemaker#AttributeNames": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#AttributeName" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 16 + } + } + }, + "com.amazonaws.sagemaker#AuthMode": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "Completed", - "name": "COMPLETED" - }, - { - "value": "InProgress", - "name": "IN_PROGRESS" - }, - { - "value": "Failed", - "name": "FAILED" - }, - { - "value": "Stopped", - "name": "STOPPED" + "value": "SSO", + "name": "SSO" }, { - "value": "Stopping", - "name": "STOPPING" + "value": "IAM", + "name": "IAM" } ] } }, - "com.amazonaws.sagemaker#AutoMLJobSummaries": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#AutoMLJobSummary" - } - }, - "com.amazonaws.sagemaker#AutoMLJobSummary": { + "com.amazonaws.sagemaker#AutoMLCandidate": { "type": "structure", "members": { - "AutoMLJobName": { - "target": "com.amazonaws.sagemaker#AutoMLJobName", + "CandidateName": { + "target": "com.amazonaws.sagemaker#CandidateName", "traits": { - "smithy.api#documentation": "

          The name of the object you are requesting.

          ", + "smithy.api#documentation": "

          The candidate name.

          ", "smithy.api#required": {} } }, - "AutoMLJobArn": { - "target": "com.amazonaws.sagemaker#AutoMLJobArn", + "FinalAutoMLJobObjectiveMetric": { + "target": "com.amazonaws.sagemaker#FinalAutoMLJobObjectiveMetric" + }, + "ObjectiveStatus": { + "target": "com.amazonaws.sagemaker#ObjectiveStatus", "traits": { - "smithy.api#documentation": "

          The ARN of the job.

          ", + "smithy.api#documentation": "

          The objective status.

          ", "smithy.api#required": {} } }, - "AutoMLJobStatus": { - "target": "com.amazonaws.sagemaker#AutoMLJobStatus", + "CandidateSteps": { + "target": "com.amazonaws.sagemaker#CandidateSteps", "traits": { - "smithy.api#documentation": "

          The job's status.

          ", + "smithy.api#documentation": "

          The candidate's steps.

          ", "smithy.api#required": {} } }, - "AutoMLJobSecondaryStatus": { - "target": "com.amazonaws.sagemaker#AutoMLJobSecondaryStatus", + "CandidateStatus": { + "target": "com.amazonaws.sagemaker#CandidateStatus", "traits": { - "smithy.api#documentation": "

          The job's secondary status.

          ", + "smithy.api#documentation": "

          The candidate's status.

          ", "smithy.api#required": {} } }, + "InferenceContainers": { + "target": "com.amazonaws.sagemaker#AutoMLContainerDefinitions", + "traits": { + "smithy.api#documentation": "

          The inference containers.

          " + } + }, "CreationTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          When the job was created.

          ", + "smithy.api#documentation": "

          The creation time.

          ", "smithy.api#required": {} } }, "EndTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The end time of an AutoML job.

          " + "smithy.api#documentation": "

          The end time.

          " } }, "LastModifiedTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          When the job was last modified.

          ", + "smithy.api#documentation": "

          The last modified time.

          ", "smithy.api#required": {} } }, "FailureReason": { "target": "com.amazonaws.sagemaker#AutoMLFailureReason", "traits": { - "smithy.api#documentation": "

          The failure reason of a job.

          " + "smithy.api#documentation": "

          The failure reason.

          " } } }, "traits": { - "smithy.api#documentation": "

          Provides a summary about a job.

          " + "smithy.api#documentation": "

          An Autopilot job returns recommendations, or candidates. Each candidate has futher details\n about the steps involed, and the status.

          " } }, - "com.amazonaws.sagemaker#AutoMLMaxResults": { - "type": "integer", - "traits": { - "smithy.api#range": { - "min": 1, - "max": 100 + "com.amazonaws.sagemaker#AutoMLCandidateStep": { + "type": "structure", + "members": { + "CandidateStepType": { + "target": "com.amazonaws.sagemaker#CandidateStepType", + "traits": { + "smithy.api#documentation": "

          Whether the Candidate is at the transform, training, or processing step.

          ", + "smithy.api#required": {} + } + }, + "CandidateStepArn": { + "target": "com.amazonaws.sagemaker#CandidateStepArn", + "traits": { + "smithy.api#documentation": "

          The ARN for the Candidate's step.

          ", + "smithy.api#required": {} + } + }, + "CandidateStepName": { + "target": "com.amazonaws.sagemaker#CandidateStepName", + "traits": { + "smithy.api#documentation": "

          The name for the Candidate's step.

          ", + "smithy.api#required": {} + } } + }, + "traits": { + "smithy.api#documentation": "

          Information about the steps for a Candidate, and what step it is working on.

          " } }, - "com.amazonaws.sagemaker#AutoMLMetricEnum": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Accuracy", - "name": "ACCURACY" - }, - { - "value": "MSE", - "name": "MSE" - }, - { - "value": "F1", - "name": "F1" - }, - { - "value": "F1macro", - "name": "F1_MACRO" - }, - { - "value": "AUC", - "name": "AUC" - } - ] - } - }, - "com.amazonaws.sagemaker#AutoMLNameContains": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 63 - }, - "smithy.api#pattern": "[a-zA-Z0-9\\-]+" + "com.amazonaws.sagemaker#AutoMLCandidates": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#AutoMLCandidate" } }, - "com.amazonaws.sagemaker#AutoMLOutputDataConfig": { + "com.amazonaws.sagemaker#AutoMLChannel": { "type": "structure", "members": { - "KmsKeyId": { - "target": "com.amazonaws.sagemaker#KmsKeyId", + "DataSource": { + "target": "com.amazonaws.sagemaker#AutoMLDataSource", "traits": { - "smithy.api#documentation": "

          The AWS KMS encryption key ID.

          " + "smithy.api#documentation": "

          The data source.

          ", + "smithy.api#required": {} } }, - "S3OutputPath": { - "target": "com.amazonaws.sagemaker#S3Uri", + "CompressionType": { + "target": "com.amazonaws.sagemaker#CompressionType", "traits": { - "smithy.api#documentation": "

          The Amazon S3 output path. Must be 128 characters or less.

          ", + "smithy.api#documentation": "

          You can use Gzip or None. The default value is None.

          " + } + }, + "TargetAttributeName": { + "target": "com.amazonaws.sagemaker#TargetAttributeName", + "traits": { + "smithy.api#documentation": "

          The name of the target variable in supervised learning, a.k.a. 'y'.

          ", "smithy.api#required": {} } } }, "traits": { - "smithy.api#documentation": "

          The output data configuration.

          " + "smithy.api#documentation": "

          Similar to Channel. A channel is a named input source that training algorithms can\n consume. Refer to Channel for detailed descriptions.

          " } }, - "com.amazonaws.sagemaker#AutoMLS3DataSource": { + "com.amazonaws.sagemaker#AutoMLContainerDefinition": { "type": "structure", "members": { - "S3DataType": { - "target": "com.amazonaws.sagemaker#AutoMLS3DataType", + "Image": { + "target": "com.amazonaws.sagemaker#ContainerImage", "traits": { - "smithy.api#documentation": "

          The data type.

          ", + "smithy.api#documentation": "

          The ECR path of the container. Refer to ContainerDefinition for more details.

          ", "smithy.api#required": {} } }, - "S3Uri": { - "target": "com.amazonaws.sagemaker#S3Uri", + "ModelDataUrl": { + "target": "com.amazonaws.sagemaker#Url", "traits": { - "smithy.api#documentation": "

          The URL to the Amazon S3 data source.

          ", + "smithy.api#documentation": "

          The location of the model artifacts. Refer to ContainerDefinition for more\n details.

          ", "smithy.api#required": {} } + }, + "Environment": { + "target": "com.amazonaws.sagemaker#EnvironmentMap", + "traits": { + "smithy.api#documentation": "

          Environment variables to set in the container. Refer to ContainerDefinition for more\n details.

          " + } } }, "traits": { - "smithy.api#documentation": "

          The Amazon S3 data source.

          " + "smithy.api#documentation": "

          A list of container definitions that describe the different containers that make up one\n AutoML candidate. Refer to ContainerDefinition for more details.

          " } }, - "com.amazonaws.sagemaker#AutoMLS3DataType": { + "com.amazonaws.sagemaker#AutoMLContainerDefinitions": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#AutoMLContainerDefinition" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 5 + } + } + }, + "com.amazonaws.sagemaker#AutoMLDataSource": { + "type": "structure", + "members": { + "S3DataSource": { + "target": "com.amazonaws.sagemaker#AutoMLS3DataSource", + "traits": { + "smithy.api#documentation": "

          The Amazon S3 location of the input data.

          \n \n

          The input data must be in CSV format and contain at least 500 rows.

          \n
          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The data source for the Autopilot job.

          " + } + }, + "com.amazonaws.sagemaker#AutoMLFailureReason": { "type": "string", "traits": { - "smithy.api#enum": [ - { - "value": "ManifestFile", - "name": "MANIFEST_FILE" - }, - { - "value": "S3Prefix", - "name": "S3_PREFIX" + "smithy.api#length": { + "min": 0, + "max": 1024 + } + } + }, + "com.amazonaws.sagemaker#AutoMLInputDataConfig": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#AutoMLChannel" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 20 + } + } + }, + "com.amazonaws.sagemaker#AutoMLJobArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 256 + }, + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:automl-job/.*" + } + }, + "com.amazonaws.sagemaker#AutoMLJobArtifacts": { + "type": "structure", + "members": { + "CandidateDefinitionNotebookLocation": { + "target": "com.amazonaws.sagemaker#CandidateDefinitionNotebookLocation", + "traits": { + "smithy.api#documentation": "

          The URL to the notebook location.

          " } - ] + }, + "DataExplorationNotebookLocation": { + "target": "com.amazonaws.sagemaker#DataExplorationNotebookLocation", + "traits": { + "smithy.api#documentation": "

          The URL to the notebook location.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Artifacts that are generation during a job.

          " } }, - "com.amazonaws.sagemaker#AutoMLSecurityConfig": { + "com.amazonaws.sagemaker#AutoMLJobCompletionCriteria": { "type": "structure", "members": { - "VolumeKmsKeyId": { - "target": "com.amazonaws.sagemaker#KmsKeyId", + "MaxCandidates": { + "target": "com.amazonaws.sagemaker#MaxCandidates", "traits": { - "smithy.api#documentation": "

          The key used to encrypt stored data.

          " + "smithy.api#documentation": "

          The maximum number of times a training job is allowed to run.

          " } }, - "EnableInterContainerTrafficEncryption": { - "target": "com.amazonaws.sagemaker#Boolean", + "MaxRuntimePerTrainingJobInSeconds": { + "target": "com.amazonaws.sagemaker#MaxRuntimePerTrainingJobInSeconds", "traits": { - "smithy.api#documentation": "

          Whether to use traffic encryption between the container layers.

          " + "smithy.api#documentation": "

          The maximum time, in seconds, a job is allowed to run.

          " } }, - "VpcConfig": { - "target": "com.amazonaws.sagemaker#VpcConfig", + "MaxAutoMLJobRuntimeInSeconds": { + "target": "com.amazonaws.sagemaker#MaxAutoMLJobRuntimeInSeconds", "traits": { - "smithy.api#documentation": "

          VPC configuration.

          " + "smithy.api#documentation": "

          The maximum time, in seconds, an AutoML job is allowed to wait for a trial to complete.\n It must be equal to or greater than MaxRuntimePerTrainingJobInSeconds.

          " } } }, "traits": { - "smithy.api#documentation": "

          Security options.

          " + "smithy.api#documentation": "

          How long a job is allowed to run, or how many candidates a job is allowed to\n generate.

          " } }, - "com.amazonaws.sagemaker#AutoMLSortBy": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Name", - "name": "NAME" - }, - { - "value": "CreationTime", - "name": "CREATION_TIME" - }, - { - "value": "Status", - "name": "STATUS" + "com.amazonaws.sagemaker#AutoMLJobConfig": { + "type": "structure", + "members": { + "CompletionCriteria": { + "target": "com.amazonaws.sagemaker#AutoMLJobCompletionCriteria", + "traits": { + "smithy.api#documentation": "

          How long a job is allowed to run, or how many candidates a job is allowed to\n generate.

          " } - ] + }, + "SecurityConfig": { + "target": "com.amazonaws.sagemaker#AutoMLSecurityConfig", + "traits": { + "smithy.api#documentation": "

          Security configuration for traffic encryption or Amazon VPC settings.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          A collection of settings used for a job.

          " } }, - "com.amazonaws.sagemaker#AutoMLSortOrder": { + "com.amazonaws.sagemaker#AutoMLJobName": { "type": "string", "traits": { - "smithy.api#enum": [ - { - "value": "Ascending", - "name": "ASCENDING" - }, - { - "value": "Descending", - "name": "DESCENDING" + "smithy.api#length": { + "min": 1, + "max": 32 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,31}" + } + }, + "com.amazonaws.sagemaker#AutoMLJobObjective": { + "type": "structure", + "members": { + "MetricName": { + "target": "com.amazonaws.sagemaker#AutoMLMetricEnum", + "traits": { + "smithy.api#documentation": "

          The name of the objective metric used to measure the predictive quality of a machine\n learning system. This metric is optimized during training to provide the best estimate for\n model parameter values from data.

          \n

          Here are the options:

          \n
            \n
          • \n

            \n MSE: The mean squared error (MSE) is\n the average of the squared differences between the predicted and actual values. It is\n used for regression. MSE values are always positive, the better a model is at\n predicting the actual values the smaller the MSE value. When the data contains\n outliers, they tend to dominate the MSE which might cause subpar prediction\n performance.

            \n
          • \n
          • \n

            \n Accuracy: The ratio of the number\n correctly classified items to the total number (correctly and incorrectly)\n classified. It is used for binary and multiclass classification. Measures how close\n the predicted class values are to the actual values. Accuracy values vary between\n zero and one, one being perfect accuracy and zero perfect inaccuracy.

            \n
          • \n
          • \n

            \n F1: The F1 score is the harmonic\n mean of the precision and recall. It is used for binary classification into classes\n traditionally referred to as positive and negative. Predictions are said to be true\n when they match their actual (correct) class; false when they do not. Precision is\n the ratio of the true positive predictions to all positive predictions (including the\n false positives) in a data set and measures the quality of the prediction when it\n predicts the positive class. Recall (or sensitivity) is the ratio of the true\n positive predictions to all actual positive instances and measures how completely a\n model predicts the actual class members in a data set. The standard F1 score weighs\n precision and recall equally. But which metric is paramount typically depends on\n specific aspects of a problem. F1 scores vary between zero and one, one being the\n best possible performance and zero the worst.

            \n
          • \n
          • \n

            \n AUC: The area under the curve (AUC)\n metric is used to compare and evaluate binary classification by algorithms such as\n logistic regression that return probabilities. A threshold is needed to map the\n probabilities into classifications. The relevant curve is the receiver operating\n characteristic curve that plots the true positive rate (TPR) of predictions (or\n recall) against the false positive rate (FPR) as a function of the threshold value,\n above which a prediction is considered positive. Increasing the threshold results in\n fewer false positives but more false negatives. AUC is the area under this receiver\n operating characteristic curve and so provides an aggregated measure of the model\n performance across all possible classification thresholds. The AUC score can also be\n interpreted as the probability that a randomly selected positive data point is more\n likely to be predicted positive than a randomly selected negative example. AUC scores\n vary between zero and one, one being perfect accuracy and one half not better than a\n random classifier. Values less that one half predict worse than a random predictor\n and such consistently bad predictors can be inverted to obtain better than random\n predictors.

            \n
          • \n
          • \n

            \n F1macro: The F1macro score applies\n F1 scoring to multiclass classification. In this context, you have multiple classes\n to predict. You just calculate the precision and recall for each class as you did for\n the positive class in binary classification. Then used these values to calculate the\n F1 score for each class and average them to obtain the F1macro score. F1macro scores\n vary between zero and one, one being the best possible performance and zero the\n worst.

            \n
          • \n
          \n

          If you do not specify a metric explicitly, the default behavior is to automatically\n use:

          \n
            \n
          • \n

            \n MSE: for regression.

            \n
          • \n
          • \n

            \n F1: for binary classification

            \n
          • \n
          • \n

            \n Accuracy: for multiclass classification.

            \n
          • \n
          ", + "smithy.api#required": {} } - ] + } + }, + "traits": { + "smithy.api#documentation": "

          Specifies a metric to minimize or maximize as the objective of a job.

          " } }, - "com.amazonaws.sagemaker#AwsManagedHumanLoopRequestSource": { + "com.amazonaws.sagemaker#AutoMLJobObjectiveType": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "AWS/Rekognition/DetectModerationLabels/Image/V3", - "name": "REKOGNITION_DETECT_MODERATION_LABELS_IMAGE_V3" + "value": "Maximize", + "name": "MAXIMIZE" }, { - "value": "AWS/Textract/AnalyzeDocument/Forms/V1", - "name": "TEXTRACT_ANALYZE_DOCUMENT_FORMS_V1" + "value": "Minimize", + "name": "MINIMIZE" } ] } }, - "com.amazonaws.sagemaker#BatchStrategy": { + "com.amazonaws.sagemaker#AutoMLJobSecondaryStatus": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "MultiRecord", - "name": "MULTI_RECORD" + "value": "Starting", + "name": "STARTING" }, { - "value": "SingleRecord", - "name": "SINGLE_RECORD" - } - ] - } - }, - "com.amazonaws.sagemaker#BillableTimeInSeconds": { - "type": "integer", - "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 1 - } - } - }, - "com.amazonaws.sagemaker#Boolean": { - "type": "boolean" - }, - "com.amazonaws.sagemaker#BooleanOperator": { - "type": "string", - "traits": { - "smithy.api#enum": [ + "value": "AnalyzingData", + "name": "ANALYZING_DATA" + }, { - "value": "And", - "name": "AND" + "value": "FeatureEngineering", + "name": "FEATURE_ENGINEERING" }, { - "value": "Or", - "name": "OR" - } - ] - } - }, - "com.amazonaws.sagemaker#Branch": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 1024 - }, - "smithy.api#pattern": "[^ ~^:?*\\[]+" - } - }, - "com.amazonaws.sagemaker#CandidateDefinitionNotebookLocation": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1 - } - } - }, - "com.amazonaws.sagemaker#CandidateName": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 64 - } - } - }, - "com.amazonaws.sagemaker#CandidateSortBy": { - "type": "string", - "traits": { - "smithy.api#enum": [ + "value": "ModelTuning", + "name": "MODEL_TUNING" + }, { - "value": "CreationTime", - "name": "CreationTime" + "value": "MaxCandidatesReached", + "name": "MAX_CANDIDATES_REACHED" }, { - "value": "Status", - "name": "Status" + "value": "Failed", + "name": "FAILED" }, { - "value": "FinalObjectiveMetricValue", - "name": "FinalObjectiveMetricValue" + "value": "Stopped", + "name": "STOPPED" + }, + { + "value": "MaxAutoMLJobRuntimeReached", + "name": "MAX_AUTO_ML_JOB_RUNTIME_REACHED" + }, + { + "value": "Stopping", + "name": "STOPPING" + }, + { + "value": "CandidateDefinitionsGenerated", + "name": "CANDIDATE_DEFINITIONS_GENERATED" } ] } }, - "com.amazonaws.sagemaker#CandidateStatus": { + "com.amazonaws.sagemaker#AutoMLJobStatus": { "type": "string", "traits": { "smithy.api#enum": [ @@ -1646,619 +1898,664 @@ ] } }, - "com.amazonaws.sagemaker#CandidateStepArn": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 256 + "com.amazonaws.sagemaker#AutoMLJobSummaries": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#AutoMLJobSummary" + } + }, + "com.amazonaws.sagemaker#AutoMLJobSummary": { + "type": "structure", + "members": { + "AutoMLJobName": { + "target": "com.amazonaws.sagemaker#AutoMLJobName", + "traits": { + "smithy.api#documentation": "

          The name of the object you are requesting.

          ", + "smithy.api#required": {} + } }, - "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:.*/.*" + "AutoMLJobArn": { + "target": "com.amazonaws.sagemaker#AutoMLJobArn", + "traits": { + "smithy.api#documentation": "

          The ARN of the job.

          ", + "smithy.api#required": {} + } + }, + "AutoMLJobStatus": { + "target": "com.amazonaws.sagemaker#AutoMLJobStatus", + "traits": { + "smithy.api#documentation": "

          The job's status.

          ", + "smithy.api#required": {} + } + }, + "AutoMLJobSecondaryStatus": { + "target": "com.amazonaws.sagemaker#AutoMLJobSecondaryStatus", + "traits": { + "smithy.api#documentation": "

          The job's secondary status.

          ", + "smithy.api#required": {} + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          When the job was created.

          ", + "smithy.api#required": {} + } + }, + "EndTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          The end time of an AutoML job.

          " + } + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          When the job was last modified.

          ", + "smithy.api#required": {} + } + }, + "FailureReason": { + "target": "com.amazonaws.sagemaker#AutoMLFailureReason", + "traits": { + "smithy.api#documentation": "

          The failure reason of a job.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Provides a summary about a job.

          " } }, - "com.amazonaws.sagemaker#CandidateStepName": { - "type": "string", + "com.amazonaws.sagemaker#AutoMLMaxResults": { + "type": "integer", "traits": { - "smithy.api#length": { + "smithy.api#range": { "min": 1, - "max": 64 + "max": 100 } } }, - "com.amazonaws.sagemaker#CandidateStepType": { + "com.amazonaws.sagemaker#AutoMLMetricEnum": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "AWS::SageMaker::TrainingJob", - "name": "TRAINING" + "value": "Accuracy", + "name": "ACCURACY" }, { - "value": "AWS::SageMaker::TransformJob", - "name": "TRANSFORM" + "value": "MSE", + "name": "MSE" }, { - "value": "AWS::SageMaker::ProcessingJob", - "name": "PROCESSING" + "value": "F1", + "name": "F1" + }, + { + "value": "F1macro", + "name": "F1_MACRO" + }, + { + "value": "AUC", + "name": "AUC" } ] } }, - "com.amazonaws.sagemaker#CandidateSteps": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#AutoMLCandidateStep" + "com.amazonaws.sagemaker#AutoMLNameContains": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 63 + }, + "smithy.api#pattern": "[a-zA-Z0-9\\-]+" } }, - "com.amazonaws.sagemaker#CaptureContentTypeHeader": { + "com.amazonaws.sagemaker#AutoMLOutputDataConfig": { "type": "structure", "members": { - "CsvContentTypes": { - "target": "com.amazonaws.sagemaker#CsvContentTypes", + "KmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", "traits": { - "smithy.api#documentation": "

          " + "smithy.api#documentation": "

          The AWS KMS encryption key ID.

          " } }, - "JsonContentTypes": { - "target": "com.amazonaws.sagemaker#JsonContentTypes", + "S3OutputPath": { + "target": "com.amazonaws.sagemaker#S3Uri", "traits": { - "smithy.api#documentation": "

          " + "smithy.api#documentation": "

          The Amazon S3 output path. Must be 128 characters or less.

          ", + "smithy.api#required": {} } } }, "traits": { - "smithy.api#documentation": "

          " - } - }, - "com.amazonaws.sagemaker#CaptureMode": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Input", - "name": "INPUT" - }, - { - "value": "Output", - "name": "OUTPUT" - } - ] + "smithy.api#documentation": "

          The output data configuration.

          " } }, - "com.amazonaws.sagemaker#CaptureOption": { + "com.amazonaws.sagemaker#AutoMLS3DataSource": { "type": "structure", "members": { - "CaptureMode": { - "target": "com.amazonaws.sagemaker#CaptureMode", + "S3DataType": { + "target": "com.amazonaws.sagemaker#AutoMLS3DataType", "traits": { - "smithy.api#documentation": "

          ", + "smithy.api#documentation": "

          The data type.

          ", "smithy.api#required": {} } - } - }, - "traits": { - "smithy.api#documentation": "

          " - } - }, - "com.amazonaws.sagemaker#CaptureOptionList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#CaptureOption" + }, + "S3Uri": { + "target": "com.amazonaws.sagemaker#S3Uri", + "traits": { + "smithy.api#documentation": "

          The URL to the Amazon S3 data source.

          ", + "smithy.api#required": {} + } + } }, "traits": { - "smithy.api#length": { - "min": 1, - "max": 2 - } + "smithy.api#documentation": "

          The Amazon S3 data source.

          " } }, - "com.amazonaws.sagemaker#CaptureStatus": { + "com.amazonaws.sagemaker#AutoMLS3DataType": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "Started", - "name": "STARTED" + "value": "ManifestFile", + "name": "MANIFEST_FILE" }, { - "value": "Stopped", - "name": "STOPPED" + "value": "S3Prefix", + "name": "S3_PREFIX" } ] } }, - "com.amazonaws.sagemaker#CategoricalParameterRange": { + "com.amazonaws.sagemaker#AutoMLSecurityConfig": { "type": "structure", "members": { - "Name": { - "target": "com.amazonaws.sagemaker#ParameterKey", + "VolumeKmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", "traits": { - "smithy.api#documentation": "

          The name of the categorical hyperparameter to tune.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The key used to encrypt stored data.

          " } }, - "Values": { - "target": "com.amazonaws.sagemaker#ParameterValues", + "EnableInterContainerTrafficEncryption": { + "target": "com.amazonaws.sagemaker#Boolean", "traits": { - "smithy.api#documentation": "

          A list of the categories\n for\n the hyperparameter.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Whether to use traffic encryption between the container layers.

          " + } + }, + "VpcConfig": { + "target": "com.amazonaws.sagemaker#VpcConfig", + "traits": { + "smithy.api#documentation": "

          VPC configuration.

          " } } }, "traits": { - "smithy.api#documentation": "

          A list of categorical hyperparameters to tune.

          " + "smithy.api#documentation": "

          Security options.

          " } }, - "com.amazonaws.sagemaker#CategoricalParameterRangeSpecification": { + "com.amazonaws.sagemaker#AutoMLSortBy": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Name", + "name": "NAME" + }, + { + "value": "CreationTime", + "name": "CREATION_TIME" + }, + { + "value": "Status", + "name": "STATUS" + } + ] + } + }, + "com.amazonaws.sagemaker#AutoMLSortOrder": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Ascending", + "name": "ASCENDING" + }, + { + "value": "Descending", + "name": "DESCENDING" + } + ] + } + }, + "com.amazonaws.sagemaker#AutoRollbackConfig": { "type": "structure", "members": { - "Values": { - "target": "com.amazonaws.sagemaker#ParameterValues", + "Alarms": { + "target": "com.amazonaws.sagemaker#AlarmList", "traits": { - "smithy.api#documentation": "

          The allowed categories for the hyperparameter.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          " } } }, "traits": { - "smithy.api#documentation": "

          Defines the possible values for a categorical hyperparameter.

          " + "smithy.api#documentation": "

          Currently, the AutoRollbackConfig API is not supported.

          " } }, - "com.amazonaws.sagemaker#CategoricalParameterRanges": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#CategoricalParameterRange" - }, + "com.amazonaws.sagemaker#AwsManagedHumanLoopRequestSource": { + "type": "string", "traits": { - "smithy.api#length": { - "min": 0, - "max": 20 + "smithy.api#enum": [ + { + "value": "AWS/Rekognition/DetectModerationLabels/Image/V3", + "name": "REKOGNITION_DETECT_MODERATION_LABELS_IMAGE_V3" + }, + { + "value": "AWS/Textract/AnalyzeDocument/Forms/V1", + "name": "TEXTRACT_ANALYZE_DOCUMENT_FORMS_V1" + } + ] + } + }, + "com.amazonaws.sagemaker#BatchStrategy": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "MultiRecord", + "name": "MULTI_RECORD" + }, + { + "value": "SingleRecord", + "name": "SINGLE_RECORD" + } + ] + } + }, + "com.amazonaws.sagemaker#Bias": { + "type": "structure", + "members": { + "Report": { + "target": "com.amazonaws.sagemaker#MetricsSource", + "traits": { + "smithy.api#documentation": "

          The bias report for a model

          " + } } + }, + "traits": { + "smithy.api#documentation": "

          Contains bias metrics for a model.

          " } }, - "com.amazonaws.sagemaker#Cents": { + "com.amazonaws.sagemaker#BillableTimeInSeconds": { "type": "integer", "traits": { + "smithy.api#box": {}, "smithy.api#range": { - "min": 0, - "max": 99 + "min": 1 } } }, - "com.amazonaws.sagemaker#CertifyForMarketplace": { - "type": "boolean" + "com.amazonaws.sagemaker#BlockedReason": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 1024 + } + } }, - "com.amazonaws.sagemaker#Channel": { + "com.amazonaws.sagemaker#BlueGreenUpdatePolicy": { "type": "structure", "members": { - "ChannelName": { - "target": "com.amazonaws.sagemaker#ChannelName", - "traits": { - "smithy.api#documentation": "

          The name of the channel.

          ", - "smithy.api#required": {} - } - }, - "DataSource": { - "target": "com.amazonaws.sagemaker#DataSource", + "TrafficRoutingConfiguration": { + "target": "com.amazonaws.sagemaker#TrafficRoutingConfig", "traits": { - "smithy.api#documentation": "

          The location of the channel data.

          ", + "smithy.api#documentation": "

          ", "smithy.api#required": {} } }, - "ContentType": { - "target": "com.amazonaws.sagemaker#ContentType", - "traits": { - "smithy.api#documentation": "

          The MIME type of the data.

          " - } - }, - "CompressionType": { - "target": "com.amazonaws.sagemaker#CompressionType", - "traits": { - "smithy.api#documentation": "

          If training data is compressed, the compression type. The default value is\n None. CompressionType is used only in Pipe input mode. In\n File mode, leave this field unset or set it to None.

          " - } - }, - "RecordWrapperType": { - "target": "com.amazonaws.sagemaker#RecordWrapper", - "traits": { - "smithy.api#documentation": "

          \n

          Specify RecordIO as the value when input data is in raw format but the training\n algorithm requires the RecordIO format. In this case, Amazon SageMaker wraps each individual S3\n object in a RecordIO record. If the input data is already in RecordIO format, you don't\n need to set this attribute. For more information, see Create\n a Dataset Using RecordIO.

          \n

          In File mode, leave this field unset or set it to None.

          " - } - }, - "InputMode": { - "target": "com.amazonaws.sagemaker#TrainingInputMode", + "TerminationWaitInSeconds": { + "target": "com.amazonaws.sagemaker#TerminationWaitInSeconds", "traits": { - "smithy.api#documentation": "

          (Optional) The input mode to use for the data channel in a training job. If you don't\n set a value for InputMode, Amazon SageMaker uses the value set for\n TrainingInputMode. Use this parameter to override the\n TrainingInputMode setting in a AlgorithmSpecification\n request when you have a channel that needs a different input mode from the training\n job's general setting. To download the data from Amazon Simple Storage Service (Amazon S3) to the provisioned ML\n storage volume, and mount the directory to a Docker volume, use File input\n mode. To stream data directly from Amazon S3 to the container, choose Pipe input\n mode.

          \n

          To use a model for incremental training, choose File input model.

          " + "smithy.api#documentation": "

          " } }, - "ShuffleConfig": { - "target": "com.amazonaws.sagemaker#ShuffleConfig", + "MaximumExecutionTimeoutInSeconds": { + "target": "com.amazonaws.sagemaker#MaximumExecutionTimeoutInSeconds", "traits": { - "smithy.api#documentation": "

          A configuration for a shuffle option for input data in a channel. If you use\n S3Prefix for S3DataType, this shuffles the results of the\n S3 key prefix matches. If you use ManifestFile, the order of the S3 object\n references in the ManifestFile is shuffled. If you use\n AugmentedManifestFile, the order of the JSON lines in the\n AugmentedManifestFile is shuffled. The shuffling order is determined\n using the Seed value.

          \n

          For Pipe input mode, shuffling is done at the start of every epoch. With large\n datasets this ensures that the order of the training data is different for each epoch,\n it helps reduce bias and possible overfitting. In a multi-node training job when\n ShuffleConfig is combined with S3DataDistributionType of\n ShardedByS3Key, the data is shuffled across nodes so that the content\n sent to a particular node on the first epoch might be sent to a different node on the\n second epoch.

          " + "smithy.api#documentation": "

          " } } }, "traits": { - "smithy.api#documentation": "

          A channel is a named input source that training algorithms can consume.

          " + "smithy.api#documentation": "

          Currently, the BlueGreenUpdatePolicy API is not supported.

          " } }, - "com.amazonaws.sagemaker#ChannelName": { + "com.amazonaws.sagemaker#Boolean": { + "type": "boolean" + }, + "com.amazonaws.sagemaker#BooleanOperator": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "And", + "name": "AND" + }, + { + "value": "Or", + "name": "OR" + } + ] + } + }, + "com.amazonaws.sagemaker#Branch": { "type": "string", "traits": { "smithy.api#length": { "min": 1, - "max": 64 + "max": 1024 }, - "smithy.api#pattern": "[A-Za-z0-9\\.\\-_]+" + "smithy.api#pattern": "[^ ~^:?*\\[]+" } }, - "com.amazonaws.sagemaker#ChannelSpecification": { + "com.amazonaws.sagemaker#CacheHitResult": { "type": "structure", "members": { - "Name": { - "target": "com.amazonaws.sagemaker#ChannelName", - "traits": { - "smithy.api#documentation": "

          The name of the channel.

          ", - "smithy.api#required": {} - } - }, - "Description": { - "target": "com.amazonaws.sagemaker#EntityDescription", - "traits": { - "smithy.api#documentation": "

          A brief description of the channel.

          " - } - }, - "IsRequired": { - "target": "com.amazonaws.sagemaker#Boolean", - "traits": { - "smithy.api#documentation": "

          Indicates whether the channel is required by the algorithm.

          " - } - }, - "SupportedContentTypes": { - "target": "com.amazonaws.sagemaker#ContentTypes", - "traits": { - "smithy.api#documentation": "

          The supported MIME types for the data.

          ", - "smithy.api#required": {} - } - }, - "SupportedCompressionTypes": { - "target": "com.amazonaws.sagemaker#CompressionTypes", - "traits": { - "smithy.api#documentation": "

          The allowed compression types, if data compression is used.

          " - } - }, - "SupportedInputModes": { - "target": "com.amazonaws.sagemaker#InputModes", + "SourcePipelineExecutionArn": { + "target": "com.amazonaws.sagemaker#PipelineExecutionArn", "traits": { - "smithy.api#documentation": "

          The allowed input mode, either FILE or PIPE.

          \n

          In FILE mode, Amazon SageMaker copies the data from the input source onto the local\n Amazon Elastic Block Store (Amazon EBS) volumes before starting your training algorithm.\n This is the most commonly used input mode.

          \n

          In PIPE mode, Amazon SageMaker streams input data from the source directly to your\n algorithm without using the EBS volume.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pipeline execution.

          " } } }, "traits": { - "smithy.api#documentation": "

          Defines a named input source, called a channel, to be used by an algorithm.

          " + "smithy.api#documentation": "

          Details on the cache hit of a pipeline execution step.

          " } }, - "com.amazonaws.sagemaker#ChannelSpecifications": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#ChannelSpecification" - }, - "traits": { - "smithy.api#length": { - "min": 1, - "max": 8 - } - } - }, - "com.amazonaws.sagemaker#CheckpointConfig": { - "type": "structure", - "members": { - "S3Uri": { - "target": "com.amazonaws.sagemaker#S3Uri", - "traits": { - "smithy.api#documentation": "

          Identifies the S3 path where you want Amazon SageMaker to store checkpoints. For example,\n s3://bucket-name/key-name-prefix.

          ", - "smithy.api#required": {} - } - }, - "LocalPath": { - "target": "com.amazonaws.sagemaker#DirectoryPath", - "traits": { - "smithy.api#documentation": "

          (Optional) The local directory where checkpoints are written. The default directory is\n /opt/ml/checkpoints/.

          " - } - } - }, - "traits": { - "smithy.api#documentation": "

          Contains information about the output location for managed spot training checkpoint\n data.

          " - } - }, - "com.amazonaws.sagemaker#Cidr": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 4, - "max": 64 - }, - "smithy.api#pattern": "(^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\\/(3[0-2]|[1-2][0-9]|[0-9]))$)|(^s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:)))(%.+)?s*(\\/(12[0-8]|1[0-1][0-9]|[1-9][0-9]|[0-9]))$)" - } - }, - "com.amazonaws.sagemaker#Cidrs": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#Cidr" - } - }, - "com.amazonaws.sagemaker#ClientId": { + "com.amazonaws.sagemaker#CandidateDefinitionNotebookLocation": { "type": "string", "traits": { "smithy.api#length": { - "min": 1, - "max": 128 - }, - "smithy.api#pattern": "[\\w+-]+" + "min": 1 + } } }, - "com.amazonaws.sagemaker#ClientSecret": { + "com.amazonaws.sagemaker#CandidateName": { "type": "string", "traits": { "smithy.api#length": { "min": 1, "max": 64 - }, - "smithy.api#pattern": "[\\w+=/-]+", - "smithy.api#sensitive": {} - } - }, - "com.amazonaws.sagemaker#ClientToken": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 36 - }, - "smithy.api#pattern": "^[a-zA-Z0-9-]+$" + } } }, - "com.amazonaws.sagemaker#CodeRepositoryArn": { + "com.amazonaws.sagemaker#CandidateSortBy": { "type": "string", "traits": { - "smithy.api#length": { - "min": 1, - "max": 2048 - }, - "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:code-repository/.*" + "smithy.api#enum": [ + { + "value": "CreationTime", + "name": "CreationTime" + }, + { + "value": "Status", + "name": "Status" + }, + { + "value": "FinalObjectiveMetricValue", + "name": "FinalObjectiveMetricValue" + } + ] } }, - "com.amazonaws.sagemaker#CodeRepositoryContains": { + "com.amazonaws.sagemaker#CandidateStatus": { "type": "string", "traits": { - "smithy.api#length": { - "min": 0, - "max": 1024 - }, - "smithy.api#pattern": "[a-zA-Z0-9-]+" + "smithy.api#enum": [ + { + "value": "Completed", + "name": "COMPLETED" + }, + { + "value": "InProgress", + "name": "IN_PROGRESS" + }, + { + "value": "Failed", + "name": "FAILED" + }, + { + "value": "Stopped", + "name": "STOPPED" + }, + { + "value": "Stopping", + "name": "STOPPING" + } + ] } }, - "com.amazonaws.sagemaker#CodeRepositoryNameContains": { + "com.amazonaws.sagemaker#CandidateStepArn": { "type": "string", "traits": { "smithy.api#length": { - "min": 0, - "max": 63 + "min": 1, + "max": 256 }, - "smithy.api#pattern": "[a-zA-Z0-9-]+" + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:.*/.*" } }, - "com.amazonaws.sagemaker#CodeRepositoryNameOrUrl": { + "com.amazonaws.sagemaker#CandidateStepName": { "type": "string", "traits": { "smithy.api#length": { "min": 1, - "max": 1024 - }, - "smithy.api#pattern": "^https://([^/]+)/?(.*)$|^[a-zA-Z0-9](-*[a-zA-Z0-9])*" + "max": 64 + } } }, - "com.amazonaws.sagemaker#CodeRepositorySortBy": { + "com.amazonaws.sagemaker#CandidateStepType": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "Name", - "name": "NAME" + "value": "AWS::SageMaker::TrainingJob", + "name": "TRAINING" }, { - "value": "CreationTime", - "name": "CREATION_TIME" + "value": "AWS::SageMaker::TransformJob", + "name": "TRANSFORM" }, { - "value": "LastModifiedTime", - "name": "LAST_MODIFIED_TIME" + "value": "AWS::SageMaker::ProcessingJob", + "name": "PROCESSING" } ] } }, - "com.amazonaws.sagemaker#CodeRepositorySortOrder": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Ascending", - "name": "ASCENDING" - }, - { - "value": "Descending", - "name": "DESCENDING" - } - ] + "com.amazonaws.sagemaker#CandidateSteps": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#AutoMLCandidateStep" } }, - "com.amazonaws.sagemaker#CodeRepositorySummary": { + "com.amazonaws.sagemaker#CapacitySize": { "type": "structure", "members": { - "CodeRepositoryName": { - "target": "com.amazonaws.sagemaker#EntityName", - "traits": { - "smithy.api#documentation": "

          The name of the Git repository.

          ", - "smithy.api#required": {} - } - }, - "CodeRepositoryArn": { - "target": "com.amazonaws.sagemaker#CodeRepositoryArn", - "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the Git repository.

          ", - "smithy.api#required": {} - } - }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#CreationTime", + "Type": { + "target": "com.amazonaws.sagemaker#CapacitySizeType", "traits": { - "smithy.api#documentation": "

          The date and time that the Git repository was created.

          ", + "smithy.api#documentation": "

          This API is not supported.

          ", "smithy.api#required": {} } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#LastModifiedTime", + "Value": { + "target": "com.amazonaws.sagemaker#CapacitySizeValue", "traits": { - "smithy.api#documentation": "

          The date and time that the Git repository was last modified.

          ", + "smithy.api#documentation": "

          ", "smithy.api#required": {} } - }, - "GitConfig": { - "target": "com.amazonaws.sagemaker#GitConfig", - "traits": { - "smithy.api#documentation": "

          Configuration details for the Git repository, including the URL where it is located\n and the ARN of the AWS Secrets Manager secret that contains the credentials used to\n access the repository.

          " - } } }, "traits": { - "smithy.api#documentation": "

          Specifies summary information about a Git repository.

          " + "smithy.api#documentation": "

          Currently, the CapacitySize API is not supported.

          " } }, - "com.amazonaws.sagemaker#CodeRepositorySummaryList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#CodeRepositorySummary" + "com.amazonaws.sagemaker#CapacitySizeType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "INSTANCE_COUNT", + "name": "INSTANCE_COUNT" + }, + { + "value": "CAPACITY_PERCENT", + "name": "CAPACITY_PERCENT" + } + ] } }, - "com.amazonaws.sagemaker#CognitoConfig": { + "com.amazonaws.sagemaker#CapacitySizeValue": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1 + } + } + }, + "com.amazonaws.sagemaker#CaptureContentTypeHeader": { "type": "structure", "members": { - "UserPool": { - "target": "com.amazonaws.sagemaker#CognitoUserPool", + "CsvContentTypes": { + "target": "com.amazonaws.sagemaker#CsvContentTypes", "traits": { - "smithy.api#documentation": "

          A \n user pool is a user directory in Amazon Cognito. \n With a user pool, your users can sign in to your web or mobile app through Amazon Cognito. \n Your users can also sign in through social identity providers like \n Google, Facebook, Amazon, or Apple, and through SAML identity providers.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          " } }, - "ClientId": { - "target": "com.amazonaws.sagemaker#ClientId", + "JsonContentTypes": { + "target": "com.amazonaws.sagemaker#JsonContentTypes", "traits": { - "smithy.api#documentation": "

          The client ID for your Amazon Cognito user pool.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          " } } }, "traits": { - "smithy.api#documentation": "

          Use this parameter to configure your Amazon Cognito workforce. \n A single Cognito workforce is created using and corresponds to a single\n \n Amazon Cognito user pool.

          " + "smithy.api#documentation": "

          " } }, - "com.amazonaws.sagemaker#CognitoMemberDefinition": { + "com.amazonaws.sagemaker#CaptureMode": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Input", + "name": "INPUT" + }, + { + "value": "Output", + "name": "OUTPUT" + } + ] + } + }, + "com.amazonaws.sagemaker#CaptureOption": { "type": "structure", "members": { - "UserPool": { - "target": "com.amazonaws.sagemaker#CognitoUserPool", + "CaptureMode": { + "target": "com.amazonaws.sagemaker#CaptureMode", "traits": { - "smithy.api#documentation": "

          An identifier for a user pool. The user pool must be in the same region as the service\n that you are calling.

          ", - "smithy.api#required": {} - } - }, - "UserGroup": { - "target": "com.amazonaws.sagemaker#CognitoUserGroup", - "traits": { - "smithy.api#documentation": "

          An identifier for a user group.

          ", - "smithy.api#required": {} - } - }, - "ClientId": { - "target": "com.amazonaws.sagemaker#ClientId", - "traits": { - "smithy.api#documentation": "

          An identifier for an application client. You must create the app client ID using\n Amazon Cognito.

          ", + "smithy.api#documentation": "

          ", "smithy.api#required": {} } } }, "traits": { - "smithy.api#documentation": "

          Identifies a Amazon Cognito user group. A user group can be used in on or more work\n teams.

          " + "smithy.api#documentation": "

          " } }, - "com.amazonaws.sagemaker#CognitoUserGroup": { - "type": "string", + "com.amazonaws.sagemaker#CaptureOptionList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#CaptureOption" + }, "traits": { "smithy.api#length": { "min": 1, - "max": 128 - }, - "smithy.api#pattern": "[\\p{L}\\p{M}\\p{S}\\p{N}\\p{P}]+" + "max": 2 + } } }, - "com.amazonaws.sagemaker#CognitoUserPool": { + "com.amazonaws.sagemaker#CaptureStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Started", + "name": "STARTED" + }, + { + "value": "Stopped", + "name": "STOPPED" + } + ] + } + }, + "com.amazonaws.sagemaker#Catalog": { "type": "string", "traits": { "smithy.api#length": { "min": 1, - "max": 55 + "max": 255 }, - "smithy.api#pattern": "[\\w-]+_[0-9a-zA-Z]+" + "smithy.api#pattern": "[\\u0020-\\uD7FF\\uE000-\\uFFFD\\uD800\\uDC00-\\uDBFF\\uDFFF\\t]*" } }, - "com.amazonaws.sagemaker#CollectionConfiguration": { + "com.amazonaws.sagemaker#CategoricalParameterRange": { "type": "structure", "members": { - "CollectionName": { - "target": "com.amazonaws.sagemaker#CollectionName", + "Name": { + "target": "com.amazonaws.sagemaker#ParameterKey", "traits": { - "smithy.api#documentation": "

          The name of the tensor collection. The name must be unique relative to other rule configuration names.

          " + "smithy.api#documentation": "

          The name of the categorical hyperparameter to tune.

          ", + "smithy.api#required": {} } }, - "CollectionParameters": { - "target": "com.amazonaws.sagemaker#CollectionParameters", + "Values": { + "target": "com.amazonaws.sagemaker#ParameterValues", "traits": { - "smithy.api#documentation": "

          Parameter values for the tensor collection. The allowed parameters are\n \"name\", \"include_regex\", \"reduction_config\",\n \"save_config\", \"tensor_names\", and\n \"save_histogram\".

          " + "smithy.api#documentation": "

          A list of the categories\n for\n the hyperparameter.

          ", + "smithy.api#required": {} } } }, "traits": { - "smithy.api#documentation": "

          Configuration information for tensor collections.

          " + "smithy.api#documentation": "

          A list of categorical hyperparameters to tune.

          " } }, - "com.amazonaws.sagemaker#CollectionConfigurations": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#CollectionConfiguration" - }, - "traits": { - "smithy.api#length": { - "min": 0, - "max": 20 + "com.amazonaws.sagemaker#CategoricalParameterRangeSpecification": { + "type": "structure", + "members": { + "Values": { + "target": "com.amazonaws.sagemaker#ParameterValues", + "traits": { + "smithy.api#documentation": "

          The allowed categories for the hyperparameter.

          ", + "smithy.api#required": {} + } } - } - }, - "com.amazonaws.sagemaker#CollectionName": { - "type": "string", + }, "traits": { - "smithy.api#length": { - "min": 1, - "max": 256 - }, - "smithy.api#pattern": ".*" + "smithy.api#documentation": "

          Defines the possible values for a categorical hyperparameter.

          " } }, - "com.amazonaws.sagemaker#CollectionParameters": { - "type": "map", - "key": { - "target": "com.amazonaws.sagemaker#ConfigKey" - }, - "value": { - "target": "com.amazonaws.sagemaker#ConfigValue" + "com.amazonaws.sagemaker#CategoricalParameterRanges": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#CategoricalParameterRange" }, "traits": { "smithy.api#length": { @@ -2267,986 +2564,1031 @@ } } }, - "com.amazonaws.sagemaker#CompilationJobArn": { - "type": "string", + "com.amazonaws.sagemaker#Cents": { + "type": "integer", "traits": { - "smithy.api#length": { + "smithy.api#range": { "min": 0, - "max": 256 - }, - "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:compilation-job/.*" - } - }, - "com.amazonaws.sagemaker#CompilationJobStatus": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "INPROGRESS", - "name": "INPROGRESS" - }, - { - "value": "COMPLETED", - "name": "COMPLETED" - }, - { - "value": "FAILED", - "name": "FAILED" - }, - { - "value": "STARTING", - "name": "STARTING" - }, - { - "value": "STOPPING", - "name": "STOPPING" - }, - { - "value": "STOPPED", - "name": "STOPPED" - } - ] + "max": 99 + } } }, - "com.amazonaws.sagemaker#CompilationJobSummaries": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#CompilationJobSummary" - } + "com.amazonaws.sagemaker#CertifyForMarketplace": { + "type": "boolean" }, - "com.amazonaws.sagemaker#CompilationJobSummary": { + "com.amazonaws.sagemaker#Channel": { "type": "structure", "members": { - "CompilationJobName": { - "target": "com.amazonaws.sagemaker#EntityName", + "ChannelName": { + "target": "com.amazonaws.sagemaker#ChannelName", "traits": { - "smithy.api#documentation": "

          The name of the model compilation job that you want a summary for.

          ", + "smithy.api#documentation": "

          The name of the channel.

          ", "smithy.api#required": {} } }, - "CompilationJobArn": { - "target": "com.amazonaws.sagemaker#CompilationJobArn", + "DataSource": { + "target": "com.amazonaws.sagemaker#DataSource", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model compilation job.

          ", + "smithy.api#documentation": "

          The location of the channel data.

          ", "smithy.api#required": {} } }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#CreationTime", + "ContentType": { + "target": "com.amazonaws.sagemaker#ContentType", "traits": { - "smithy.api#documentation": "

          The time when the model compilation job was created.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The MIME type of the data.

          " } }, - "CompilationStartTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "CompressionType": { + "target": "com.amazonaws.sagemaker#CompressionType", "traits": { - "smithy.api#documentation": "

          The time when the model compilation job started.

          " + "smithy.api#documentation": "

          If training data is compressed, the compression type. The default value is\n None. CompressionType is used only in Pipe input mode. In\n File mode, leave this field unset or set it to None.

          " } }, - "CompilationEndTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "RecordWrapperType": { + "target": "com.amazonaws.sagemaker#RecordWrapper", "traits": { - "smithy.api#documentation": "

          The time when the model compilation job completed.

          " + "smithy.api#documentation": "

          \n

          Specify RecordIO as the value when input data is in raw format but the training\n algorithm requires the RecordIO format. In this case, Amazon SageMaker wraps each individual S3\n object in a RecordIO record. If the input data is already in RecordIO format, you don't\n need to set this attribute. For more information, see Create\n a Dataset Using RecordIO.

          \n

          In File mode, leave this field unset or set it to None.

          " } }, - "CompilationTargetDevice": { - "target": "com.amazonaws.sagemaker#TargetDevice", + "InputMode": { + "target": "com.amazonaws.sagemaker#TrainingInputMode", "traits": { - "smithy.api#documentation": "

          The type of device that the model will run on after the compilation job has\n completed.

          " + "smithy.api#documentation": "

          (Optional) The input mode to use for the data channel in a training job. If you don't\n set a value for InputMode, Amazon SageMaker uses the value set for\n TrainingInputMode. Use this parameter to override the\n TrainingInputMode setting in a AlgorithmSpecification\n request when you have a channel that needs a different input mode from the training\n job's general setting. To download the data from Amazon Simple Storage Service (Amazon S3) to the provisioned ML\n storage volume, and mount the directory to a Docker volume, use File input\n mode. To stream data directly from Amazon S3 to the container, choose Pipe input\n mode.

          \n

          To use a model for incremental training, choose File input model.

          " } }, - "CompilationTargetPlatformOs": { - "target": "com.amazonaws.sagemaker#TargetPlatformOs", + "ShuffleConfig": { + "target": "com.amazonaws.sagemaker#ShuffleConfig", "traits": { - "smithy.api#documentation": "

          The type of OS that the model will run on after the compilation job has\n completed.

          " + "smithy.api#documentation": "

          A configuration for a shuffle option for input data in a channel. If you use\n S3Prefix for S3DataType, this shuffles the results of the\n S3 key prefix matches. If you use ManifestFile, the order of the S3 object\n references in the ManifestFile is shuffled. If you use\n AugmentedManifestFile, the order of the JSON lines in the\n AugmentedManifestFile is shuffled. The shuffling order is determined\n using the Seed value.

          \n

          For Pipe input mode, shuffling is done at the start of every epoch. With large\n datasets this ensures that the order of the training data is different for each epoch,\n it helps reduce bias and possible overfitting. In a multi-node training job when\n ShuffleConfig is combined with S3DataDistributionType of\n ShardedByS3Key, the data is shuffled across nodes so that the content\n sent to a particular node on the first epoch might be sent to a different node on the\n second epoch.

          " } + } + }, + "traits": { + "smithy.api#documentation": "

          A channel is a named input source that training algorithms can consume.

          " + } + }, + "com.amazonaws.sagemaker#ChannelName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 64 }, - "CompilationTargetPlatformArch": { - "target": "com.amazonaws.sagemaker#TargetPlatformArch", + "smithy.api#pattern": "[A-Za-z0-9\\.\\-_]+" + } + }, + "com.amazonaws.sagemaker#ChannelSpecification": { + "type": "structure", + "members": { + "Name": { + "target": "com.amazonaws.sagemaker#ChannelName", "traits": { - "smithy.api#documentation": "

          The type of architecture that the model will run on after the compilation job has\n completed.

          " + "smithy.api#documentation": "

          The name of the channel.

          ", + "smithy.api#required": {} } }, - "CompilationTargetPlatformAccelerator": { - "target": "com.amazonaws.sagemaker#TargetPlatformAccelerator", + "Description": { + "target": "com.amazonaws.sagemaker#EntityDescription", "traits": { - "smithy.api#documentation": "

          The type of accelerator that the model will run on after the compilation job has\n completed.

          " + "smithy.api#documentation": "

          A brief description of the channel.

          " } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#LastModifiedTime", + "IsRequired": { + "target": "com.amazonaws.sagemaker#Boolean", "traits": { - "smithy.api#documentation": "

          The time when the model compilation job was last modified.

          " + "smithy.api#documentation": "

          Indicates whether the channel is required by the algorithm.

          " } }, - "CompilationJobStatus": { - "target": "com.amazonaws.sagemaker#CompilationJobStatus", + "SupportedContentTypes": { + "target": "com.amazonaws.sagemaker#ContentTypes", "traits": { - "smithy.api#documentation": "

          The status of the model compilation job.

          ", + "smithy.api#documentation": "

          The supported MIME types for the data.

          ", "smithy.api#required": {} } - } - }, - "traits": { - "smithy.api#documentation": "

          A summary of a model compilation job.

          " - } - }, - "com.amazonaws.sagemaker#CompilerOptions": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 3, - "max": 1024 }, - "smithy.api#pattern": ".*" - } - }, - "com.amazonaws.sagemaker#CompressionType": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "None", - "name": "NONE" - }, - { - "value": "Gzip", - "name": "GZIP" + "SupportedCompressionTypes": { + "target": "com.amazonaws.sagemaker#CompressionTypes", + "traits": { + "smithy.api#documentation": "

          The allowed compression types, if data compression is used.

          " } - ] + }, + "SupportedInputModes": { + "target": "com.amazonaws.sagemaker#InputModes", + "traits": { + "smithy.api#documentation": "

          The allowed input mode, either FILE or PIPE.

          \n

          In FILE mode, Amazon SageMaker copies the data from the input source onto the local\n Amazon Elastic Block Store (Amazon EBS) volumes before starting your training algorithm.\n This is the most commonly used input mode.

          \n

          In PIPE mode, Amazon SageMaker streams input data from the source directly to your\n algorithm without using the EBS volume.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Defines a named input source, called a channel, to be used by an algorithm.

          " } }, - "com.amazonaws.sagemaker#CompressionTypes": { + "com.amazonaws.sagemaker#ChannelSpecifications": { "type": "list", "member": { - "target": "com.amazonaws.sagemaker#CompressionType" - } - }, - "com.amazonaws.sagemaker#ConfigKey": { - "type": "string", + "target": "com.amazonaws.sagemaker#ChannelSpecification" + }, "traits": { "smithy.api#length": { "min": 1, - "max": 256 - }, - "smithy.api#pattern": ".*" - } - }, - "com.amazonaws.sagemaker#ConfigValue": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 256 - }, - "smithy.api#pattern": ".*" + "max": 8 + } } }, - "com.amazonaws.sagemaker#ConflictException": { + "com.amazonaws.sagemaker#CheckpointConfig": { "type": "structure", "members": { - "Message": { - "target": "com.amazonaws.sagemaker#FailureReason" + "S3Uri": { + "target": "com.amazonaws.sagemaker#S3Uri", + "traits": { + "smithy.api#documentation": "

          Identifies the S3 path where you want Amazon SageMaker to store checkpoints. For example,\n s3://bucket-name/key-name-prefix.

          ", + "smithy.api#required": {} + } + }, + "LocalPath": { + "target": "com.amazonaws.sagemaker#DirectoryPath", + "traits": { + "smithy.api#documentation": "

          (Optional) The local directory where checkpoints are written. The default directory is\n /opt/ml/checkpoints/.

          " + } } }, "traits": { - "smithy.api#documentation": "

          There was a conflict when you attempted to modify an experiment, trial, or trial\n component.

          ", - "smithy.api#error": "client" + "smithy.api#documentation": "

          Contains information about the output location for managed spot training checkpoint\n data.

          " } }, - "com.amazonaws.sagemaker#ContainerArgument": { + "com.amazonaws.sagemaker#Cidr": { "type": "string", "traits": { "smithy.api#length": { - "min": 0, - "max": 256 + "min": 4, + "max": 64 }, - "smithy.api#pattern": ".*" + "smithy.api#pattern": "(^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\\/(3[0-2]|[1-2][0-9]|[0-9]))$)|(^s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]d|1dd|[1-9]?d)(.(25[0-5]|2[0-4]d|1dd|[1-9]?d)){3}))|:)))(%.+)?s*(\\/(12[0-8]|1[0-1][0-9]|[1-9][0-9]|[0-9]))$)" } }, - "com.amazonaws.sagemaker#ContainerArguments": { + "com.amazonaws.sagemaker#Cidrs": { "type": "list", "member": { - "target": "com.amazonaws.sagemaker#ContainerArgument" - }, + "target": "com.amazonaws.sagemaker#Cidr" + } + }, + "com.amazonaws.sagemaker#ClientId": { + "type": "string", "traits": { "smithy.api#length": { "min": 1, - "max": 100 - } + "max": 128 + }, + "smithy.api#pattern": "[\\w+-]+" } }, - "com.amazonaws.sagemaker#ContainerDefinition": { - "type": "structure", - "members": { - "ContainerHostname": { - "target": "com.amazonaws.sagemaker#ContainerHostname", - "traits": { - "smithy.api#documentation": "

          This parameter is ignored for models that contain only a\n PrimaryContainer.

          \n

          When a ContainerDefinition is part of an inference pipeline, the value of\n the parameter uniquely identifies the container for the purposes of logging and metrics.\n For information, see Use Logs and Metrics\n to Monitor an Inference Pipeline. If you don't specify a value for this\n parameter for a ContainerDefinition that is part of an inference pipeline,\n a unique name is automatically assigned based on the position of the\n ContainerDefinition in the pipeline. If you specify a value for the\n ContainerHostName for any ContainerDefinition that is part\n of an inference pipeline, you must specify a value for the\n ContainerHostName parameter of every ContainerDefinition\n in that pipeline.

          " - } - }, - "Image": { - "target": "com.amazonaws.sagemaker#ContainerImage", - "traits": { - "smithy.api#documentation": "

          The path where inference code is stored. This can be either in Amazon EC2 Container Registry or in a\n Docker registry that is accessible from the same VPC that you configure for your\n endpoint. If you are using your\n own custom algorithm instead of an algorithm provided by Amazon SageMaker, the inference code must\n meet Amazon SageMaker requirements. Amazon SageMaker supports both registry/repository[:tag] and\n registry/repository[@digest] image path formats. For more information,\n see Using\n Your Own Algorithms with Amazon SageMaker\n

          " - } - }, - "ImageConfig": { - "target": "com.amazonaws.sagemaker#ImageConfig", - "traits": { - "smithy.api#documentation": "

          Specifies whether the model container is in Amazon ECR or a private Docker registry\n accessible from your\n Amazon Virtual Private Cloud (VPC). For information about storing containers in a private Docker registry,\n see Use a Private Docker\n Registry for Real-Time Inference Containers\n

          " - } - }, - "Mode": { - "target": "com.amazonaws.sagemaker#ContainerMode", - "traits": { - "smithy.api#documentation": "

          Whether the container hosts a single model or multiple models.

          " - } - }, - "ModelDataUrl": { - "target": "com.amazonaws.sagemaker#Url", - "traits": { - "smithy.api#documentation": "

          The S3 path where the model artifacts, which result from model training, are stored.\n This path must point to a single gzip compressed tar archive (.tar.gz suffix). The S3\n path is required for Amazon SageMaker built-in algorithms, but not if you use your own algorithms.\n For more information on built-in algorithms, see Common\n Parameters.

          \n \n

          The model artifacts must be in an S3 bucket that is in the same region as the\n model or endpoint you are creating.

          \n
          \n

          If you provide a value for this parameter, Amazon SageMaker uses AWS Security Token Service to\n download model artifacts from the S3 path you provide. AWS STS is activated in your\n IAM user account by default. If you previously deactivated AWS STS for a region, you\n need to reactivate AWS STS for that region. For more information, see Activating and\n Deactivating AWS STS in an AWS Region in the AWS Identity and Access Management User\n Guide.

          \n \n

          If you use a built-in algorithm to create a model, Amazon SageMaker requires that you provide\n a S3 path to the model artifacts in ModelDataUrl.

          \n
          " - } - }, - "Environment": { - "target": "com.amazonaws.sagemaker#EnvironmentMap", - "traits": { - "smithy.api#documentation": "

          The environment variables to set in the Docker container. Each key and value in the\n Environment string to string map can have length of up to 1024. We\n support up to 16 entries in the map.

          " - } - }, - "ModelPackageName": { - "target": "com.amazonaws.sagemaker#VersionedArnOrName", - "traits": { - "smithy.api#documentation": "

          The name or Amazon Resource Name (ARN) of the model package to use to create the\n model.

          " - } - } - }, + "com.amazonaws.sagemaker#ClientSecret": { + "type": "string", "traits": { - "smithy.api#documentation": "

          Describes the container, as part of model definition.

          " + "smithy.api#length": { + "min": 1, + "max": 64 + }, + "smithy.api#pattern": "[\\w+=/-]+", + "smithy.api#sensitive": {} } }, - "com.amazonaws.sagemaker#ContainerDefinitionList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#ContainerDefinition" - }, + "com.amazonaws.sagemaker#ClientToken": { + "type": "string", "traits": { "smithy.api#length": { - "min": 0, - "max": 5 - } + "min": 1, + "max": 36 + }, + "smithy.api#pattern": "^[a-zA-Z0-9-]+$" } }, - "com.amazonaws.sagemaker#ContainerEntrypoint": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#ContainerEntrypointString" - }, + "com.amazonaws.sagemaker#CodeRepositoryArn": { + "type": "string", "traits": { "smithy.api#length": { "min": 1, - "max": 100 - } + "max": 2048 + }, + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:code-repository/.*" } }, - "com.amazonaws.sagemaker#ContainerEntrypointString": { + "com.amazonaws.sagemaker#CodeRepositoryContains": { "type": "string", "traits": { "smithy.api#length": { "min": 0, - "max": 256 + "max": 1024 }, - "smithy.api#pattern": ".*" + "smithy.api#pattern": "[a-zA-Z0-9-]+" } }, - "com.amazonaws.sagemaker#ContainerHostname": { + "com.amazonaws.sagemaker#CodeRepositoryNameContains": { "type": "string", "traits": { "smithy.api#length": { "min": 0, "max": 63 }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" + "smithy.api#pattern": "[a-zA-Z0-9-]+" } }, - "com.amazonaws.sagemaker#ContainerImage": { + "com.amazonaws.sagemaker#CodeRepositoryNameOrUrl": { "type": "string", "traits": { "smithy.api#length": { - "min": 0, - "max": 255 + "min": 1, + "max": 1024 }, - "smithy.api#pattern": "[\\S]+" + "smithy.api#pattern": "^https://([^/]+)/?(.*)$|^[a-zA-Z0-9](-*[a-zA-Z0-9])*" } }, - "com.amazonaws.sagemaker#ContainerMode": { + "com.amazonaws.sagemaker#CodeRepositorySortBy": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "SingleModel", - "name": "SINGLE_MODEL" + "value": "Name", + "name": "NAME" }, { - "value": "MultiModel", - "name": "MULTI_MODEL" + "value": "CreationTime", + "name": "CREATION_TIME" + }, + { + "value": "LastModifiedTime", + "name": "LAST_MODIFIED_TIME" } ] } }, - "com.amazonaws.sagemaker#ContentClassifier": { + "com.amazonaws.sagemaker#CodeRepositorySortOrder": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "FreeOfPersonallyIdentifiableInformation", - "name": "FREE_OF_PERSONALLY_IDENTIFIABLE_INFORMATION" + "value": "Ascending", + "name": "ASCENDING" }, { - "value": "FreeOfAdultContent", - "name": "FREE_OF_ADULT_CONTENT" + "value": "Descending", + "name": "DESCENDING" } ] } }, - "com.amazonaws.sagemaker#ContentClassifiers": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#ContentClassifier" - }, - "traits": { - "smithy.api#length": { - "min": 0, - "max": 256 - } - } - }, - "com.amazonaws.sagemaker#ContentType": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 256 - }, - "smithy.api#pattern": ".*" - } - }, - "com.amazonaws.sagemaker#ContentTypes": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#ContentType" - } - }, - "com.amazonaws.sagemaker#ContinuousParameterRange": { + "com.amazonaws.sagemaker#CodeRepositorySummary": { "type": "structure", "members": { - "Name": { - "target": "com.amazonaws.sagemaker#ParameterKey", + "CodeRepositoryName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          The name of the continuous hyperparameter to tune.

          ", + "smithy.api#documentation": "

          The name of the Git repository.

          ", "smithy.api#required": {} } }, - "MinValue": { - "target": "com.amazonaws.sagemaker#ParameterValue", + "CodeRepositoryArn": { + "target": "com.amazonaws.sagemaker#CodeRepositoryArn", "traits": { - "smithy.api#documentation": "

          The minimum value for the hyperparameter.\n The\n tuning job uses floating-point values between this value and MaxValuefor\n tuning.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the Git repository.

          ", "smithy.api#required": {} } }, - "MaxValue": { - "target": "com.amazonaws.sagemaker#ParameterValue", + "CreationTime": { + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          The maximum value for the hyperparameter. The tuning job uses floating-point values\n between MinValue value and this value for tuning.

          ", + "smithy.api#documentation": "

          The date and time that the Git repository was created.

          ", "smithy.api#required": {} } }, - "ScalingType": { - "target": "com.amazonaws.sagemaker#HyperParameterScalingType", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#LastModifiedTime", "traits": { - "smithy.api#documentation": "

          The scale that hyperparameter tuning uses to search the hyperparameter range. For\n information about choosing a hyperparameter scale, see Hyperparameter Scaling. One of the following values:

          \n
          \n
          Auto
          \n
          \n

          Amazon SageMaker hyperparameter tuning chooses the best scale for the\n hyperparameter.

          \n
          \n
          Linear
          \n
          \n

          Hyperparameter tuning searches the values in the hyperparameter range by\n using a linear scale.

          \n
          \n
          Logarithmic
          \n
          \n

          Hyperparameter tuning searches the values in the hyperparameter range by\n using a logarithmic scale.

          \n

          Logarithmic scaling works only for ranges that have only values greater\n than 0.

          \n
          \n
          ReverseLogarithmic
          \n
          \n

          Hyperparameter tuning searches the values in the hyperparameter range by\n using a reverse logarithmic scale.

          \n

          Reverse logarithmic scaling works only for ranges that are entirely within\n the range 0<=x<1.0.

          \n
          \n
          " + "smithy.api#documentation": "

          The date and time that the Git repository was last modified.

          ", + "smithy.api#required": {} + } + }, + "GitConfig": { + "target": "com.amazonaws.sagemaker#GitConfig", + "traits": { + "smithy.api#documentation": "

          Configuration details for the Git repository, including the URL where it is located\n and the ARN of the AWS Secrets Manager secret that contains the credentials used to\n access the repository.

          " } } }, "traits": { - "smithy.api#documentation": "

          A list of continuous hyperparameters to tune.

          " + "smithy.api#documentation": "

          Specifies summary information about a Git repository.

          " } }, - "com.amazonaws.sagemaker#ContinuousParameterRangeSpecification": { + "com.amazonaws.sagemaker#CodeRepositorySummaryList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#CodeRepositorySummary" + } + }, + "com.amazonaws.sagemaker#CognitoConfig": { "type": "structure", "members": { - "MinValue": { - "target": "com.amazonaws.sagemaker#ParameterValue", + "UserPool": { + "target": "com.amazonaws.sagemaker#CognitoUserPool", "traits": { - "smithy.api#documentation": "

          The minimum floating-point value allowed.

          ", + "smithy.api#documentation": "

          A \n user pool is a user directory in Amazon Cognito. \n With a user pool, your users can sign in to your web or mobile app through Amazon Cognito. \n Your users can also sign in through social identity providers like \n Google, Facebook, Amazon, or Apple, and through SAML identity providers.

          ", "smithy.api#required": {} } }, - "MaxValue": { - "target": "com.amazonaws.sagemaker#ParameterValue", + "ClientId": { + "target": "com.amazonaws.sagemaker#ClientId", "traits": { - "smithy.api#documentation": "

          The maximum floating-point value allowed.

          ", + "smithy.api#documentation": "

          The client ID for your Amazon Cognito user pool.

          ", "smithy.api#required": {} } } }, "traits": { - "smithy.api#documentation": "

          Defines the possible values for a continuous hyperparameter.

          " - } - }, - "com.amazonaws.sagemaker#ContinuousParameterRanges": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#ContinuousParameterRange" - }, - "traits": { - "smithy.api#length": { - "min": 0, - "max": 20 - } - } - }, - "com.amazonaws.sagemaker#CreateAlgorithm": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#CreateAlgorithmInput" - }, - "output": { - "target": "com.amazonaws.sagemaker#CreateAlgorithmOutput" - }, - "traits": { - "smithy.api#documentation": "

          Create a machine learning algorithm that you can use in Amazon SageMaker and list in the AWS\n Marketplace.

          " + "smithy.api#documentation": "

          Use this parameter to configure your Amazon Cognito workforce. \n A single Cognito workforce is created using and corresponds to a single\n \n Amazon Cognito user pool.

          " } }, - "com.amazonaws.sagemaker#CreateAlgorithmInput": { + "com.amazonaws.sagemaker#CognitoMemberDefinition": { "type": "structure", "members": { - "AlgorithmName": { - "target": "com.amazonaws.sagemaker#EntityName", + "UserPool": { + "target": "com.amazonaws.sagemaker#CognitoUserPool", "traits": { - "smithy.api#documentation": "

          The name of the algorithm.

          ", + "smithy.api#documentation": "

          An identifier for a user pool. The user pool must be in the same region as the service\n that you are calling.

          ", "smithy.api#required": {} } }, - "AlgorithmDescription": { - "target": "com.amazonaws.sagemaker#EntityDescription", - "traits": { - "smithy.api#documentation": "

          A description of the algorithm.

          " - } - }, - "TrainingSpecification": { - "target": "com.amazonaws.sagemaker#TrainingSpecification", + "UserGroup": { + "target": "com.amazonaws.sagemaker#CognitoUserGroup", "traits": { - "smithy.api#documentation": "

          Specifies details about training jobs run by this algorithm, including the\n following:

          \n
            \n
          • \n

            The Amazon ECR path of the container and the version digest of the\n algorithm.

            \n
          • \n
          • \n

            The hyperparameters that the algorithm supports.

            \n
          • \n
          • \n

            The instance types that the algorithm supports for training.

            \n
          • \n
          • \n

            Whether the algorithm supports distributed training.

            \n
          • \n
          • \n

            The metrics that the algorithm emits to Amazon CloudWatch.

            \n
          • \n
          • \n

            Which metrics that the algorithm emits can be used as the objective metric for\n hyperparameter tuning jobs.

            \n
          • \n
          • \n

            The input channels that the algorithm supports for training data. For example,\n an algorithm might support train, validation, and\n test channels.

            \n
          • \n
          ", + "smithy.api#documentation": "

          An identifier for a user group.

          ", "smithy.api#required": {} } }, - "InferenceSpecification": { - "target": "com.amazonaws.sagemaker#InferenceSpecification", + "ClientId": { + "target": "com.amazonaws.sagemaker#ClientId", "traits": { - "smithy.api#documentation": "

          Specifies details about inference jobs that the algorithm runs, including the\n following:

          \n
            \n
          • \n

            The Amazon ECR paths of containers that contain the inference code and model\n artifacts.

            \n
          • \n
          • \n

            The instance types that the algorithm supports for transform jobs and\n real-time endpoints used for inference.

            \n
          • \n
          • \n

            The input and output content formats that the algorithm supports for\n inference.

            \n
          • \n
          " + "smithy.api#documentation": "

          An identifier for an application client. You must create the app client ID using\n Amazon Cognito.

          ", + "smithy.api#required": {} } + } + }, + "traits": { + "smithy.api#documentation": "

          Identifies a Amazon Cognito user group. A user group can be used in on or more work\n teams.

          " + } + }, + "com.amazonaws.sagemaker#CognitoUserGroup": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 128 }, - "ValidationSpecification": { - "target": "com.amazonaws.sagemaker#AlgorithmValidationSpecification", - "traits": { - "smithy.api#documentation": "

          Specifies configurations for one or more training jobs and that Amazon SageMaker runs to test the\n algorithm's training code and, optionally, one or more batch transform jobs that Amazon SageMaker\n runs to test the algorithm's inference code.

          " - } + "smithy.api#pattern": "[\\p{L}\\p{M}\\p{S}\\p{N}\\p{P}]+" + } + }, + "com.amazonaws.sagemaker#CognitoUserPool": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 55 }, - "CertifyForMarketplace": { - "target": "com.amazonaws.sagemaker#CertifyForMarketplace", - "traits": { - "smithy.api#documentation": "

          Whether to certify the algorithm so that it can be listed in AWS Marketplace.

          " - } - } + "smithy.api#pattern": "[\\w-]+_[0-9a-zA-Z]+" } }, - "com.amazonaws.sagemaker#CreateAlgorithmOutput": { + "com.amazonaws.sagemaker#CollectionConfiguration": { "type": "structure", "members": { - "AlgorithmArn": { - "target": "com.amazonaws.sagemaker#AlgorithmArn", + "CollectionName": { + "target": "com.amazonaws.sagemaker#CollectionName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the new algorithm.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The name of the tensor collection. The name must be unique relative to other rule configuration names.

          " + } + }, + "CollectionParameters": { + "target": "com.amazonaws.sagemaker#CollectionParameters", + "traits": { + "smithy.api#documentation": "

          Parameter values for the tensor collection. The allowed parameters are\n \"name\", \"include_regex\", \"reduction_config\",\n \"save_config\", \"tensor_names\", and\n \"save_histogram\".

          " } } + }, + "traits": { + "smithy.api#documentation": "

          Configuration information for tensor collections.

          " } }, - "com.amazonaws.sagemaker#CreateApp": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#CreateAppRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#CreateAppResponse" + "com.amazonaws.sagemaker#CollectionConfigurations": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#CollectionConfiguration" }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceInUse" - }, - { - "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + "traits": { + "smithy.api#length": { + "min": 0, + "max": 20 } - ], + } + }, + "com.amazonaws.sagemaker#CollectionName": { + "type": "string", "traits": { - "smithy.api#documentation": "

          Creates a running App for the specified UserProfile. Supported Apps are JupyterServer\n \n and KernelGateway. This operation is automatically invoked by Amazon SageMaker Studio\n upon access to the associated Domain, and when new kernel configurations are selected by the user.\n \n A user may have multiple Apps active simultaneously.

          " + "smithy.api#length": { + "min": 1, + "max": 256 + }, + "smithy.api#pattern": ".*" } }, - "com.amazonaws.sagemaker#CreateAppImageConfig": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#CreateAppImageConfigRequest" + "com.amazonaws.sagemaker#CollectionParameters": { + "type": "map", + "key": { + "target": "com.amazonaws.sagemaker#ConfigKey" }, - "output": { - "target": "com.amazonaws.sagemaker#CreateAppImageConfigResponse" + "value": { + "target": "com.amazonaws.sagemaker#ConfigValue" }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceInUse" + "traits": { + "smithy.api#length": { + "min": 0, + "max": 20 } - ], + } + }, + "com.amazonaws.sagemaker#CompilationJobArn": { + "type": "string", "traits": { - "smithy.api#documentation": "

          Creates a configuration for running a SageMaker image as a KernelGateway app. The\n configuration specifies the Amazon Elastic File System (EFS) storage volume on the image, and a list of the\n kernels in the image.

          " + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:compilation-job/.*" } }, - "com.amazonaws.sagemaker#CreateAppImageConfigRequest": { + "com.amazonaws.sagemaker#CompilationJobStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "INPROGRESS", + "name": "INPROGRESS" + }, + { + "value": "COMPLETED", + "name": "COMPLETED" + }, + { + "value": "FAILED", + "name": "FAILED" + }, + { + "value": "STARTING", + "name": "STARTING" + }, + { + "value": "STOPPING", + "name": "STOPPING" + }, + { + "value": "STOPPED", + "name": "STOPPED" + } + ] + } + }, + "com.amazonaws.sagemaker#CompilationJobSummaries": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#CompilationJobSummary" + } + }, + "com.amazonaws.sagemaker#CompilationJobSummary": { "type": "structure", "members": { - "AppImageConfigName": { - "target": "com.amazonaws.sagemaker#AppImageConfigName", + "CompilationJobName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          The name of the AppImageConfig. Must be unique to your account.

          ", + "smithy.api#documentation": "

          The name of the model compilation job that you want a summary for.

          ", "smithy.api#required": {} } }, - "Tags": { - "target": "com.amazonaws.sagemaker#TagList", + "CompilationJobArn": { + "target": "com.amazonaws.sagemaker#CompilationJobArn", "traits": { - "smithy.api#documentation": "

          A list of tags to apply to the AppImageConfig.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model compilation job.

          ", + "smithy.api#required": {} } }, - "KernelGatewayImageConfig": { - "target": "com.amazonaws.sagemaker#KernelGatewayImageConfig", + "CreationTime": { + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          The KernelGatewayImageConfig.

          " + "smithy.api#documentation": "

          The time when the model compilation job was created.

          ", + "smithy.api#required": {} } - } - } - }, - "com.amazonaws.sagemaker#CreateAppImageConfigResponse": { - "type": "structure", - "members": { - "AppImageConfigArn": { - "target": "com.amazonaws.sagemaker#AppImageConfigArn", + }, + "CompilationStartTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the AppImageConfig.

          " + "smithy.api#documentation": "

          The time when the model compilation job started.

          " } - } - } - }, - "com.amazonaws.sagemaker#CreateAppRequest": { - "type": "structure", - "members": { - "DomainId": { - "target": "com.amazonaws.sagemaker#DomainId", + }, + "CompilationEndTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The domain ID.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The time when the model compilation job completed.

          " } }, - "UserProfileName": { - "target": "com.amazonaws.sagemaker#UserProfileName", + "CompilationTargetDevice": { + "target": "com.amazonaws.sagemaker#TargetDevice", "traits": { - "smithy.api#documentation": "

          The user profile name.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The type of device that the model will run on after the compilation job has\n completed.

          " } }, - "AppType": { - "target": "com.amazonaws.sagemaker#AppType", + "CompilationTargetPlatformOs": { + "target": "com.amazonaws.sagemaker#TargetPlatformOs", "traits": { - "smithy.api#documentation": "

          The type of app.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The type of OS that the model will run on after the compilation job has\n completed.

          " } }, - "AppName": { - "target": "com.amazonaws.sagemaker#AppName", + "CompilationTargetPlatformArch": { + "target": "com.amazonaws.sagemaker#TargetPlatformArch", "traits": { - "smithy.api#documentation": "

          The name of the app.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The type of architecture that the model will run on after the compilation job has\n completed.

          " } }, - "Tags": { - "target": "com.amazonaws.sagemaker#TagList", + "CompilationTargetPlatformAccelerator": { + "target": "com.amazonaws.sagemaker#TargetPlatformAccelerator", "traits": { - "smithy.api#documentation": "

          Each tag consists of a key and an optional value.\n Tag keys must be unique per resource.

          " + "smithy.api#documentation": "

          The type of accelerator that the model will run on after the compilation job has\n completed.

          " } }, - "ResourceSpec": { - "target": "com.amazonaws.sagemaker#ResourceSpec", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#LastModifiedTime", "traits": { - "smithy.api#documentation": "

          The instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance.

          " + "smithy.api#documentation": "

          The time when the model compilation job was last modified.

          " + } + }, + "CompilationJobStatus": { + "target": "com.amazonaws.sagemaker#CompilationJobStatus", + "traits": { + "smithy.api#documentation": "

          The status of the model compilation job.

          ", + "smithy.api#required": {} } } + }, + "traits": { + "smithy.api#documentation": "

          A summary of a model compilation job.

          " } }, - "com.amazonaws.sagemaker#CreateAppResponse": { + "com.amazonaws.sagemaker#CompilerOptions": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 3, + "max": 1024 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#CompressionType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "None", + "name": "NONE" + }, + { + "value": "Gzip", + "name": "GZIP" + } + ] + } + }, + "com.amazonaws.sagemaker#CompressionTypes": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#CompressionType" + } + }, + "com.amazonaws.sagemaker#ConditionOutcome": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "True", + "name": "TRUE" + }, + { + "value": "False", + "name": "FALSE" + } + ] + } + }, + "com.amazonaws.sagemaker#ConditionStepMetadata": { "type": "structure", "members": { - "AppArn": { - "target": "com.amazonaws.sagemaker#AppArn", + "Outcome": { + "target": "com.amazonaws.sagemaker#ConditionOutcome", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the app.

          " + "smithy.api#documentation": "

          The outcome of the Condition step evaluation.

          " } } + }, + "traits": { + "smithy.api#documentation": "

          Metadata for a Condition step.

          " } }, - "com.amazonaws.sagemaker#CreateAutoMLJob": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#CreateAutoMLJobRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#CreateAutoMLJobResponse" - }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceInUse" + "com.amazonaws.sagemaker#ConfigKey": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 256 }, - { - "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" - } - ], + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#ConfigValue": { + "type": "string", "traits": { - "smithy.api#documentation": "

          Creates an Autopilot job.

          \n

          Find the best performing model after you run an Autopilot job by calling . Deploy that model by following the steps described in\n Step 6.1:\n Deploy the Model to Amazon SageMaker Hosting Services.

          \n

          For information about how to use Autopilot, see Automate Model\n Development with Amazon SageMaker Autopilot.

          " + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": ".*" } }, - "com.amazonaws.sagemaker#CreateAutoMLJobRequest": { + "com.amazonaws.sagemaker#ConflictException": { "type": "structure", "members": { - "AutoMLJobName": { - "target": "com.amazonaws.sagemaker#AutoMLJobName", - "traits": { - "smithy.api#documentation": "

          Identifies an Autopilot job. Must be unique to your account and is case-insensitive.

          ", - "smithy.api#required": {} - } - }, - "InputDataConfig": { - "target": "com.amazonaws.sagemaker#AutoMLInputDataConfig", - "traits": { - "smithy.api#documentation": "

          Similar to InputDataConfig supported by Tuning. Format(s) supported: CSV. Minimum of 500\n rows.

          ", - "smithy.api#required": {} - } + "Message": { + "target": "com.amazonaws.sagemaker#FailureReason" + } + }, + "traits": { + "smithy.api#documentation": "

          There was a conflict when you attempted to modify an experiment, trial, or trial\n component.

          ", + "smithy.api#error": "client" + } + }, + "com.amazonaws.sagemaker#ContainerArgument": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 }, - "OutputDataConfig": { - "target": "com.amazonaws.sagemaker#AutoMLOutputDataConfig", + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#ContainerArguments": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ContainerArgument" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.sagemaker#ContainerDefinition": { + "type": "structure", + "members": { + "ContainerHostname": { + "target": "com.amazonaws.sagemaker#ContainerHostname", "traits": { - "smithy.api#documentation": "

          Similar to OutputDataConfig supported by Tuning. Format(s) supported: CSV.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          This parameter is ignored for models that contain only a\n PrimaryContainer.

          \n

          When a ContainerDefinition is part of an inference pipeline, the value of\n the parameter uniquely identifies the container for the purposes of logging and metrics.\n For information, see Use Logs and Metrics\n to Monitor an Inference Pipeline. If you don't specify a value for this\n parameter for a ContainerDefinition that is part of an inference pipeline,\n a unique name is automatically assigned based on the position of the\n ContainerDefinition in the pipeline. If you specify a value for the\n ContainerHostName for any ContainerDefinition that is part\n of an inference pipeline, you must specify a value for the\n ContainerHostName parameter of every ContainerDefinition\n in that pipeline.

          " } }, - "ProblemType": { - "target": "com.amazonaws.sagemaker#ProblemType", + "Image": { + "target": "com.amazonaws.sagemaker#ContainerImage", "traits": { - "smithy.api#documentation": "

          Defines the kind of preprocessing and algorithms intended for the candidates. Options\n include: BinaryClassification, MulticlassClassification, and Regression.

          " + "smithy.api#documentation": "

          The path where inference code is stored. This can be either in Amazon EC2 Container Registry or in a\n Docker registry that is accessible from the same VPC that you configure for your\n endpoint. If you are using your\n own custom algorithm instead of an algorithm provided by Amazon SageMaker, the inference code must\n meet Amazon SageMaker requirements. Amazon SageMaker supports both registry/repository[:tag] and\n registry/repository[@digest] image path formats. For more information,\n see Using\n Your Own Algorithms with Amazon SageMaker\n

          " } }, - "AutoMLJobObjective": { - "target": "com.amazonaws.sagemaker#AutoMLJobObjective", + "ImageConfig": { + "target": "com.amazonaws.sagemaker#ImageConfig", "traits": { - "smithy.api#documentation": "

          Defines the objective of a an AutoML job. You provide a AutoMLJobObjective$MetricName and Autopilot infers whether to minimize or\n maximize it. If a metric is not specified, the most commonly used ObjectiveMetric for\n problem type is automaically selected.

          " + "smithy.api#documentation": "

          Specifies whether the model container is in Amazon ECR or a private Docker registry\n accessible from your\n Amazon Virtual Private Cloud (VPC). For information about storing containers in a private Docker registry,\n see Use a Private Docker\n Registry for Real-Time Inference Containers\n

          " } }, - "AutoMLJobConfig": { - "target": "com.amazonaws.sagemaker#AutoMLJobConfig", + "Mode": { + "target": "com.amazonaws.sagemaker#ContainerMode", "traits": { - "smithy.api#documentation": "

          Contains CompletionCriteria and SecurityConfig.

          " + "smithy.api#documentation": "

          Whether the container hosts a single model or multiple models.

          " } }, - "RoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", + "ModelDataUrl": { + "target": "com.amazonaws.sagemaker#Url", "traits": { - "smithy.api#documentation": "

          The ARN of the role that is used to access the data.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The S3 path where the model artifacts, which result from model training, are stored.\n This path must point to a single gzip compressed tar archive (.tar.gz suffix). The S3\n path is required for Amazon SageMaker built-in algorithms, but not if you use your own algorithms.\n For more information on built-in algorithms, see Common\n Parameters.

          \n \n

          The model artifacts must be in an S3 bucket that is in the same region as the\n model or endpoint you are creating.

          \n
          \n

          If you provide a value for this parameter, Amazon SageMaker uses AWS Security Token Service to\n download model artifacts from the S3 path you provide. AWS STS is activated in your\n IAM user account by default. If you previously deactivated AWS STS for a region, you\n need to reactivate AWS STS for that region. For more information, see Activating and\n Deactivating AWS STS in an AWS Region in the AWS Identity and Access Management User\n Guide.

          \n \n

          If you use a built-in algorithm to create a model, Amazon SageMaker requires that you provide\n a S3 path to the model artifacts in ModelDataUrl.

          \n
          " } }, - "GenerateCandidateDefinitionsOnly": { - "target": "com.amazonaws.sagemaker#GenerateCandidateDefinitionsOnly", + "Environment": { + "target": "com.amazonaws.sagemaker#EnvironmentMap", "traits": { - "smithy.api#documentation": "

          Generates possible candidates without training a model. A candidate is a combination of\n data preprocessors, algorithms, and algorithm parameter settings.

          " + "smithy.api#documentation": "

          The environment variables to set in the Docker container. Each key and value in the\n Environment string to string map can have length of up to 1024. We\n support up to 16 entries in the map.

          " } }, - "Tags": { - "target": "com.amazonaws.sagemaker#TagList", + "ModelPackageName": { + "target": "com.amazonaws.sagemaker#VersionedArnOrName", "traits": { - "smithy.api#documentation": "

          Each tag consists of a key and an optional value. Tag keys must be unique per\n resource.

          " + "smithy.api#documentation": "

          The name or Amazon Resource Name (ARN) of the model package to use to create the\n model.

          " } } - } + }, + "traits": { + "smithy.api#documentation": "

          Describes the container, as part of model definition.

          " + } }, - "com.amazonaws.sagemaker#CreateAutoMLJobResponse": { - "type": "structure", - "members": { - "AutoMLJobArn": { - "target": "com.amazonaws.sagemaker#AutoMLJobArn", - "traits": { - "smithy.api#documentation": "

          When a job is created, it is assigned a unique ARN.

          ", - "smithy.api#required": {} - } + "com.amazonaws.sagemaker#ContainerDefinitionList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ContainerDefinition" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 5 } } }, - "com.amazonaws.sagemaker#CreateCodeRepository": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#CreateCodeRepositoryInput" + "com.amazonaws.sagemaker#ContainerEntrypoint": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ContainerEntrypointString" }, - "output": { - "target": "com.amazonaws.sagemaker#CreateCodeRepositoryOutput" + "traits": { + "smithy.api#length": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.sagemaker#ContainerEntrypointString": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#ContainerHostname": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 63 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,62}" + } + }, + "com.amazonaws.sagemaker#ContainerImage": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 255 + }, + "smithy.api#pattern": "[\\S]+" + } + }, + "com.amazonaws.sagemaker#ContainerMode": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "SingleModel", + "name": "SINGLE_MODEL" + }, + { + "value": "MultiModel", + "name": "MULTI_MODEL" + } + ] + } + }, + "com.amazonaws.sagemaker#ContentClassifier": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "FreeOfPersonallyIdentifiableInformation", + "name": "FREE_OF_PERSONALLY_IDENTIFIABLE_INFORMATION" + }, + { + "value": "FreeOfAdultContent", + "name": "FREE_OF_ADULT_CONTENT" + } + ] + } + }, + "com.amazonaws.sagemaker#ContentClassifiers": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ContentClassifier" }, "traits": { - "smithy.api#documentation": "

          Creates a Git repository as a resource in your Amazon SageMaker account. You can associate the\n repository with notebook instances so that you can use Git source control for the\n notebooks you create. The Git repository is a resource in your Amazon SageMaker account, so it can\n be associated with more than one notebook instance, and it persists independently from\n the lifecycle of any notebook instances it is associated with.

          \n

          The repository can be hosted either in AWS CodeCommit or in any\n other Git repository.

          " + "smithy.api#length": { + "min": 0, + "max": 256 + } } }, - "com.amazonaws.sagemaker#CreateCodeRepositoryInput": { + "com.amazonaws.sagemaker#ContentDigest": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 72 + }, + "smithy.api#pattern": "^[Ss][Hh][Aa]256:[0-9a-fA-F]{64}$" + } + }, + "com.amazonaws.sagemaker#ContentType": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#ContentTypes": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ContentType" + } + }, + "com.amazonaws.sagemaker#ContextArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:context/.*" + } + }, + "com.amazonaws.sagemaker#ContextSource": { "type": "structure", "members": { - "CodeRepositoryName": { - "target": "com.amazonaws.sagemaker#EntityName", + "SourceUri": { + "target": "com.amazonaws.sagemaker#String2048", "traits": { - "smithy.api#documentation": "

          The name of the Git repository. The name must have 1 to 63 characters. Valid\n characters are a-z, A-Z, 0-9, and - (hyphen).

          ", + "smithy.api#documentation": "

          The URI of the source.

          ", "smithy.api#required": {} } }, - "GitConfig": { - "target": "com.amazonaws.sagemaker#GitConfig", + "SourceType": { + "target": "com.amazonaws.sagemaker#String256", "traits": { - "smithy.api#documentation": "

          Specifies details about the repository, including the URL where the repository is\n located, the default branch, and credentials to use to access the repository.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The type of the source.

          " } - } - } - }, - "com.amazonaws.sagemaker#CreateCodeRepositoryOutput": { - "type": "structure", - "members": { - "CodeRepositoryArn": { - "target": "com.amazonaws.sagemaker#CodeRepositoryArn", + }, + "SourceId": { + "target": "com.amazonaws.sagemaker#String256", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the new repository.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The ID of the source.

          " } } - } - }, - "com.amazonaws.sagemaker#CreateCompilationJob": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#CreateCompilationJobRequest" }, - "output": { - "target": "com.amazonaws.sagemaker#CreateCompilationJobResponse" - }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceInUse" - }, - { - "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" - } - ], "traits": { - "smithy.api#documentation": "

          Starts a model compilation job. After the model has been compiled, Amazon SageMaker saves the\n resulting model artifacts to an Amazon Simple Storage Service (Amazon S3) bucket that you specify.

          \n

          If\n you choose to host your model using Amazon SageMaker hosting services, you can use the resulting\n model artifacts as part of the model. You can also use the artifacts with\n AWS\n IoT Greengrass. In that case, deploy them as an ML\n resource.

          \n

          In the request body, you provide the following:

          \n
            \n
          • \n

            A name for the compilation job

            \n
          • \n
          • \n

            Information about the input model artifacts

            \n
          • \n
          • \n

            The output location for the compiled model and the device (target) that the\n model runs on

            \n
          • \n
          • \n

            The Amazon Resource Name (ARN) of the IAM role that Amazon SageMaker assumes to perform\n the model compilation job.

            \n
          • \n
          \n

          You can also provide a Tag to track the model compilation job's resource\n use and costs. The response body contains the\n CompilationJobArn\n for the compiled job.

          \n

          To stop a model compilation job, use StopCompilationJob. To get\n information about a particular model compilation job, use DescribeCompilationJob. To get information about multiple model\n compilation jobs, use ListCompilationJobs.

          " + "smithy.api#documentation": "

          A structure describing the source of a context.

          " } }, - "com.amazonaws.sagemaker#CreateCompilationJobRequest": { + "com.amazonaws.sagemaker#ContextSummaries": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ContextSummary" + } + }, + "com.amazonaws.sagemaker#ContextSummary": { "type": "structure", "members": { - "CompilationJobName": { - "target": "com.amazonaws.sagemaker#EntityName", + "ContextArn": { + "target": "com.amazonaws.sagemaker#ContextArn", "traits": { - "smithy.api#documentation": "

          A name for the model compilation job. The name must be unique within the AWS Region\n and within your AWS account.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the context.

          " } }, - "RoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", + "ContextName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker to perform tasks on\n your behalf.

          \n

          During model compilation, Amazon SageMaker needs your permission to:

          \n
            \n
          • \n

            Read input data from an S3 bucket

            \n
          • \n
          • \n

            Write model artifacts to an S3 bucket

            \n
          • \n
          • \n

            Write logs to Amazon CloudWatch Logs

            \n
          • \n
          • \n

            Publish metrics to Amazon CloudWatch

            \n
          • \n
          \n

          You grant permissions for all of these tasks to an IAM role. To pass this role to\n Amazon SageMaker, the caller of this API must have the iam:PassRole permission. For\n more information, see Amazon SageMaker\n Roles.\n

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The name of the context.

          " } }, - "InputConfig": { - "target": "com.amazonaws.sagemaker#InputConfig", + "Source": { + "target": "com.amazonaws.sagemaker#ContextSource", "traits": { - "smithy.api#documentation": "

          Provides information about the location of input model artifacts, the name and shape\n of the expected data inputs, and the framework in which the model was trained.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The source of the context.

          " } }, - "OutputConfig": { - "target": "com.amazonaws.sagemaker#OutputConfig", + "ContextType": { + "target": "com.amazonaws.sagemaker#String256", "traits": { - "smithy.api#documentation": "

          Provides information about the output location for the compiled model and the target\n device the model runs on.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The type of the context.

          " } }, - "StoppingCondition": { - "target": "com.amazonaws.sagemaker#StoppingCondition", + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Specifies a limit to how long a model compilation job can run. When the job reaches\n the time limit, Amazon SageMaker ends the compilation job. Use this API to cap model training\n costs.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          When the context was created.

          " } }, - "Tags": { - "target": "com.amazonaws.sagemaker#TagList", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          An array of key-value pairs that you want to use to organize and track your AWS\n resource costs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management\n User Guide.

          " + "smithy.api#documentation": "

          When the context was last modified.

          " } } + }, + "traits": { + "smithy.api#documentation": "

          Lists a summary of the properties of a context. A context provides a logical grouping\n of other entities.

          " } }, - "com.amazonaws.sagemaker#CreateCompilationJobResponse": { + "com.amazonaws.sagemaker#ContinuousParameterRange": { "type": "structure", "members": { - "CompilationJobArn": { - "target": "com.amazonaws.sagemaker#CompilationJobArn", + "Name": { + "target": "com.amazonaws.sagemaker#ParameterKey", "traits": { - "smithy.api#documentation": "

          If the action is successful, the service sends back an HTTP 200 response. Amazon SageMaker returns\n the following data in JSON format:

          \n
            \n
          • \n

            \n CompilationJobArn: The Amazon Resource Name (ARN) of the compiled\n job.

            \n
          • \n
          ", + "smithy.api#documentation": "

          The name of the continuous hyperparameter to tune.

          ", "smithy.api#required": {} } - } - } - }, - "com.amazonaws.sagemaker#CreateDomain": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#CreateDomainRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#CreateDomainResponse" - }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceInUse" }, - { - "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" - } - ], - "traits": { - "smithy.api#documentation": "

          Creates a Domain used by Amazon SageMaker Studio. A domain consists of an associated\n Amazon Elastic File System (EFS) volume, a list of authorized users, and a variety of security, application,\n policy, and Amazon Virtual Private Cloud (VPC) configurations. An AWS account is limited to one domain per region.\n Users within a domain can share notebook files and other artifacts with each other.

          \n

          When a domain is created, an EFS volume is created for use by all of the users within the\n domain. Each user receives a private home directory within the EFS volume for notebooks,\n Git repositories, and data files.

          \n

          \n VPC configuration\n

          \n

          All SageMaker Studio traffic between the domain and the EFS volume is through the specified\n VPC and subnets. For other Studio traffic, you can specify the AppNetworkAccessType\n parameter. AppNetworkAccessType corresponds to the network access type that you\n choose when you onboard to Studio. The following options are available:

          \n
            \n
          • \n

            \n PublicInternetOnly - Non-EFS traffic goes through a VPC managed by\n Amazon SageMaker, which allows internet access. This is the default value.

            \n
          • \n
          • \n

            \n VpcOnly - All Studio traffic is through the specified VPC and subnets.\n Internet access is disabled by default. To allow internet access, you must specify a\n NAT gateway.

            \n

            When internet access is disabled, you won't be able to run a Studio notebook or to\n train or host models unless your VPC has an interface endpoint to the SageMaker API and runtime\n or a NAT gateway and your security groups allow outbound connections.

            \n
          • \n
          \n

          For more information, see\n Connect\n SageMaker Studio Notebooks to Resources in a VPC.

          " - } - }, - "com.amazonaws.sagemaker#CreateDomainRequest": { - "type": "structure", - "members": { - "DomainName": { - "target": "com.amazonaws.sagemaker#DomainName", + "MinValue": { + "target": "com.amazonaws.sagemaker#ParameterValue", "traits": { - "smithy.api#documentation": "

          A name for the domain.

          ", + "smithy.api#documentation": "

          The minimum value for the hyperparameter.\n The\n tuning job uses floating-point values between this value and MaxValuefor\n tuning.

          ", "smithy.api#required": {} } }, - "AuthMode": { - "target": "com.amazonaws.sagemaker#AuthMode", - "traits": { - "smithy.api#documentation": "

          The mode of authentication that members use to access the domain.

          ", - "smithy.api#required": {} - } - }, - "DefaultUserSettings": { - "target": "com.amazonaws.sagemaker#UserSettings", - "traits": { - "smithy.api#documentation": "

          The default user settings.

          ", - "smithy.api#required": {} - } - }, - "SubnetIds": { - "target": "com.amazonaws.sagemaker#Subnets", - "traits": { - "smithy.api#documentation": "

          The VPC subnets that Studio uses for communication.

          ", - "smithy.api#required": {} - } - }, - "VpcId": { - "target": "com.amazonaws.sagemaker#VpcId", + "MaxValue": { + "target": "com.amazonaws.sagemaker#ParameterValue", "traits": { - "smithy.api#documentation": "

          The ID of the Amazon Virtual Private Cloud (VPC) that Studio uses for communication.

          ", + "smithy.api#documentation": "

          The maximum value for the hyperparameter. The tuning job uses floating-point values\n between MinValue value and this value for tuning.

          ", "smithy.api#required": {} } }, - "Tags": { - "target": "com.amazonaws.sagemaker#TagList", - "traits": { - "smithy.api#documentation": "

          Tags to associated with the Domain. Each tag consists of a key and an optional value.\n Tag keys must be unique per resource. Tags are searchable using the\n Search API.

          " - } - }, - "AppNetworkAccessType": { - "target": "com.amazonaws.sagemaker#AppNetworkAccessType", - "traits": { - "smithy.api#documentation": "

          Specifies the VPC used for non-EFS traffic. The default value is\n PublicInternetOnly.

          \n
            \n
          • \n

            \n PublicInternetOnly - Non-EFS traffic is through a VPC managed by\n Amazon SageMaker, which allows direct internet access

            \n
          • \n
          • \n

            \n VpcOnly - All Studio traffic is through the specified VPC and subnets

            \n
          • \n
          " - } - }, - "HomeEfsFileSystemKmsKeyId": { - "target": "com.amazonaws.sagemaker#KmsKeyId", - "traits": { - "smithy.api#deprecated": { - "message": "This property is deprecated, use KmsKeyId instead." - }, - "smithy.api#documentation": "

          This member is deprecated and replaced with KmsKeyId.

          " - } - }, - "KmsKeyId": { - "target": "com.amazonaws.sagemaker#KmsKeyId", + "ScalingType": { + "target": "com.amazonaws.sagemaker#HyperParameterScalingType", "traits": { - "smithy.api#documentation": "

          SageMaker uses AWS KMS to encrypt the EFS volume attached to the domain with an AWS managed\n customer master key (CMK) by default. For more control, specify a customer managed CMK.

          " + "smithy.api#documentation": "

          The scale that hyperparameter tuning uses to search the hyperparameter range. For\n information about choosing a hyperparameter scale, see Hyperparameter Scaling. One of the following values:

          \n
          \n
          Auto
          \n
          \n

          Amazon SageMaker hyperparameter tuning chooses the best scale for the\n hyperparameter.

          \n
          \n
          Linear
          \n
          \n

          Hyperparameter tuning searches the values in the hyperparameter range by\n using a linear scale.

          \n
          \n
          Logarithmic
          \n
          \n

          Hyperparameter tuning searches the values in the hyperparameter range by\n using a logarithmic scale.

          \n

          Logarithmic scaling works only for ranges that have only values greater\n than 0.

          \n
          \n
          ReverseLogarithmic
          \n
          \n

          Hyperparameter tuning searches the values in the hyperparameter range by\n using a reverse logarithmic scale.

          \n

          Reverse logarithmic scaling works only for ranges that are entirely within\n the range 0<=x<1.0.

          \n
          \n
          " } } + }, + "traits": { + "smithy.api#documentation": "

          A list of continuous hyperparameters to tune.

          " } }, - "com.amazonaws.sagemaker#CreateDomainResponse": { + "com.amazonaws.sagemaker#ContinuousParameterRangeSpecification": { "type": "structure", "members": { - "DomainArn": { - "target": "com.amazonaws.sagemaker#DomainArn", + "MinValue": { + "target": "com.amazonaws.sagemaker#ParameterValue", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the created domain.

          " + "smithy.api#documentation": "

          The minimum floating-point value allowed.

          ", + "smithy.api#required": {} } }, - "Url": { - "target": "com.amazonaws.sagemaker#String1024", + "MaxValue": { + "target": "com.amazonaws.sagemaker#ParameterValue", "traits": { - "smithy.api#documentation": "

          The URL to the created domain.

          " + "smithy.api#documentation": "

          The maximum floating-point value allowed.

          ", + "smithy.api#required": {} } } + }, + "traits": { + "smithy.api#documentation": "

          Defines the possible values for a continuous hyperparameter.

          " } }, - "com.amazonaws.sagemaker#CreateEndpoint": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#CreateEndpointInput" - }, - "output": { - "target": "com.amazonaws.sagemaker#CreateEndpointOutput" + "com.amazonaws.sagemaker#ContinuousParameterRanges": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ContinuousParameterRange" }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" - } - ], "traits": { - "smithy.api#documentation": "

          Creates an endpoint using the endpoint configuration specified in the request. Amazon SageMaker\n uses the endpoint to provision resources and deploy models. You create the endpoint\n configuration with the CreateEndpointConfig API.

          \n

          Use this API to deploy models using Amazon SageMaker hosting services.

          \n

          For an example that calls this method when deploying a model to Amazon SageMaker hosting services,\n see Deploy the\n Model to Amazon SageMaker Hosting Services (AWS SDK for Python (Boto\n 3)).\n

          \n \n

          You must not delete an EndpointConfig that is in use by an endpoint\n that is live or while the UpdateEndpoint or CreateEndpoint\n operations are being performed on the endpoint. To update an endpoint, you must\n create a new EndpointConfig.

          \n
          \n

          The endpoint name must be unique within an AWS Region in your AWS account.

          \n

          When it receives the request, Amazon SageMaker creates the endpoint, launches the resources (ML\n compute instances), and deploys the model(s) on them.

          \n \n \n

          When you call CreateEndpoint, a load call is made to DynamoDB to\n verify that your endpoint configuration exists. When you read data from a DynamoDB\n table supporting \n Eventually Consistent Reads\n , the response might not\n reflect the results of a recently completed write operation. The response might\n include some stale data. If the dependent entities are not yet in DynamoDB, this\n causes a validation error. If you repeat your read request after a short time, the\n response should return the latest data. So retry logic is recommended to handle\n these possible issues. We also recommend that customers call DescribeEndpointConfig before calling CreateEndpoint to minimize the potential impact of a DynamoDB eventually consistent read.

          \n
          \n

          When Amazon SageMaker receives the request, it sets the endpoint status to\n Creating. After it creates the endpoint, it sets the status to\n InService. Amazon SageMaker can then process incoming requests for inferences. To\n check the status of an endpoint, use the DescribeEndpoint\n API.

          \n

          If any of the models hosted at this endpoint get model data from an Amazon S3 location,\n Amazon SageMaker uses AWS Security Token Service to download model artifacts from the S3 path you\n provided. AWS STS is activated in your IAM user account by default. If you previously\n deactivated AWS STS for a region, you need to reactivate AWS STS for that region. For\n more information, see Activating and\n Deactivating AWS STS in an AWS Region in the AWS Identity and Access Management User\n Guide.

          \n \n

          \n To add the IAM role policies for using this API operation, \n go to the IAM console, and\n choose Roles in the left navigation pane. Search the IAM role that you want to grant access to use\n the CreateEndpoint and CreateEndpointConfig API operations, add the following policies to the role.\n

          \n
            \n
          • \n

            Option 1: For a full Amazon SageMaker access, search and attach the AmazonSageMakerFullAccess policy.

            \n
          • \n
          • \n

            Option 2: For granting a limited access to an IAM role, \n paste the following Action elements manually into the JSON file of the IAM role:

            \n

            \n \"Action\": [\"sagemaker:CreateEndpoint\", \"sagemaker:CreateEndpointConfig\"]\n

            \n

            \n \"Resource\": [\n

            \n

            \n \"arn:aws:sagemaker:region:account-id:endpoint/endpointName\"\n

            \n

            \n \"arn:aws:sagemaker:region:account-id:endpoint-config/endpointConfigName\"\n

            \n

            \n ]\n

            \n

            For more information, see Amazon SageMaker API Permissions: Actions, Permissions, and Resources Reference.

            \n
          • \n
          \n \n
          " + "smithy.api#length": { + "min": 0, + "max": 20 + } } }, - "com.amazonaws.sagemaker#CreateEndpointConfig": { + "com.amazonaws.sagemaker#CreateAction": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateEndpointConfigInput" + "target": "com.amazonaws.sagemaker#CreateActionRequest" }, "output": { - "target": "com.amazonaws.sagemaker#CreateEndpointConfigOutput" + "target": "com.amazonaws.sagemaker#CreateActionResponse" }, "errors": [ { @@ -3254,157 +3596,153 @@ } ], "traits": { - "smithy.api#documentation": "

          Creates an endpoint configuration that Amazon SageMaker hosting services uses to deploy models. In\n the configuration, you identify one or more models, created using the\n CreateModel API, to deploy and the resources that you want Amazon SageMaker to\n provision. Then you call the CreateEndpoint API.

          \n \n

          Use this API if you want to use Amazon SageMaker hosting services to deploy models into\n production.

          \n
          \n

          In the request, you define a ProductionVariant, for each model that you\n want to deploy. Each ProductionVariant parameter also describes the\n resources that you want Amazon SageMaker to provision. This includes the number and type of ML\n compute instances to deploy.

          \n

          If you are hosting multiple models, you also assign a VariantWeight to\n specify how much traffic you want to allocate to each model. For example, suppose that\n you want to host two models, A and B, and you assign traffic weight 2 for model A and 1\n for model B. Amazon SageMaker distributes two-thirds of the traffic to Model A, and one-third to\n model B.

          \n

          For an example that calls this method when deploying a model to Amazon SageMaker hosting services,\n see Deploy the\n Model to Amazon SageMaker Hosting Services (AWS SDK for Python (Boto\n 3)).\n

          \n \n

          When you call CreateEndpoint, a load call is made to DynamoDB to\n verify that your endpoint configuration exists. When you read data from a DynamoDB\n table supporting \n Eventually Consistent Reads\n , the response might not\n reflect the results of a recently completed write operation. The response might\n include some stale data. If the dependent entities are not yet in DynamoDB, this\n causes a validation error. If you repeat your read request after a short time, the\n response should return the latest data. So retry logic is recommended to handle\n these possible issues. We also recommend that customers call DescribeEndpointConfig before calling CreateEndpoint to minimize the potential impact of a DynamoDB eventually consistent read.

          \n
          " + "smithy.api#documentation": "

          Creates an action. An action is a lineage tracking entity that\n represents an action or activity. For example, a model deployment or an HPO job.\n Generally, an action involves at least one input or output artifact. For more information, see\n Amazon SageMaker\n ML Lineage Tracking.

          " } }, - "com.amazonaws.sagemaker#CreateEndpointConfigInput": { + "com.amazonaws.sagemaker#CreateActionRequest": { "type": "structure", "members": { - "EndpointConfigName": { - "target": "com.amazonaws.sagemaker#EndpointConfigName", + "ActionName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The name of the endpoint configuration. You specify this name in a CreateEndpoint request.

          ", + "smithy.api#documentation": "

          The name of the action. Must be unique to your account in an AWS Region.

          ", "smithy.api#required": {} } }, - "ProductionVariants": { - "target": "com.amazonaws.sagemaker#ProductionVariantList", + "Source": { + "target": "com.amazonaws.sagemaker#ActionSource", "traits": { - "smithy.api#documentation": "

          An list of ProductionVariant objects, one for each model that you want\n to host at this endpoint.

          ", + "smithy.api#documentation": "

          The source type, ID, and URI.

          ", "smithy.api#required": {} } }, - "DataCaptureConfig": { - "target": "com.amazonaws.sagemaker#DataCaptureConfig" - }, - "Tags": { - "target": "com.amazonaws.sagemaker#TagList", + "ActionType": { + "target": "com.amazonaws.sagemaker#String256", "traits": { - "smithy.api#documentation": "

          A list of key-value pairs. For more information, see Using\n Cost Allocation Tags in the AWS Billing and Cost Management User\n Guide.

          " + "smithy.api#documentation": "

          The action type.

          ", + "smithy.api#required": {} } }, - "KmsKeyId": { - "target": "com.amazonaws.sagemaker#KmsKeyId", - "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of a AWS Key Management Service key that Amazon SageMaker uses to encrypt data on\n the storage volume attached to the ML compute instance that hosts the endpoint.

          \n

          The KmsKeyId can be any of the following formats:

          \n
            \n
          • \n

            Key ID: 1234abcd-12ab-34cd-56ef-1234567890ab\n

            \n
          • \n
          • \n

            Key ARN:\n arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\n

            \n
          • \n
          • \n

            Alias name: alias/ExampleAlias\n

            \n
          • \n
          • \n

            Alias name ARN:\n arn:aws:kms:us-west-2:111122223333:alias/ExampleAlias\n

            \n
          • \n
          \n

          The KMS key policy must grant permission to the IAM role that you specify in your\n CreateEndpoint, UpdateEndpoint requests. For more\n information, refer to the AWS Key Management Service section Using Key\n Policies in AWS KMS \n

          \n \n

          Certain Nitro-based instances include local storage, dependent on the instance\n type. Local storage volumes are encrypted using a hardware module on the instance.\n You can't request a KmsKeyId when using an instance type with local\n storage. If any of the models that you specify in the\n ProductionVariants parameter use nitro-based instances with local\n storage, do not specify a value for the KmsKeyId parameter. If you\n specify a value for KmsKeyId when using any nitro-based instances with\n local storage, the call to CreateEndpointConfig fails.

          \n

          For a list of instance types that support local instance storage, see Instance Store Volumes.

          \n

          For more information about local instance storage encryption, see SSD\n Instance Store Volumes.

          \n
          " - } - } - } - }, - "com.amazonaws.sagemaker#CreateEndpointConfigOutput": { - "type": "structure", - "members": { - "EndpointConfigArn": { - "target": "com.amazonaws.sagemaker#EndpointConfigArn", + "Description": { + "target": "com.amazonaws.sagemaker#ExperimentDescription", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the endpoint configuration.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The description of the action.

          " } - } - } - }, - "com.amazonaws.sagemaker#CreateEndpointInput": { - "type": "structure", - "members": { - "EndpointName": { - "target": "com.amazonaws.sagemaker#EndpointName", + }, + "Status": { + "target": "com.amazonaws.sagemaker#ActionStatus", "traits": { - "smithy.api#documentation": "

          The name of the endpoint.The name must be unique within an AWS Region in your AWS account. The name is case-insensitive in CreateEndpoint, \n but the case is preserved and must be matched in .

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The status of the action.

          " } }, - "EndpointConfigName": { - "target": "com.amazonaws.sagemaker#EndpointConfigName", + "Properties": { + "target": "com.amazonaws.sagemaker#LineageEntityParameters", "traits": { - "smithy.api#documentation": "

          The name of an endpoint configuration. For more information, see\n CreateEndpointConfig.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A list of properties to add to the action.

          " } }, + "MetadataProperties": { + "target": "com.amazonaws.sagemaker#MetadataProperties" + }, "Tags": { "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          An array of key-value pairs. For more information, see Using\n Cost Allocation Tagsin the AWS Billing and Cost Management User\n Guide.\n \n

          " + "smithy.api#documentation": "

          A list of tags to apply to the action.

          " } } } }, - "com.amazonaws.sagemaker#CreateEndpointOutput": { + "com.amazonaws.sagemaker#CreateActionResponse": { "type": "structure", "members": { - "EndpointArn": { - "target": "com.amazonaws.sagemaker#EndpointArn", + "ActionArn": { + "target": "com.amazonaws.sagemaker#ActionArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the endpoint.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the action.

          " } } } }, - "com.amazonaws.sagemaker#CreateExperiment": { + "com.amazonaws.sagemaker#CreateAlgorithm": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateExperimentRequest" + "target": "com.amazonaws.sagemaker#CreateAlgorithmInput" }, "output": { - "target": "com.amazonaws.sagemaker#CreateExperimentResponse" + "target": "com.amazonaws.sagemaker#CreateAlgorithmOutput" }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" - } - ], "traits": { - "smithy.api#documentation": "

          Creates an SageMaker experiment. An experiment is a collection of\n trials that are observed, compared and evaluated as a group. A trial is\n a set of steps, called trial components, that produce a machine learning\n model.

          \n

          The goal of an experiment is to determine the components that produce the best model.\n Multiple trials are performed, each one isolating and measuring the impact of a change to one\n or more inputs, while keeping the remaining inputs constant.

          \n

          When you use Amazon SageMaker Studio or the Amazon SageMaker Python SDK, all experiments, trials, and trial\n components are automatically tracked, logged, and indexed. When you use the AWS SDK for Python (Boto), you\n must use the logging APIs provided by the SDK.

          \n

          You can add tags to experiments, trials, trial components and then use the Search API to search for the tags.

          \n

          To add a description to an experiment, specify the optional Description\n parameter. To add a description later, or to change the description, call the UpdateExperiment API.

          \n

          To get a list of all your experiments, call the ListExperiments API. To\n view an experiment's properties, call the DescribeExperiment API. To get a\n list of all the trials associated with an experiment, call the ListTrials\n API. To create a trial call the CreateTrial API.

          " + "smithy.api#documentation": "

          Create a machine learning algorithm that you can use in Amazon SageMaker and list in the AWS\n Marketplace.

          " } }, - "com.amazonaws.sagemaker#CreateExperimentRequest": { + "com.amazonaws.sagemaker#CreateAlgorithmInput": { "type": "structure", "members": { - "ExperimentName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "AlgorithmName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          The name of the experiment. The name must be unique in your AWS account and is not\n case-sensitive.

          ", + "smithy.api#documentation": "

          The name of the algorithm.

          ", "smithy.api#required": {} } }, - "DisplayName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "AlgorithmDescription": { + "target": "com.amazonaws.sagemaker#EntityDescription", "traits": { - "smithy.api#documentation": "

          The name of the experiment as displayed. The name doesn't need to be unique. If you don't\n specify DisplayName, the value in ExperimentName is\n displayed.

          " + "smithy.api#documentation": "

          A description of the algorithm.

          " } }, - "Description": { - "target": "com.amazonaws.sagemaker#ExperimentDescription", + "TrainingSpecification": { + "target": "com.amazonaws.sagemaker#TrainingSpecification", "traits": { - "smithy.api#documentation": "

          The description of the experiment.

          " + "smithy.api#documentation": "

          Specifies details about training jobs run by this algorithm, including the\n following:

          \n
            \n
          • \n

            The Amazon ECR path of the container and the version digest of the\n algorithm.

            \n
          • \n
          • \n

            The hyperparameters that the algorithm supports.

            \n
          • \n
          • \n

            The instance types that the algorithm supports for training.

            \n
          • \n
          • \n

            Whether the algorithm supports distributed training.

            \n
          • \n
          • \n

            The metrics that the algorithm emits to Amazon CloudWatch.

            \n
          • \n
          • \n

            Which metrics that the algorithm emits can be used as the objective metric for\n hyperparameter tuning jobs.

            \n
          • \n
          • \n

            The input channels that the algorithm supports for training data. For example,\n an algorithm might support train, validation, and\n test channels.

            \n
          • \n
          ", + "smithy.api#required": {} + } + }, + "InferenceSpecification": { + "target": "com.amazonaws.sagemaker#InferenceSpecification", + "traits": { + "smithy.api#documentation": "

          Specifies details about inference jobs that the algorithm runs, including the\n following:

          \n
            \n
          • \n

            The Amazon ECR paths of containers that contain the inference code and model\n artifacts.

            \n
          • \n
          • \n

            The instance types that the algorithm supports for transform jobs and\n real-time endpoints used for inference.

            \n
          • \n
          • \n

            The input and output content formats that the algorithm supports for\n inference.

            \n
          • \n
          " + } + }, + "ValidationSpecification": { + "target": "com.amazonaws.sagemaker#AlgorithmValidationSpecification", + "traits": { + "smithy.api#documentation": "

          Specifies configurations for one or more training jobs and that Amazon SageMaker runs to test the\n algorithm's training code and, optionally, one or more batch transform jobs that Amazon SageMaker\n runs to test the algorithm's inference code.

          " + } + }, + "CertifyForMarketplace": { + "target": "com.amazonaws.sagemaker#CertifyForMarketplace", + "traits": { + "smithy.api#documentation": "

          Whether to certify the algorithm so that it can be listed in AWS Marketplace.

          " } }, "Tags": { "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          A list of tags to associate with the experiment. You can use Search API\n to search on the tags.

          " + "smithy.api#documentation": "

          An array of key-value pairs. You can use tags to categorize your AWS resources in\n different ways, for example, by purpose, owner, or environment. For more information,\n see Tagging AWS\n Resources.

          " } } } }, - "com.amazonaws.sagemaker#CreateExperimentResponse": { + "com.amazonaws.sagemaker#CreateAlgorithmOutput": { "type": "structure", "members": { - "ExperimentArn": { - "target": "com.amazonaws.sagemaker#ExperimentArn", + "AlgorithmArn": { + "target": "com.amazonaws.sagemaker#AlgorithmArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the experiment.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the new algorithm.

          ", + "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#CreateFlowDefinition": { + "com.amazonaws.sagemaker#CreateApp": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateFlowDefinitionRequest" + "target": "com.amazonaws.sagemaker#CreateAppRequest" }, "output": { - "target": "com.amazonaws.sagemaker#CreateFlowDefinitionResponse" + "target": "com.amazonaws.sagemaker#CreateAppResponse" }, "errors": [ { @@ -3415,135 +3753,192 @@ } ], "traits": { - "smithy.api#documentation": "

          Creates a flow definition.

          " + "smithy.api#documentation": "

          Creates a running App for the specified UserProfile. Supported Apps are JupyterServer\n \n and KernelGateway. This operation is automatically invoked by Amazon SageMaker Studio\n upon access to the associated Domain, and when new kernel configurations are selected by the user.\n \n A user may have multiple Apps active simultaneously.

          " } }, - "com.amazonaws.sagemaker#CreateFlowDefinitionRequest": { + "com.amazonaws.sagemaker#CreateAppImageConfig": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#CreateAppImageConfigRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#CreateAppImageConfigResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceInUse" + } + ], + "traits": { + "smithy.api#documentation": "

          Creates a configuration for running a SageMaker image as a KernelGateway app. The\n configuration specifies the Amazon Elastic File System (EFS) storage volume on the image, and a list of the\n kernels in the image.

          " + } + }, + "com.amazonaws.sagemaker#CreateAppImageConfigRequest": { "type": "structure", "members": { - "FlowDefinitionName": { - "target": "com.amazonaws.sagemaker#FlowDefinitionName", + "AppImageConfigName": { + "target": "com.amazonaws.sagemaker#AppImageConfigName", "traits": { - "smithy.api#documentation": "

          The name of your flow definition.

          ", + "smithy.api#documentation": "

          The name of the AppImageConfig. Must be unique to your account.

          ", "smithy.api#required": {} } }, - "HumanLoopRequestSource": { - "target": "com.amazonaws.sagemaker#HumanLoopRequestSource", + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          Container for configuring the source of human task requests. Use to specify if\n Amazon Rekognition or Amazon Textract is used as an integration source.

          " + "smithy.api#documentation": "

          A list of tags to apply to the AppImageConfig.

          " } }, - "HumanLoopActivationConfig": { - "target": "com.amazonaws.sagemaker#HumanLoopActivationConfig", - "traits": { - "smithy.api#documentation": "

          An object containing information about the events that trigger a human workflow.

          " + "KernelGatewayImageConfig": { + "target": "com.amazonaws.sagemaker#KernelGatewayImageConfig", + "traits": { + "smithy.api#documentation": "

          The KernelGatewayImageConfig.

          " + } + } + } + }, + "com.amazonaws.sagemaker#CreateAppImageConfigResponse": { + "type": "structure", + "members": { + "AppImageConfigArn": { + "target": "com.amazonaws.sagemaker#AppImageConfigArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the AppImageConfig.

          " + } + } + } + }, + "com.amazonaws.sagemaker#CreateAppRequest": { + "type": "structure", + "members": { + "DomainId": { + "target": "com.amazonaws.sagemaker#DomainId", + "traits": { + "smithy.api#documentation": "

          The domain ID.

          ", + "smithy.api#required": {} } }, - "HumanLoopConfig": { - "target": "com.amazonaws.sagemaker#HumanLoopConfig", + "UserProfileName": { + "target": "com.amazonaws.sagemaker#UserProfileName", "traits": { - "smithy.api#documentation": "

          An object containing information about the tasks the human reviewers will perform.

          ", + "smithy.api#documentation": "

          The user profile name.

          ", "smithy.api#required": {} } }, - "OutputConfig": { - "target": "com.amazonaws.sagemaker#FlowDefinitionOutputConfig", + "AppType": { + "target": "com.amazonaws.sagemaker#AppType", "traits": { - "smithy.api#documentation": "

          An object containing information about where the human review results will be uploaded.

          ", + "smithy.api#documentation": "

          The type of app.

          ", "smithy.api#required": {} } }, - "RoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", + "AppName": { + "target": "com.amazonaws.sagemaker#AppName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the role needed to call other services on your behalf. For example, arn:aws:iam::1234567890:role/service-role/AmazonSageMaker-ExecutionRole-20180111T151298.

          ", + "smithy.api#documentation": "

          The name of the app.

          ", "smithy.api#required": {} } }, "Tags": { "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          An array of key-value pairs that contain metadata to help you categorize and organize a flow definition. Each tag consists of a key and a value, both of which you define.

          " + "smithy.api#documentation": "

          Each tag consists of a key and an optional value.\n Tag keys must be unique per resource.

          " + } + }, + "ResourceSpec": { + "target": "com.amazonaws.sagemaker#ResourceSpec", + "traits": { + "smithy.api#documentation": "

          The instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance.

          " } } } }, - "com.amazonaws.sagemaker#CreateFlowDefinitionResponse": { + "com.amazonaws.sagemaker#CreateAppResponse": { "type": "structure", "members": { - "FlowDefinitionArn": { - "target": "com.amazonaws.sagemaker#FlowDefinitionArn", + "AppArn": { + "target": "com.amazonaws.sagemaker#AppArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the flow definition you create.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the app.

          " } } } }, - "com.amazonaws.sagemaker#CreateHumanTaskUi": { + "com.amazonaws.sagemaker#CreateArtifact": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateHumanTaskUiRequest" + "target": "com.amazonaws.sagemaker#CreateArtifactRequest" }, "output": { - "target": "com.amazonaws.sagemaker#CreateHumanTaskUiResponse" + "target": "com.amazonaws.sagemaker#CreateArtifactResponse" }, "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceInUse" - }, { "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" } ], "traits": { - "smithy.api#documentation": "

          Defines the settings you will use for the human review workflow user interface. Reviewers will see a three-panel interface with an instruction area, the item to review, and an input area.

          " + "smithy.api#documentation": "

          Creates an artifact. An artifact is a lineage tracking entity that\n represents a URI addressable object or data. Some examples are the S3 URI of a dataset and\n the ECR registry path of an image. For more information, see\n Amazon SageMaker\n ML Lineage Tracking.

          " } }, - "com.amazonaws.sagemaker#CreateHumanTaskUiRequest": { + "com.amazonaws.sagemaker#CreateArtifactRequest": { "type": "structure", "members": { - "HumanTaskUiName": { - "target": "com.amazonaws.sagemaker#HumanTaskUiName", + "ArtifactName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The name of the user interface you are creating.

          ", + "smithy.api#documentation": "

          The name of the artifact. Must be unique to your account in an AWS Region.

          " + } + }, + "Source": { + "target": "com.amazonaws.sagemaker#ArtifactSource", + "traits": { + "smithy.api#documentation": "

          The ID, ID type, and URI of the source.

          ", "smithy.api#required": {} } }, - "UiTemplate": { - "target": "com.amazonaws.sagemaker#UiTemplate", + "ArtifactType": { + "target": "com.amazonaws.sagemaker#String256", "traits": { + "smithy.api#documentation": "

          The artifact type.

          ", "smithy.api#required": {} } }, + "Properties": { + "target": "com.amazonaws.sagemaker#LineageEntityParameters", + "traits": { + "smithy.api#documentation": "

          A list of properties to add to the artifact.

          " + } + }, + "MetadataProperties": { + "target": "com.amazonaws.sagemaker#MetadataProperties" + }, "Tags": { "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          An array of key-value pairs that contain metadata to help you categorize and organize a human review workflow user interface. Each tag consists of a key and a value, both of which you define.

          " + "smithy.api#documentation": "

          A list of tags to apply to the artifact.

          " } } } }, - "com.amazonaws.sagemaker#CreateHumanTaskUiResponse": { + "com.amazonaws.sagemaker#CreateArtifactResponse": { "type": "structure", "members": { - "HumanTaskUiArn": { - "target": "com.amazonaws.sagemaker#HumanTaskUiArn", + "ArtifactArn": { + "target": "com.amazonaws.sagemaker#ArtifactArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the human review workflow user interface you create.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the artifact.

          " } } } }, - "com.amazonaws.sagemaker#CreateHyperParameterTuningJob": { + "com.amazonaws.sagemaker#CreateAutoMLJob": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateHyperParameterTuningJobRequest" + "target": "com.amazonaws.sagemaker#CreateAutoMLJobRequest" }, "output": { - "target": "com.amazonaws.sagemaker#CreateHyperParameterTuningJobResponse" + "target": "com.amazonaws.sagemaker#CreateAutoMLJobResponse" }, "errors": [ { @@ -3554,139 +3949,140 @@ } ], "traits": { - "smithy.api#documentation": "

          Starts a hyperparameter tuning job. A hyperparameter tuning job finds the best version\n of a model by running many training jobs on your dataset using the algorithm you choose\n and values for hyperparameters within ranges that you specify. It then chooses the\n hyperparameter values that result in a model that performs the best, as measured by an\n objective metric that you choose.

          " + "smithy.api#documentation": "

          Creates an Autopilot job.

          \n

          Find the best performing model after you run an Autopilot job by calling . Deploy that model by following the steps described in\n Step 6.1:\n Deploy the Model to Amazon SageMaker Hosting Services.

          \n

          For information about how to use Autopilot, see Automate Model\n Development with Amazon SageMaker Autopilot.

          " } }, - "com.amazonaws.sagemaker#CreateHyperParameterTuningJobRequest": { + "com.amazonaws.sagemaker#CreateAutoMLJobRequest": { "type": "structure", "members": { - "HyperParameterTuningJobName": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobName", + "AutoMLJobName": { + "target": "com.amazonaws.sagemaker#AutoMLJobName", "traits": { - "smithy.api#documentation": "

          The name of the tuning job. This name is the prefix for the names of all training jobs\n that this tuning job launches. The name must be unique within the same AWS account and\n AWS Region. The name must have 1 to 32 characters. Valid characters are a-z, A-Z,\n 0-9, and : + = @ _ % - (hyphen). The name is not case sensitive.

          ", + "smithy.api#documentation": "

          Identifies an Autopilot job. Must be unique to your account and is case-insensitive.

          ", "smithy.api#required": {} } }, - "HyperParameterTuningJobConfig": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobConfig", + "InputDataConfig": { + "target": "com.amazonaws.sagemaker#AutoMLInputDataConfig", "traits": { - "smithy.api#documentation": "

          The HyperParameterTuningJobConfig object that describes the tuning\n job, including the search strategy, the objective metric used to evaluate training jobs,\n ranges of parameters to search, and resource limits for the tuning job. For more\n information, see How\n Hyperparameter Tuning Works.

          ", + "smithy.api#documentation": "

          Similar to InputDataConfig supported by Tuning. Format(s) supported: CSV. Minimum of 500\n rows.

          ", "smithy.api#required": {} } }, - "TrainingJobDefinition": { - "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinition", + "OutputDataConfig": { + "target": "com.amazonaws.sagemaker#AutoMLOutputDataConfig", "traits": { - "smithy.api#documentation": "

          The HyperParameterTrainingJobDefinition object that describes the\n training jobs that this tuning job launches,\n including\n static hyperparameters, input data configuration, output data configuration, resource\n configuration, and stopping condition.

          " + "smithy.api#documentation": "

          Similar to OutputDataConfig supported by Tuning. Format(s) supported: CSV.

          ", + "smithy.api#required": {} } }, - "TrainingJobDefinitions": { - "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinitions", + "ProblemType": { + "target": "com.amazonaws.sagemaker#ProblemType", "traits": { - "smithy.api#documentation": "

          A list of the HyperParameterTrainingJobDefinition objects launched\n for this tuning job.

          " + "smithy.api#documentation": "

          Defines the kind of preprocessing and algorithms intended for the candidates. Options\n include: BinaryClassification, MulticlassClassification, and Regression.

          " } }, - "WarmStartConfig": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobWarmStartConfig", + "AutoMLJobObjective": { + "target": "com.amazonaws.sagemaker#AutoMLJobObjective", "traits": { - "smithy.api#documentation": "

          Specifies the configuration for starting the hyperparameter tuning job using one or\n more previous tuning jobs as a starting point. The results of previous tuning jobs are\n used to inform which combinations of hyperparameters to search over in the new tuning\n job.

          \n

          All training jobs launched by the new hyperparameter tuning job are evaluated by using\n the objective metric. If you specify IDENTICAL_DATA_AND_ALGORITHM as the\n WarmStartType value for the warm start configuration, the training job\n that performs the best in the new tuning job is compared to the best training jobs from\n the parent tuning jobs. From these, the training job that performs the best as measured\n by the objective metric is returned as the overall best training job.

          \n \n

          All training jobs launched by parent hyperparameter tuning jobs and the new\n hyperparameter tuning jobs count against the limit of training jobs for the tuning\n job.

          \n
          " + "smithy.api#documentation": "

          Defines the objective of a an AutoML job. You provide a AutoMLJobObjective$MetricName and Autopilot infers whether to minimize or\n maximize it. If a metric is not specified, the most commonly used ObjectiveMetric for\n problem type is automaically selected.

          " + } + }, + "AutoMLJobConfig": { + "target": "com.amazonaws.sagemaker#AutoMLJobConfig", + "traits": { + "smithy.api#documentation": "

          Contains CompletionCriteria and SecurityConfig.

          " + } + }, + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", + "traits": { + "smithy.api#documentation": "

          The ARN of the role that is used to access the data.

          ", + "smithy.api#required": {} + } + }, + "GenerateCandidateDefinitionsOnly": { + "target": "com.amazonaws.sagemaker#GenerateCandidateDefinitionsOnly", + "traits": { + "smithy.api#documentation": "

          Generates possible candidates without training a model. A candidate is a combination of\n data preprocessors, algorithms, and algorithm parameter settings.

          " } }, "Tags": { "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          An array of key-value pairs. You can use tags to categorize your AWS resources in\n different ways, for example, by purpose, owner, or environment. For more information,\n see AWS\n Tagging Strategies.

          \n

          Tags that you specify for the tuning job are also added to all training jobs that the\n tuning job launches.

          " + "smithy.api#documentation": "

          Each tag consists of a key and an optional value. Tag keys must be unique per\n resource.

          " } } } }, - "com.amazonaws.sagemaker#CreateHyperParameterTuningJobResponse": { + "com.amazonaws.sagemaker#CreateAutoMLJobResponse": { "type": "structure", "members": { - "HyperParameterTuningJobArn": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobArn", + "AutoMLJobArn": { + "target": "com.amazonaws.sagemaker#AutoMLJobArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the tuning job. Amazon SageMaker assigns an ARN to a\n hyperparameter tuning job when you create it.

          ", + "smithy.api#documentation": "

          When a job is created, it is assigned a unique ARN.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#CreateImage": { + "com.amazonaws.sagemaker#CreateCodeRepository": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateImageRequest" + "target": "com.amazonaws.sagemaker#CreateCodeRepositoryInput" }, "output": { - "target": "com.amazonaws.sagemaker#CreateImageResponse" + "target": "com.amazonaws.sagemaker#CreateCodeRepositoryOutput" }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceInUse" - }, - { - "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" - } - ], "traits": { - "smithy.api#documentation": "

          Creates a custom SageMaker image. A SageMaker image is a set of image versions. Each image\n version represents a container image stored in Amazon Container Registry (ECR). For more information, see\n Bring your own SageMaker image.

          " + "smithy.api#documentation": "

          Creates a Git repository as a resource in your Amazon SageMaker account. You can associate the\n repository with notebook instances so that you can use Git source control for the\n notebooks you create. The Git repository is a resource in your Amazon SageMaker account, so it can\n be associated with more than one notebook instance, and it persists independently from\n the lifecycle of any notebook instances it is associated with.

          \n

          The repository can be hosted either in AWS CodeCommit or in any\n other Git repository.

          " } }, - "com.amazonaws.sagemaker#CreateImageRequest": { + "com.amazonaws.sagemaker#CreateCodeRepositoryInput": { "type": "structure", "members": { - "Description": { - "target": "com.amazonaws.sagemaker#ImageDescription", - "traits": { - "smithy.api#documentation": "

          The description of the image.

          " - } - }, - "DisplayName": { - "target": "com.amazonaws.sagemaker#ImageDisplayName", - "traits": { - "smithy.api#documentation": "

          The display name of the image. If not provided, ImageName is displayed.

          " - } - }, - "ImageName": { - "target": "com.amazonaws.sagemaker#ImageName", + "CodeRepositoryName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          The name of the image. Must be unique to your account.

          ", + "smithy.api#documentation": "

          The name of the Git repository. The name must have 1 to 63 characters. Valid\n characters are a-z, A-Z, 0-9, and - (hyphen).

          ", "smithy.api#required": {} } }, - "RoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", + "GitConfig": { + "target": "com.amazonaws.sagemaker#GitConfig", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker to perform tasks on your behalf.

          ", + "smithy.api#documentation": "

          Specifies details about the repository, including the URL where the repository is\n located, the default branch, and credentials to use to access the repository.

          ", "smithy.api#required": {} } }, "Tags": { "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          A list of tags to apply to the image.

          " + "smithy.api#documentation": "

          An array of key-value pairs. You can use tags to categorize your AWS resources in\n different ways, for example, by purpose, owner, or environment. For more information,\n see Tagging AWS\n Resources.

          " } } } }, - "com.amazonaws.sagemaker#CreateImageResponse": { + "com.amazonaws.sagemaker#CreateCodeRepositoryOutput": { "type": "structure", "members": { - "ImageArn": { - "target": "com.amazonaws.sagemaker#ImageArn", + "CodeRepositoryArn": { + "target": "com.amazonaws.sagemaker#CodeRepositoryArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the image.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the new repository.

          ", + "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#CreateImageVersion": { + "com.amazonaws.sagemaker#CreateCompilationJob": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateImageVersionRequest" + "target": "com.amazonaws.sagemaker#CreateCompilationJobRequest" }, "output": { - "target": "com.amazonaws.sagemaker#CreateImageVersionResponse" + "target": "com.amazonaws.sagemaker#CreateCompilationJobResponse" }, "errors": [ { @@ -3694,371 +4090,370 @@ }, { "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" - }, - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" } ], "traits": { - "smithy.api#documentation": "

          Creates a version of the SageMaker image specified by ImageName. The version\n represents the Amazon Container Registry (ECR) container image specified by BaseImage.

          " + "smithy.api#documentation": "

          Starts a model compilation job. After the model has been compiled, Amazon SageMaker saves the\n resulting model artifacts to an Amazon Simple Storage Service (Amazon S3) bucket that you specify.

          \n

          If\n you choose to host your model using Amazon SageMaker hosting services, you can use the resulting\n model artifacts as part of the model. You can also use the artifacts with\n AWS\n IoT Greengrass. In that case, deploy them as an ML\n resource.

          \n

          In the request body, you provide the following:

          \n
            \n
          • \n

            A name for the compilation job

            \n
          • \n
          • \n

            Information about the input model artifacts

            \n
          • \n
          • \n

            The output location for the compiled model and the device (target) that the\n model runs on

            \n
          • \n
          • \n

            The Amazon Resource Name (ARN) of the IAM role that Amazon SageMaker assumes to perform\n the model compilation job.

            \n
          • \n
          \n

          You can also provide a Tag to track the model compilation job's resource\n use and costs. The response body contains the\n CompilationJobArn\n for the compiled job.

          \n

          To stop a model compilation job, use StopCompilationJob. To get\n information about a particular model compilation job, use DescribeCompilationJob. To get information about multiple model\n compilation jobs, use ListCompilationJobs.

          " } }, - "com.amazonaws.sagemaker#CreateImageVersionRequest": { + "com.amazonaws.sagemaker#CreateCompilationJobRequest": { "type": "structure", "members": { - "BaseImage": { - "target": "com.amazonaws.sagemaker#ImageBaseImage", + "CompilationJobName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          The registry path of the container image to use as the starting point for this\n version. The path is an Amazon Container Registry (ECR) URI in the following format:

          \n

          \n .dkr.ecr..amazonaws.com/\n

          ", + "smithy.api#documentation": "

          A name for the model compilation job. The name must be unique within the AWS Region\n and within your AWS account.

          ", "smithy.api#required": {} } }, - "ClientToken": { - "target": "com.amazonaws.sagemaker#ClientToken", + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          A unique ID. If not specified, the AWS CLI and AWS SDKs, such as the SDK for Python\n (Boto3), add a unique value to the call.

          ", - "smithy.api#idempotencyToken": {}, + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker to perform tasks on\n your behalf.

          \n

          During model compilation, Amazon SageMaker needs your permission to:

          \n
            \n
          • \n

            Read input data from an S3 bucket

            \n
          • \n
          • \n

            Write model artifacts to an S3 bucket

            \n
          • \n
          • \n

            Write logs to Amazon CloudWatch Logs

            \n
          • \n
          • \n

            Publish metrics to Amazon CloudWatch

            \n
          • \n
          \n

          You grant permissions for all of these tasks to an IAM role. To pass this role to\n Amazon SageMaker, the caller of this API must have the iam:PassRole permission. For\n more information, see Amazon SageMaker\n Roles.\n

          ", "smithy.api#required": {} } }, - "ImageName": { - "target": "com.amazonaws.sagemaker#ImageName", + "InputConfig": { + "target": "com.amazonaws.sagemaker#InputConfig", "traits": { - "smithy.api#documentation": "

          The ImageName of the Image to create a version of.

          ", + "smithy.api#documentation": "

          Provides information about the location of input model artifacts, the name and shape\n of the expected data inputs, and the framework in which the model was trained.

          ", "smithy.api#required": {} } - } - } - }, - "com.amazonaws.sagemaker#CreateImageVersionResponse": { - "type": "structure", - "members": { - "ImageVersionArn": { - "target": "com.amazonaws.sagemaker#ImageVersionArn", + }, + "OutputConfig": { + "target": "com.amazonaws.sagemaker#OutputConfig", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the image version.

          " + "smithy.api#documentation": "

          Provides information about the output location for the compiled model and the target\n device the model runs on.

          ", + "smithy.api#required": {} + } + }, + "StoppingCondition": { + "target": "com.amazonaws.sagemaker#StoppingCondition", + "traits": { + "smithy.api#documentation": "

          Specifies a limit to how long a model compilation job can run. When the job reaches\n the time limit, Amazon SageMaker ends the compilation job. Use this API to cap model training\n costs.

          ", + "smithy.api#required": {} + } + }, + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", + "traits": { + "smithy.api#documentation": "

          An array of key-value pairs. You can use tags to categorize your AWS resources in\n different ways, for example, by purpose, owner, or environment. For more information,\n see Tagging AWS\n Resources.

          " } } } }, - "com.amazonaws.sagemaker#CreateLabelingJob": { + "com.amazonaws.sagemaker#CreateCompilationJobResponse": { + "type": "structure", + "members": { + "CompilationJobArn": { + "target": "com.amazonaws.sagemaker#CompilationJobArn", + "traits": { + "smithy.api#documentation": "

          If the action is successful, the service sends back an HTTP 200 response. Amazon SageMaker returns\n the following data in JSON format:

          \n
            \n
          • \n

            \n CompilationJobArn: The Amazon Resource Name (ARN) of the compiled\n job.

            \n
          • \n
          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.sagemaker#CreateContext": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateLabelingJobRequest" + "target": "com.amazonaws.sagemaker#CreateContextRequest" }, "output": { - "target": "com.amazonaws.sagemaker#CreateLabelingJobResponse" + "target": "com.amazonaws.sagemaker#CreateContextResponse" }, "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceInUse" - }, { "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" } ], "traits": { - "smithy.api#documentation": "

          Creates a job that uses workers to label the data objects in your input dataset. You\n can use the labeled data to train machine learning models.

          \n

          You can select your workforce from one of three providers:

          \n
            \n
          • \n

            A private workforce that you create. It can include employees, contractors,\n and outside experts. Use a private workforce when want the data to stay within\n your organization or when a specific set of skills is required.

            \n
          • \n
          • \n

            One or more vendors that you select from the AWS Marketplace. Vendors provide\n expertise in specific areas.

            \n
          • \n
          • \n

            The Amazon Mechanical Turk workforce. This is the largest workforce, but it\n should only be used for public data or data that has been stripped of any\n personally identifiable information.

            \n
          • \n
          \n

          You can also use automated data labeling to reduce the number of\n data objects that need to be labeled by a human. Automated data labeling uses\n active learning to determine if a data object can be labeled by\n machine or if it needs to be sent to a human worker. For more information, see Using\n Automated Data Labeling.

          \n

          The data objects to be labeled are contained in an Amazon S3 bucket. You create a\n manifest file that describes the location of each object. For\n more information, see Using Input and Output Data.

          \n

          The output can be used as the manifest file for another labeling job or as training\n data for your machine learning models.

          " + "smithy.api#documentation": "

          Creates a context. A context is a lineage tracking entity that\n represents a logical grouping of other tracking or experiment entities. Some examples are\n an endpoint and a model package. For more information, see\n Amazon SageMaker\n ML Lineage Tracking.

          " } }, - "com.amazonaws.sagemaker#CreateLabelingJobRequest": { + "com.amazonaws.sagemaker#CreateContextRequest": { "type": "structure", "members": { - "LabelingJobName": { - "target": "com.amazonaws.sagemaker#LabelingJobName", - "traits": { - "smithy.api#documentation": "

          The name of the labeling job. This name is used to identify the job in a list of\n labeling jobs.

          ", - "smithy.api#required": {} - } - }, - "LabelAttributeName": { - "target": "com.amazonaws.sagemaker#LabelAttributeName", - "traits": { - "smithy.api#documentation": "

          The attribute name to use for the label in the output manifest file. This is the key\n for the key/value pair formed with the label that a worker assigns to the object. The\n name can't end with \"-metadata\". If you are running a semantic segmentation labeling\n job, the attribute name must end with \"-ref\". If you are running any other kind of\n labeling job, the attribute name must not end with \"-ref\".

          ", - "smithy.api#required": {} - } - }, - "InputConfig": { - "target": "com.amazonaws.sagemaker#LabelingJobInputConfig", + "ContextName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          Input data for the labeling job, such as the Amazon S3 location of the data objects and the\n location of the manifest file that describes the data objects.

          ", + "smithy.api#documentation": "

          The name of the context. Must be unique to your account in an AWS Region.

          ", "smithy.api#required": {} } }, - "OutputConfig": { - "target": "com.amazonaws.sagemaker#LabelingJobOutputConfig", + "Source": { + "target": "com.amazonaws.sagemaker#ContextSource", "traits": { - "smithy.api#documentation": "

          The location of the output data and the AWS Key Management Service key ID for the key used to encrypt\n the output data, if any.

          ", + "smithy.api#documentation": "

          The source type, ID, and URI.

          ", "smithy.api#required": {} } }, - "RoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", + "ContextType": { + "target": "com.amazonaws.sagemaker#String256", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Number (ARN) that Amazon SageMaker assumes to perform tasks on your behalf\n during data labeling. You must grant this role the necessary permissions so that Amazon SageMaker\n can successfully complete data labeling.

          ", + "smithy.api#documentation": "

          The context type.

          ", "smithy.api#required": {} } }, - "LabelCategoryConfigS3Uri": { - "target": "com.amazonaws.sagemaker#S3Uri", - "traits": { - "smithy.api#documentation": "

          The S3 URI of the file that defines the categories used to label the data\n objects.

          \n

          For 3D point cloud task types, see Create a\n Labeling Category Configuration File for 3D Point Cloud Labeling Jobs.

          \n

          For all other built-in task types and custom\n tasks, your label category configuration file must be a JSON file in the\n following format. Identify the labels you want to use by replacing label_1,\n label_2,...,label_n with your label\n categories.

          \n

          \n {\n

          \n

          \n \"document-version\": \"2018-11-28\"\n

          \n

          \n \"labels\": [\n

          \n

          \n {\n

          \n

          \n \"label\": \"label_1\"\n

          \n

          \n },\n

          \n

          \n {\n

          \n

          \n \"label\": \"label_2\"\n

          \n

          \n },\n

          \n

          \n ...\n

          \n

          \n {\n

          \n

          \n \"label\": \"label_n\"\n

          \n

          \n }\n

          \n

          \n ]\n

          \n

          \n }\n

          " - } - }, - "StoppingConditions": { - "target": "com.amazonaws.sagemaker#LabelingJobStoppingConditions", - "traits": { - "smithy.api#documentation": "

          A set of conditions for stopping the labeling job. If any of the conditions are met,\n the job is automatically stopped. You can use these conditions to control the cost of\n data labeling.

          " - } - }, - "LabelingJobAlgorithmsConfig": { - "target": "com.amazonaws.sagemaker#LabelingJobAlgorithmsConfig", + "Description": { + "target": "com.amazonaws.sagemaker#ExperimentDescription", "traits": { - "smithy.api#documentation": "

          Configures the information required to perform automated data labeling.

          " + "smithy.api#documentation": "

          The description of the context.

          " } }, - "HumanTaskConfig": { - "target": "com.amazonaws.sagemaker#HumanTaskConfig", + "Properties": { + "target": "com.amazonaws.sagemaker#LineageEntityParameters", "traits": { - "smithy.api#documentation": "

          Configures the labeling task and how it is presented to workers; including, but not limited to price, keywords, and batch size (task count).

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A list of properties to add to the context.

          " } }, "Tags": { "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          An array of key/value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management\n User Guide.

          " + "smithy.api#documentation": "

          A list of tags to apply to the context.

          " } } } }, - "com.amazonaws.sagemaker#CreateLabelingJobResponse": { + "com.amazonaws.sagemaker#CreateContextResponse": { "type": "structure", "members": { - "LabelingJobArn": { - "target": "com.amazonaws.sagemaker#LabelingJobArn", + "ContextArn": { + "target": "com.amazonaws.sagemaker#ContextArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the labeling job. You use this ARN to identify the\n labeling job.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the context.

          " } } } }, - "com.amazonaws.sagemaker#CreateModel": { + "com.amazonaws.sagemaker#CreateDomain": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateModelInput" + "target": "com.amazonaws.sagemaker#CreateDomainRequest" }, "output": { - "target": "com.amazonaws.sagemaker#CreateModelOutput" + "target": "com.amazonaws.sagemaker#CreateDomainResponse" }, "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceInUse" + }, { "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" } ], "traits": { - "smithy.api#documentation": "

          Creates a model in Amazon SageMaker. In the request, you name the model and describe a primary\n container. For the primary container,\n you specify the Docker image that contains inference code, artifacts (from prior\n training), and a custom environment map that the inference code uses when you deploy the\n model for predictions.

          \n

          Use this API to create a model if you want to use Amazon SageMaker hosting services or run a batch\n transform job.

          \n

          To host your model, you create an endpoint configuration with the\n CreateEndpointConfig API, and then create an endpoint with the\n CreateEndpoint API. Amazon SageMaker then deploys all of the containers that you\n defined for the model in the hosting environment.

          \n

          For an example that calls this method when deploying a model to Amazon SageMaker hosting services,\n see Deploy the\n Model to Amazon SageMaker Hosting Services (AWS SDK for Python (Boto\n 3)).\n

          \n

          To run a batch transform using your model, you start a job with the\n CreateTransformJob API. Amazon SageMaker uses your model and your dataset to get\n inferences which are then saved to a specified S3 location.

          \n

          In the CreateModel request, you must define a container with the\n PrimaryContainer parameter.

          \n

          In the request, you also provide an IAM role that Amazon SageMaker can assume to access model\n artifacts and docker image for deployment on ML compute hosting instances or for batch\n transform jobs. In addition, you also use the IAM role to manage permissions the\n inference code needs. For example, if the inference code access any other AWS resources,\n you grant necessary permissions via this role.

          " + "smithy.api#documentation": "

          Creates a Domain used by Amazon SageMaker Studio. A domain consists of an associated\n Amazon Elastic File System (EFS) volume, a list of authorized users, and a variety of security, application,\n policy, and Amazon Virtual Private Cloud (VPC) configurations. An AWS account is limited to one domain per region.\n Users within a domain can share notebook files and other artifacts with each other.

          \n\n

          \n EFS storage\n

          \n

          When a domain is created, an EFS volume is created for use by all of the users within the\n domain. Each user receives a private home directory within the EFS volume for notebooks,\n Git repositories, and data files.

          \n

          SageMaker uses the AWS Key Management Service (AWS KMS) to encrypt the EFS volume attached to the domain with\n an AWS managed customer master key (CMK) by default. For more control, you can specify a\n customer managed CMK. For more information, see\n Protect Data at\n Rest Using Encryption.

          \n\n

          \n VPC configuration\n

          \n

          All SageMaker Studio traffic between the domain and the EFS volume is through the specified\n VPC and subnets. For other Studio traffic, you can specify the AppNetworkAccessType\n parameter. AppNetworkAccessType corresponds to the network access type that you\n choose when you onboard to Studio. The following options are available:

          \n
            \n
          • \n

            \n PublicInternetOnly - Non-EFS traffic goes through a VPC managed by\n Amazon SageMaker, which allows internet access. This is the default value.

            \n
          • \n
          • \n

            \n VpcOnly - All Studio traffic is through the specified VPC and subnets.\n Internet access is disabled by default. To allow internet access, you must specify a\n NAT gateway.

            \n

            When internet access is disabled, you won't be able to run a Studio notebook or to\n train or host models unless your VPC has an interface endpoint to the SageMaker API and runtime\n or a NAT gateway and your security groups allow outbound connections.

            \n
          • \n
          \n

          For more information, see\n Connect\n SageMaker Studio Notebooks to Resources in a VPC.

          " } }, - "com.amazonaws.sagemaker#CreateModelInput": { + "com.amazonaws.sagemaker#CreateDomainRequest": { "type": "structure", "members": { - "ModelName": { - "target": "com.amazonaws.sagemaker#ModelName", + "DomainName": { + "target": "com.amazonaws.sagemaker#DomainName", "traits": { - "smithy.api#documentation": "

          The name of the new model.

          ", + "smithy.api#documentation": "

          A name for the domain.

          ", "smithy.api#required": {} } }, - "PrimaryContainer": { - "target": "com.amazonaws.sagemaker#ContainerDefinition", + "AuthMode": { + "target": "com.amazonaws.sagemaker#AuthMode", "traits": { - "smithy.api#documentation": "

          The location of the primary docker image containing inference code, associated\n artifacts, and custom environment map that the inference code uses when the model is\n deployed for predictions.

          " + "smithy.api#documentation": "

          The mode of authentication that members use to access the domain.

          ", + "smithy.api#required": {} } }, - "Containers": { - "target": "com.amazonaws.sagemaker#ContainerDefinitionList", + "DefaultUserSettings": { + "target": "com.amazonaws.sagemaker#UserSettings", "traits": { - "smithy.api#documentation": "

          Specifies the containers in the inference pipeline.

          " + "smithy.api#documentation": "

          The default user settings.

          ", + "smithy.api#required": {} } }, - "ExecutionRoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", + "SubnetIds": { + "target": "com.amazonaws.sagemaker#Subnets", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the IAM role that Amazon SageMaker can assume to access model\n artifacts and docker image for deployment on ML compute instances or for batch transform\n jobs. Deploying on ML compute instances is part of model hosting. For more information,\n see Amazon SageMaker\n Roles.

          \n \n

          To be able to pass this role to Amazon SageMaker, the caller of this API must have the\n iam:PassRole permission.

          \n
          ", + "smithy.api#documentation": "

          The VPC subnets that Studio uses for communication.

          ", + "smithy.api#required": {} + } + }, + "VpcId": { + "target": "com.amazonaws.sagemaker#VpcId", + "traits": { + "smithy.api#documentation": "

          The ID of the Amazon Virtual Private Cloud (VPC) that Studio uses for communication.

          ", "smithy.api#required": {} } }, "Tags": { "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          An array of key-value pairs. For more information, see Using\n Cost Allocation Tags in the AWS Billing and Cost Management User\n Guide.\n

          " + "smithy.api#documentation": "

          Tags to associated with the Domain. Each tag consists of a key and an optional value.\n Tag keys must be unique per resource. Tags are searchable using the\n Search API.

          " } }, - "VpcConfig": { - "target": "com.amazonaws.sagemaker#VpcConfig", + "AppNetworkAccessType": { + "target": "com.amazonaws.sagemaker#AppNetworkAccessType", "traits": { - "smithy.api#documentation": "

          A VpcConfig object that specifies the VPC that you want your model\n to connect to. Control access to and from your model container by configuring the VPC.\n VpcConfig is used in hosting services and in batch transform. For more\n information, see Protect Endpoints by Using an Amazon Virtual Private Cloud and Protect Data in Batch\n Transform Jobs by Using an Amazon Virtual Private Cloud.

          " + "smithy.api#documentation": "

          Specifies the VPC used for non-EFS traffic. The default value is\n PublicInternetOnly.

          \n
            \n
          • \n

            \n PublicInternetOnly - Non-EFS traffic is through a VPC managed by\n Amazon SageMaker, which allows direct internet access

            \n
          • \n
          • \n

            \n VpcOnly - All Studio traffic is through the specified VPC and subnets

            \n
          • \n
          " } }, - "EnableNetworkIsolation": { - "target": "com.amazonaws.sagemaker#Boolean", + "HomeEfsFileSystemKmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", "traits": { - "smithy.api#documentation": "

          Isolates the model container. No inbound or outbound network calls can be made to or\n from the model container.

          " + "smithy.api#deprecated": { + "message": "This property is deprecated, use KmsKeyId instead." + }, + "smithy.api#documentation": "

          This member is deprecated and replaced with KmsKeyId.

          " + } + }, + "KmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", + "traits": { + "smithy.api#documentation": "

          SageMaker uses AWS KMS to encrypt the EFS volume attached to the domain with an AWS managed\n customer master key (CMK) by default. For more control, specify a customer managed CMK.

          " } } } }, - "com.amazonaws.sagemaker#CreateModelOutput": { + "com.amazonaws.sagemaker#CreateDomainResponse": { "type": "structure", "members": { - "ModelArn": { - "target": "com.amazonaws.sagemaker#ModelArn", + "DomainArn": { + "target": "com.amazonaws.sagemaker#DomainArn", "traits": { - "smithy.api#documentation": "

          The ARN of the model created in Amazon SageMaker.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the created domain.

          " + } + }, + "Url": { + "target": "com.amazonaws.sagemaker#String1024", + "traits": { + "smithy.api#documentation": "

          The URL to the created domain.

          " } } } }, - "com.amazonaws.sagemaker#CreateModelPackage": { + "com.amazonaws.sagemaker#CreateEndpoint": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateModelPackageInput" + "target": "com.amazonaws.sagemaker#CreateEndpointInput" }, "output": { - "target": "com.amazonaws.sagemaker#CreateModelPackageOutput" + "target": "com.amazonaws.sagemaker#CreateEndpointOutput" }, "errors": [ { - "target": "com.amazonaws.sagemaker#ConflictException" - }, + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + } + ], + "traits": { + "smithy.api#documentation": "

          Creates an endpoint using the endpoint configuration specified in the request. Amazon SageMaker\n uses the endpoint to provision resources and deploy models. You create the endpoint\n configuration with the CreateEndpointConfig API.

          \n

          Use this API to deploy models using Amazon SageMaker hosting services.

          \n

          For an example that calls this method when deploying a model to Amazon SageMaker hosting services,\n see Deploy the\n Model to Amazon SageMaker Hosting Services (AWS SDK for Python (Boto\n 3)).\n

          \n \n

          You must not delete an EndpointConfig that is in use by an endpoint\n that is live or while the UpdateEndpoint or CreateEndpoint\n operations are being performed on the endpoint. To update an endpoint, you must\n create a new EndpointConfig.

          \n
          \n

          The endpoint name must be unique within an AWS Region in your AWS account.

          \n

          When it receives the request, Amazon SageMaker creates the endpoint, launches the resources (ML\n compute instances), and deploys the model(s) on them.

          \n \n \n

          When you call CreateEndpoint, a load call is made to DynamoDB to\n verify that your endpoint configuration exists. When you read data from a DynamoDB\n table supporting \n Eventually Consistent Reads\n , the response might not\n reflect the results of a recently completed write operation. The response might\n include some stale data. If the dependent entities are not yet in DynamoDB, this\n causes a validation error. If you repeat your read request after a short time, the\n response should return the latest data. So retry logic is recommended to handle\n these possible issues. We also recommend that customers call DescribeEndpointConfig before calling CreateEndpoint to minimize the potential impact of a DynamoDB eventually consistent read.

          \n
          \n

          When Amazon SageMaker receives the request, it sets the endpoint status to\n Creating. After it creates the endpoint, it sets the status to\n InService. Amazon SageMaker can then process incoming requests for inferences. To\n check the status of an endpoint, use the DescribeEndpoint\n API.

          \n

          If any of the models hosted at this endpoint get model data from an Amazon S3 location,\n Amazon SageMaker uses AWS Security Token Service to download model artifacts from the S3 path you\n provided. AWS STS is activated in your IAM user account by default. If you previously\n deactivated AWS STS for a region, you need to reactivate AWS STS for that region. For\n more information, see Activating and\n Deactivating AWS STS in an AWS Region in the AWS Identity and Access Management User\n Guide.

          \n \n

          \n To add the IAM role policies for using this API operation,\n go to the IAM console, and\n choose Roles in the left navigation pane. Search the IAM role that you want to grant access to use\n the CreateEndpoint and CreateEndpointConfig API operations, add the following policies to the role.\n

          \n
            \n
          • \n

            Option 1: For a full Amazon SageMaker access, search and attach the AmazonSageMakerFullAccess policy.

            \n
          • \n
          • \n

            Option 2: For granting a limited access to an IAM role,\n paste the following Action elements manually into the JSON file of the IAM role:

            \n

            \n \"Action\": [\"sagemaker:CreateEndpoint\", \"sagemaker:CreateEndpointConfig\"]\n

            \n

            \n \"Resource\": [\n

            \n

            \n \"arn:aws:sagemaker:region:account-id:endpoint/endpointName\"\n

            \n

            \n \"arn:aws:sagemaker:region:account-id:endpoint-config/endpointConfigName\"\n

            \n

            \n ]\n

            \n

            For more information, see Amazon SageMaker API Permissions: Actions, Permissions, and Resources Reference.

            \n
          • \n
          \n\n
          " + } + }, + "com.amazonaws.sagemaker#CreateEndpointConfig": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#CreateEndpointConfigInput" + }, + "output": { + "target": "com.amazonaws.sagemaker#CreateEndpointConfigOutput" + }, + "errors": [ { "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" } ], "traits": { - "smithy.api#documentation": "

          Creates a model package that you can use to create Amazon SageMaker models or list on AWS\n Marketplace. Buyers can subscribe to model packages listed on AWS Marketplace to create\n models in Amazon SageMaker.

          \n

          To create a model package by specifying a Docker container that contains your\n inference code and the Amazon S3 location of your model artifacts, provide values for\n InferenceSpecification. To create a model from an algorithm resource\n that you created or subscribed to in AWS Marketplace, provide a value for\n SourceAlgorithmSpecification.

          " + "smithy.api#documentation": "

          Creates an endpoint configuration that Amazon SageMaker hosting services uses to deploy models. In\n the configuration, you identify one or more models, created using the\n CreateModel API, to deploy and the resources that you want Amazon SageMaker to\n provision. Then you call the CreateEndpoint API.

          \n \n

          Use this API if you want to use Amazon SageMaker hosting services to deploy models into\n production.

          \n
          \n

          In the request, you define a ProductionVariant, for each model that you\n want to deploy. Each ProductionVariant parameter also describes the\n resources that you want Amazon SageMaker to provision. This includes the number and type of ML\n compute instances to deploy.

          \n

          If you are hosting multiple models, you also assign a VariantWeight to\n specify how much traffic you want to allocate to each model. For example, suppose that\n you want to host two models, A and B, and you assign traffic weight 2 for model A and 1\n for model B. Amazon SageMaker distributes two-thirds of the traffic to Model A, and one-third to\n model B.

          \n

          For an example that calls this method when deploying a model to Amazon SageMaker hosting services,\n see Deploy the\n Model to Amazon SageMaker Hosting Services (AWS SDK for Python (Boto\n 3)).\n

          \n \n

          When you call CreateEndpoint, a load call is made to DynamoDB to\n verify that your endpoint configuration exists. When you read data from a DynamoDB\n table supporting \n Eventually Consistent Reads\n , the response might not\n reflect the results of a recently completed write operation. The response might\n include some stale data. If the dependent entities are not yet in DynamoDB, this\n causes a validation error. If you repeat your read request after a short time, the\n response should return the latest data. So retry logic is recommended to handle\n these possible issues. We also recommend that customers call DescribeEndpointConfig before calling CreateEndpoint to minimize the potential impact of a DynamoDB eventually consistent read.

          \n
          " } }, - "com.amazonaws.sagemaker#CreateModelPackageInput": { + "com.amazonaws.sagemaker#CreateEndpointConfigInput": { "type": "structure", "members": { - "ModelPackageName": { - "target": "com.amazonaws.sagemaker#EntityName", - "traits": { - "smithy.api#documentation": "

          The name of the model package. The name must have 1 to 63 characters. Valid characters\n are a-z, A-Z, 0-9, and - (hyphen).

          " - } - }, - "ModelPackageDescription": { - "target": "com.amazonaws.sagemaker#EntityDescription", + "EndpointConfigName": { + "target": "com.amazonaws.sagemaker#EndpointConfigName", "traits": { - "smithy.api#documentation": "

          A description of the model package.

          " + "smithy.api#documentation": "

          The name of the endpoint configuration. You specify this name in a CreateEndpoint request.

          ", + "smithy.api#required": {} } }, - "InferenceSpecification": { - "target": "com.amazonaws.sagemaker#InferenceSpecification", + "ProductionVariants": { + "target": "com.amazonaws.sagemaker#ProductionVariantList", "traits": { - "smithy.api#documentation": "

          Specifies details about inference jobs that can be run with models based on this model\n package, including the following:

          \n
            \n
          • \n

            The Amazon ECR paths of containers that contain the inference code and model\n artifacts.

            \n
          • \n
          • \n

            The instance types that the model package supports for transform jobs and\n real-time endpoints used for inference.

            \n
          • \n
          • \n

            The input and output content formats that the model package supports for\n inference.

            \n
          • \n
          " + "smithy.api#documentation": "

          An list of ProductionVariant objects, one for each model that you want\n to host at this endpoint.

          ", + "smithy.api#required": {} } }, - "ValidationSpecification": { - "target": "com.amazonaws.sagemaker#ModelPackageValidationSpecification", - "traits": { - "smithy.api#documentation": "

          Specifies configurations for one or more transform jobs that Amazon SageMaker runs to test the\n model package.

          " - } + "DataCaptureConfig": { + "target": "com.amazonaws.sagemaker#DataCaptureConfig" }, - "SourceAlgorithmSpecification": { - "target": "com.amazonaws.sagemaker#SourceAlgorithmSpecification", + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          Details about the algorithm that was used to create the model package.

          " + "smithy.api#documentation": "

          An array of key-value pairs. You can use tags to categorize your AWS resources in\n different ways, for example, by purpose, owner, or environment. For more information,\n see Tagging AWS\n Resources.

          " } }, - "CertifyForMarketplace": { - "target": "com.amazonaws.sagemaker#CertifyForMarketplace", + "KmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", "traits": { - "smithy.api#documentation": "

          Whether to certify the model package for listing on AWS Marketplace.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of a AWS Key Management Service key that Amazon SageMaker uses to encrypt data on\n the storage volume attached to the ML compute instance that hosts the endpoint.

          \n

          The KmsKeyId can be any of the following formats:

          \n
            \n
          • \n

            Key ID: 1234abcd-12ab-34cd-56ef-1234567890ab\n

            \n
          • \n
          • \n

            Key ARN:\n arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\n

            \n
          • \n
          • \n

            Alias name: alias/ExampleAlias\n

            \n
          • \n
          • \n

            Alias name ARN:\n arn:aws:kms:us-west-2:111122223333:alias/ExampleAlias\n

            \n
          • \n
          \n

          The KMS key policy must grant permission to the IAM role that you specify in your\n CreateEndpoint, UpdateEndpoint requests. For more\n information, refer to the AWS Key Management Service section Using Key\n Policies in AWS KMS \n

          \n \n

          Certain Nitro-based instances include local storage, dependent on the instance\n type. Local storage volumes are encrypted using a hardware module on the instance.\n You can't request a KmsKeyId when using an instance type with local\n storage. If any of the models that you specify in the\n ProductionVariants parameter use nitro-based instances with local\n storage, do not specify a value for the KmsKeyId parameter. If you\n specify a value for KmsKeyId when using any nitro-based instances with\n local storage, the call to CreateEndpointConfig fails.

          \n

          For a list of instance types that support local instance storage, see Instance Store Volumes.

          \n

          For more information about local instance storage encryption, see SSD\n Instance Store Volumes.

          \n
          " } } } }, - "com.amazonaws.sagemaker#CreateModelPackageOutput": { + "com.amazonaws.sagemaker#CreateEndpointConfigOutput": { "type": "structure", "members": { - "ModelPackageArn": { - "target": "com.amazonaws.sagemaker#ModelPackageArn", + "EndpointConfigArn": { + "target": "com.amazonaws.sagemaker#EndpointConfigArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the new model package.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the endpoint configuration.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#CreateMonitoringSchedule": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#CreateMonitoringScheduleRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#CreateMonitoringScheduleResponse" - }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceInUse" - }, - { - "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" - } - ], - "traits": { - "smithy.api#documentation": "

          Creates a schedule that regularly starts Amazon SageMaker Processing Jobs to monitor the data\n captured for an Amazon SageMaker Endoint.

          " - } - }, - "com.amazonaws.sagemaker#CreateMonitoringScheduleRequest": { + "com.amazonaws.sagemaker#CreateEndpointInput": { "type": "structure", "members": { - "MonitoringScheduleName": { - "target": "com.amazonaws.sagemaker#MonitoringScheduleName", + "EndpointName": { + "target": "com.amazonaws.sagemaker#EndpointName", "traits": { - "smithy.api#documentation": "

          The name of the monitoring schedule. The name must be unique within an AWS Region within\n an AWS account.

          ", + "smithy.api#documentation": "

          The name of the endpoint.The name must be unique within an AWS Region in your AWS account. The name is case-insensitive in CreateEndpoint,\n but the case is preserved and must be matched in .

          ", "smithy.api#required": {} } }, - "MonitoringScheduleConfig": { - "target": "com.amazonaws.sagemaker#MonitoringScheduleConfig", + "EndpointConfigName": { + "target": "com.amazonaws.sagemaker#EndpointConfigName", "traits": { - "smithy.api#documentation": "

          The configuration object that specifies the monitoring schedule and defines the\n monitoring job.

          ", + "smithy.api#documentation": "

          The name of an endpoint configuration. For more information, see\n CreateEndpointConfig.

          ", "smithy.api#required": {} } }, "Tags": { "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          (Optional) An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management\n User Guide.

          " + "smithy.api#documentation": "

          An array of key-value pairs. You can use tags to categorize your AWS resources in\n different ways, for example, by purpose, owner, or environment. For more information,\n see Tagging AWS\n Resources.

          " } } } }, - "com.amazonaws.sagemaker#CreateMonitoringScheduleResponse": { + "com.amazonaws.sagemaker#CreateEndpointOutput": { "type": "structure", "members": { - "MonitoringScheduleArn": { - "target": "com.amazonaws.sagemaker#MonitoringScheduleArn", + "EndpointArn": { + "target": "com.amazonaws.sagemaker#EndpointArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the monitoring schedule.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the endpoint.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#CreateNotebookInstance": { + "com.amazonaws.sagemaker#CreateExperiment": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateNotebookInstanceInput" + "target": "com.amazonaws.sagemaker#CreateExperimentRequest" }, "output": { - "target": "com.amazonaws.sagemaker#CreateNotebookInstanceOutput" + "target": "com.amazonaws.sagemaker#CreateExperimentResponse" }, "errors": [ { @@ -4066,265 +4461,366 @@ } ], "traits": { - "smithy.api#documentation": "

          Creates an Amazon SageMaker notebook instance. A notebook instance is a machine learning (ML)\n compute instance running on a Jupyter notebook.

          \n

          In a CreateNotebookInstance request, specify the type of ML compute\n instance that you want to run. Amazon SageMaker launches the instance, installs common libraries\n that you can use to explore datasets for model training, and attaches an ML storage\n volume to the notebook instance.

          \n

          Amazon SageMaker also provides a set of example notebooks. Each notebook demonstrates how to\n use Amazon SageMaker with a specific algorithm or with a machine learning framework.

          \n

          After receiving the request, Amazon SageMaker does the following:

          \n
            \n
          1. \n

            Creates a network interface in the Amazon SageMaker VPC.

            \n
          2. \n
          3. \n

            (Option) If you specified SubnetId, Amazon SageMaker creates a network\n interface in your own VPC, which is inferred from the subnet ID that you provide\n in the input. When creating this network interface, Amazon SageMaker attaches the security\n group that you specified in the request to the network interface that it creates\n in your VPC.

            \n \n
          4. \n
          5. \n

            Launches an EC2 instance of the type specified in the request in the Amazon SageMaker\n VPC. If you specified SubnetId of your VPC, Amazon SageMaker specifies both\n network interfaces when launching this instance. This enables inbound traffic\n from your own VPC to the notebook instance, assuming that the security groups\n allow it.

            \n
          6. \n
          \n \n

          After creating the notebook instance, Amazon SageMaker returns its Amazon Resource Name (ARN).\n You can't change the name of a notebook instance after you create it.

          \n

          After Amazon SageMaker creates the notebook instance, you can connect to the Jupyter server and\n work in Jupyter notebooks. For example, you can write code to explore a dataset that you\n can use for model training, train a model, host models by creating Amazon SageMaker endpoints, and\n validate hosted models.

          \n

          For more information, see How It Works.

          " + "smithy.api#documentation": "

          Creates an SageMaker experiment. An experiment is a collection of\n trials that are observed, compared and evaluated as a group. A trial is\n a set of steps, called trial components, that produce a machine learning\n model.

          \n

          The goal of an experiment is to determine the components that produce the best model.\n Multiple trials are performed, each one isolating and measuring the impact of a change to one\n or more inputs, while keeping the remaining inputs constant.

          \n

          When you use Amazon SageMaker Studio or the Amazon SageMaker Python SDK, all experiments, trials, and trial\n components are automatically tracked, logged, and indexed. When you use the AWS SDK for Python (Boto), you\n must use the logging APIs provided by the SDK.

          \n

          You can add tags to experiments, trials, trial components and then use the Search API to search for the tags.

          \n

          To add a description to an experiment, specify the optional Description\n parameter. To add a description later, or to change the description, call the UpdateExperiment API.

          \n

          To get a list of all your experiments, call the ListExperiments API. To\n view an experiment's properties, call the DescribeExperiment API. To get a\n list of all the trials associated with an experiment, call the ListTrials\n API. To create a trial call the CreateTrial API.

          " } }, - "com.amazonaws.sagemaker#CreateNotebookInstanceInput": { + "com.amazonaws.sagemaker#CreateExperimentRequest": { "type": "structure", "members": { - "NotebookInstanceName": { - "target": "com.amazonaws.sagemaker#NotebookInstanceName", + "ExperimentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The name of the new notebook instance.

          ", + "smithy.api#documentation": "

          The name of the experiment. The name must be unique in your AWS account and is not\n case-sensitive.

          ", "smithy.api#required": {} } }, - "InstanceType": { - "target": "com.amazonaws.sagemaker#InstanceType", + "DisplayName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The type of ML compute instance to launch for the notebook instance.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The name of the experiment as displayed. The name doesn't need to be unique. If you don't\n specify DisplayName, the value in ExperimentName is\n displayed.

          " } }, - "SubnetId": { - "target": "com.amazonaws.sagemaker#SubnetId", + "Description": { + "target": "com.amazonaws.sagemaker#ExperimentDescription", "traits": { - "smithy.api#documentation": "

          The ID of the subnet in a VPC to which you would like to have a connectivity from\n your ML compute instance.

          " + "smithy.api#documentation": "

          The description of the experiment.

          " } }, - "SecurityGroupIds": { - "target": "com.amazonaws.sagemaker#SecurityGroupIds", + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          The VPC security group IDs, in the form sg-xxxxxxxx. The security groups must be\n for the same VPC as specified in the subnet.

          " + "smithy.api#documentation": "

          A list of tags to associate with the experiment. You can use Search API\n to search on the tags.

          " + } + } + } + }, + "com.amazonaws.sagemaker#CreateExperimentResponse": { + "type": "structure", + "members": { + "ExperimentArn": { + "target": "com.amazonaws.sagemaker#ExperimentArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the experiment.

          " } + } + } + }, + "com.amazonaws.sagemaker#CreateFeatureGroup": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#CreateFeatureGroupRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#CreateFeatureGroupResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceInUse" }, - "RoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", + { + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + } + ], + "traits": { + "smithy.api#documentation": "

          Create a new FeatureGroup. A FeatureGroup is a group of\n Features defined in the FeatureStore to describe a\n Record.

          \n

          The FeatureGroup defines the schema and features contained in the\n FeatureGroup. A FeatureGroup definition is composed of a list of\n Features, a RecordIdentifierFeatureName, an\n EventTimeFeatureName and configurations for its OnlineStore\n and OfflineStore. Check AWS service quotas to see\n the FeatureGroups quota for your AWS account.

          \n \n

          You must include at least one of OnlineStoreConfig and\n OfflineStoreConfig to create a FeatureGroup.

          \n
          " + } + }, + "com.amazonaws.sagemaker#CreateFeatureGroupRequest": { + "type": "structure", + "members": { + "FeatureGroupName": { + "target": "com.amazonaws.sagemaker#FeatureGroupName", "traits": { - "smithy.api#documentation": "

          When you send any requests to AWS resources from the notebook instance, Amazon SageMaker\n assumes this role to perform tasks on your behalf. You must grant this role necessary\n permissions so Amazon SageMaker can perform these tasks. The policy must allow the Amazon SageMaker service\n principal (sagemaker.amazonaws.com) permissions to assume this role. For more\n information, see Amazon SageMaker Roles.

          \n \n

          To be able to pass this role to Amazon SageMaker, the caller of this API must have the\n iam:PassRole permission.

          \n
          ", + "smithy.api#documentation": "

          The name of the FeatureGroup. The name must be unique within an AWS Region\n in an AWS account. The name:

          \n
            \n
          • \n

            Must start and end with an alphanumeric character.

            \n
          • \n
          • \n

            Can only contain alphanumeric character and hyphens. Spaces are not allowed.\n

            \n
          • \n
          ", "smithy.api#required": {} } }, - "KmsKeyId": { - "target": "com.amazonaws.sagemaker#KmsKeyId", + "RecordIdentifierFeatureName": { + "target": "com.amazonaws.sagemaker#FeatureName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of a AWS Key Management Service key that Amazon SageMaker uses to encrypt data on\n the storage volume attached to your notebook instance. The KMS key you provide must be\n enabled. For information, see Enabling and Disabling\n Keys in the AWS Key Management Service Developer Guide.

          " + "smithy.api#documentation": "

          The name of the Feature whose value uniquely identifies a\n Record defined in the FeatureStore. Only the latest record per\n identifier value will be stored in the OnlineStore.\n RecordIdentifierFeatureName must be one of feature definitions'\n names.

          \n

          You use the RecordIdentifierFeatureName to access data in a\n FeatureStore.

          \n

          This name:

          \n
            \n
          • \n

            Must start and end with an alphanumeric character.

            \n
          • \n
          • \n

            Can only contains alphanumeric characters, hyphens, underscores. Spaces are not\n allowed.

            \n
          • \n
          ", + "smithy.api#required": {} } }, - "Tags": { - "target": "com.amazonaws.sagemaker#TagList", + "EventTimeFeatureName": { + "target": "com.amazonaws.sagemaker#FeatureName", "traits": { - "smithy.api#documentation": "

          A list of tags to associate with the notebook instance. You can add tags later by\n using the CreateTags API.

          " + "smithy.api#documentation": "

          The name of the feature that stores the EventTime of a Record\n in a FeatureGroup.

          \n

          An EventTime is a point in time when a new event occurs that corresponds to\n the creation or update of a Record in a FeatureGroup. All\n Records in the FeatureGroup must have a corresponding\n EventTime.

          \n

          An EventTime can be a String or Fractional.

          \n
            \n
          • \n

            \n Fractional: EventTime feature values must be a Unix\n timestamp in seconds.

            \n
          • \n
          • \n

            \n String: EventTime feature values must be an ISO-8601\n string in the format. The following formats are supported\n yyyy-MM-dd'T'HH:mm:ssZ and yyyy-MM-dd'T'HH:mm:ss.SSSZ\n where yyyy, MM, and dd represent the year,\n month, and day respectively and HH, mm, ss,\n and if applicable, SSS represent the hour, month, second and\n milliseconds respsectively. 'T' and Z are constants.

            \n
          • \n
          ", + "smithy.api#required": {} } }, - "LifecycleConfigName": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName", + "FeatureDefinitions": { + "target": "com.amazonaws.sagemaker#FeatureDefinitions", "traits": { - "smithy.api#documentation": "

          The name of a lifecycle configuration to associate with the notebook instance. For\n information about lifestyle configurations, see Step 2.1: (Optional)\n Customize a Notebook Instance.

          " + "smithy.api#documentation": "

          A list of Feature names and types. Name and Type\n is compulsory per Feature.

          \n

          Valid feature FeatureTypes are Integral,\n Fractional and String.

          \n

          \n FeatureNames cannot be any of the following: is_deleted,\n write_time, api_invocation_time\n

          \n

          You can create up to 2,500 FeatureDefinitions per\n FeatureGroup.

          ", + "smithy.api#required": {} } }, - "DirectInternetAccess": { - "target": "com.amazonaws.sagemaker#DirectInternetAccess", + "OnlineStoreConfig": { + "target": "com.amazonaws.sagemaker#OnlineStoreConfig", "traits": { - "smithy.api#documentation": "

          Sets whether Amazon SageMaker provides internet access to the notebook instance. If you set this\n to Disabled this notebook instance will be able to access resources only in\n your VPC, and will not be able to connect to Amazon SageMaker training and endpoint services unless\n your configure a NAT Gateway in your VPC.

          \n

          For more information, see Notebook Instances Are Internet-Enabled by Default. You can set the value\n of this parameter to Disabled only if you set a value for the\n SubnetId parameter.

          " + "smithy.api#documentation": "

          You can turn the OnlineStore on or off by specifying True for\n the EnableOnlineStore flag in OnlineStoreConfig; the default\n value is False.

          \n

          You can also include an AWS KMS key ID (KMSKeyId) for at-rest encryption of\n the OnlineStore.

          " } }, - "VolumeSizeInGB": { - "target": "com.amazonaws.sagemaker#NotebookInstanceVolumeSizeInGB", + "OfflineStoreConfig": { + "target": "com.amazonaws.sagemaker#OfflineStoreConfig", "traits": { - "smithy.api#documentation": "

          The size, in GB, of the ML storage volume to attach to the notebook instance. The\n default value is 5 GB.

          " + "smithy.api#documentation": "

          Use this to configure an OfflineFeatureStore. This parameter allows you to\n specify:

          \n
            \n
          • \n

            The Amazon Simple Storage Service (Amazon S3) location of an\n OfflineStore.

            \n
          • \n
          • \n

            A configuration for an AWS Glue or AWS Hive data cataolgue.

            \n
          • \n
          • \n

            An KMS encryption key to encrypt the Amazon S3 location used for\n OfflineStore.

            \n
          • \n
          \n

          To learn more about this parameter, see OfflineStoreConfig.

          " } }, - "AcceleratorTypes": { - "target": "com.amazonaws.sagemaker#NotebookInstanceAcceleratorTypes", + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          A list of Elastic Inference (EI) instance types to associate with this notebook\n instance. Currently, only one instance type can be associated with a notebook instance.\n For more information, see Using Elastic Inference in Amazon SageMaker.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the IAM execution role used to persist data into the\n OfflineStore if an OfflineStoreConfig is provided.

          " } }, - "DefaultCodeRepository": { - "target": "com.amazonaws.sagemaker#CodeRepositoryNameOrUrl", + "Description": { + "target": "com.amazonaws.sagemaker#Description", "traits": { - "smithy.api#documentation": "

          A Git repository to associate with the notebook instance as its default code\n repository. This can be either the name of a Git repository stored as a resource in your\n account, or the URL of a Git repository in AWS CodeCommit or in any\n other Git repository. When you open a notebook instance, it opens in the directory that\n contains this repository. For more information, see Associating Git Repositories with Amazon SageMaker\n Notebook Instances.

          " + "smithy.api#documentation": "

          A free-form description of a FeatureGroup.

          " } }, - "AdditionalCodeRepositories": { - "target": "com.amazonaws.sagemaker#AdditionalCodeRepositoryNamesOrUrls", + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          An array of up to three Git repositories to associate with the notebook instance.\n These can be either the names of Git repositories stored as resources in your account,\n or the URL of Git repositories in AWS CodeCommit or in any\n other Git repository. These repositories are cloned at the same level as the default\n repository of your notebook instance. For more information, see Associating Git\n Repositories with Amazon SageMaker Notebook Instances.

          " + "smithy.api#documentation": "

          Tags used to identify Features in each FeatureGroup.

          " } - }, - "RootAccess": { - "target": "com.amazonaws.sagemaker#RootAccess", + } + } + }, + "com.amazonaws.sagemaker#CreateFeatureGroupResponse": { + "type": "structure", + "members": { + "FeatureGroupArn": { + "target": "com.amazonaws.sagemaker#FeatureGroupArn", "traits": { - "smithy.api#documentation": "

          Whether root access is enabled or disabled for users of the notebook instance. The\n default value is Enabled.

          \n \n

          Lifecycle configurations need root access to be able to set up a notebook\n instance. Because of this, lifecycle configurations associated with a notebook\n instance always run with root access even if you disable root access for\n users.

          \n
          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the FeatureGroup. This is a unique\n identifier for the feature group.

          ", + "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#CreateNotebookInstanceLifecycleConfig": { + "com.amazonaws.sagemaker#CreateFlowDefinition": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateNotebookInstanceLifecycleConfigInput" + "target": "com.amazonaws.sagemaker#CreateFlowDefinitionRequest" }, "output": { - "target": "com.amazonaws.sagemaker#CreateNotebookInstanceLifecycleConfigOutput" + "target": "com.amazonaws.sagemaker#CreateFlowDefinitionResponse" }, "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceInUse" + }, { "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" } ], "traits": { - "smithy.api#documentation": "

          Creates a lifecycle configuration that you can associate with a notebook instance. A\n lifecycle configuration is a collection of shell scripts that\n run when you create or start a notebook instance.

          \n

          Each lifecycle configuration script has a limit of 16384 characters.

          \n

          The value of the $PATH environment variable that is available to both\n scripts is /sbin:bin:/usr/sbin:/usr/bin.

          \n

          View CloudWatch Logs for notebook instance lifecycle configurations in log group\n /aws/sagemaker/NotebookInstances in log stream\n [notebook-instance-name]/[LifecycleConfigHook].

          \n

          Lifecycle configuration scripts cannot run for longer than 5 minutes. If a script runs\n for longer than 5 minutes, it fails and the notebook instance is not created or\n started.

          \n

          For information about notebook instance lifestyle configurations, see Step\n 2.1: (Optional) Customize a Notebook Instance.

          " + "smithy.api#documentation": "

          Creates a flow definition.

          " } }, - "com.amazonaws.sagemaker#CreateNotebookInstanceLifecycleConfigInput": { + "com.amazonaws.sagemaker#CreateFlowDefinitionRequest": { "type": "structure", "members": { - "NotebookInstanceLifecycleConfigName": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName", + "FlowDefinitionName": { + "target": "com.amazonaws.sagemaker#FlowDefinitionName", "traits": { - "smithy.api#documentation": "

          The name of the lifecycle configuration.

          ", + "smithy.api#documentation": "

          The name of your flow definition.

          ", "smithy.api#required": {} } }, - "OnCreate": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigList", + "HumanLoopRequestSource": { + "target": "com.amazonaws.sagemaker#HumanLoopRequestSource", "traits": { - "smithy.api#documentation": "

          A shell script that runs only once, when you create a notebook instance. The shell\n script must be a base64-encoded string.

          " + "smithy.api#documentation": "

          Container for configuring the source of human task requests. Use to specify if\n Amazon Rekognition or Amazon Textract is used as an integration source.

          " } }, - "OnStart": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigList", + "HumanLoopActivationConfig": { + "target": "com.amazonaws.sagemaker#HumanLoopActivationConfig", "traits": { - "smithy.api#documentation": "

          A shell script that runs every time you start a notebook instance, including when you\n create the notebook instance. The shell script must be a base64-encoded string.

          " + "smithy.api#documentation": "

          An object containing information about the events that trigger a human workflow.

          " } - } - } - }, - "com.amazonaws.sagemaker#CreateNotebookInstanceLifecycleConfigOutput": { - "type": "structure", - "members": { - "NotebookInstanceLifecycleConfigArn": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigArn", + }, + "HumanLoopConfig": { + "target": "com.amazonaws.sagemaker#HumanLoopConfig", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the lifecycle configuration.

          " + "smithy.api#documentation": "

          An object containing information about the tasks the human reviewers will perform.

          ", + "smithy.api#required": {} + } + }, + "OutputConfig": { + "target": "com.amazonaws.sagemaker#FlowDefinitionOutputConfig", + "traits": { + "smithy.api#documentation": "

          An object containing information about where the human review results will be uploaded.

          ", + "smithy.api#required": {} + } + }, + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the role needed to call other services on your behalf. For example, arn:aws:iam::1234567890:role/service-role/AmazonSageMaker-ExecutionRole-20180111T151298.

          ", + "smithy.api#required": {} + } + }, + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", + "traits": { + "smithy.api#documentation": "

          An array of key-value pairs that contain metadata to help you categorize and organize a flow definition. Each tag consists of a key and a value, both of which you define.

          " } } } }, - "com.amazonaws.sagemaker#CreateNotebookInstanceOutput": { + "com.amazonaws.sagemaker#CreateFlowDefinitionResponse": { "type": "structure", "members": { - "NotebookInstanceArn": { - "target": "com.amazonaws.sagemaker#NotebookInstanceArn", + "FlowDefinitionArn": { + "target": "com.amazonaws.sagemaker#FlowDefinitionArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the notebook instance.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the flow definition you create.

          ", + "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#CreatePresignedDomainUrl": { + "com.amazonaws.sagemaker#CreateHumanTaskUi": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreatePresignedDomainUrlRequest" + "target": "com.amazonaws.sagemaker#CreateHumanTaskUiRequest" }, "output": { - "target": "com.amazonaws.sagemaker#CreatePresignedDomainUrlResponse" + "target": "com.amazonaws.sagemaker#CreateHumanTaskUiResponse" }, "errors": [ { - "target": "com.amazonaws.sagemaker#ResourceNotFound" + "target": "com.amazonaws.sagemaker#ResourceInUse" + }, + { + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" } ], "traits": { - "smithy.api#documentation": "

          Creates a URL for a specified UserProfile in a Domain. When accessed in a web browser,\n the user will be automatically signed in to Amazon SageMaker Studio, and granted access to all of\n the Apps and files associated with the Domain's Amazon Elastic File System (EFS) volume.\n This operation can only be called when the authentication mode equals IAM.\n

          \n \n

          The URL that you get from a call to CreatePresignedDomainUrl is valid\n only for 5 minutes. If you try to use the URL after the 5-minute limit expires, you\n are directed to the AWS console sign-in page.

          \n
          " + "smithy.api#documentation": "

          Defines the settings you will use for the human review workflow user interface. Reviewers will see a three-panel interface with an instruction area, the item to review, and an input area.

          " } }, - "com.amazonaws.sagemaker#CreatePresignedDomainUrlRequest": { + "com.amazonaws.sagemaker#CreateHumanTaskUiRequest": { "type": "structure", "members": { - "DomainId": { - "target": "com.amazonaws.sagemaker#DomainId", + "HumanTaskUiName": { + "target": "com.amazonaws.sagemaker#HumanTaskUiName", "traits": { - "smithy.api#documentation": "

          The domain ID.

          ", + "smithy.api#documentation": "

          The name of the user interface you are creating.

          ", "smithy.api#required": {} } }, - "UserProfileName": { - "target": "com.amazonaws.sagemaker#UserProfileName", + "UiTemplate": { + "target": "com.amazonaws.sagemaker#UiTemplate", "traits": { - "smithy.api#documentation": "

          The name of the UserProfile to sign-in as.

          ", "smithy.api#required": {} } }, - "SessionExpirationDurationInSeconds": { - "target": "com.amazonaws.sagemaker#SessionExpirationDurationInSeconds", + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          The session expiration duration in seconds.

          " + "smithy.api#documentation": "

          An array of key-value pairs that contain metadata to help you categorize and organize a human review workflow user interface. Each tag consists of a key and a value, both of which you define.

          " } } } }, - "com.amazonaws.sagemaker#CreatePresignedDomainUrlResponse": { + "com.amazonaws.sagemaker#CreateHumanTaskUiResponse": { "type": "structure", "members": { - "AuthorizedUrl": { - "target": "com.amazonaws.sagemaker#PresignedDomainUrl", + "HumanTaskUiArn": { + "target": "com.amazonaws.sagemaker#HumanTaskUiArn", "traits": { - "smithy.api#documentation": "

          The presigned URL.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the human review workflow user interface you create.

          ", + "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#CreatePresignedNotebookInstanceUrl": { + "com.amazonaws.sagemaker#CreateHyperParameterTuningJob": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreatePresignedNotebookInstanceUrlInput" + "target": "com.amazonaws.sagemaker#CreateHyperParameterTuningJobRequest" }, "output": { - "target": "com.amazonaws.sagemaker#CreatePresignedNotebookInstanceUrlOutput" + "target": "com.amazonaws.sagemaker#CreateHyperParameterTuningJobResponse" }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceInUse" + }, + { + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + } + ], "traits": { - "smithy.api#documentation": "

          Returns a URL that you can use to connect to the Jupyter server from a notebook\n instance. In the Amazon SageMaker console, when you choose Open next to a notebook\n instance, Amazon SageMaker opens a new tab showing the Jupyter server home page from the notebook\n instance. The console uses this API to get the URL and show the page.

          \n

          The IAM role or user used to call this API defines the permissions to access the\n notebook instance. Once the presigned URL is created, no additional permission is\n required to access this URL. IAM authorization policies for this API are also enforced\n for every HTTP request and WebSocket frame that attempts to connect to the notebook\n instance.

          \n

          You can restrict access to this API and to the URL that it returns to a list of IP\n addresses that you specify. Use the NotIpAddress condition operator and the\n aws:SourceIP condition context key to specify the list of IP addresses\n that you want to have access to the notebook instance. For more information, see Limit Access to a Notebook Instance by IP Address.

          \n \n

          The URL that you get from a call to CreatePresignedNotebookInstanceUrl is valid only for 5 minutes. If\n you try to use the URL after the 5-minute limit expires, you are directed to the\n AWS console sign-in page.

          \n
          " + "smithy.api#documentation": "

          Starts a hyperparameter tuning job. A hyperparameter tuning job finds the best version\n of a model by running many training jobs on your dataset using the algorithm you choose\n and values for hyperparameters within ranges that you specify. It then chooses the\n hyperparameter values that result in a model that performs the best, as measured by an\n objective metric that you choose.

          " } }, - "com.amazonaws.sagemaker#CreatePresignedNotebookInstanceUrlInput": { + "com.amazonaws.sagemaker#CreateHyperParameterTuningJobRequest": { "type": "structure", "members": { - "NotebookInstanceName": { - "target": "com.amazonaws.sagemaker#NotebookInstanceName", + "HyperParameterTuningJobName": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobName", "traits": { - "smithy.api#documentation": "

          The name of the notebook instance.

          ", + "smithy.api#documentation": "

          The name of the tuning job. This name is the prefix for the names of all training jobs\n that this tuning job launches. The name must be unique within the same AWS account and\n AWS Region. The name must have 1 to 32 characters. Valid characters are a-z, A-Z,\n 0-9, and : + = @ _ % - (hyphen). The name is not case sensitive.

          ", "smithy.api#required": {} } }, - "SessionExpirationDurationInSeconds": { - "target": "com.amazonaws.sagemaker#SessionExpirationDurationInSeconds", + "HyperParameterTuningJobConfig": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobConfig", "traits": { - "smithy.api#documentation": "

          The duration of the session, in seconds. The default is 12 hours.

          " + "smithy.api#documentation": "

          The HyperParameterTuningJobConfig object that describes the tuning\n job, including the search strategy, the objective metric used to evaluate training jobs,\n ranges of parameters to search, and resource limits for the tuning job. For more\n information, see How\n Hyperparameter Tuning Works.

          ", + "smithy.api#required": {} + } + }, + "TrainingJobDefinition": { + "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinition", + "traits": { + "smithy.api#documentation": "

          The HyperParameterTrainingJobDefinition object that describes the\n training jobs that this tuning job launches,\n including\n static hyperparameters, input data configuration, output data configuration, resource\n configuration, and stopping condition.

          " + } + }, + "TrainingJobDefinitions": { + "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinitions", + "traits": { + "smithy.api#documentation": "

          A list of the HyperParameterTrainingJobDefinition objects launched\n for this tuning job.

          " + } + }, + "WarmStartConfig": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobWarmStartConfig", + "traits": { + "smithy.api#documentation": "

          Specifies the configuration for starting the hyperparameter tuning job using one or\n more previous tuning jobs as a starting point. The results of previous tuning jobs are\n used to inform which combinations of hyperparameters to search over in the new tuning\n job.

          \n

          All training jobs launched by the new hyperparameter tuning job are evaluated by using\n the objective metric. If you specify IDENTICAL_DATA_AND_ALGORITHM as the\n WarmStartType value for the warm start configuration, the training job\n that performs the best in the new tuning job is compared to the best training jobs from\n the parent tuning jobs. From these, the training job that performs the best as measured\n by the objective metric is returned as the overall best training job.

          \n \n

          All training jobs launched by parent hyperparameter tuning jobs and the new\n hyperparameter tuning jobs count against the limit of training jobs for the tuning\n job.

          \n
          " + } + }, + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", + "traits": { + "smithy.api#documentation": "

          An array of key-value pairs. You can use tags to categorize your AWS resources in\n different ways, for example, by purpose, owner, or environment. For more information,\n see Tagging AWS\n Resources.

          \n

          Tags that you specify for the tuning job are also added to all training jobs that the\n tuning job launches.

          " } } } }, - "com.amazonaws.sagemaker#CreatePresignedNotebookInstanceUrlOutput": { + "com.amazonaws.sagemaker#CreateHyperParameterTuningJobResponse": { "type": "structure", "members": { - "AuthorizedUrl": { - "target": "com.amazonaws.sagemaker#NotebookInstanceUrl", + "HyperParameterTuningJobArn": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobArn", "traits": { - "smithy.api#documentation": "

          A JSON object that contains the URL string.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the tuning job. Amazon SageMaker assigns an ARN to a\n hyperparameter tuning job when you create it.

          ", + "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#CreateProcessingJob": { + "com.amazonaws.sagemaker#CreateImage": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateProcessingJobRequest" + "target": "com.amazonaws.sagemaker#CreateImageRequest" }, "output": { - "target": "com.amazonaws.sagemaker#CreateProcessingJobResponse" + "target": "com.amazonaws.sagemaker#CreateImageResponse" }, "errors": [ { @@ -4332,106 +4828,128 @@ }, { "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" - }, - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" } ], "traits": { - "smithy.api#documentation": "

          Creates a processing job.

          " + "smithy.api#documentation": "

          Creates a custom SageMaker image. A SageMaker image is a set of image versions. Each image\n version represents a container image stored in Amazon Container Registry (ECR). For more information, see\n Bring your own SageMaker image.

          " } }, - "com.amazonaws.sagemaker#CreateProcessingJobRequest": { + "com.amazonaws.sagemaker#CreateImageRequest": { "type": "structure", "members": { - "ProcessingInputs": { - "target": "com.amazonaws.sagemaker#ProcessingInputs", + "Description": { + "target": "com.amazonaws.sagemaker#ImageDescription", "traits": { - "smithy.api#documentation": "

          For each input, data is downloaded from S3 into the processing container before the\n processing job begins running if \"S3InputMode\" is set to File.

          " + "smithy.api#documentation": "

          The description of the image.

          " } }, - "ProcessingOutputConfig": { - "target": "com.amazonaws.sagemaker#ProcessingOutputConfig", + "DisplayName": { + "target": "com.amazonaws.sagemaker#ImageDisplayName", "traits": { - "smithy.api#documentation": "

          Output configuration for the processing job.

          " + "smithy.api#documentation": "

          The display name of the image. If not provided, ImageName is displayed.

          " } }, - "ProcessingJobName": { - "target": "com.amazonaws.sagemaker#ProcessingJobName", + "ImageName": { + "target": "com.amazonaws.sagemaker#ImageName", "traits": { - "smithy.api#documentation": "

          The name of the processing job. The name must be unique within an AWS Region in the\n AWS account.

          ", + "smithy.api#documentation": "

          The name of the image. Must be unique to your account.

          ", "smithy.api#required": {} } }, - "ProcessingResources": { - "target": "com.amazonaws.sagemaker#ProcessingResources", + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          Identifies the resources, ML compute instances, and ML storage volumes to deploy for a\n processing job. In distributed training, you specify more than one instance.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an IAM role that enables Amazon SageMaker to perform tasks on your behalf.

          ", "smithy.api#required": {} } }, - "StoppingCondition": { - "target": "com.amazonaws.sagemaker#ProcessingStoppingCondition", + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          The time limit for how long the processing job is allowed to run.

          " + "smithy.api#documentation": "

          A list of tags to apply to the image.

          " } - }, - "AppSpecification": { - "target": "com.amazonaws.sagemaker#AppSpecification", + } + } + }, + "com.amazonaws.sagemaker#CreateImageResponse": { + "type": "structure", + "members": { + "ImageArn": { + "target": "com.amazonaws.sagemaker#ImageArn", "traits": { - "smithy.api#documentation": "

          Configures the processing job to run a specified Docker container image.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the image.

          " } + } + } + }, + "com.amazonaws.sagemaker#CreateImageVersion": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#CreateImageVersionRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#CreateImageVersionResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceInUse" }, - "Environment": { - "target": "com.amazonaws.sagemaker#ProcessingEnvironmentMap", - "traits": { - "smithy.api#documentation": "

          Sets the environment variables in the Docker container.

          " - } + { + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" }, - "NetworkConfig": { - "target": "com.amazonaws.sagemaker#NetworkConfig", + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Creates a version of the SageMaker image specified by ImageName. The version\n represents the Amazon Container Registry (ECR) container image specified by BaseImage.

          " + } + }, + "com.amazonaws.sagemaker#CreateImageVersionRequest": { + "type": "structure", + "members": { + "BaseImage": { + "target": "com.amazonaws.sagemaker#ImageBaseImage", "traits": { - "smithy.api#documentation": "

          Networking options for a processing job.

          " + "smithy.api#documentation": "

          The registry path of the container image to use as the starting point for this\n version. The path is an Amazon Container Registry (ECR) URI in the following format:

          \n

          \n .dkr.ecr..amazonaws.com/\n

          ", + "smithy.api#required": {} } }, - "RoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", + "ClientToken": { + "target": "com.amazonaws.sagemaker#ClientToken", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on\n your behalf.

          ", + "smithy.api#documentation": "

          A unique ID. If not specified, the AWS CLI and AWS SDKs, such as the SDK for Python\n (Boto3), add a unique value to the call.

          ", + "smithy.api#idempotencyToken": {}, "smithy.api#required": {} } }, - "Tags": { - "target": "com.amazonaws.sagemaker#TagList", + "ImageName": { + "target": "com.amazonaws.sagemaker#ImageName", "traits": { - "smithy.api#documentation": "

          (Optional) An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management\n User Guide.

          " + "smithy.api#documentation": "

          The ImageName of the Image to create a version of.

          ", + "smithy.api#required": {} } - }, - "ExperimentConfig": { - "target": "com.amazonaws.sagemaker#ExperimentConfig" } } }, - "com.amazonaws.sagemaker#CreateProcessingJobResponse": { + "com.amazonaws.sagemaker#CreateImageVersionResponse": { "type": "structure", "members": { - "ProcessingJobArn": { - "target": "com.amazonaws.sagemaker#ProcessingJobArn", + "ImageVersionArn": { + "target": "com.amazonaws.sagemaker#ImageVersionArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the processing job.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the image version.

          " } } } }, - "com.amazonaws.sagemaker#CreateTrainingJob": { + "com.amazonaws.sagemaker#CreateLabelingJob": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateTrainingJobRequest" + "target": "com.amazonaws.sagemaker#CreateLabelingJobRequest" }, "output": { - "target": "com.amazonaws.sagemaker#CreateTrainingJobResponse" + "target": "com.amazonaws.sagemaker#CreateLabelingJobResponse" }, "errors": [ { @@ -4439,284 +4957,200 @@ }, { "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" - }, - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" } ], "traits": { - "smithy.api#documentation": "

          Starts a model training job. After training completes, Amazon SageMaker saves the resulting\n model artifacts to an Amazon S3 location that you specify.

          \n

          If you choose to host your model using Amazon SageMaker hosting services, you can use the\n resulting model artifacts as part of the model. You can also use the artifacts in a\n machine learning service other than Amazon SageMaker, provided that you know how to use them for\n inferences.\n \n

          \n

          In the request body, you provide the following:

          \n
            \n
          • \n

            \n AlgorithmSpecification - Identifies the training algorithm to\n use.\n

            \n
          • \n
          • \n

            \n HyperParameters - Specify these algorithm-specific parameters to\n enable the estimation of model parameters during training. Hyperparameters can\n be tuned to optimize this learning process. For a list of hyperparameters for\n each training algorithm provided by Amazon SageMaker, see Algorithms.

            \n
          • \n
          • \n

            \n InputDataConfig - Describes the training dataset and the Amazon S3,\n EFS, or FSx location where it is stored.

            \n
          • \n
          • \n

            \n OutputDataConfig - Identifies the Amazon S3 bucket where you want\n Amazon SageMaker to save the results of model training.

            \n

            \n
          • \n
          • \n

            \n ResourceConfig - Identifies the resources, ML compute\n instances, and ML storage volumes to deploy for model training. In distributed\n training, you specify more than one instance.

            \n \n
          • \n
          • \n

            \n EnableManagedSpotTraining - Optimize the cost of training machine\n learning models by up to 80% by using Amazon EC2 Spot instances. For more\n information, see Managed Spot\n Training.

            \n
          • \n
          • \n

            \n RoleARN - The Amazon Resource Number (ARN) that Amazon SageMaker assumes\n to perform tasks on your behalf during model training.\n \n You must grant this role the necessary permissions so that Amazon SageMaker can successfully\n complete model training.

            \n
          • \n
          • \n

            \n StoppingCondition - To help cap training costs, use\n MaxRuntimeInSeconds to set a time limit for training. Use\n MaxWaitTimeInSeconds to specify how long you are willing to\n wait for a managed spot training job to complete.

            \n
          • \n
          \n

          For more information about Amazon SageMaker, see How It Works.

          " + "smithy.api#documentation": "

          Creates a job that uses workers to label the data objects in your input dataset. You\n can use the labeled data to train machine learning models.

          \n

          You can select your workforce from one of three providers:

          \n
            \n
          • \n

            A private workforce that you create. It can include employees, contractors,\n and outside experts. Use a private workforce when want the data to stay within\n your organization or when a specific set of skills is required.

            \n
          • \n
          • \n

            One or more vendors that you select from the AWS Marketplace. Vendors provide\n expertise in specific areas.

            \n
          • \n
          • \n

            The Amazon Mechanical Turk workforce. This is the largest workforce, but it\n should only be used for public data or data that has been stripped of any\n personally identifiable information.

            \n
          • \n
          \n

          You can also use automated data labeling to reduce the number of\n data objects that need to be labeled by a human. Automated data labeling uses\n active learning to determine if a data object can be labeled by\n machine or if it needs to be sent to a human worker. For more information, see Using\n Automated Data Labeling.

          \n

          The data objects to be labeled are contained in an Amazon S3 bucket. You create a\n manifest file that describes the location of each object. For\n more information, see Using Input and Output Data.

          \n

          The output can be used as the manifest file for another labeling job or as training\n data for your machine learning models.

          " } }, - "com.amazonaws.sagemaker#CreateTrainingJobRequest": { + "com.amazonaws.sagemaker#CreateLabelingJobRequest": { "type": "structure", "members": { - "TrainingJobName": { - "target": "com.amazonaws.sagemaker#TrainingJobName", + "LabelingJobName": { + "target": "com.amazonaws.sagemaker#LabelingJobName", "traits": { - "smithy.api#documentation": "

          The name of the training job. The name must be unique within an AWS Region in an\n AWS account.

          ", + "smithy.api#documentation": "

          The name of the labeling job. This name is used to identify the job in a list of\n labeling jobs.

          ", "smithy.api#required": {} } }, - "HyperParameters": { - "target": "com.amazonaws.sagemaker#HyperParameters", + "LabelAttributeName": { + "target": "com.amazonaws.sagemaker#LabelAttributeName", "traits": { - "smithy.api#documentation": "

          Algorithm-specific parameters that influence the quality of the model. You set\n hyperparameters before you start the learning process. For a list of hyperparameters for\n each training algorithm provided by Amazon SageMaker, see Algorithms.

          \n

          You can specify a maximum of 100 hyperparameters. Each hyperparameter is a\n key-value pair. Each key and value is limited to 256 characters, as specified by the\n Length Constraint.

          " + "smithy.api#documentation": "

          The attribute name to use for the label in the output manifest file. This is the key\n for the key/value pair formed with the label that a worker assigns to the object. The\n name can't end with \"-metadata\". If you are running a semantic segmentation labeling\n job, the attribute name must end with \"-ref\". If you are running any other kind of\n labeling job, the attribute name must not end with \"-ref\".

          ", + "smithy.api#required": {} } }, - "AlgorithmSpecification": { - "target": "com.amazonaws.sagemaker#AlgorithmSpecification", + "InputConfig": { + "target": "com.amazonaws.sagemaker#LabelingJobInputConfig", "traits": { - "smithy.api#documentation": "

          The registry path of the Docker image that contains the training algorithm and\n algorithm-specific metadata, including the input mode. For more information about\n algorithms provided by Amazon SageMaker, see Algorithms. For information about\n providing your own algorithms, see Using Your Own Algorithms with Amazon\n SageMaker.

          ", + "smithy.api#documentation": "

          Input data for the labeling job, such as the Amazon S3 location of the data objects and the\n location of the manifest file that describes the data objects.

          ", "smithy.api#required": {} } }, - "RoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", + "OutputConfig": { + "target": "com.amazonaws.sagemaker#LabelingJobOutputConfig", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform\n tasks on your behalf.

          \n

          During model training, Amazon SageMaker needs your permission to read input data from an S3\n bucket, download a Docker image that contains training code, write model artifacts to an\n S3 bucket, write logs to Amazon CloudWatch Logs, and publish metrics to Amazon CloudWatch. You grant\n permissions for all of these tasks to an IAM role. For more information, see Amazon SageMaker\n Roles.

          \n \n

          To be able to pass this role to Amazon SageMaker, the caller of this API must have the\n iam:PassRole permission.

          \n
          ", + "smithy.api#documentation": "

          The location of the output data and the AWS Key Management Service key ID for the key used to encrypt\n the output data, if any.

          ", "smithy.api#required": {} } }, - "InputDataConfig": { - "target": "com.amazonaws.sagemaker#InputDataConfig", + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          An array of Channel objects. Each channel is a named input source.\n InputDataConfig\n \n describes the input data and its location.

          \n

          Algorithms can accept input data from one or more channels. For example, an\n algorithm might have two channels of input data, training_data and\n validation_data. The configuration for each channel provides the S3,\n EFS, or FSx location where the input data is stored. It also provides information about\n the stored data: the MIME type, compression method, and whether the data is wrapped in\n RecordIO format.

          \n

          Depending on the input mode that the algorithm supports, Amazon SageMaker either copies input\n data files from an S3 bucket to a local directory in the Docker container, or makes it\n available as input streams. For example, if you specify an EFS location, input data\n files will be made available as input streams. They do not need to be\n downloaded.

          " + "smithy.api#documentation": "

          The Amazon Resource Number (ARN) that Amazon SageMaker assumes to perform tasks on your behalf\n during data labeling. You must grant this role the necessary permissions so that Amazon SageMaker\n can successfully complete data labeling.

          ", + "smithy.api#required": {} } }, - "OutputDataConfig": { - "target": "com.amazonaws.sagemaker#OutputDataConfig", + "LabelCategoryConfigS3Uri": { + "target": "com.amazonaws.sagemaker#S3Uri", "traits": { - "smithy.api#documentation": "

          Specifies the path to the S3 location where you want to store model artifacts. Amazon SageMaker\n creates subfolders for the artifacts.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The S3 URI of the file that defines the categories used to label the data\n objects.

          \n

          For 3D point cloud task types, see Create a\n Labeling Category Configuration File for 3D Point Cloud Labeling Jobs.

          \n

          For all other built-in task types and custom\n tasks, your label category configuration file must be a JSON file in the\n following format. Identify the labels you want to use by replacing label_1,\n label_2,...,label_n with your label\n categories.

          \n

          \n {\n

          \n

          \n \"document-version\": \"2018-11-28\"\n

          \n

          \n \"labels\": [\n

          \n

          \n {\n

          \n

          \n \"label\": \"label_1\"\n

          \n

          \n },\n

          \n

          \n {\n

          \n

          \n \"label\": \"label_2\"\n

          \n

          \n },\n

          \n

          \n ...\n

          \n

          \n {\n

          \n

          \n \"label\": \"label_n\"\n

          \n

          \n }\n

          \n

          \n ]\n

          \n

          \n }\n

          " } }, - "ResourceConfig": { - "target": "com.amazonaws.sagemaker#ResourceConfig", + "StoppingConditions": { + "target": "com.amazonaws.sagemaker#LabelingJobStoppingConditions", "traits": { - "smithy.api#documentation": "

          The resources, including the ML compute instances and ML storage volumes, to use\n for model training.

          \n

          ML storage volumes store model artifacts and incremental states. Training\n algorithms might also use ML storage volumes for scratch space. If you want Amazon SageMaker to use\n the ML storage volume to store the training data, choose File as the\n TrainingInputMode in the algorithm specification. For distributed\n training algorithms, specify an instance count greater than 1.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A set of conditions for stopping the labeling job. If any of the conditions are met,\n the job is automatically stopped. You can use these conditions to control the cost of\n data labeling.

          " } }, - "VpcConfig": { - "target": "com.amazonaws.sagemaker#VpcConfig", + "LabelingJobAlgorithmsConfig": { + "target": "com.amazonaws.sagemaker#LabelingJobAlgorithmsConfig", "traits": { - "smithy.api#documentation": "

          A VpcConfig object that specifies the VPC that you want your\n training job to connect to. Control access to and from your training container by\n configuring the VPC. For more information, see Protect Training Jobs by Using an Amazon\n Virtual Private Cloud.

          " + "smithy.api#documentation": "

          Configures the information required to perform automated data labeling.

          " } }, - "StoppingCondition": { - "target": "com.amazonaws.sagemaker#StoppingCondition", + "HumanTaskConfig": { + "target": "com.amazonaws.sagemaker#HumanTaskConfig", "traits": { - "smithy.api#documentation": "

          Specifies a limit to how long a model training job can run. When the job reaches the\n time limit, Amazon SageMaker ends the training job. Use this API to cap model training costs.

          \n

          To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays\n job termination for 120 seconds. Algorithms can use this 120-second window to save the\n model artifacts, so the results of training are not lost.

          ", + "smithy.api#documentation": "

          Configures the labeling task and how it is presented to workers; including, but not limited to price, keywords, and batch size (task count).

          ", "smithy.api#required": {} } }, "Tags": { "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          An array of key-value pairs. For more information, see Using\n Cost Allocation Tags in the AWS Billing and Cost Management User\n Guide.\n \n

          " - } - }, - "EnableNetworkIsolation": { - "target": "com.amazonaws.sagemaker#Boolean", - "traits": { - "smithy.api#documentation": "

          Isolates the training container. No inbound or outbound network calls can be made,\n except for calls between peers within a training cluster for distributed training. If\n you enable network isolation for training jobs that are configured to use a VPC, Amazon SageMaker\n downloads and uploads customer data and model artifacts through the specified VPC, but\n the training container does not have network access.

          " + "smithy.api#documentation": "

          An array of key/value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management\n User Guide.

          " } - }, - "EnableInterContainerTrafficEncryption": { - "target": "com.amazonaws.sagemaker#Boolean", - "traits": { - "smithy.api#documentation": "

          To encrypt all communications between ML compute instances in distributed training,\n choose True. Encryption provides greater security for distributed training,\n but training might take longer. How long it takes depends on the amount of communication\n between compute instances, especially if you use a deep learning algorithm in\n distributed training. For more information, see Protect Communications Between ML\n Compute Instances in a Distributed Training Job.

          " - } - }, - "EnableManagedSpotTraining": { - "target": "com.amazonaws.sagemaker#Boolean", - "traits": { - "smithy.api#documentation": "

          To train models using managed spot training, choose True. Managed spot\n training provides a fully managed and scalable infrastructure for training machine\n learning models. this option is useful when training jobs can be interrupted and when\n there is flexibility when the training job is run.

          \n

          The complete and intermediate results of jobs are stored in an Amazon S3 bucket, and can be\n used as a starting point to train models incrementally. Amazon SageMaker provides metrics and\n logs in CloudWatch. They can be used to see when managed spot training jobs are running,\n interrupted, resumed, or completed.

          " - } - }, - "CheckpointConfig": { - "target": "com.amazonaws.sagemaker#CheckpointConfig", - "traits": { - "smithy.api#documentation": "

          Contains information about the output location for managed spot training checkpoint\n data.

          " - } - }, - "DebugHookConfig": { - "target": "com.amazonaws.sagemaker#DebugHookConfig" - }, - "DebugRuleConfigurations": { - "target": "com.amazonaws.sagemaker#DebugRuleConfigurations", - "traits": { - "smithy.api#documentation": "

          Configuration information for debugging rules.

          " - } - }, - "TensorBoardOutputConfig": { - "target": "com.amazonaws.sagemaker#TensorBoardOutputConfig" - }, - "ExperimentConfig": { - "target": "com.amazonaws.sagemaker#ExperimentConfig" } } }, - "com.amazonaws.sagemaker#CreateTrainingJobResponse": { + "com.amazonaws.sagemaker#CreateLabelingJobResponse": { "type": "structure", "members": { - "TrainingJobArn": { - "target": "com.amazonaws.sagemaker#TrainingJobArn", + "LabelingJobArn": { + "target": "com.amazonaws.sagemaker#LabelingJobArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the training job.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the labeling job. You use this ARN to identify the\n labeling job.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#CreateTransformJob": { + "com.amazonaws.sagemaker#CreateModel": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateTransformJobRequest" + "target": "com.amazonaws.sagemaker#CreateModelInput" }, "output": { - "target": "com.amazonaws.sagemaker#CreateTransformJobResponse" + "target": "com.amazonaws.sagemaker#CreateModelOutput" }, "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceInUse" - }, { "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" - }, - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" } ], "traits": { - "smithy.api#documentation": "

          Starts a transform job. A transform job uses a trained model to get inferences on a\n dataset and saves these results to an Amazon S3 location that you specify.

          \n

          To perform batch transformations, you create a transform job and use the data that you\n have readily available.

          \n

          In the request body, you provide the following:

          \n
            \n
          • \n

            \n TransformJobName - Identifies the transform job. The name must be\n unique within an AWS Region in an AWS account.

            \n
          • \n
          • \n

            \n ModelName - Identifies the model to use. ModelName\n must be the name of an existing Amazon SageMaker model in the same AWS Region and AWS\n account. For information on creating a model, see CreateModel.

            \n
          • \n
          • \n

            \n TransformInput - Describes the dataset to be transformed and the\n Amazon S3 location where it is stored.

            \n
          • \n
          • \n

            \n TransformOutput - Identifies the Amazon S3 location where you want\n Amazon SageMaker to save the results from the transform job.

            \n
          • \n
          • \n

            \n TransformResources - Identifies the ML compute instances for the\n transform job.

            \n
          • \n
          \n

          For more information about how batch transformation works, see Batch\n Transform.

          " + "smithy.api#documentation": "

          Creates a model in Amazon SageMaker. In the request, you name the model and describe a primary\n container. For the primary container,\n you specify the Docker image that contains inference code, artifacts (from prior\n training), and a custom environment map that the inference code uses when you deploy the\n model for predictions.

          \n

          Use this API to create a model if you want to use Amazon SageMaker hosting services or run a batch\n transform job.

          \n

          To host your model, you create an endpoint configuration with the\n CreateEndpointConfig API, and then create an endpoint with the\n CreateEndpoint API. Amazon SageMaker then deploys all of the containers that you\n defined for the model in the hosting environment.

          \n

          For an example that calls this method when deploying a model to Amazon SageMaker hosting services,\n see Deploy the\n Model to Amazon SageMaker Hosting Services (AWS SDK for Python (Boto\n 3)).\n

          \n

          To run a batch transform using your model, you start a job with the\n CreateTransformJob API. Amazon SageMaker uses your model and your dataset to get\n inferences which are then saved to a specified S3 location.

          \n

          In the CreateModel request, you must define a container with the\n PrimaryContainer parameter.

          \n

          In the request, you also provide an IAM role that Amazon SageMaker can assume to access model\n artifacts and docker image for deployment on ML compute hosting instances or for batch\n transform jobs. In addition, you also use the IAM role to manage permissions the\n inference code needs. For example, if the inference code access any other AWS resources,\n you grant necessary permissions via this role.

          " } }, - "com.amazonaws.sagemaker#CreateTransformJobRequest": { + "com.amazonaws.sagemaker#CreateModelInput": { "type": "structure", "members": { - "TransformJobName": { - "target": "com.amazonaws.sagemaker#TransformJobName", - "traits": { - "smithy.api#documentation": "

          The name of the transform job. The name must be unique within an AWS Region in an\n AWS account.

          ", - "smithy.api#required": {} - } - }, "ModelName": { "target": "com.amazonaws.sagemaker#ModelName", "traits": { - "smithy.api#documentation": "

          The name of the model that you want to use for the transform job.\n ModelName must be the name of an existing Amazon SageMaker model within an AWS\n Region in an AWS account.

          ", + "smithy.api#documentation": "

          The name of the new model.

          ", "smithy.api#required": {} } }, - "MaxConcurrentTransforms": { - "target": "com.amazonaws.sagemaker#MaxConcurrentTransforms", - "traits": { - "smithy.api#documentation": "

          The maximum number of parallel requests that can be sent to each instance in a\n transform job. If MaxConcurrentTransforms is set to 0 or left\n unset, Amazon SageMaker checks the optional execution-parameters to determine the settings for your\n chosen algorithm. If the execution-parameters endpoint is not enabled, the default value\n is 1. For more information on execution-parameters, see How Containers Serve Requests. For built-in algorithms, you don't need to\n set a value for MaxConcurrentTransforms.

          " - } - }, - "ModelClientConfig": { - "target": "com.amazonaws.sagemaker#ModelClientConfig", - "traits": { - "smithy.api#documentation": "

          Configures the timeout and maximum number of retries for processing a transform job\n invocation.

          " - } - }, - "MaxPayloadInMB": { - "target": "com.amazonaws.sagemaker#MaxPayloadInMB", - "traits": { - "smithy.api#documentation": "

          The maximum allowed size of the payload, in MB. A payload is the\n data portion of a record (without metadata). The value in MaxPayloadInMB\n must be greater than, or equal to, the size of a single record. To estimate the size of\n a record in MB, divide the size of your dataset by the number of records. To ensure that\n the records fit within the maximum payload size, we recommend using a slightly larger\n value. The default value is 6 MB.\n

          \n

          For cases where the payload might be arbitrarily large and is transmitted using HTTP\n chunked encoding, set the value to 0.\n This\n feature works only in supported algorithms. Currently, Amazon SageMaker built-in\n algorithms do not support HTTP chunked encoding.

          " - } - }, - "BatchStrategy": { - "target": "com.amazonaws.sagemaker#BatchStrategy", - "traits": { - "smithy.api#documentation": "

          Specifies the number of records to include in a mini-batch for an HTTP inference\n request. A record\n is a single unit of input data that\n inference can be made on. For example, a single line in a CSV file is a record.

          \n

          To enable the batch strategy, you must set the SplitType property to\n Line, RecordIO, or TFRecord.

          \n

          To use only one record when making an HTTP invocation request to a container, set\n BatchStrategy to SingleRecord and SplitType\n to Line.

          \n

          To fit as many records in a mini-batch as can fit within the\n MaxPayloadInMB limit, set BatchStrategy to\n MultiRecord and SplitType to Line.

          " - } - }, - "Environment": { - "target": "com.amazonaws.sagemaker#TransformEnvironmentMap", + "PrimaryContainer": { + "target": "com.amazonaws.sagemaker#ContainerDefinition", "traits": { - "smithy.api#documentation": "

          The environment variables to set in the Docker container. We support up to 16 key and\n values entries in the map.

          " + "smithy.api#documentation": "

          The location of the primary docker image containing inference code, associated\n artifacts, and custom environment map that the inference code uses when the model is\n deployed for predictions.

          " } }, - "TransformInput": { - "target": "com.amazonaws.sagemaker#TransformInput", + "Containers": { + "target": "com.amazonaws.sagemaker#ContainerDefinitionList", "traits": { - "smithy.api#documentation": "

          Describes the input source and\n the\n way the transform job consumes it.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Specifies the containers in the inference pipeline.

          " } }, - "TransformOutput": { - "target": "com.amazonaws.sagemaker#TransformOutput", + "ExecutionRoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          Describes the results of the transform job.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the IAM role that Amazon SageMaker can assume to access model\n artifacts and docker image for deployment on ML compute instances or for batch transform\n jobs. Deploying on ML compute instances is part of model hosting. For more information,\n see Amazon SageMaker\n Roles.

          \n \n

          To be able to pass this role to Amazon SageMaker, the caller of this API must have the\n iam:PassRole permission.

          \n
          ", "smithy.api#required": {} } }, - "TransformResources": { - "target": "com.amazonaws.sagemaker#TransformResources", + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          Describes the resources, including\n ML\n instance types and ML instance count, to use for the transform\n job.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          An array of key-value pairs. You can use tags to categorize your AWS resources in\n different ways, for example, by purpose, owner, or environment. For more information,\n see Tagging AWS\n Resources.

          " } }, - "DataProcessing": { - "target": "com.amazonaws.sagemaker#DataProcessing", + "VpcConfig": { + "target": "com.amazonaws.sagemaker#VpcConfig", "traits": { - "smithy.api#documentation": "

          The data structure used to specify the data to be used for inference in a batch\n transform job and to associate the data that is relevant to the prediction results in\n the output. The input filter provided allows you to exclude input data that is not\n needed for inference in a batch transform job. The output filter provided allows you to\n include input data relevant to interpreting the predictions in the output from the job.\n For more information, see Associate Prediction\n Results with their Corresponding Input Records.

          " + "smithy.api#documentation": "

          A VpcConfig object that specifies the VPC that you want your model\n to connect to. Control access to and from your model container by configuring the VPC.\n VpcConfig is used in hosting services and in batch transform. For more\n information, see Protect Endpoints by Using an Amazon Virtual Private Cloud and Protect Data in Batch\n Transform Jobs by Using an Amazon Virtual Private Cloud.

          " } }, - "Tags": { - "target": "com.amazonaws.sagemaker#TagList", + "EnableNetworkIsolation": { + "target": "com.amazonaws.sagemaker#Boolean", "traits": { - "smithy.api#documentation": "

          (Optional)\n An\n array of key-value pairs. For more information, see Using\n Cost Allocation Tags in the AWS Billing and Cost Management User\n Guide.

          " + "smithy.api#documentation": "

          Isolates the model container. No inbound or outbound network calls can be made to or\n from the model container.

          " } - }, - "ExperimentConfig": { - "target": "com.amazonaws.sagemaker#ExperimentConfig" } } }, - "com.amazonaws.sagemaker#CreateTransformJobResponse": { + "com.amazonaws.sagemaker#CreateModelOutput": { "type": "structure", "members": { - "TransformJobArn": { - "target": "com.amazonaws.sagemaker#TransformJobArn", + "ModelArn": { + "target": "com.amazonaws.sagemaker#ModelArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the transform job.

          ", + "smithy.api#documentation": "

          The ARN of the model created in Amazon SageMaker.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#CreateTrial": { + "com.amazonaws.sagemaker#CreateModelPackage": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateTrialRequest" + "target": "com.amazonaws.sagemaker#CreateModelPackageInput" }, "output": { - "target": "com.amazonaws.sagemaker#CreateTrialResponse" + "target": "com.amazonaws.sagemaker#CreateModelPackageOutput" }, "errors": [ { - "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + "target": "com.amazonaws.sagemaker#ConflictException" }, { - "target": "com.amazonaws.sagemaker#ResourceNotFound" + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" } ], "traits": { - "smithy.api#documentation": "

          Creates an Amazon SageMaker trial. A trial is a set of steps called\n trial components that produce a machine learning model. A trial is part\n of a single Amazon SageMaker experiment.

          \n

          When you use Amazon SageMaker Studio or the Amazon SageMaker Python SDK, all experiments, trials, and trial\n components are automatically tracked, logged, and indexed. When you use the AWS SDK for Python (Boto), you\n must use the logging APIs provided by the SDK.

          \n

          You can add tags to a trial and then use the Search API to search for\n the tags.

          \n

          To get a list of all your trials, call the ListTrials API. To view a\n trial's properties, call the DescribeTrial API. To create a trial component,\n call the CreateTrialComponent API.

          " + "smithy.api#documentation": "

          Creates a model package that you can use to create Amazon SageMaker models or list on AWS\n Marketplace, or a versioned model that is part of a model group. Buyers can subscribe to model packages listed on AWS Marketplace to create\n models in Amazon SageMaker.

          \n

          To create a model package by specifying a Docker container that contains your\n inference code and the Amazon S3 location of your model artifacts, provide values for\n InferenceSpecification. To create a model from an algorithm resource\n that you created or subscribed to in AWS Marketplace, provide a value for\n SourceAlgorithmSpecification.

          \n \n

          There are two types of model packages:

          \n
            \n
          • \n

            Versioned - a model that is part of a model group in the model registry.

            \n
          • \n
          • \n

            Unversioned - a model package that is not part of a model group.

            \n
          • \n
          \n
          " } }, - "com.amazonaws.sagemaker#CreateTrialComponent": { + "com.amazonaws.sagemaker#CreateModelPackageGroup": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateTrialComponentRequest" + "target": "com.amazonaws.sagemaker#CreateModelPackageGroupInput" }, "output": { - "target": "com.amazonaws.sagemaker#CreateTrialComponentResponse" + "target": "com.amazonaws.sagemaker#CreateModelPackageGroupOutput" }, "errors": [ { @@ -4724,129 +5158,139 @@ } ], "traits": { - "smithy.api#documentation": "

          Creates a trial component, which is a stage of a machine learning\n trial. A trial is composed of one or more trial components. A trial\n component can be used in multiple trials.

          \n

          Trial components include pre-processing jobs, training jobs, and batch transform\n jobs.

          \n

          When you use Amazon SageMaker Studio or the Amazon SageMaker Python SDK, all experiments, trials, and trial\n components are automatically tracked, logged, and indexed. When you use the AWS SDK for Python (Boto), you\n must use the logging APIs provided by the SDK.

          \n

          You can add tags to a trial component and then use the Search API to\n search for the tags.

          \n \n

          \n CreateTrialComponent can only be invoked from within an Amazon SageMaker managed\n environment. This includes Amazon SageMaker training jobs, processing jobs, transform jobs, and Amazon SageMaker\n notebooks. A call to CreateTrialComponent from outside one of these\n environments results in an error.

          \n
          " + "smithy.api#documentation": "

          Creates a model group. A model group contains a group of model versions.

          " } }, - "com.amazonaws.sagemaker#CreateTrialComponentRequest": { + "com.amazonaws.sagemaker#CreateModelPackageGroupInput": { "type": "structure", "members": { - "TrialComponentName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "ModelPackageGroupName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          The name of the component. The name must be unique in your AWS account and is not\n case-sensitive.

          ", + "smithy.api#documentation": "

          The name of the model group.

          ", "smithy.api#required": {} } }, - "DisplayName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "ModelPackageGroupDescription": { + "target": "com.amazonaws.sagemaker#EntityDescription", "traits": { - "smithy.api#documentation": "

          The name of the component as displayed. The name doesn't need to be unique. If\n DisplayName isn't specified, TrialComponentName is\n displayed.

          " + "smithy.api#documentation": "

          A description for the model group.

          " } }, - "Status": { - "target": "com.amazonaws.sagemaker#TrialComponentStatus", + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          The status of the component. States include:

          \n
            \n
          • \n

            InProgress

            \n
          • \n
          • \n

            Completed

            \n
          • \n
          • \n

            Failed

            \n
          • \n
          " + "smithy.api#documentation": "

          A list of key value pairs associated with the model group. For more information, see\n Tagging AWS\n resources in the AWS General Reference Guide.

          " } - }, - "StartTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + } + } + }, + "com.amazonaws.sagemaker#CreateModelPackageGroupOutput": { + "type": "structure", + "members": { + "ModelPackageGroupArn": { + "target": "com.amazonaws.sagemaker#ModelPackageGroupArn", "traits": { - "smithy.api#documentation": "

          When the component started.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model group.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.sagemaker#CreateModelPackageInput": { + "type": "structure", + "members": { + "ModelPackageName": { + "target": "com.amazonaws.sagemaker#EntityName", + "traits": { + "smithy.api#documentation": "

          The name of the model package. The name must have 1 to 63 characters. Valid characters\n are a-z, A-Z, 0-9, and - (hyphen).

          \n

          This parameter is required for unversioned models. It is not applicable to versioned\n models.

          " } }, - "EndTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "ModelPackageGroupName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          When the component ended.

          " + "smithy.api#documentation": "

          The name of the model group that this model version belongs to.

          \n

          This parameter is required for versioned models, and does not apply to unversioned\n models.

          " } }, - "Parameters": { - "target": "com.amazonaws.sagemaker#TrialComponentParameters", + "ModelPackageDescription": { + "target": "com.amazonaws.sagemaker#EntityDescription", "traits": { - "smithy.api#documentation": "

          The hyperparameters for the component.

          " + "smithy.api#documentation": "

          A description of the model package.

          " } }, - "InputArtifacts": { - "target": "com.amazonaws.sagemaker#TrialComponentArtifacts", + "InferenceSpecification": { + "target": "com.amazonaws.sagemaker#InferenceSpecification", "traits": { - "smithy.api#documentation": "

          The input artifacts for the component. Examples of input artifacts are datasets,\n algorithms, hyperparameters, source code, and instance types.

          " + "smithy.api#documentation": "

          Specifies details about inference jobs that can be run with models based on this model\n package, including the following:

          \n
            \n
          • \n

            The Amazon ECR paths of containers that contain the inference code and model\n artifacts.

            \n
          • \n
          • \n

            The instance types that the model package supports for transform jobs and\n real-time endpoints used for inference.

            \n
          • \n
          • \n

            The input and output content formats that the model package supports for\n inference.

            \n
          • \n
          " } }, - "OutputArtifacts": { - "target": "com.amazonaws.sagemaker#TrialComponentArtifacts", + "ValidationSpecification": { + "target": "com.amazonaws.sagemaker#ModelPackageValidationSpecification", "traits": { - "smithy.api#documentation": "

          The output artifacts for the component. Examples of output artifacts are metrics,\n snapshots, logs, and images.

          " + "smithy.api#documentation": "

          Specifies configurations for one or more transform jobs that Amazon SageMaker runs to test the\n model package.

          " } }, - "Tags": { - "target": "com.amazonaws.sagemaker#TagList", + "SourceAlgorithmSpecification": { + "target": "com.amazonaws.sagemaker#SourceAlgorithmSpecification", "traits": { - "smithy.api#documentation": "

          A list of tags to associate with the component. You can use Search API\n to search on the tags.

          " + "smithy.api#documentation": "

          Details about the algorithm that was used to create the model package.

          " } - } - } - }, - "com.amazonaws.sagemaker#CreateTrialComponentResponse": { - "type": "structure", - "members": { - "TrialComponentArn": { - "target": "com.amazonaws.sagemaker#TrialComponentArn", + }, + "CertifyForMarketplace": { + "target": "com.amazonaws.sagemaker#CertifyForMarketplace", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the trial component.

          " + "smithy.api#documentation": "

          Whether to certify the model package for listing on AWS Marketplace.

          \n

          This parameter is optional for unversioned models, and does not apply to versioned\n models.

          " } - } - } - }, - "com.amazonaws.sagemaker#CreateTrialRequest": { - "type": "structure", - "members": { - "TrialName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + }, + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          The name of the trial. The name must be unique in your AWS account and is not\n case-sensitive.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A list of key value pairs associated with the model. For more information, see Tagging AWS\n resources in the AWS General Reference Guide.

          " } }, - "DisplayName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "ModelApprovalStatus": { + "target": "com.amazonaws.sagemaker#ModelApprovalStatus", "traits": { - "smithy.api#documentation": "

          The name of the trial as displayed. The name doesn't need to be unique. If\n DisplayName isn't specified, TrialName is displayed.

          " + "smithy.api#documentation": "

          Whether the model is approved for deployment.

          \n

          This parameter is optional for versioned models, and does not apply to unversioned\n models.

          \n

          For versioned models, the value of this parameter must be set to Approved\n to deploy the model.

          " } }, - "ExperimentName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "MetadataProperties": { + "target": "com.amazonaws.sagemaker#MetadataProperties" + }, + "ModelMetrics": { + "target": "com.amazonaws.sagemaker#ModelMetrics", "traits": { - "smithy.api#documentation": "

          The name of the experiment to associate the trial with.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A structure that contains model metrics reports.

          " } }, - "Tags": { - "target": "com.amazonaws.sagemaker#TagList", + "ClientToken": { + "target": "com.amazonaws.sagemaker#ClientToken", "traits": { - "smithy.api#documentation": "

          A list of tags to associate with the trial. You can use Search API to\n search on the tags.

          " + "smithy.api#documentation": "

          A unique token that guarantees that the call to this API is idempotent.

          ", + "smithy.api#idempotencyToken": {} } } } }, - "com.amazonaws.sagemaker#CreateTrialResponse": { + "com.amazonaws.sagemaker#CreateModelPackageOutput": { "type": "structure", "members": { - "TrialArn": { - "target": "com.amazonaws.sagemaker#TrialArn", + "ModelPackageArn": { + "target": "com.amazonaws.sagemaker#ModelPackageArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the trial.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the new model package.

          ", + "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#CreateUserProfile": { + "com.amazonaws.sagemaker#CreateMonitoringSchedule": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateUserProfileRequest" + "target": "com.amazonaws.sagemaker#CreateMonitoringScheduleRequest" }, "output": { - "target": "com.amazonaws.sagemaker#CreateUserProfileResponse" + "target": "com.amazonaws.sagemaker#CreateMonitoringScheduleResponse" }, "errors": [ { @@ -4857,1137 +5301,1674 @@ } ], "traits": { - "smithy.api#documentation": "

          Creates a user profile. A user profile represents a single user within a domain, and is\n the main way to reference a \"person\" for the purposes of sharing, reporting, and other\n user-oriented features. This entity is created when a user onboards to Amazon SageMaker Studio. If an\n administrator invites a person by email or imports them from SSO, a user profile is\n automatically created. A user profile is the primary holder of settings for an individual\n user and has a reference to the user's private Amazon Elastic File System (EFS) home directory.\n

          " + "smithy.api#documentation": "

          Creates a schedule that regularly starts Amazon SageMaker Processing Jobs to monitor the data\n captured for an Amazon SageMaker Endoint.

          " } }, - "com.amazonaws.sagemaker#CreateUserProfileRequest": { + "com.amazonaws.sagemaker#CreateMonitoringScheduleRequest": { "type": "structure", "members": { - "DomainId": { - "target": "com.amazonaws.sagemaker#DomainId", + "MonitoringScheduleName": { + "target": "com.amazonaws.sagemaker#MonitoringScheduleName", "traits": { - "smithy.api#documentation": "

          The ID of the associated Domain.

          ", + "smithy.api#documentation": "

          The name of the monitoring schedule. The name must be unique within an AWS Region within\n an AWS account.

          ", "smithy.api#required": {} } }, - "UserProfileName": { - "target": "com.amazonaws.sagemaker#UserProfileName", + "MonitoringScheduleConfig": { + "target": "com.amazonaws.sagemaker#MonitoringScheduleConfig", "traits": { - "smithy.api#documentation": "

          A name for the UserProfile.

          ", + "smithy.api#documentation": "

          The configuration object that specifies the monitoring schedule and defines the\n monitoring job.

          ", "smithy.api#required": {} } }, - "SingleSignOnUserIdentifier": { - "target": "com.amazonaws.sagemaker#SingleSignOnUserIdentifier", - "traits": { - "smithy.api#documentation": "

          A specifier for the type of value specified in SingleSignOnUserValue. Currently, the only supported value is \"UserName\".\n If the Domain's AuthMode is SSO, this field is required. If the Domain's AuthMode is not SSO, this field cannot be specified.\n

          " - } - }, - "SingleSignOnUserValue": { - "target": "com.amazonaws.sagemaker#String256", - "traits": { - "smithy.api#documentation": "

          The username of the associated AWS Single Sign-On User for this UserProfile. If the Domain's AuthMode is SSO, this field is\n required, and must match a valid username of a user in your directory. If the Domain's AuthMode is not SSO, this field cannot be specified.\n

          " - } - }, "Tags": { "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          Each tag consists of a key and an optional value.\n Tag keys must be unique per resource.

          " - } - }, - "UserSettings": { - "target": "com.amazonaws.sagemaker#UserSettings", - "traits": { - "smithy.api#documentation": "

          A collection of settings.

          " + "smithy.api#documentation": "

          (Optional) An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management\n User Guide.

          " } } } }, - "com.amazonaws.sagemaker#CreateUserProfileResponse": { + "com.amazonaws.sagemaker#CreateMonitoringScheduleResponse": { "type": "structure", "members": { - "UserProfileArn": { - "target": "com.amazonaws.sagemaker#UserProfileArn", + "MonitoringScheduleArn": { + "target": "com.amazonaws.sagemaker#MonitoringScheduleArn", "traits": { - "smithy.api#documentation": "

          The user profile Amazon Resource Name (ARN).

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the monitoring schedule.

          ", + "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#CreateWorkforce": { + "com.amazonaws.sagemaker#CreateNotebookInstance": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateWorkforceRequest" + "target": "com.amazonaws.sagemaker#CreateNotebookInstanceInput" }, "output": { - "target": "com.amazonaws.sagemaker#CreateWorkforceResponse" + "target": "com.amazonaws.sagemaker#CreateNotebookInstanceOutput" }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + } + ], "traits": { - "smithy.api#documentation": "

          Use this operation to create a workforce. This operation will return an error\n if a workforce already exists in the AWS Region that you specify. You can only\n create one workforce in each AWS Region per AWS account.

          \n\n

          If you want to create a new workforce in an AWS Region where \n a workforce already exists, use the API\n operation to delete the existing workforce and then use CreateWorkforce \n to create a new workforce.

          \n\n

          To create a private workforce using Amazon Cognito, you must specify a Cognito user pool\n in CognitoConfig.\n You can also create an Amazon Cognito workforce using the Amazon SageMaker console. \n For more information, see \n \n Create a Private Workforce (Amazon Cognito).

          \n\n

          To create a private workforce using your own OIDC Identity Provider (IdP), specify your IdP\n configuration in OidcConfig. Your OIDC IdP must support groups\n because groups are used by Ground Truth and Amazon A2I to create work teams. \n For more information, see \n Create a Private Workforce (OIDC IdP).

          " + "smithy.api#documentation": "

          Creates an Amazon SageMaker notebook instance. A notebook instance is a machine learning (ML)\n compute instance running on a Jupyter notebook.

          \n

          In a CreateNotebookInstance request, specify the type of ML compute\n instance that you want to run. Amazon SageMaker launches the instance, installs common libraries\n that you can use to explore datasets for model training, and attaches an ML storage\n volume to the notebook instance.

          \n

          Amazon SageMaker also provides a set of example notebooks. Each notebook demonstrates how to\n use Amazon SageMaker with a specific algorithm or with a machine learning framework.

          \n

          After receiving the request, Amazon SageMaker does the following:

          \n
            \n
          1. \n

            Creates a network interface in the Amazon SageMaker VPC.

            \n
          2. \n
          3. \n

            (Option) If you specified SubnetId, Amazon SageMaker creates a network\n interface in your own VPC, which is inferred from the subnet ID that you provide\n in the input. When creating this network interface, Amazon SageMaker attaches the security\n group that you specified in the request to the network interface that it creates\n in your VPC.

            \n \n
          4. \n
          5. \n

            Launches an EC2 instance of the type specified in the request in the Amazon SageMaker\n VPC. If you specified SubnetId of your VPC, Amazon SageMaker specifies both\n network interfaces when launching this instance. This enables inbound traffic\n from your own VPC to the notebook instance, assuming that the security groups\n allow it.

            \n
          6. \n
          \n \n

          After creating the notebook instance, Amazon SageMaker returns its Amazon Resource Name (ARN).\n You can't change the name of a notebook instance after you create it.

          \n

          After Amazon SageMaker creates the notebook instance, you can connect to the Jupyter server and\n work in Jupyter notebooks. For example, you can write code to explore a dataset that you\n can use for model training, train a model, host models by creating Amazon SageMaker endpoints, and\n validate hosted models.

          \n

          For more information, see How It Works.

          " } }, - "com.amazonaws.sagemaker#CreateWorkforceRequest": { + "com.amazonaws.sagemaker#CreateNotebookInstanceInput": { "type": "structure", "members": { - "CognitoConfig": { - "target": "com.amazonaws.sagemaker#CognitoConfig", + "NotebookInstanceName": { + "target": "com.amazonaws.sagemaker#NotebookInstanceName", "traits": { - "smithy.api#documentation": "

          Use this parameter to configure an Amazon Cognito private workforce.\n A single Cognito workforce is created using and corresponds to a single\n \n Amazon Cognito user pool.

          \n \n

          Do not use OidcConfig if you specify values for \n CognitoConfig.

          " + "smithy.api#documentation": "

          The name of the new notebook instance.

          ", + "smithy.api#required": {} } }, - "OidcConfig": { - "target": "com.amazonaws.sagemaker#OidcConfig", + "InstanceType": { + "target": "com.amazonaws.sagemaker#InstanceType", "traits": { - "smithy.api#documentation": "

          Use this parameter to configure a private workforce using your own OIDC Identity Provider.

          \n

          Do not use CognitoConfig if you specify values for \n OidcConfig.

          " + "smithy.api#documentation": "

          The type of ML compute instance to launch for the notebook instance.

          ", + "smithy.api#required": {} } }, - "SourceIpConfig": { - "target": "com.amazonaws.sagemaker#SourceIpConfig" + "SubnetId": { + "target": "com.amazonaws.sagemaker#SubnetId", + "traits": { + "smithy.api#documentation": "

          The ID of the subnet in a VPC to which you would like to have a connectivity from\n your ML compute instance.

          " + } }, - "WorkforceName": { - "target": "com.amazonaws.sagemaker#WorkforceName", + "SecurityGroupIds": { + "target": "com.amazonaws.sagemaker#SecurityGroupIds", "traits": { - "smithy.api#documentation": "

          The name of the private workforce.

          ", + "smithy.api#documentation": "

          The VPC security group IDs, in the form sg-xxxxxxxx. The security groups must be\n for the same VPC as specified in the subnet.

          " + } + }, + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", + "traits": { + "smithy.api#documentation": "

          When you send any requests to AWS resources from the notebook instance, Amazon SageMaker\n assumes this role to perform tasks on your behalf. You must grant this role necessary\n permissions so Amazon SageMaker can perform these tasks. The policy must allow the Amazon SageMaker service\n principal (sagemaker.amazonaws.com) permissions to assume this role. For more\n information, see Amazon SageMaker Roles.

          \n \n

          To be able to pass this role to Amazon SageMaker, the caller of this API must have the\n iam:PassRole permission.

          \n
          ", "smithy.api#required": {} } }, + "KmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of a AWS Key Management Service key that Amazon SageMaker uses to encrypt data on\n the storage volume attached to your notebook instance. The KMS key you provide must be\n enabled. For information, see Enabling and Disabling\n Keys in the AWS Key Management Service Developer Guide.

          " + } + }, "Tags": { "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          An array of key-value pairs that contain metadata to help you categorize and \n organize our workforce. Each tag consists of a key and a value, \n both of which you define.

          " + "smithy.api#documentation": "

          An array of key-value pairs. You can use tags to categorize your AWS resources in\n different ways, for example, by purpose, owner, or environment. For more information,\n see Tagging AWS\n Resources.

          " + } + }, + "LifecycleConfigName": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName", + "traits": { + "smithy.api#documentation": "

          The name of a lifecycle configuration to associate with the notebook instance. For\n information about lifestyle configurations, see Step 2.1: (Optional)\n Customize a Notebook Instance.

          " + } + }, + "DirectInternetAccess": { + "target": "com.amazonaws.sagemaker#DirectInternetAccess", + "traits": { + "smithy.api#documentation": "

          Sets whether Amazon SageMaker provides internet access to the notebook instance. If you set this\n to Disabled this notebook instance will be able to access resources only in\n your VPC, and will not be able to connect to Amazon SageMaker training and endpoint services unless\n your configure a NAT Gateway in your VPC.

          \n

          For more information, see Notebook Instances Are Internet-Enabled by Default. You can set the value\n of this parameter to Disabled only if you set a value for the\n SubnetId parameter.

          " + } + }, + "VolumeSizeInGB": { + "target": "com.amazonaws.sagemaker#NotebookInstanceVolumeSizeInGB", + "traits": { + "smithy.api#documentation": "

          The size, in GB, of the ML storage volume to attach to the notebook instance. The\n default value is 5 GB.

          " + } + }, + "AcceleratorTypes": { + "target": "com.amazonaws.sagemaker#NotebookInstanceAcceleratorTypes", + "traits": { + "smithy.api#documentation": "

          A list of Elastic Inference (EI) instance types to associate with this notebook\n instance. Currently, only one instance type can be associated with a notebook instance.\n For more information, see Using Elastic Inference in Amazon SageMaker.

          " + } + }, + "DefaultCodeRepository": { + "target": "com.amazonaws.sagemaker#CodeRepositoryNameOrUrl", + "traits": { + "smithy.api#documentation": "

          A Git repository to associate with the notebook instance as its default code\n repository. This can be either the name of a Git repository stored as a resource in your\n account, or the URL of a Git repository in AWS CodeCommit or in any\n other Git repository. When you open a notebook instance, it opens in the directory that\n contains this repository. For more information, see Associating Git Repositories with Amazon SageMaker\n Notebook Instances.

          " + } + }, + "AdditionalCodeRepositories": { + "target": "com.amazonaws.sagemaker#AdditionalCodeRepositoryNamesOrUrls", + "traits": { + "smithy.api#documentation": "

          An array of up to three Git repositories to associate with the notebook instance.\n These can be either the names of Git repositories stored as resources in your account,\n or the URL of Git repositories in AWS CodeCommit or in any\n other Git repository. These repositories are cloned at the same level as the default\n repository of your notebook instance. For more information, see Associating Git\n Repositories with Amazon SageMaker Notebook Instances.

          " + } + }, + "RootAccess": { + "target": "com.amazonaws.sagemaker#RootAccess", + "traits": { + "smithy.api#documentation": "

          Whether root access is enabled or disabled for users of the notebook instance. The\n default value is Enabled.

          \n \n

          Lifecycle configurations need root access to be able to set up a notebook\n instance. Because of this, lifecycle configurations associated with a notebook\n instance always run with root access even if you disable root access for\n users.

          \n
          " } } } }, - "com.amazonaws.sagemaker#CreateWorkforceResponse": { + "com.amazonaws.sagemaker#CreateNotebookInstanceLifecycleConfig": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#CreateNotebookInstanceLifecycleConfigInput" + }, + "output": { + "target": "com.amazonaws.sagemaker#CreateNotebookInstanceLifecycleConfigOutput" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + } + ], + "traits": { + "smithy.api#documentation": "

          Creates a lifecycle configuration that you can associate with a notebook instance. A\n lifecycle configuration is a collection of shell scripts that\n run when you create or start a notebook instance.

          \n

          Each lifecycle configuration script has a limit of 16384 characters.

          \n

          The value of the $PATH environment variable that is available to both\n scripts is /sbin:bin:/usr/sbin:/usr/bin.

          \n

          View CloudWatch Logs for notebook instance lifecycle configurations in log group\n /aws/sagemaker/NotebookInstances in log stream\n [notebook-instance-name]/[LifecycleConfigHook].

          \n

          Lifecycle configuration scripts cannot run for longer than 5 minutes. If a script runs\n for longer than 5 minutes, it fails and the notebook instance is not created or\n started.

          \n

          For information about notebook instance lifestyle configurations, see Step\n 2.1: (Optional) Customize a Notebook Instance.

          " + } + }, + "com.amazonaws.sagemaker#CreateNotebookInstanceLifecycleConfigInput": { "type": "structure", "members": { - "WorkforceArn": { - "target": "com.amazonaws.sagemaker#WorkforceArn", + "NotebookInstanceLifecycleConfigName": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the workforce.

          ", + "smithy.api#documentation": "

          The name of the lifecycle configuration.

          ", "smithy.api#required": {} } + }, + "OnCreate": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigList", + "traits": { + "smithy.api#documentation": "

          A shell script that runs only once, when you create a notebook instance. The shell\n script must be a base64-encoded string.

          " + } + }, + "OnStart": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigList", + "traits": { + "smithy.api#documentation": "

          A shell script that runs every time you start a notebook instance, including when you\n create the notebook instance. The shell script must be a base64-encoded string.

          " + } } } }, - "com.amazonaws.sagemaker#CreateWorkteam": { + "com.amazonaws.sagemaker#CreateNotebookInstanceLifecycleConfigOutput": { + "type": "structure", + "members": { + "NotebookInstanceLifecycleConfigArn": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the lifecycle configuration.

          " + } + } + } + }, + "com.amazonaws.sagemaker#CreateNotebookInstanceOutput": { + "type": "structure", + "members": { + "NotebookInstanceArn": { + "target": "com.amazonaws.sagemaker#NotebookInstanceArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the notebook instance.

          " + } + } + } + }, + "com.amazonaws.sagemaker#CreatePipeline": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#CreateWorkteamRequest" + "target": "com.amazonaws.sagemaker#CreatePipelineRequest" }, "output": { - "target": "com.amazonaws.sagemaker#CreateWorkteamResponse" + "target": "com.amazonaws.sagemaker#CreatePipelineResponse" }, "errors": [ { - "target": "com.amazonaws.sagemaker#ResourceInUse" + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" }, { - "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + "target": "com.amazonaws.sagemaker#ResourceNotFound" } ], "traits": { - "smithy.api#documentation": "

          Creates a new work team for labeling your data. A work team is defined by one or more\n Amazon Cognito user pools. You must first create the user pools before you can create a work\n team.

          \n

          You cannot create more than 25 work teams in an account and region.

          " + "smithy.api#documentation": "

          Creates a pipeline using a JSON pipeline definition.

          " } }, - "com.amazonaws.sagemaker#CreateWorkteamRequest": { + "com.amazonaws.sagemaker#CreatePipelineRequest": { "type": "structure", "members": { - "WorkteamName": { - "target": "com.amazonaws.sagemaker#WorkteamName", + "PipelineName": { + "target": "com.amazonaws.sagemaker#PipelineName", "traits": { - "smithy.api#documentation": "

          The name of the work team. Use this name to identify the work team.

          ", + "smithy.api#documentation": "

          The name of the pipeline.

          ", "smithy.api#required": {} } }, - "WorkforceName": { - "target": "com.amazonaws.sagemaker#WorkforceName", + "PipelineDisplayName": { + "target": "com.amazonaws.sagemaker#PipelineName", "traits": { - "smithy.api#documentation": "

          The name of the workforce.

          " + "smithy.api#documentation": "

          The display name of the pipeline.

          " } }, - "MemberDefinitions": { - "target": "com.amazonaws.sagemaker#MemberDefinitions", + "PipelineDefinition": { + "target": "com.amazonaws.sagemaker#PipelineDefinition", "traits": { - "smithy.api#documentation": "

          A list of MemberDefinition objects that contains objects that identify\n the workers that make up the work team.

          \n

          Workforces can be created using Amazon Cognito or your own OIDC Identity Provider (IdP). For\n private workforces created using Amazon Cognito use CognitoMemberDefinition. For\n workforces created using your own OIDC identity provider (IdP) use\n OidcMemberDefinition. Do not provide input for both of these parameters\n in a single request.

          \n

          For workforces created using Amazon Cognito, private work teams correspond to Amazon Cognito\n user groups within the user pool used to create a workforce. All of the\n CognitoMemberDefinition objects that make up the member definition must\n have the same ClientId and UserPool values. To add a Amazon\n Cognito user group to an existing worker pool, see Adding groups to a User\n Pool. For more information about user pools, see Amazon Cognito User\n Pools.

          \n

          For workforces created using your own OIDC IdP, specify the user groups that you want to \n include in your private work team in OidcMemberDefinition by listing those groups\n in Groups.

          ", + "smithy.api#documentation": "

          The JSON pipeline definition of the pipeline.

          ", "smithy.api#required": {} } }, - "Description": { - "target": "com.amazonaws.sagemaker#String200", + "PipelineDescription": { + "target": "com.amazonaws.sagemaker#PipelineDescription", "traits": { - "smithy.api#documentation": "

          A description of the work team.

          ", + "smithy.api#documentation": "

          A description of the pipeline.

          " + } + }, + "ClientRequestToken": { + "target": "com.amazonaws.sagemaker#IdempotencyToken", + "traits": { + "smithy.api#documentation": "

          A unique, case-sensitive identifier that you provide to ensure the idempotency of the\n operation. An idempotent operation completes no more than one time.

          ", + "smithy.api#idempotencyToken": {}, "smithy.api#required": {} } }, - "NotificationConfiguration": { - "target": "com.amazonaws.sagemaker#NotificationConfiguration", + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          Configures notification of workers regarding available or expiring work items.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the role used by the pipeline to access and create resources.

          ", + "smithy.api#required": {} } }, "Tags": { "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          An array of key-value pairs.

          \n

          For more information, see Resource\n Tag and Using\n Cost Allocation Tags in the AWS Billing and Cost Management User\n Guide.

          " + "smithy.api#documentation": "

          A list of tags to apply to the created pipeline.

          " } } } }, - "com.amazonaws.sagemaker#CreateWorkteamResponse": { + "com.amazonaws.sagemaker#CreatePipelineResponse": { "type": "structure", "members": { - "WorkteamArn": { - "target": "com.amazonaws.sagemaker#WorkteamArn", + "PipelineArn": { + "target": "com.amazonaws.sagemaker#PipelineArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the work team. You can use this ARN to identify the\n work team.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the created pipeline.

          " } } } }, - "com.amazonaws.sagemaker#CreationTime": { - "type": "timestamp" - }, - "com.amazonaws.sagemaker#CsvContentType": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 256 - }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*\\/[a-zA-Z0-9](-*[a-zA-Z0-9.])*" - } - }, - "com.amazonaws.sagemaker#CsvContentTypes": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#CsvContentType" + "com.amazonaws.sagemaker#CreatePresignedDomainUrl": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#CreatePresignedDomainUrlRequest" }, - "traits": { - "smithy.api#length": { - "min": 1, - "max": 10 + "output": { + "target": "com.amazonaws.sagemaker#CreatePresignedDomainUrlResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" } + ], + "traits": { + "smithy.api#documentation": "

          Creates a URL for a specified UserProfile in a Domain. When accessed in a web browser,\n the user will be automatically signed in to Amazon SageMaker Studio, and granted access to all of\n the Apps and files associated with the Domain's Amazon Elastic File System (EFS) volume.\n This operation can only be called when the authentication mode equals IAM.\n

          \n \n

          The URL that you get from a call to CreatePresignedDomainUrl is valid\n only for 5 minutes. If you try to use the URL after the 5-minute limit expires, you\n are directed to the AWS console sign-in page.

          \n
          " } }, - "com.amazonaws.sagemaker#CustomImage": { + "com.amazonaws.sagemaker#CreatePresignedDomainUrlRequest": { "type": "structure", "members": { - "ImageName": { - "target": "com.amazonaws.sagemaker#ImageName", + "DomainId": { + "target": "com.amazonaws.sagemaker#DomainId", "traits": { - "smithy.api#documentation": "

          The name of the CustomImage. Must be unique to your account.

          ", + "smithy.api#documentation": "

          The domain ID.

          ", "smithy.api#required": {} } }, - "ImageVersionNumber": { - "target": "com.amazonaws.sagemaker#ImageVersionNumber", + "UserProfileName": { + "target": "com.amazonaws.sagemaker#UserProfileName", "traits": { - "smithy.api#box": {}, - "smithy.api#documentation": "

          The version number of the CustomImage.

          " + "smithy.api#documentation": "

          The name of the UserProfile to sign-in as.

          ", + "smithy.api#required": {} } }, - "AppImageConfigName": { - "target": "com.amazonaws.sagemaker#AppImageConfigName", + "SessionExpirationDurationInSeconds": { + "target": "com.amazonaws.sagemaker#SessionExpirationDurationInSeconds", "traits": { - "smithy.api#documentation": "

          The name of the AppImageConfig.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The session expiration duration in seconds.

          " } } - }, - "traits": { - "smithy.api#documentation": "

          A custom SageMaker image. For more information, see\n Bring your own SageMaker image.

          " } }, - "com.amazonaws.sagemaker#CustomImages": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#CustomImage" + "com.amazonaws.sagemaker#CreatePresignedDomainUrlResponse": { + "type": "structure", + "members": { + "AuthorizedUrl": { + "target": "com.amazonaws.sagemaker#PresignedDomainUrl", + "traits": { + "smithy.api#documentation": "

          The presigned URL.

          " + } + } + } + }, + "com.amazonaws.sagemaker#CreatePresignedNotebookInstanceUrl": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#CreatePresignedNotebookInstanceUrlInput" + }, + "output": { + "target": "com.amazonaws.sagemaker#CreatePresignedNotebookInstanceUrlOutput" }, "traits": { - "smithy.api#length": { - "min": 0, - "max": 30 + "smithy.api#documentation": "

          Returns a URL that you can use to connect to the Jupyter server from a notebook\n instance. In the Amazon SageMaker console, when you choose Open next to a notebook\n instance, Amazon SageMaker opens a new tab showing the Jupyter server home page from the notebook\n instance. The console uses this API to get the URL and show the page.

          \n

          The IAM role or user used to call this API defines the permissions to access the\n notebook instance. Once the presigned URL is created, no additional permission is\n required to access this URL. IAM authorization policies for this API are also enforced\n for every HTTP request and WebSocket frame that attempts to connect to the notebook\n instance.

          \n

          You can restrict access to this API and to the URL that it returns to a list of IP\n addresses that you specify. Use the NotIpAddress condition operator and the\n aws:SourceIP condition context key to specify the list of IP addresses\n that you want to have access to the notebook instance. For more information, see Limit Access to a Notebook Instance by IP Address.

          \n \n

          The URL that you get from a call to CreatePresignedNotebookInstanceUrl is valid only for 5 minutes. If\n you try to use the URL after the 5-minute limit expires, you are directed to the\n AWS console sign-in page.

          \n
          " + } + }, + "com.amazonaws.sagemaker#CreatePresignedNotebookInstanceUrlInput": { + "type": "structure", + "members": { + "NotebookInstanceName": { + "target": "com.amazonaws.sagemaker#NotebookInstanceName", + "traits": { + "smithy.api#documentation": "

          The name of the notebook instance.

          ", + "smithy.api#required": {} + } + }, + "SessionExpirationDurationInSeconds": { + "target": "com.amazonaws.sagemaker#SessionExpirationDurationInSeconds", + "traits": { + "smithy.api#documentation": "

          The duration of the session, in seconds. The default is 12 hours.

          " + } } } }, - "com.amazonaws.sagemaker#DataCaptureConfig": { + "com.amazonaws.sagemaker#CreatePresignedNotebookInstanceUrlOutput": { "type": "structure", "members": { - "EnableCapture": { - "target": "com.amazonaws.sagemaker#EnableCapture", + "AuthorizedUrl": { + "target": "com.amazonaws.sagemaker#NotebookInstanceUrl", "traits": { - "smithy.api#documentation": "

          " + "smithy.api#documentation": "

          A JSON object that contains the URL string.

          " } + } + } + }, + "com.amazonaws.sagemaker#CreateProcessingJob": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#CreateProcessingJobRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#CreateProcessingJobResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceInUse" }, - "InitialSamplingPercentage": { - "target": "com.amazonaws.sagemaker#SamplingPercentage", + { + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + }, + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Creates a processing job.

          " + } + }, + "com.amazonaws.sagemaker#CreateProcessingJobRequest": { + "type": "structure", + "members": { + "ProcessingInputs": { + "target": "com.amazonaws.sagemaker#ProcessingInputs", "traits": { - "smithy.api#documentation": "

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          List of input configurations for the processing job.

          " } }, - "DestinationS3Uri": { - "target": "com.amazonaws.sagemaker#DestinationS3Uri", + "ProcessingOutputConfig": { + "target": "com.amazonaws.sagemaker#ProcessingOutputConfig", "traits": { - "smithy.api#documentation": "

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Output configuration for the processing job.

          " } }, - "KmsKeyId": { - "target": "com.amazonaws.sagemaker#KmsKeyId", + "ProcessingJobName": { + "target": "com.amazonaws.sagemaker#ProcessingJobName", "traits": { - "smithy.api#documentation": "

          " + "smithy.api#documentation": "

          The name of the processing job. The name must be unique within an AWS Region in the\n AWS account.

          ", + "smithy.api#required": {} } }, - "CaptureOptions": { - "target": "com.amazonaws.sagemaker#CaptureOptionList", + "ProcessingResources": { + "target": "com.amazonaws.sagemaker#ProcessingResources", "traits": { - "smithy.api#documentation": "

          ", + "smithy.api#documentation": "

          Identifies the resources, ML compute instances, and ML storage volumes to deploy for a\n processing job. In distributed training, you specify more than one instance.

          ", "smithy.api#required": {} } }, - "CaptureContentTypeHeader": { - "target": "com.amazonaws.sagemaker#CaptureContentTypeHeader", + "StoppingCondition": { + "target": "com.amazonaws.sagemaker#ProcessingStoppingCondition", "traits": { - "smithy.api#documentation": "

          " + "smithy.api#documentation": "

          The time limit for how long the processing job is allowed to run.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          " - } - }, - "com.amazonaws.sagemaker#DataCaptureConfigSummary": { - "type": "structure", - "members": { - "EnableCapture": { - "target": "com.amazonaws.sagemaker#EnableCapture", + }, + "AppSpecification": { + "target": "com.amazonaws.sagemaker#AppSpecification", "traits": { - "smithy.api#documentation": "

          ", + "smithy.api#documentation": "

          Configures the processing job to run a specified Docker container image.

          ", "smithy.api#required": {} } }, - "CaptureStatus": { - "target": "com.amazonaws.sagemaker#CaptureStatus", + "Environment": { + "target": "com.amazonaws.sagemaker#ProcessingEnvironmentMap", "traits": { - "smithy.api#documentation": "

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Sets the environment variables in the Docker container.

          " } }, - "CurrentSamplingPercentage": { - "target": "com.amazonaws.sagemaker#SamplingPercentage", + "NetworkConfig": { + "target": "com.amazonaws.sagemaker#NetworkConfig", "traits": { - "smithy.api#documentation": "

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Networking options for a processing job.

          " } }, - "DestinationS3Uri": { - "target": "com.amazonaws.sagemaker#DestinationS3Uri", + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on\n your behalf.

          ", "smithy.api#required": {} } }, - "KmsKeyId": { - "target": "com.amazonaws.sagemaker#KmsKeyId", + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          (Optional) An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management\n User Guide.

          " } + }, + "ExperimentConfig": { + "target": "com.amazonaws.sagemaker#ExperimentConfig" } - }, - "traits": { - "smithy.api#documentation": "

          " } }, - "com.amazonaws.sagemaker#DataExplorationNotebookLocation": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1 + "com.amazonaws.sagemaker#CreateProcessingJobResponse": { + "type": "structure", + "members": { + "ProcessingJobArn": { + "target": "com.amazonaws.sagemaker#ProcessingJobArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the processing job.

          ", + "smithy.api#required": {} + } } } }, - "com.amazonaws.sagemaker#DataInputConfig": { - "type": "string", + "com.amazonaws.sagemaker#CreateProject": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#CreateProjectInput" + }, + "output": { + "target": "com.amazonaws.sagemaker#CreateProjectOutput" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + } + ], "traits": { - "smithy.api#length": { - "min": 1, - "max": 1024 - }, - "smithy.api#pattern": "[\\S\\s]+" + "smithy.api#documentation": "

          Creates a machine learning (ML) project that can contain one or more templates that set\n up an ML pipeline from training to deploying an approved model.

          " } }, - "com.amazonaws.sagemaker#DataProcessing": { + "com.amazonaws.sagemaker#CreateProjectInput": { "type": "structure", "members": { - "InputFilter": { - "target": "com.amazonaws.sagemaker#JsonPath", + "ProjectName": { + "target": "com.amazonaws.sagemaker#ProjectEntityName", "traits": { - "smithy.api#documentation": "

          A JSONPath expression used to select a portion of the input data to pass to\n the algorithm. Use the InputFilter parameter to exclude fields, such as an\n ID column, from the input. If you want Amazon SageMaker to pass the entire input dataset to the\n algorithm, accept the default value $.

          \n

          Examples: \"$\", \"$[1:]\", \"$.features\"\n

          " + "smithy.api#documentation": "

          The name of the project.

          ", + "smithy.api#required": {} } }, - "OutputFilter": { - "target": "com.amazonaws.sagemaker#JsonPath", + "ProjectDescription": { + "target": "com.amazonaws.sagemaker#EntityDescription", "traits": { - "smithy.api#documentation": "

          A JSONPath expression used to select a portion of the joined dataset to save\n in the output file for a batch transform job. If you want Amazon SageMaker to store the entire input\n dataset in the output file, leave the default value, $. If you specify\n indexes that aren't within the dimension size of the joined dataset, you get an\n error.

          \n

          Examples: \"$\", \"$[0,5:]\",\n \"$['id','SageMakerOutput']\"\n

          " + "smithy.api#documentation": "

          A description for the project.

          " } }, - "JoinSource": { - "target": "com.amazonaws.sagemaker#JoinSource", + "ServiceCatalogProvisioningDetails": { + "target": "com.amazonaws.sagemaker#ServiceCatalogProvisioningDetails", "traits": { - "smithy.api#documentation": "

          Specifies the source of the data to join with the transformed data. The valid values\n are None and Input. The default value is None,\n which specifies not to join the input with the transformed data. If you want the batch\n transform job to join the original input data with the transformed data, set\n JoinSource to Input.

          \n \n

          For JSON or JSONLines objects, such as a JSON array, Amazon SageMaker adds the transformed data to\n the input JSON object in an attribute called SageMakerOutput. The joined\n result for JSON must be a key-value pair object. If the input is not a key-value pair\n object, Amazon SageMaker creates a new JSON file. In the new JSON file, and the input data is stored\n under the SageMakerInput key and the results are stored in\n SageMakerOutput.

          \n

          For CSV files, Amazon SageMaker combines the transformed data with the input data at the end of\n the input data and stores it in the output file. The joined data has the joined input\n data followed by the transformed data and the output is a CSV file.

          " + "smithy.api#documentation": "

          The product ID and provisioning artifact ID to provision a service catalog. For\n information, see What is AWS Service\n Catalog.

          ", + "smithy.api#required": {} + } + }, + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", + "traits": { + "smithy.api#documentation": "

          An array of key-value pairs that you want to use to organize and track your AWS\n resource costs. For more information, see Tagging AWS resources in the AWS General Reference Guide.

          " } } - }, - "traits": { - "smithy.api#documentation": "

          The data structure used to specify the data to be used for inference in a batch\n transform job and to associate the data that is relevant to the prediction results in\n the output. The input filter provided allows you to exclude input data that is not\n needed for inference in a batch transform job. The output filter provided allows you to\n include input data relevant to interpreting the predictions in the output from the job.\n For more information, see Associate Prediction\n Results with their Corresponding Input Records.

          " } }, - "com.amazonaws.sagemaker#DataSource": { + "com.amazonaws.sagemaker#CreateProjectOutput": { "type": "structure", "members": { - "S3DataSource": { - "target": "com.amazonaws.sagemaker#S3DataSource", + "ProjectArn": { + "target": "com.amazonaws.sagemaker#ProjectArn", "traits": { - "smithy.api#documentation": "

          The S3 location of the data source that is associated with a channel.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the project.

          ", + "smithy.api#required": {} } }, - "FileSystemDataSource": { - "target": "com.amazonaws.sagemaker#FileSystemDataSource", + "ProjectId": { + "target": "com.amazonaws.sagemaker#ProjectId", "traits": { - "smithy.api#documentation": "

          The file system that is associated with a channel.

          " + "smithy.api#documentation": "

          The ID of the new project.

          ", + "smithy.api#required": {} } } + } + }, + "com.amazonaws.sagemaker#CreateTrainingJob": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#CreateTrainingJobRequest" }, + "output": { + "target": "com.amazonaws.sagemaker#CreateTrainingJobResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceInUse" + }, + { + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + }, + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#documentation": "

          Describes the location of the channel data.

          " + "smithy.api#documentation": "

          Starts a model training job. After training completes, Amazon SageMaker saves the resulting\n model artifacts to an Amazon S3 location that you specify.

          \n

          If you choose to host your model using Amazon SageMaker hosting services, you can use the resulting\n model artifacts as part of the model. You can also use the artifacts in a machine\n learning service other than Amazon SageMaker, provided that you know how to use them for\n inference.\n \n

          \n

          In the request body, you provide the following:

          \n
            \n
          • \n

            \n AlgorithmSpecification - Identifies the training algorithm to\n use.\n

            \n
          • \n
          • \n

            \n HyperParameters - Specify these algorithm-specific parameters to\n enable the estimation of model parameters during training. Hyperparameters can\n be tuned to optimize this learning process. For a list of hyperparameters for\n each training algorithm provided by Amazon SageMaker, see Algorithms.

            \n
          • \n
          • \n

            \n InputDataConfig - Describes the training dataset and the Amazon S3,\n EFS, or FSx location where it is stored.

            \n
          • \n
          • \n

            \n OutputDataConfig - Identifies the Amazon S3 bucket where you want\n Amazon SageMaker to save the results of model training.

            \n

            \n
          • \n
          • \n

            \n ResourceConfig - Identifies the resources, ML compute\n instances, and ML storage volumes to deploy for model training. In distributed\n training, you specify more than one instance.

            \n \n
          • \n
          • \n

            \n EnableManagedSpotTraining - Optimize the cost of training machine\n learning models by up to 80% by using Amazon EC2 Spot instances. For more\n information, see Managed Spot\n Training.

            \n
          • \n
          • \n

            \n RoleArn - The Amazon Resource Number (ARN) that Amazon SageMaker assumes\n to perform tasks on your behalf during model training.\n \n You must grant this role the necessary permissions so that Amazon SageMaker can successfully\n complete model training.

            \n
          • \n
          • \n

            \n StoppingCondition - To help cap training costs, use\n MaxRuntimeInSeconds to set a time limit for training. Use\n MaxWaitTimeInSeconds to specify how long you are willing to\n wait for a managed spot training job to complete.

            \n
          • \n
          \n

          For more information about Amazon SageMaker, see How It Works.

          " } }, - "com.amazonaws.sagemaker#DebugHookConfig": { + "com.amazonaws.sagemaker#CreateTrainingJobRequest": { "type": "structure", "members": { - "LocalPath": { - "target": "com.amazonaws.sagemaker#DirectoryPath", - "traits": { - "smithy.api#documentation": "

          Path to local storage location for tensors. Defaults to\n /opt/ml/output/tensors/.

          " - } - }, - "S3OutputPath": { - "target": "com.amazonaws.sagemaker#S3Uri", + "TrainingJobName": { + "target": "com.amazonaws.sagemaker#TrainingJobName", "traits": { - "smithy.api#documentation": "

          Path to Amazon S3 storage location for tensors.

          ", + "smithy.api#documentation": "

          The name of the training job. The name must be unique within an AWS Region in an\n AWS account.

          ", "smithy.api#required": {} } }, - "HookParameters": { - "target": "com.amazonaws.sagemaker#HookParameters", + "HyperParameters": { + "target": "com.amazonaws.sagemaker#HyperParameters", "traits": { - "smithy.api#documentation": "

          Configuration information for the debug hook parameters.

          " + "smithy.api#documentation": "

          Algorithm-specific parameters that influence the quality of the model. You set\n hyperparameters before you start the learning process. For a list of hyperparameters for\n each training algorithm provided by Amazon SageMaker, see Algorithms.

          \n

          You can specify a maximum of 100 hyperparameters. Each hyperparameter is a\n key-value pair. Each key and value is limited to 256 characters, as specified by the\n Length Constraint.

          " } }, - "CollectionConfigurations": { - "target": "com.amazonaws.sagemaker#CollectionConfigurations", + "AlgorithmSpecification": { + "target": "com.amazonaws.sagemaker#AlgorithmSpecification", "traits": { - "smithy.api#documentation": "

          Configuration information for tensor collections.

          " + "smithy.api#documentation": "

          The registry path of the Docker image that contains the training algorithm and\n algorithm-specific metadata, including the input mode. For more information about\n algorithms provided by Amazon SageMaker, see Algorithms. For information about\n providing your own algorithms, see Using Your Own Algorithms with Amazon\n SageMaker.

          ", + "smithy.api#required": {} } - } - }, - "traits": { - "smithy.api#documentation": "

          Configuration information for the debug hook parameters, collection configuration, and\n storage paths.

          " - } - }, - "com.amazonaws.sagemaker#DebugRuleConfiguration": { - "type": "structure", - "members": { - "RuleConfigurationName": { - "target": "com.amazonaws.sagemaker#RuleConfigurationName", + }, + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          The name of the rule configuration. It must be unique relative to other rule\n configuration names.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform\n tasks on your behalf.

          \n

          During model training, Amazon SageMaker needs your permission to read input data from an S3\n bucket, download a Docker image that contains training code, write model artifacts to an\n S3 bucket, write logs to Amazon CloudWatch Logs, and publish metrics to Amazon CloudWatch. You grant\n permissions for all of these tasks to an IAM role. For more information, see Amazon SageMaker\n Roles.

          \n \n

          To be able to pass this role to Amazon SageMaker, the caller of this API must have the\n iam:PassRole permission.

          \n
          ", "smithy.api#required": {} } }, - "LocalPath": { - "target": "com.amazonaws.sagemaker#DirectoryPath", + "InputDataConfig": { + "target": "com.amazonaws.sagemaker#InputDataConfig", "traits": { - "smithy.api#documentation": "

          Path to local storage location for output of rules. Defaults to\n /opt/ml/processing/output/rule/.

          " + "smithy.api#documentation": "

          An array of Channel objects. Each channel is a named input source.\n InputDataConfig\n \n describes the input data and its location.

          \n

          Algorithms can accept input data from one or more channels. For example, an\n algorithm might have two channels of input data, training_data and\n validation_data. The configuration for each channel provides the S3,\n EFS, or FSx location where the input data is stored. It also provides information about\n the stored data: the MIME type, compression method, and whether the data is wrapped in\n RecordIO format.

          \n

          Depending on the input mode that the algorithm supports, Amazon SageMaker either copies input\n data files from an S3 bucket to a local directory in the Docker container, or makes it\n available as input streams. For example, if you specify an EFS location, input data\n files will be made available as input streams. They do not need to be\n downloaded.

          " } }, - "S3OutputPath": { - "target": "com.amazonaws.sagemaker#S3Uri", + "OutputDataConfig": { + "target": "com.amazonaws.sagemaker#OutputDataConfig", "traits": { - "smithy.api#documentation": "

          Path to Amazon S3 storage location for rules.

          " + "smithy.api#documentation": "

          Specifies the path to the S3 location where you want to store model artifacts. Amazon SageMaker\n creates subfolders for the artifacts.

          ", + "smithy.api#required": {} } }, - "RuleEvaluatorImage": { - "target": "com.amazonaws.sagemaker#AlgorithmImage", + "ResourceConfig": { + "target": "com.amazonaws.sagemaker#ResourceConfig", "traits": { - "smithy.api#documentation": "

          The Amazon Elastic Container (ECR) Image for the managed rule evaluation.

          ", + "smithy.api#documentation": "

          The resources, including the ML compute instances and ML storage volumes, to use\n for model training.

          \n

          ML storage volumes store model artifacts and incremental states. Training\n algorithms might also use ML storage volumes for scratch space. If you want Amazon SageMaker to use\n the ML storage volume to store the training data, choose File as the\n TrainingInputMode in the algorithm specification. For distributed\n training algorithms, specify an instance count greater than 1.

          ", "smithy.api#required": {} } }, - "InstanceType": { - "target": "com.amazonaws.sagemaker#ProcessingInstanceType", + "VpcConfig": { + "target": "com.amazonaws.sagemaker#VpcConfig", "traits": { - "smithy.api#documentation": "

          The instance type to deploy for a training job.

          " + "smithy.api#documentation": "

          A VpcConfig object that specifies the VPC that you want your\n training job to connect to. Control access to and from your training container by\n configuring the VPC. For more information, see Protect Training Jobs by Using an Amazon\n Virtual Private Cloud.

          " } }, - "VolumeSizeInGB": { - "target": "com.amazonaws.sagemaker#OptionalVolumeSizeInGB", + "StoppingCondition": { + "target": "com.amazonaws.sagemaker#StoppingCondition", "traits": { - "smithy.api#documentation": "

          The size, in GB, of the ML storage volume attached to the processing instance.

          " + "smithy.api#documentation": "

          Specifies a limit to how long a model training job can run. When the job reaches the\n time limit, Amazon SageMaker ends the training job. Use this API to cap model training costs.

          \n

          To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays\n job termination for 120 seconds. Algorithms can use this 120-second window to save the\n model artifacts, so the results of training are not lost.

          ", + "smithy.api#required": {} } }, - "RuleParameters": { - "target": "com.amazonaws.sagemaker#RuleParameters", + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          Runtime configuration for rule container.

          " + "smithy.api#documentation": "

          An array of key-value pairs. You can use tags to categorize your AWS resources in\n different ways, for example, by purpose, owner, or environment. For more information,\n see Tagging AWS\n Resources.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          Configuration information for debugging rules.

          " - } - }, - "com.amazonaws.sagemaker#DebugRuleConfigurations": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#DebugRuleConfiguration" - }, - "traits": { - "smithy.api#length": { - "min": 0, - "max": 20 - } - } - }, - "com.amazonaws.sagemaker#DebugRuleEvaluationStatus": { - "type": "structure", - "members": { - "RuleConfigurationName": { - "target": "com.amazonaws.sagemaker#RuleConfigurationName", + }, + "EnableNetworkIsolation": { + "target": "com.amazonaws.sagemaker#Boolean", "traits": { - "smithy.api#documentation": "

          The name of the rule configuration

          " + "smithy.api#documentation": "

          Isolates the training container. No inbound or outbound network calls can be made,\n except for calls between peers within a training cluster for distributed training. If\n you enable network isolation for training jobs that are configured to use a VPC, Amazon SageMaker\n downloads and uploads customer data and model artifacts through the specified VPC, but\n the training container does not have network access.

          " } }, - "RuleEvaluationJobArn": { - "target": "com.amazonaws.sagemaker#ProcessingJobArn", + "EnableInterContainerTrafficEncryption": { + "target": "com.amazonaws.sagemaker#Boolean", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the rule evaluation job.

          " + "smithy.api#documentation": "

          To encrypt all communications between ML compute instances in distributed training,\n choose True. Encryption provides greater security for distributed training,\n but training might take longer. How long it takes depends on the amount of communication\n between compute instances, especially if you use a deep learning algorithm in\n distributed training. For more information, see Protect Communications Between ML\n Compute Instances in a Distributed Training Job.

          " } }, - "RuleEvaluationStatus": { - "target": "com.amazonaws.sagemaker#RuleEvaluationStatus", + "EnableManagedSpotTraining": { + "target": "com.amazonaws.sagemaker#Boolean", "traits": { - "smithy.api#documentation": "

          Status of the rule evaluation.

          " + "smithy.api#documentation": "

          To train models using managed spot training, choose True. Managed spot\n training provides a fully managed and scalable infrastructure for training machine\n learning models. this option is useful when training jobs can be interrupted and when\n there is flexibility when the training job is run.

          \n

          The complete and intermediate results of jobs are stored in an Amazon S3 bucket, and can be\n used as a starting point to train models incrementally. Amazon SageMaker provides metrics and\n logs in CloudWatch. They can be used to see when managed spot training jobs are running,\n interrupted, resumed, or completed.

          " } }, - "StatusDetails": { - "target": "com.amazonaws.sagemaker#StatusDetails", + "CheckpointConfig": { + "target": "com.amazonaws.sagemaker#CheckpointConfig", "traits": { - "smithy.api#documentation": "

          Details from the rule evaluation.

          " + "smithy.api#documentation": "

          Contains information about the output location for managed spot training checkpoint\n data.

          " } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "DebugHookConfig": { + "target": "com.amazonaws.sagemaker#DebugHookConfig" + }, + "DebugRuleConfigurations": { + "target": "com.amazonaws.sagemaker#DebugRuleConfigurations", "traits": { - "smithy.api#documentation": "

          Timestamp when the rule evaluation status was last modified.

          " + "smithy.api#documentation": "

          Configuration information for debugging rules.

          " } + }, + "TensorBoardOutputConfig": { + "target": "com.amazonaws.sagemaker#TensorBoardOutputConfig" + }, + "ExperimentConfig": { + "target": "com.amazonaws.sagemaker#ExperimentConfig" } - }, - "traits": { - "smithy.api#documentation": "

          Information about the status of the rule evaluation.

          " - } - }, - "com.amazonaws.sagemaker#DebugRuleEvaluationStatuses": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#DebugRuleEvaluationStatus" - }, - "traits": { - "smithy.api#length": { - "min": 0, - "max": 20 - } - } - }, - "com.amazonaws.sagemaker#DefaultGid": { - "type": "integer", - "traits": { - "smithy.api#range": { - "min": 0, - "max": 65535 - } - } - }, - "com.amazonaws.sagemaker#DefaultUid": { - "type": "integer", - "traits": { - "smithy.api#range": { - "min": 0, - "max": 65535 - } - } - }, - "com.amazonaws.sagemaker#DeleteAlgorithm": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#DeleteAlgorithmInput" - }, - "traits": { - "smithy.api#documentation": "

          Removes the specified algorithm from your account.

          " } }, - "com.amazonaws.sagemaker#DeleteAlgorithmInput": { + "com.amazonaws.sagemaker#CreateTrainingJobResponse": { "type": "structure", "members": { - "AlgorithmName": { - "target": "com.amazonaws.sagemaker#EntityName", + "TrainingJobArn": { + "target": "com.amazonaws.sagemaker#TrainingJobArn", "traits": { - "smithy.api#documentation": "

          The name of the algorithm to delete.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the training job.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DeleteApp": { + "com.amazonaws.sagemaker#CreateTransformJob": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DeleteAppRequest" + "target": "com.amazonaws.sagemaker#CreateTransformJobRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#CreateTransformJobResponse" }, "errors": [ { "target": "com.amazonaws.sagemaker#ResourceInUse" }, { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], - "traits": { - "smithy.api#documentation": "

          Used to stop and delete an app.

          " - } - }, - "com.amazonaws.sagemaker#DeleteAppImageConfig": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#DeleteAppImageConfigRequest" - }, - "errors": [ + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + }, { "target": "com.amazonaws.sagemaker#ResourceNotFound" } ], "traits": { - "smithy.api#documentation": "

          Deletes an AppImageConfig.

          " + "smithy.api#documentation": "

          Starts a transform job. A transform job uses a trained model to get inferences on a\n dataset and saves these results to an Amazon S3 location that you specify.

          \n

          To perform batch transformations, you create a transform job and use the data that you\n have readily available.

          \n

          In the request body, you provide the following:

          \n
            \n
          • \n

            \n TransformJobName - Identifies the transform job. The name must be\n unique within an AWS Region in an AWS account.

            \n
          • \n
          • \n

            \n ModelName - Identifies the model to use. ModelName\n must be the name of an existing Amazon SageMaker model in the same AWS Region and AWS\n account. For information on creating a model, see CreateModel.

            \n
          • \n
          • \n

            \n TransformInput - Describes the dataset to be transformed and the\n Amazon S3 location where it is stored.

            \n
          • \n
          • \n

            \n TransformOutput - Identifies the Amazon S3 location where you want\n Amazon SageMaker to save the results from the transform job.

            \n
          • \n
          • \n

            \n TransformResources - Identifies the ML compute instances for the\n transform job.

            \n
          • \n
          \n

          For more information about how batch transformation works, see Batch\n Transform.

          " } }, - "com.amazonaws.sagemaker#DeleteAppImageConfigRequest": { + "com.amazonaws.sagemaker#CreateTransformJobRequest": { "type": "structure", "members": { - "AppImageConfigName": { - "target": "com.amazonaws.sagemaker#AppImageConfigName", + "TransformJobName": { + "target": "com.amazonaws.sagemaker#TransformJobName", "traits": { - "smithy.api#documentation": "

          The name of the AppImageConfig to delete.

          ", + "smithy.api#documentation": "

          The name of the transform job. The name must be unique within an AWS Region in an\n AWS account.

          ", "smithy.api#required": {} } - } - } - }, - "com.amazonaws.sagemaker#DeleteAppRequest": { - "type": "structure", - "members": { - "DomainId": { - "target": "com.amazonaws.sagemaker#DomainId", + }, + "ModelName": { + "target": "com.amazonaws.sagemaker#ModelName", "traits": { - "smithy.api#documentation": "

          The domain ID.

          ", + "smithy.api#documentation": "

          The name of the model that you want to use for the transform job.\n ModelName must be the name of an existing Amazon SageMaker model within an AWS\n Region in an AWS account.

          ", "smithy.api#required": {} } }, - "UserProfileName": { - "target": "com.amazonaws.sagemaker#UserProfileName", + "MaxConcurrentTransforms": { + "target": "com.amazonaws.sagemaker#MaxConcurrentTransforms", "traits": { - "smithy.api#documentation": "

          The user profile name.

          ", + "smithy.api#documentation": "

          The maximum number of parallel requests that can be sent to each instance in a\n transform job. If MaxConcurrentTransforms is set to 0 or left\n unset, Amazon SageMaker checks the optional execution-parameters to determine the settings for your\n chosen algorithm. If the execution-parameters endpoint is not enabled, the default value\n is 1. For more information on execution-parameters, see How Containers Serve Requests. For built-in algorithms, you don't need to\n set a value for MaxConcurrentTransforms.

          " + } + }, + "ModelClientConfig": { + "target": "com.amazonaws.sagemaker#ModelClientConfig", + "traits": { + "smithy.api#documentation": "

          Configures the timeout and maximum number of retries for processing a transform job\n invocation.

          " + } + }, + "MaxPayloadInMB": { + "target": "com.amazonaws.sagemaker#MaxPayloadInMB", + "traits": { + "smithy.api#documentation": "

          The maximum allowed size of the payload, in MB. A payload is the\n data portion of a record (without metadata). The value in MaxPayloadInMB\n must be greater than, or equal to, the size of a single record. To estimate the size of\n a record in MB, divide the size of your dataset by the number of records. To ensure that\n the records fit within the maximum payload size, we recommend using a slightly larger\n value. The default value is 6 MB.\n

          \n

          For cases where the payload might be arbitrarily large and is transmitted using HTTP\n chunked encoding, set the value to 0.\n This\n feature works only in supported algorithms. Currently, Amazon SageMaker built-in\n algorithms do not support HTTP chunked encoding.

          " + } + }, + "BatchStrategy": { + "target": "com.amazonaws.sagemaker#BatchStrategy", + "traits": { + "smithy.api#documentation": "

          Specifies the number of records to include in a mini-batch for an HTTP inference\n request. A record\n is a single unit of input data that\n inference can be made on. For example, a single line in a CSV file is a record.

          \n

          To enable the batch strategy, you must set the SplitType property to\n Line, RecordIO, or TFRecord.

          \n

          To use only one record when making an HTTP invocation request to a container, set\n BatchStrategy to SingleRecord and SplitType\n to Line.

          \n

          To fit as many records in a mini-batch as can fit within the\n MaxPayloadInMB limit, set BatchStrategy to\n MultiRecord and SplitType to Line.

          " + } + }, + "Environment": { + "target": "com.amazonaws.sagemaker#TransformEnvironmentMap", + "traits": { + "smithy.api#documentation": "

          The environment variables to set in the Docker container. We support up to 16 key and\n values entries in the map.

          " + } + }, + "TransformInput": { + "target": "com.amazonaws.sagemaker#TransformInput", + "traits": { + "smithy.api#documentation": "

          Describes the input source and\n the\n way the transform job consumes it.

          ", "smithy.api#required": {} } }, - "AppType": { - "target": "com.amazonaws.sagemaker#AppType", + "TransformOutput": { + "target": "com.amazonaws.sagemaker#TransformOutput", "traits": { - "smithy.api#documentation": "

          The type of app.

          ", + "smithy.api#documentation": "

          Describes the results of the transform job.

          ", "smithy.api#required": {} } }, - "AppName": { - "target": "com.amazonaws.sagemaker#AppName", + "TransformResources": { + "target": "com.amazonaws.sagemaker#TransformResources", "traits": { - "smithy.api#documentation": "

          The name of the app.

          ", + "smithy.api#documentation": "

          Describes the resources, including\n ML\n instance types and ML instance count, to use for the transform\n job.

          ", "smithy.api#required": {} } + }, + "DataProcessing": { + "target": "com.amazonaws.sagemaker#DataProcessing", + "traits": { + "smithy.api#documentation": "

          The data structure used to specify the data to be used for inference in a batch\n transform job and to associate the data that is relevant to the prediction results in\n the output. The input filter provided allows you to exclude input data that is not\n needed for inference in a batch transform job. The output filter provided allows you to\n include input data relevant to interpreting the predictions in the output from the job.\n For more information, see Associate Prediction\n Results with their Corresponding Input Records.

          " + } + }, + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", + "traits": { + "smithy.api#documentation": "

          (Optional)\n An\n array of key-value pairs. For more information, see Using\n Cost Allocation Tags in the AWS Billing and Cost Management User\n Guide.

          " + } + }, + "ExperimentConfig": { + "target": "com.amazonaws.sagemaker#ExperimentConfig" } } }, - "com.amazonaws.sagemaker#DeleteCodeRepository": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#DeleteCodeRepositoryInput" - }, - "traits": { - "smithy.api#documentation": "

          Deletes the specified Git repository from your account.

          " - } - }, - "com.amazonaws.sagemaker#DeleteCodeRepositoryInput": { + "com.amazonaws.sagemaker#CreateTransformJobResponse": { "type": "structure", "members": { - "CodeRepositoryName": { - "target": "com.amazonaws.sagemaker#EntityName", + "TransformJobArn": { + "target": "com.amazonaws.sagemaker#TransformJobArn", "traits": { - "smithy.api#documentation": "

          The name of the Git repository to delete.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the transform job.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DeleteDomain": { + "com.amazonaws.sagemaker#CreateTrial": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DeleteDomainRequest" + "target": "com.amazonaws.sagemaker#CreateTrialRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#CreateTrialResponse" }, "errors": [ { - "target": "com.amazonaws.sagemaker#ResourceInUse" + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" }, { "target": "com.amazonaws.sagemaker#ResourceNotFound" } ], "traits": { - "smithy.api#documentation": "

          Used to delete a domain.\n If you onboarded with IAM mode, you will need to delete your domain to onboard again using SSO.\n Use with caution. All of the members of the domain will lose access to their EFS volume,\n including data, notebooks, and other artifacts.\n

          " + "smithy.api#documentation": "

          Creates an Amazon SageMaker trial. A trial is a set of steps called\n trial components that produce a machine learning model. A trial is part\n of a single Amazon SageMaker experiment.

          \n

          When you use Amazon SageMaker Studio or the Amazon SageMaker Python SDK, all experiments, trials, and trial\n components are automatically tracked, logged, and indexed. When you use the AWS SDK for Python (Boto), you\n must use the logging APIs provided by the SDK.

          \n

          You can add tags to a trial and then use the Search API to search for\n the tags.

          \n

          To get a list of all your trials, call the ListTrials API. To view a\n trial's properties, call the DescribeTrial API. To create a trial component,\n call the CreateTrialComponent API.

          " } }, - "com.amazonaws.sagemaker#DeleteDomainRequest": { + "com.amazonaws.sagemaker#CreateTrialComponent": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#CreateTrialComponentRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#CreateTrialComponentResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + } + ], + "traits": { + "smithy.api#documentation": "

          Creates a trial component, which is a stage of a machine learning\n trial. A trial is composed of one or more trial components. A trial\n component can be used in multiple trials.

          \n

          Trial components include pre-processing jobs, training jobs, and batch transform\n jobs.

          \n

          When you use Amazon SageMaker Studio or the Amazon SageMaker Python SDK, all experiments, trials, and trial\n components are automatically tracked, logged, and indexed. When you use the AWS SDK for Python (Boto), you\n must use the logging APIs provided by the SDK.

          \n

          You can add tags to a trial component and then use the Search API to\n search for the tags.

          \n \n

          \n CreateTrialComponent can only be invoked from within an Amazon SageMaker managed\n environment. This includes Amazon SageMaker training jobs, processing jobs, transform jobs, and Amazon SageMaker\n notebooks. A call to CreateTrialComponent from outside one of these\n environments results in an error.

          \n
          " + } + }, + "com.amazonaws.sagemaker#CreateTrialComponentRequest": { "type": "structure", "members": { - "DomainId": { - "target": "com.amazonaws.sagemaker#DomainId", + "TrialComponentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The domain ID.

          ", + "smithy.api#documentation": "

          The name of the component. The name must be unique in your AWS account and is not\n case-sensitive.

          ", "smithy.api#required": {} } }, - "RetentionPolicy": { - "target": "com.amazonaws.sagemaker#RetentionPolicy", + "DisplayName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The retention policy for this domain, which specifies whether resources will be retained after the Domain is deleted.\n By default, all resources are retained (not automatically deleted).\n

          " + "smithy.api#documentation": "

          The name of the component as displayed. The name doesn't need to be unique. If\n DisplayName isn't specified, TrialComponentName is\n displayed.

          " + } + }, + "Status": { + "target": "com.amazonaws.sagemaker#TrialComponentStatus", + "traits": { + "smithy.api#documentation": "

          The status of the component. States include:

          \n
            \n
          • \n

            InProgress

            \n
          • \n
          • \n

            Completed

            \n
          • \n
          • \n

            Failed

            \n
          • \n
          " + } + }, + "StartTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          When the component started.

          " + } + }, + "EndTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          When the component ended.

          " + } + }, + "Parameters": { + "target": "com.amazonaws.sagemaker#TrialComponentParameters", + "traits": { + "smithy.api#documentation": "

          The hyperparameters for the component.

          " + } + }, + "InputArtifacts": { + "target": "com.amazonaws.sagemaker#TrialComponentArtifacts", + "traits": { + "smithy.api#documentation": "

          The input artifacts for the component. Examples of input artifacts are datasets,\n algorithms, hyperparameters, source code, and instance types.

          " + } + }, + "OutputArtifacts": { + "target": "com.amazonaws.sagemaker#TrialComponentArtifacts", + "traits": { + "smithy.api#documentation": "

          The output artifacts for the component. Examples of output artifacts are metrics,\n snapshots, logs, and images.

          " + } + }, + "MetadataProperties": { + "target": "com.amazonaws.sagemaker#MetadataProperties" + }, + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", + "traits": { + "smithy.api#documentation": "

          A list of tags to associate with the component. You can use Search API\n to search on the tags.

          " } } } }, - "com.amazonaws.sagemaker#DeleteEndpoint": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#DeleteEndpointInput" - }, - "traits": { - "smithy.api#documentation": "

          Deletes an endpoint. Amazon SageMaker frees up all of the resources that were deployed when the\n endpoint was created.

          \n

          Amazon SageMaker retires any custom KMS key grants associated with the endpoint, meaning you don't\n need to use the RevokeGrant API call.

          " - } - }, - "com.amazonaws.sagemaker#DeleteEndpointConfig": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#DeleteEndpointConfigInput" - }, - "traits": { - "smithy.api#documentation": "

          Deletes an endpoint configuration. The DeleteEndpointConfig API\n deletes only the specified configuration. It does not delete endpoints created using the\n configuration.

          \n

          You must not delete an EndpointConfig in use by an endpoint that is\n live or while the UpdateEndpoint or CreateEndpoint operations\n are being performed on the endpoint. If you delete the EndpointConfig of an\n endpoint that is active or being created or updated you may lose visibility into the\n instance type the endpoint is using. The endpoint must be deleted in order to stop\n incurring charges.

          " + "com.amazonaws.sagemaker#CreateTrialComponentResponse": { + "type": "structure", + "members": { + "TrialComponentArn": { + "target": "com.amazonaws.sagemaker#TrialComponentArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the trial component.

          " + } + } } }, - "com.amazonaws.sagemaker#DeleteEndpointConfigInput": { + "com.amazonaws.sagemaker#CreateTrialRequest": { "type": "structure", "members": { - "EndpointConfigName": { - "target": "com.amazonaws.sagemaker#EndpointConfigName", + "TrialName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The name of the endpoint configuration that you want to delete.

          ", + "smithy.api#documentation": "

          The name of the trial. The name must be unique in your AWS account and is not\n case-sensitive.

          ", + "smithy.api#required": {} + } + }, + "DisplayName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "traits": { + "smithy.api#documentation": "

          The name of the trial as displayed. The name doesn't need to be unique. If\n DisplayName isn't specified, TrialName is displayed.

          " + } + }, + "ExperimentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "traits": { + "smithy.api#documentation": "

          The name of the experiment to associate the trial with.

          ", "smithy.api#required": {} } + }, + "MetadataProperties": { + "target": "com.amazonaws.sagemaker#MetadataProperties" + }, + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", + "traits": { + "smithy.api#documentation": "

          A list of tags to associate with the trial. You can use Search API to\n search on the tags.

          " + } } } }, - "com.amazonaws.sagemaker#DeleteEndpointInput": { + "com.amazonaws.sagemaker#CreateTrialResponse": { "type": "structure", "members": { - "EndpointName": { - "target": "com.amazonaws.sagemaker#EndpointName", + "TrialArn": { + "target": "com.amazonaws.sagemaker#TrialArn", "traits": { - "smithy.api#documentation": "

          The name of the endpoint that you want to delete.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the trial.

          " } } } }, - "com.amazonaws.sagemaker#DeleteExperiment": { + "com.amazonaws.sagemaker#CreateUserProfile": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DeleteExperimentRequest" + "target": "com.amazonaws.sagemaker#CreateUserProfileRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DeleteExperimentResponse" + "target": "com.amazonaws.sagemaker#CreateUserProfileResponse" }, "errors": [ { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], - "traits": { - "smithy.api#documentation": "

          Deletes an Amazon SageMaker experiment. All trials associated with the experiment must be deleted\n first. Use the ListTrials API to get a list of the trials associated with\n the experiment.

          " + "target": "com.amazonaws.sagemaker#ResourceInUse" + }, + { + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + } + ], + "traits": { + "smithy.api#documentation": "

          Creates a user profile. A user profile represents a single user within a domain, and is\n the main way to reference a \"person\" for the purposes of sharing, reporting, and other\n user-oriented features. This entity is created when a user onboards to Amazon SageMaker Studio. If an\n administrator invites a person by email or imports them from SSO, a user profile is\n automatically created. A user profile is the primary holder of settings for an individual\n user and has a reference to the user's private Amazon Elastic File System (EFS) home directory.\n

          " } }, - "com.amazonaws.sagemaker#DeleteExperimentRequest": { + "com.amazonaws.sagemaker#CreateUserProfileRequest": { "type": "structure", "members": { - "ExperimentName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "DomainId": { + "target": "com.amazonaws.sagemaker#DomainId", "traits": { - "smithy.api#documentation": "

          The name of the experiment to delete.

          ", + "smithy.api#documentation": "

          The ID of the associated Domain.

          ", + "smithy.api#required": {} + } + }, + "UserProfileName": { + "target": "com.amazonaws.sagemaker#UserProfileName", + "traits": { + "smithy.api#documentation": "

          A name for the UserProfile.

          ", "smithy.api#required": {} } + }, + "SingleSignOnUserIdentifier": { + "target": "com.amazonaws.sagemaker#SingleSignOnUserIdentifier", + "traits": { + "smithy.api#documentation": "

          A specifier for the type of value specified in SingleSignOnUserValue. Currently, the only supported value is \"UserName\".\n If the Domain's AuthMode is SSO, this field is required. If the Domain's AuthMode is not SSO, this field cannot be specified.\n

          " + } + }, + "SingleSignOnUserValue": { + "target": "com.amazonaws.sagemaker#String256", + "traits": { + "smithy.api#documentation": "

          The username of the associated AWS Single Sign-On User for this UserProfile. If the Domain's AuthMode is SSO, this field is\n required, and must match a valid username of a user in your directory. If the Domain's AuthMode is not SSO, this field cannot be specified.\n

          " + } + }, + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", + "traits": { + "smithy.api#documentation": "

          Each tag consists of a key and an optional value.\n Tag keys must be unique per resource.

          " + } + }, + "UserSettings": { + "target": "com.amazonaws.sagemaker#UserSettings", + "traits": { + "smithy.api#documentation": "

          A collection of settings.

          " + } } } }, - "com.amazonaws.sagemaker#DeleteExperimentResponse": { + "com.amazonaws.sagemaker#CreateUserProfileResponse": { "type": "structure", "members": { - "ExperimentArn": { - "target": "com.amazonaws.sagemaker#ExperimentArn", + "UserProfileArn": { + "target": "com.amazonaws.sagemaker#UserProfileArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the experiment that is being deleted.

          " + "smithy.api#documentation": "

          The user profile Amazon Resource Name (ARN).

          " } } } }, - "com.amazonaws.sagemaker#DeleteFlowDefinition": { + "com.amazonaws.sagemaker#CreateWorkforce": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DeleteFlowDefinitionRequest" + "target": "com.amazonaws.sagemaker#CreateWorkforceRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DeleteFlowDefinitionResponse" + "target": "com.amazonaws.sagemaker#CreateWorkforceResponse" }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceInUse" - }, - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], "traits": { - "smithy.api#documentation": "

          Deletes the specified flow definition.

          " + "smithy.api#documentation": "

          Use this operation to create a workforce. This operation will return an error\n if a workforce already exists in the AWS Region that you specify. You can only\n create one workforce in each AWS Region per AWS account.

          \n\n

          If you want to create a new workforce in an AWS Region where \n a workforce already exists, use the API\n operation to delete the existing workforce and then use CreateWorkforce \n to create a new workforce.

          \n\n

          To create a private workforce using Amazon Cognito, you must specify a Cognito user pool\n in CognitoConfig.\n You can also create an Amazon Cognito workforce using the Amazon SageMaker console. \n For more information, see \n \n Create a Private Workforce (Amazon Cognito).

          \n\n

          To create a private workforce using your own OIDC Identity Provider (IdP), specify your IdP\n configuration in OidcConfig. Your OIDC IdP must support groups\n because groups are used by Ground Truth and Amazon A2I to create work teams. \n For more information, see \n Create a Private Workforce (OIDC IdP).

          " } }, - "com.amazonaws.sagemaker#DeleteFlowDefinitionRequest": { + "com.amazonaws.sagemaker#CreateWorkforceRequest": { "type": "structure", "members": { - "FlowDefinitionName": { - "target": "com.amazonaws.sagemaker#FlowDefinitionName", + "CognitoConfig": { + "target": "com.amazonaws.sagemaker#CognitoConfig", "traits": { - "smithy.api#documentation": "

          The name of the flow definition you are deleting.

          ", + "smithy.api#documentation": "

          Use this parameter to configure an Amazon Cognito private workforce.\n A single Cognito workforce is created using and corresponds to a single\n \n Amazon Cognito user pool.

          \n \n

          Do not use OidcConfig if you specify values for \n CognitoConfig.

          " + } + }, + "OidcConfig": { + "target": "com.amazonaws.sagemaker#OidcConfig", + "traits": { + "smithy.api#documentation": "

          Use this parameter to configure a private workforce using your own OIDC Identity Provider.

          \n

          Do not use CognitoConfig if you specify values for \n OidcConfig.

          " + } + }, + "SourceIpConfig": { + "target": "com.amazonaws.sagemaker#SourceIpConfig" + }, + "WorkforceName": { + "target": "com.amazonaws.sagemaker#WorkforceName", + "traits": { + "smithy.api#documentation": "

          The name of the private workforce.

          ", "smithy.api#required": {} } + }, + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", + "traits": { + "smithy.api#documentation": "

          An array of key-value pairs that contain metadata to help you categorize and \n organize our workforce. Each tag consists of a key and a value, \n both of which you define.

          " + } } } }, - "com.amazonaws.sagemaker#DeleteFlowDefinitionResponse": { - "type": "structure", - "members": {} - }, - "com.amazonaws.sagemaker#DeleteHumanTaskUi": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#DeleteHumanTaskUiRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#DeleteHumanTaskUiResponse" - }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], - "traits": { - "smithy.api#documentation": "

          Use this operation to delete a human task user interface (worker task template).

          \n

          \n To see a list of human task user interfaces\n (work task templates) in your account, use .\n When you delete a worker task template, it no longer appears when you call ListHumanTaskUis.

          " - } - }, - "com.amazonaws.sagemaker#DeleteHumanTaskUiRequest": { + "com.amazonaws.sagemaker#CreateWorkforceResponse": { "type": "structure", "members": { - "HumanTaskUiName": { - "target": "com.amazonaws.sagemaker#HumanTaskUiName", + "WorkforceArn": { + "target": "com.amazonaws.sagemaker#WorkforceArn", "traits": { - "smithy.api#documentation": "

          The name of the human task user interface (work task template) you want to delete.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the workforce.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DeleteHumanTaskUiResponse": { - "type": "structure", - "members": {} - }, - "com.amazonaws.sagemaker#DeleteImage": { + "com.amazonaws.sagemaker#CreateWorkteam": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DeleteImageRequest" + "target": "com.amazonaws.sagemaker#CreateWorkteamRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DeleteImageResponse" + "target": "com.amazonaws.sagemaker#CreateWorkteamResponse" }, "errors": [ { "target": "com.amazonaws.sagemaker#ResourceInUse" }, { - "target": "com.amazonaws.sagemaker#ResourceNotFound" + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" } ], "traits": { - "smithy.api#documentation": "

          Deletes a SageMaker image and all versions of the image. The container images aren't\n deleted.

          " + "smithy.api#documentation": "

          Creates a new work team for labeling your data. A work team is defined by one or more\n Amazon Cognito user pools. You must first create the user pools before you can create a work\n team.

          \n

          You cannot create more than 25 work teams in an account and region.

          " } }, - "com.amazonaws.sagemaker#DeleteImageRequest": { + "com.amazonaws.sagemaker#CreateWorkteamRequest": { "type": "structure", "members": { - "ImageName": { - "target": "com.amazonaws.sagemaker#ImageName", + "WorkteamName": { + "target": "com.amazonaws.sagemaker#WorkteamName", "traits": { - "smithy.api#documentation": "

          The name of the image to delete.

          ", + "smithy.api#documentation": "

          The name of the work team. Use this name to identify the work team.

          ", + "smithy.api#required": {} + } + }, + "WorkforceName": { + "target": "com.amazonaws.sagemaker#WorkforceName", + "traits": { + "smithy.api#documentation": "

          The name of the workforce.

          " + } + }, + "MemberDefinitions": { + "target": "com.amazonaws.sagemaker#MemberDefinitions", + "traits": { + "smithy.api#documentation": "

          A list of MemberDefinition objects that contains objects that identify\n the workers that make up the work team.

          \n

          Workforces can be created using Amazon Cognito or your own OIDC Identity Provider (IdP). For\n private workforces created using Amazon Cognito use CognitoMemberDefinition. For\n workforces created using your own OIDC identity provider (IdP) use\n OidcMemberDefinition. Do not provide input for both of these parameters\n in a single request.

          \n

          For workforces created using Amazon Cognito, private work teams correspond to Amazon Cognito\n user groups within the user pool used to create a workforce. All of the\n CognitoMemberDefinition objects that make up the member definition must\n have the same ClientId and UserPool values. To add a Amazon\n Cognito user group to an existing worker pool, see Adding groups to a User\n Pool. For more information about user pools, see Amazon Cognito User\n Pools.

          \n

          For workforces created using your own OIDC IdP, specify the user groups that you want to \n include in your private work team in OidcMemberDefinition by listing those groups\n in Groups.

          ", + "smithy.api#required": {} + } + }, + "Description": { + "target": "com.amazonaws.sagemaker#String200", + "traits": { + "smithy.api#documentation": "

          A description of the work team.

          ", "smithy.api#required": {} } + }, + "NotificationConfiguration": { + "target": "com.amazonaws.sagemaker#NotificationConfiguration", + "traits": { + "smithy.api#documentation": "

          Configures notification of workers regarding available or expiring work items.

          " + } + }, + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", + "traits": { + "smithy.api#documentation": "

          An array of key-value pairs.

          \n

          For more information, see Resource\n Tag and Using\n Cost Allocation Tags in the AWS Billing and Cost Management User\n Guide.

          " + } } } }, - "com.amazonaws.sagemaker#DeleteImageResponse": { + "com.amazonaws.sagemaker#CreateWorkteamResponse": { "type": "structure", - "members": {} + "members": { + "WorkteamArn": { + "target": "com.amazonaws.sagemaker#WorkteamArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the work team. You can use this ARN to identify the\n work team.

          " + } + } + } }, - "com.amazonaws.sagemaker#DeleteImageVersion": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#DeleteImageVersionRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#DeleteImageVersionResponse" - }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceInUse" + "com.amazonaws.sagemaker#CreationTime": { + "type": "timestamp" + }, + "com.amazonaws.sagemaker#CsvContentType": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 256 }, - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*\\/[a-zA-Z0-9](-*[a-zA-Z0-9.])*" + } + }, + "com.amazonaws.sagemaker#CsvContentTypes": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#CsvContentType" + }, "traits": { - "smithy.api#documentation": "

          Deletes a version of a SageMaker image. The container image the version represents isn't\n deleted.

          " + "smithy.api#length": { + "min": 1, + "max": 10 + } } }, - "com.amazonaws.sagemaker#DeleteImageVersionRequest": { + "com.amazonaws.sagemaker#CustomImage": { "type": "structure", "members": { "ImageName": { "target": "com.amazonaws.sagemaker#ImageName", "traits": { - "smithy.api#documentation": "

          The name of the image.

          ", + "smithy.api#documentation": "

          The name of the CustomImage. Must be unique to your account.

          ", "smithy.api#required": {} } }, - "Version": { + "ImageVersionNumber": { "target": "com.amazonaws.sagemaker#ImageVersionNumber", "traits": { - "smithy.api#documentation": "

          The version to delete.

          ", + "smithy.api#box": {}, + "smithy.api#documentation": "

          The version number of the CustomImage.

          " + } + }, + "AppImageConfigName": { + "target": "com.amazonaws.sagemaker#AppImageConfigName", + "traits": { + "smithy.api#documentation": "

          The name of the AppImageConfig.

          ", "smithy.api#required": {} } } + }, + "traits": { + "smithy.api#documentation": "

          A custom SageMaker image. For more information, see\n Bring your own SageMaker image.

          " } }, - "com.amazonaws.sagemaker#DeleteImageVersionResponse": { - "type": "structure", - "members": {} - }, - "com.amazonaws.sagemaker#DeleteModel": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#DeleteModelInput" + "com.amazonaws.sagemaker#CustomImages": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#CustomImage" }, "traits": { - "smithy.api#documentation": "

          Deletes a model. The DeleteModel API deletes only the model entry that\n was created in Amazon SageMaker when you called the CreateModel API. It does not\n delete model artifacts, inference code, or the IAM role that you specified when\n creating the model.

          " + "smithy.api#length": { + "min": 0, + "max": 30 + } } }, - "com.amazonaws.sagemaker#DeleteModelInput": { + "com.amazonaws.sagemaker#DataCaptureConfig": { "type": "structure", "members": { - "ModelName": { - "target": "com.amazonaws.sagemaker#ModelName", + "EnableCapture": { + "target": "com.amazonaws.sagemaker#EnableCapture", "traits": { - "smithy.api#documentation": "

          The name of the model to delete.

          ", + "smithy.api#documentation": "

          " + } + }, + "InitialSamplingPercentage": { + "target": "com.amazonaws.sagemaker#SamplingPercentage", + "traits": { + "smithy.api#documentation": "

          ", + "smithy.api#required": {} + } + }, + "DestinationS3Uri": { + "target": "com.amazonaws.sagemaker#DestinationS3Uri", + "traits": { + "smithy.api#documentation": "

          ", "smithy.api#required": {} } + }, + "KmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", + "traits": { + "smithy.api#documentation": "

          " + } + }, + "CaptureOptions": { + "target": "com.amazonaws.sagemaker#CaptureOptionList", + "traits": { + "smithy.api#documentation": "

          ", + "smithy.api#required": {} + } + }, + "CaptureContentTypeHeader": { + "target": "com.amazonaws.sagemaker#CaptureContentTypeHeader", + "traits": { + "smithy.api#documentation": "

          " + } } - } - }, - "com.amazonaws.sagemaker#DeleteModelPackage": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#DeleteModelPackageInput" }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ConflictException" - } - ], "traits": { - "smithy.api#documentation": "

          Deletes a model package.

          \n

          A model package is used to create Amazon SageMaker models or list on AWS Marketplace. Buyers can\n subscribe to model packages listed on AWS Marketplace to create models in Amazon SageMaker.

          " + "smithy.api#documentation": "

          " } }, - "com.amazonaws.sagemaker#DeleteModelPackageInput": { + "com.amazonaws.sagemaker#DataCaptureConfigSummary": { "type": "structure", "members": { - "ModelPackageName": { - "target": "com.amazonaws.sagemaker#VersionedArnOrName", + "EnableCapture": { + "target": "com.amazonaws.sagemaker#EnableCapture", "traits": { - "smithy.api#documentation": "

          The name of the model package. The name must have 1 to 63 characters. Valid characters\n are a-z, A-Z, 0-9, and - (hyphen).

          ", + "smithy.api#documentation": "

          ", + "smithy.api#required": {} + } + }, + "CaptureStatus": { + "target": "com.amazonaws.sagemaker#CaptureStatus", + "traits": { + "smithy.api#documentation": "

          ", + "smithy.api#required": {} + } + }, + "CurrentSamplingPercentage": { + "target": "com.amazonaws.sagemaker#SamplingPercentage", + "traits": { + "smithy.api#documentation": "

          ", + "smithy.api#required": {} + } + }, + "DestinationS3Uri": { + "target": "com.amazonaws.sagemaker#DestinationS3Uri", + "traits": { + "smithy.api#documentation": "

          ", + "smithy.api#required": {} + } + }, + "KmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", + "traits": { + "smithy.api#documentation": "

          ", "smithy.api#required": {} } } - } - }, - "com.amazonaws.sagemaker#DeleteMonitoringSchedule": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#DeleteMonitoringScheduleRequest" }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], "traits": { - "smithy.api#documentation": "

          Deletes a monitoring schedule. Also stops the schedule had not already been stopped.\n This does not delete the job execution history of the monitoring schedule.

          " + "smithy.api#documentation": "

          " } }, - "com.amazonaws.sagemaker#DeleteMonitoringScheduleRequest": { + "com.amazonaws.sagemaker#DataCatalogConfig": { "type": "structure", "members": { - "MonitoringScheduleName": { - "target": "com.amazonaws.sagemaker#MonitoringScheduleName", + "TableName": { + "target": "com.amazonaws.sagemaker#TableName", "traits": { - "smithy.api#documentation": "

          The name of the monitoring schedule to delete.

          ", + "smithy.api#documentation": "

          The name of the Glue table.

          ", + "smithy.api#required": {} + } + }, + "Catalog": { + "target": "com.amazonaws.sagemaker#Catalog", + "traits": { + "smithy.api#documentation": "

          The name of the Glue table catalog.

          ", + "smithy.api#required": {} + } + }, + "Database": { + "target": "com.amazonaws.sagemaker#Database", + "traits": { + "smithy.api#documentation": "

          The name of the Glue table database.

          ", "smithy.api#required": {} } } + }, + "traits": { + "smithy.api#documentation": "

          The meta data of the Glue table which serves as data catalog for the\n OfflineStore.

          " } }, - "com.amazonaws.sagemaker#DeleteNotebookInstance": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#DeleteNotebookInstanceInput" - }, + "com.amazonaws.sagemaker#DataDistributionType": { + "type": "string", "traits": { - "smithy.api#documentation": "

          Deletes an Amazon SageMaker notebook instance. Before you can delete a notebook instance, you\n must call the StopNotebookInstance API.

          \n \n

          When you delete a notebook instance, you lose all of your data. Amazon SageMaker removes\n the ML compute instance, and deletes the ML storage volume and the network interface\n associated with the notebook instance.

          \n
          " + "smithy.api#enum": [ + { + "value": "FullyReplicated", + "name": "FULLYREPLICATED" + }, + { + "value": "ShardedByS3Key", + "name": "SHARDEDBYS3KEY" + } + ] } }, - "com.amazonaws.sagemaker#DeleteNotebookInstanceInput": { + "com.amazonaws.sagemaker#DataExplorationNotebookLocation": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1 + } + } + }, + "com.amazonaws.sagemaker#DataInputConfig": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 1024 + }, + "smithy.api#pattern": "[\\S\\s]+" + } + }, + "com.amazonaws.sagemaker#DataProcessing": { "type": "structure", "members": { - "NotebookInstanceName": { - "target": "com.amazonaws.sagemaker#NotebookInstanceName", + "InputFilter": { + "target": "com.amazonaws.sagemaker#JsonPath", "traits": { - "smithy.api#documentation": "

          The name of the Amazon SageMaker notebook instance to delete.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A JSONPath expression used to select a portion of the input data to pass to\n the algorithm. Use the InputFilter parameter to exclude fields, such as an\n ID column, from the input. If you want Amazon SageMaker to pass the entire input dataset to the\n algorithm, accept the default value $.

          \n

          Examples: \"$\", \"$[1:]\", \"$.features\"\n

          " + } + }, + "OutputFilter": { + "target": "com.amazonaws.sagemaker#JsonPath", + "traits": { + "smithy.api#documentation": "

          A JSONPath expression used to select a portion of the joined dataset to save\n in the output file for a batch transform job. If you want Amazon SageMaker to store the entire input\n dataset in the output file, leave the default value, $. If you specify\n indexes that aren't within the dimension size of the joined dataset, you get an\n error.

          \n

          Examples: \"$\", \"$[0,5:]\",\n \"$['id','SageMakerOutput']\"\n

          " + } + }, + "JoinSource": { + "target": "com.amazonaws.sagemaker#JoinSource", + "traits": { + "smithy.api#documentation": "

          Specifies the source of the data to join with the transformed data. The valid values\n are None and Input. The default value is None,\n which specifies not to join the input with the transformed data. If you want the batch\n transform job to join the original input data with the transformed data, set\n JoinSource to Input.

          \n \n

          For JSON or JSONLines objects, such as a JSON array, Amazon SageMaker adds the transformed data to\n the input JSON object in an attribute called SageMakerOutput. The joined\n result for JSON must be a key-value pair object. If the input is not a key-value pair\n object, Amazon SageMaker creates a new JSON file. In the new JSON file, and the input data is stored\n under the SageMakerInput key and the results are stored in\n SageMakerOutput.

          \n

          For CSV files, Amazon SageMaker combines the transformed data with the input data at the end of\n the input data and stores it in the output file. The joined data has the joined input\n data followed by the transformed data and the output is a CSV file.

          " } } - } - }, - "com.amazonaws.sagemaker#DeleteNotebookInstanceLifecycleConfig": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#DeleteNotebookInstanceLifecycleConfigInput" }, "traits": { - "smithy.api#documentation": "

          Deletes a notebook instance lifecycle configuration.

          " + "smithy.api#documentation": "

          The data structure used to specify the data to be used for inference in a batch\n transform job and to associate the data that is relevant to the prediction results in\n the output. The input filter provided allows you to exclude input data that is not\n needed for inference in a batch transform job. The output filter provided allows you to\n include input data relevant to interpreting the predictions in the output from the job.\n For more information, see Associate Prediction\n Results with their Corresponding Input Records.

          " } }, - "com.amazonaws.sagemaker#DeleteNotebookInstanceLifecycleConfigInput": { + "com.amazonaws.sagemaker#DataSource": { "type": "structure", "members": { - "NotebookInstanceLifecycleConfigName": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName", + "S3DataSource": { + "target": "com.amazonaws.sagemaker#S3DataSource", "traits": { - "smithy.api#documentation": "

          The name of the lifecycle configuration to delete.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The S3 location of the data source that is associated with a channel.

          " + } + }, + "FileSystemDataSource": { + "target": "com.amazonaws.sagemaker#FileSystemDataSource", + "traits": { + "smithy.api#documentation": "

          The file system that is associated with a channel.

          " } } + }, + "traits": { + "smithy.api#documentation": "

          Describes the location of the channel data.

          " } }, - "com.amazonaws.sagemaker#DeleteTags": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#DeleteTagsInput" - }, - "output": { - "target": "com.amazonaws.sagemaker#DeleteTagsOutput" - }, + "com.amazonaws.sagemaker#Database": { + "type": "string", "traits": { - "smithy.api#documentation": "

          Deletes the specified tags from an Amazon SageMaker resource.

          \n

          To list a resource's tags, use the ListTags API.

          \n \n

          When you call this API to delete tags from a hyperparameter tuning job, the\n deleted tags are not removed from training jobs that the hyperparameter tuning job\n launched before you called this API.

          \n
          " + "smithy.api#length": { + "min": 1, + "max": 255 + }, + "smithy.api#pattern": "[\\u0020-\\uD7FF\\uE000-\\uFFFD\\uD800\\uDC00-\\uDBFF\\uDFFF\\t]*" } }, - "com.amazonaws.sagemaker#DeleteTagsInput": { + "com.amazonaws.sagemaker#DatasetDefinition": { "type": "structure", "members": { - "ResourceArn": { - "target": "com.amazonaws.sagemaker#ResourceArn", + "AthenaDatasetDefinition": { + "target": "com.amazonaws.sagemaker#AthenaDatasetDefinition" + }, + "RedshiftDatasetDefinition": { + "target": "com.amazonaws.sagemaker#RedshiftDatasetDefinition" + }, + "LocalPath": { + "target": "com.amazonaws.sagemaker#ProcessingLocalPath", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the resource whose tags you want to\n delete.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The local path where you want Amazon SageMaker to download the Dataset Definition inputs to run a\n processing job. LocalPath is an absolute path to the input data. This is a required\n parameter when AppManaged is False (default).

          " } }, - "TagKeys": { - "target": "com.amazonaws.sagemaker#TagKeyList", + "DataDistributionType": { + "target": "com.amazonaws.sagemaker#DataDistributionType", "traits": { - "smithy.api#documentation": "

          An array or one or more tag keys to delete.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Whether the generated dataset is FullyReplicated or\n ShardedByS3Key (default).

          " + } + }, + "InputMode": { + "target": "com.amazonaws.sagemaker#InputMode", + "traits": { + "smithy.api#documentation": "

          Whether to use File or Pipe input mode. In File (default) mode,\n Amazon SageMaker copies the data from the input source onto the local Amazon Elastic Block Store\n (Amazon EBS) volumes before starting your training algorithm. This is the most commonly used\n input mode. In Pipe mode, Amazon SageMaker streams input data from the source directly to your\n algorithm without using the EBS volume.

          " } } + }, + "traits": { + "smithy.api#documentation": "

          Configuration for Dataset Definition inputs. The Dataset Definition input must specify\n exactly one of either AthenaDatasetDefinition or RedshiftDatasetDefinition\n types.

          " } }, - "com.amazonaws.sagemaker#DeleteTagsOutput": { + "com.amazonaws.sagemaker#DebugHookConfig": { "type": "structure", - "members": {} - }, - "com.amazonaws.sagemaker#DeleteTrial": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#DeleteTrialRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#DeleteTrialResponse" - }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" + "members": { + "LocalPath": { + "target": "com.amazonaws.sagemaker#DirectoryPath", + "traits": { + "smithy.api#documentation": "

          Path to local storage location for tensors. Defaults to\n /opt/ml/output/tensors/.

          " + } + }, + "S3OutputPath": { + "target": "com.amazonaws.sagemaker#S3Uri", + "traits": { + "smithy.api#documentation": "

          Path to Amazon S3 storage location for tensors.

          ", + "smithy.api#required": {} + } + }, + "HookParameters": { + "target": "com.amazonaws.sagemaker#HookParameters", + "traits": { + "smithy.api#documentation": "

          Configuration information for the debug hook parameters.

          " + } + }, + "CollectionConfigurations": { + "target": "com.amazonaws.sagemaker#CollectionConfigurations", + "traits": { + "smithy.api#documentation": "

          Configuration information for tensor collections.

          " + } } - ], + }, "traits": { - "smithy.api#documentation": "

          Deletes the specified trial. All trial components that make up the trial must be deleted\n first. Use the DescribeTrialComponent API to get the list of trial\n components.

          " + "smithy.api#documentation": "

          Configuration information for the debug hook parameters, collection configuration, and\n storage paths.

          " } }, - "com.amazonaws.sagemaker#DeleteTrialComponent": { + "com.amazonaws.sagemaker#DebugRuleConfiguration": { + "type": "structure", + "members": { + "RuleConfigurationName": { + "target": "com.amazonaws.sagemaker#RuleConfigurationName", + "traits": { + "smithy.api#documentation": "

          The name of the rule configuration. It must be unique relative to other rule\n configuration names.

          ", + "smithy.api#required": {} + } + }, + "LocalPath": { + "target": "com.amazonaws.sagemaker#DirectoryPath", + "traits": { + "smithy.api#documentation": "

          Path to local storage location for output of rules. Defaults to\n /opt/ml/processing/output/rule/.

          " + } + }, + "S3OutputPath": { + "target": "com.amazonaws.sagemaker#S3Uri", + "traits": { + "smithy.api#documentation": "

          Path to Amazon S3 storage location for rules.

          " + } + }, + "RuleEvaluatorImage": { + "target": "com.amazonaws.sagemaker#AlgorithmImage", + "traits": { + "smithy.api#documentation": "

          The Amazon Elastic Container (ECR) Image for the managed rule evaluation.

          ", + "smithy.api#required": {} + } + }, + "InstanceType": { + "target": "com.amazonaws.sagemaker#ProcessingInstanceType", + "traits": { + "smithy.api#documentation": "

          The instance type to deploy for a training job.

          " + } + }, + "VolumeSizeInGB": { + "target": "com.amazonaws.sagemaker#OptionalVolumeSizeInGB", + "traits": { + "smithy.api#documentation": "

          The size, in GB, of the ML storage volume attached to the processing instance.

          " + } + }, + "RuleParameters": { + "target": "com.amazonaws.sagemaker#RuleParameters", + "traits": { + "smithy.api#documentation": "

          Runtime configuration for rule container.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Configuration information for debugging rules.

          " + } + }, + "com.amazonaws.sagemaker#DebugRuleConfigurations": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#DebugRuleConfiguration" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 20 + } + } + }, + "com.amazonaws.sagemaker#DebugRuleEvaluationStatus": { + "type": "structure", + "members": { + "RuleConfigurationName": { + "target": "com.amazonaws.sagemaker#RuleConfigurationName", + "traits": { + "smithy.api#documentation": "

          The name of the rule configuration

          " + } + }, + "RuleEvaluationJobArn": { + "target": "com.amazonaws.sagemaker#ProcessingJobArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the rule evaluation job.

          " + } + }, + "RuleEvaluationStatus": { + "target": "com.amazonaws.sagemaker#RuleEvaluationStatus", + "traits": { + "smithy.api#documentation": "

          Status of the rule evaluation.

          " + } + }, + "StatusDetails": { + "target": "com.amazonaws.sagemaker#StatusDetails", + "traits": { + "smithy.api#documentation": "

          Details from the rule evaluation.

          " + } + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          Timestamp when the rule evaluation status was last modified.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Information about the status of the rule evaluation.

          " + } + }, + "com.amazonaws.sagemaker#DebugRuleEvaluationStatuses": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#DebugRuleEvaluationStatus" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 20 + } + } + }, + "com.amazonaws.sagemaker#DefaultGid": { + "type": "integer", + "traits": { + "smithy.api#range": { + "min": 0, + "max": 65535 + } + } + }, + "com.amazonaws.sagemaker#DefaultUid": { + "type": "integer", + "traits": { + "smithy.api#range": { + "min": 0, + "max": 65535 + } + } + }, + "com.amazonaws.sagemaker#DeleteAction": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DeleteTrialComponentRequest" + "target": "com.amazonaws.sagemaker#DeleteActionRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DeleteTrialComponentResponse" + "target": "com.amazonaws.sagemaker#DeleteActionResponse" }, "errors": [ { @@ -5995,59 +6976,57 @@ } ], "traits": { - "smithy.api#documentation": "

          Deletes the specified trial component. A trial component must be disassociated from all\n trials before the trial component can be deleted. To disassociate a trial component from a\n trial, call the DisassociateTrialComponent API.

          " + "smithy.api#documentation": "

          Deletes an action.

          " } }, - "com.amazonaws.sagemaker#DeleteTrialComponentRequest": { + "com.amazonaws.sagemaker#DeleteActionRequest": { "type": "structure", "members": { - "TrialComponentName": { + "ActionName": { "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The name of the component to delete.

          ", + "smithy.api#documentation": "

          The name of the action to delete.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DeleteTrialComponentResponse": { + "com.amazonaws.sagemaker#DeleteActionResponse": { "type": "structure", "members": { - "TrialComponentArn": { - "target": "com.amazonaws.sagemaker#TrialComponentArn", + "ActionArn": { + "target": "com.amazonaws.sagemaker#ActionArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the component is being deleted.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the action.

          " } } } }, - "com.amazonaws.sagemaker#DeleteTrialRequest": { - "type": "structure", - "members": { - "TrialName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", - "traits": { - "smithy.api#documentation": "

          The name of the trial to delete.

          ", - "smithy.api#required": {} - } - } + "com.amazonaws.sagemaker#DeleteAlgorithm": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeleteAlgorithmInput" + }, + "traits": { + "smithy.api#documentation": "

          Removes the specified algorithm from your account.

          " } }, - "com.amazonaws.sagemaker#DeleteTrialResponse": { + "com.amazonaws.sagemaker#DeleteAlgorithmInput": { "type": "structure", "members": { - "TrialArn": { - "target": "com.amazonaws.sagemaker#TrialArn", + "AlgorithmName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the trial that is being deleted.

          " + "smithy.api#documentation": "

          The name of the algorithm to delete.

          ", + "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DeleteUserProfile": { + "com.amazonaws.sagemaker#DeleteApp": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DeleteUserProfileRequest" + "target": "com.amazonaws.sagemaker#DeleteAppRequest" }, "errors": [ { @@ -6058,10 +7037,36 @@ } ], "traits": { - "smithy.api#documentation": "

          Deletes a user profile. When a user profile is deleted, the user loses access to their EFS\n volume, including data, notebooks, and other artifacts.

          " + "smithy.api#documentation": "

          Used to stop and delete an app.

          " } }, - "com.amazonaws.sagemaker#DeleteUserProfileRequest": { + "com.amazonaws.sagemaker#DeleteAppImageConfig": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeleteAppImageConfigRequest" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Deletes an AppImageConfig.

          " + } + }, + "com.amazonaws.sagemaker#DeleteAppImageConfigRequest": { + "type": "structure", + "members": { + "AppImageConfigName": { + "target": "com.amazonaws.sagemaker#AppImageConfigName", + "traits": { + "smithy.api#documentation": "

          The name of the AppImageConfig to delete.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.sagemaker#DeleteAppRequest": { "type": "structure", "members": { "DomainId": { @@ -6077,218 +7082,266 @@ "smithy.api#documentation": "

          The user profile name.

          ", "smithy.api#required": {} } + }, + "AppType": { + "target": "com.amazonaws.sagemaker#AppType", + "traits": { + "smithy.api#documentation": "

          The type of app.

          ", + "smithy.api#required": {} + } + }, + "AppName": { + "target": "com.amazonaws.sagemaker#AppName", + "traits": { + "smithy.api#documentation": "

          The name of the app.

          ", + "smithy.api#required": {} + } } } }, - "com.amazonaws.sagemaker#DeleteWorkforce": { + "com.amazonaws.sagemaker#DeleteArtifact": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DeleteWorkforceRequest" + "target": "com.amazonaws.sagemaker#DeleteArtifactRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DeleteWorkforceResponse" + "target": "com.amazonaws.sagemaker#DeleteArtifactResponse" }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#documentation": "

          Use this operation to delete a workforce.

          \n\n

          If you want to create a new workforce in an AWS Region where\n a workforce already exists, use this operation to delete the \n existing workforce and then use \n to create a new workforce.

          \n \n

          If a private workforce contains one or more work teams, you must use \n the \n operation to delete all work teams before you delete the workforce.\n If you try to delete a workforce that contains one or more work teams,\n you will recieve a ResourceInUse error.

          \n
          " + "smithy.api#documentation": "

          Deletes an artifact. Either ArtifactArn or Source must be\n specified.

          " } }, - "com.amazonaws.sagemaker#DeleteWorkforceRequest": { + "com.amazonaws.sagemaker#DeleteArtifactRequest": { "type": "structure", "members": { - "WorkforceName": { - "target": "com.amazonaws.sagemaker#WorkforceName", + "ArtifactArn": { + "target": "com.amazonaws.sagemaker#ArtifactArn", "traits": { - "smithy.api#documentation": "

          The name of the workforce.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the artifact to delete.

          " + } + }, + "Source": { + "target": "com.amazonaws.sagemaker#ArtifactSource", + "traits": { + "smithy.api#documentation": "

          The URI of the source.

          " } } } }, - "com.amazonaws.sagemaker#DeleteWorkforceResponse": { + "com.amazonaws.sagemaker#DeleteArtifactResponse": { "type": "structure", - "members": {} + "members": { + "ArtifactArn": { + "target": "com.amazonaws.sagemaker#ArtifactArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the artifact.

          " + } + } + } }, - "com.amazonaws.sagemaker#DeleteWorkteam": { + "com.amazonaws.sagemaker#DeleteAssociation": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DeleteWorkteamRequest" + "target": "com.amazonaws.sagemaker#DeleteAssociationRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DeleteWorkteamResponse" + "target": "com.amazonaws.sagemaker#DeleteAssociationResponse" }, "errors": [ { - "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + "target": "com.amazonaws.sagemaker#ResourceNotFound" } ], "traits": { - "smithy.api#documentation": "

          Deletes an existing work team. This operation can't be undone.

          " + "smithy.api#documentation": "

          Deletes an association.

          " } }, - "com.amazonaws.sagemaker#DeleteWorkteamRequest": { + "com.amazonaws.sagemaker#DeleteAssociationRequest": { "type": "structure", "members": { - "WorkteamName": { - "target": "com.amazonaws.sagemaker#WorkteamName", + "SourceArn": { + "target": "com.amazonaws.sagemaker#AssociationEntityArn", "traits": { - "smithy.api#documentation": "

          The name of the work team to delete.

          ", + "smithy.api#documentation": "

          The ARN of the source.

          ", "smithy.api#required": {} } - } - } - }, - "com.amazonaws.sagemaker#DeleteWorkteamResponse": { - "type": "structure", - "members": { - "Success": { - "target": "com.amazonaws.sagemaker#Success", + }, + "DestinationArn": { + "target": "com.amazonaws.sagemaker#AssociationEntityArn", "traits": { - "smithy.api#documentation": "

          Returns true if the work team was successfully deleted; otherwise,\n returns false.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the destination.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DeployedImage": { + "com.amazonaws.sagemaker#DeleteAssociationResponse": { "type": "structure", "members": { - "SpecifiedImage": { - "target": "com.amazonaws.sagemaker#ContainerImage", - "traits": { - "smithy.api#documentation": "

          The image path you specified when you created the model.

          " - } - }, - "ResolvedImage": { - "target": "com.amazonaws.sagemaker#ContainerImage", + "SourceArn": { + "target": "com.amazonaws.sagemaker#AssociationEntityArn", "traits": { - "smithy.api#documentation": "

          The specific digest path of the image hosted in this\n ProductionVariant.

          " + "smithy.api#documentation": "

          The ARN of the source.

          " } }, - "ResolutionTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "DestinationArn": { + "target": "com.amazonaws.sagemaker#AssociationEntityArn", "traits": { - "smithy.api#documentation": "

          The date and time when the image path for the model resolved to the\n ResolvedImage\n

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the destination.

          " } } + } + }, + "com.amazonaws.sagemaker#DeleteCodeRepository": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeleteCodeRepositoryInput" }, "traits": { - "smithy.api#documentation": "

          Gets the Amazon EC2 Container Registry path of the docker image of the model that is hosted in this ProductionVariant.

          \n

          If you used the registry/repository[:tag] form to specify the image path\n of the primary container when you created the model hosted in this\n ProductionVariant, the path resolves to a path of the form\n registry/repository[@digest]. A digest is a hash value that identifies\n a specific version of an image. For information about Amazon ECR paths, see Pulling an Image in the Amazon ECR User Guide.

          " + "smithy.api#documentation": "

          Deletes the specified Git repository from your account.

          " } }, - "com.amazonaws.sagemaker#DeployedImages": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#DeployedImage" + "com.amazonaws.sagemaker#DeleteCodeRepositoryInput": { + "type": "structure", + "members": { + "CodeRepositoryName": { + "target": "com.amazonaws.sagemaker#EntityName", + "traits": { + "smithy.api#documentation": "

          The name of the Git repository to delete.

          ", + "smithy.api#required": {} + } + } } }, - "com.amazonaws.sagemaker#DescribeAlgorithm": { + "com.amazonaws.sagemaker#DeleteContext": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeAlgorithmInput" + "target": "com.amazonaws.sagemaker#DeleteContextRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeAlgorithmOutput" + "target": "com.amazonaws.sagemaker#DeleteContextResponse" }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#documentation": "

          Returns a description of the specified algorithm that is in your account.

          " + "smithy.api#documentation": "

          Deletes an context.

          " } }, - "com.amazonaws.sagemaker#DescribeAlgorithmInput": { + "com.amazonaws.sagemaker#DeleteContextRequest": { "type": "structure", "members": { - "AlgorithmName": { - "target": "com.amazonaws.sagemaker#ArnOrName", + "ContextName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The name of the algorithm to describe.

          ", + "smithy.api#documentation": "

          The name of the context to delete.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeAlgorithmOutput": { + "com.amazonaws.sagemaker#DeleteContextResponse": { "type": "structure", "members": { - "AlgorithmName": { - "target": "com.amazonaws.sagemaker#EntityName", - "traits": { - "smithy.api#documentation": "

          The name of the algorithm being described.

          ", - "smithy.api#required": {} - } - }, - "AlgorithmArn": { - "target": "com.amazonaws.sagemaker#AlgorithmArn", - "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the algorithm.

          ", - "smithy.api#required": {} - } - }, - "AlgorithmDescription": { - "target": "com.amazonaws.sagemaker#EntityDescription", + "ContextArn": { + "target": "com.amazonaws.sagemaker#ContextArn", "traits": { - "smithy.api#documentation": "

          A brief summary about the algorithm.

          " - } - }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#CreationTime", - "traits": { - "smithy.api#documentation": "

          A timestamp specifying when the algorithm was created.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the context.

          " } + } + } + }, + "com.amazonaws.sagemaker#DeleteDomain": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeleteDomainRequest" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceInUse" }, - "TrainingSpecification": { - "target": "com.amazonaws.sagemaker#TrainingSpecification", + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Used to delete a domain.\n If you onboarded with IAM mode, you will need to delete your domain to onboard again using SSO.\n Use with caution. All of the members of the domain will lose access to their EFS volume,\n including data, notebooks, and other artifacts.\n

          " + } + }, + "com.amazonaws.sagemaker#DeleteDomainRequest": { + "type": "structure", + "members": { + "DomainId": { + "target": "com.amazonaws.sagemaker#DomainId", "traits": { - "smithy.api#documentation": "

          Details about training jobs run by this algorithm.

          ", + "smithy.api#documentation": "

          The domain ID.

          ", "smithy.api#required": {} } }, - "InferenceSpecification": { - "target": "com.amazonaws.sagemaker#InferenceSpecification", - "traits": { - "smithy.api#documentation": "

          Details about inference jobs that the algorithm runs.

          " - } - }, - "ValidationSpecification": { - "target": "com.amazonaws.sagemaker#AlgorithmValidationSpecification", + "RetentionPolicy": { + "target": "com.amazonaws.sagemaker#RetentionPolicy", "traits": { - "smithy.api#documentation": "

          Details about configurations for one or more training jobs that Amazon SageMaker runs to test the\n algorithm.

          " + "smithy.api#documentation": "

          The retention policy for this domain, which specifies whether resources will be retained after the Domain is deleted.\n By default, all resources are retained (not automatically deleted).\n

          " } - }, - "AlgorithmStatus": { - "target": "com.amazonaws.sagemaker#AlgorithmStatus", + } + } + }, + "com.amazonaws.sagemaker#DeleteEndpoint": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeleteEndpointInput" + }, + "traits": { + "smithy.api#documentation": "

          Deletes an endpoint. Amazon SageMaker frees up all of the resources that were deployed when the\n endpoint was created.

          \n

          Amazon SageMaker retires any custom KMS key grants associated with the endpoint, meaning you don't\n need to use the RevokeGrant API call.

          " + } + }, + "com.amazonaws.sagemaker#DeleteEndpointConfig": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeleteEndpointConfigInput" + }, + "traits": { + "smithy.api#documentation": "

          Deletes an endpoint configuration. The DeleteEndpointConfig API\n deletes only the specified configuration. It does not delete endpoints created using the\n configuration.

          \n

          You must not delete an EndpointConfig in use by an endpoint that is\n live or while the UpdateEndpoint or CreateEndpoint operations\n are being performed on the endpoint. If you delete the EndpointConfig of an\n endpoint that is active or being created or updated you may lose visibility into the\n instance type the endpoint is using. The endpoint must be deleted in order to stop\n incurring charges.

          " + } + }, + "com.amazonaws.sagemaker#DeleteEndpointConfigInput": { + "type": "structure", + "members": { + "EndpointConfigName": { + "target": "com.amazonaws.sagemaker#EndpointConfigName", "traits": { - "smithy.api#documentation": "

          The current status of the algorithm.

          ", + "smithy.api#documentation": "

          The name of the endpoint configuration that you want to delete.

          ", "smithy.api#required": {} } - }, - "AlgorithmStatusDetails": { - "target": "com.amazonaws.sagemaker#AlgorithmStatusDetails", + } + } + }, + "com.amazonaws.sagemaker#DeleteEndpointInput": { + "type": "structure", + "members": { + "EndpointName": { + "target": "com.amazonaws.sagemaker#EndpointName", "traits": { - "smithy.api#documentation": "

          Details about the current status of the algorithm.

          ", + "smithy.api#documentation": "

          The name of the endpoint that you want to delete.

          ", "smithy.api#required": {} } - }, - "ProductId": { - "target": "com.amazonaws.sagemaker#ProductId", - "traits": { - "smithy.api#documentation": "

          The product identifier of the algorithm.

          " - } - }, - "CertifyForMarketplace": { - "target": "com.amazonaws.sagemaker#CertifyForMarketplace", - "traits": { - "smithy.api#documentation": "

          Whether the algorithm is certified to be listed in AWS Marketplace.

          " - } } } }, - "com.amazonaws.sagemaker#DescribeApp": { + "com.amazonaws.sagemaker#DeleteExperiment": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeAppRequest" + "target": "com.amazonaws.sagemaker#DeleteExperimentRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeAppResponse" + "target": "com.amazonaws.sagemaker#DeleteExperimentResponse" }, "errors": [ { @@ -6296,16 +7349,36 @@ } ], "traits": { - "smithy.api#documentation": "

          Describes the app.

          " + "smithy.api#documentation": "

          Deletes an Amazon SageMaker experiment. All trials associated with the experiment must be deleted\n first. Use the ListTrials API to get a list of the trials associated with\n the experiment.

          " } }, - "com.amazonaws.sagemaker#DescribeAppImageConfig": { + "com.amazonaws.sagemaker#DeleteExperimentRequest": { + "type": "structure", + "members": { + "ExperimentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "traits": { + "smithy.api#documentation": "

          The name of the experiment to delete.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.sagemaker#DeleteExperimentResponse": { + "type": "structure", + "members": { + "ExperimentArn": { + "target": "com.amazonaws.sagemaker#ExperimentArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the experiment that is being deleted.

          " + } + } + } + }, + "com.amazonaws.sagemaker#DeleteFeatureGroup": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeAppImageConfigRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#DescribeAppImageConfigResponse" + "target": "com.amazonaws.sagemaker#DeleteFeatureGroupRequest" }, "errors": [ { @@ -6313,381 +7386,262 @@ } ], "traits": { - "smithy.api#documentation": "

          Describes an AppImageConfig.

          " + "smithy.api#documentation": "

          Delete the FeatureGroup and any data that was written to the\n OnlineStore of the FeatureGroup. Data cannot be accessed from\n the OnlineStore immediately after DeleteFeatureGroup is called.

          \n

          Data written into the OfflineStore will not be deleted. The AWS Glue\n database and tables that are automatically created for your OfflineStore are\n not deleted.

          " } }, - "com.amazonaws.sagemaker#DescribeAppImageConfigRequest": { + "com.amazonaws.sagemaker#DeleteFeatureGroupRequest": { "type": "structure", "members": { - "AppImageConfigName": { - "target": "com.amazonaws.sagemaker#AppImageConfigName", + "FeatureGroupName": { + "target": "com.amazonaws.sagemaker#FeatureGroupName", "traits": { - "smithy.api#documentation": "

          The name of the AppImageConfig to describe.

          ", + "smithy.api#documentation": "

          The name of the FeatureGroup you want to delete. The name must be unique\n within an AWS Region in an AWS account.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeAppImageConfigResponse": { - "type": "structure", - "members": { - "AppImageConfigArn": { - "target": "com.amazonaws.sagemaker#AppImageConfigArn", - "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the AppImageConfig.

          " - } - }, - "AppImageConfigName": { - "target": "com.amazonaws.sagemaker#AppImageConfigName", - "traits": { - "smithy.api#documentation": "

          The name of the AppImageConfig.

          " - } - }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          When the AppImageConfig was created.

          " - } - }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          When the AppImageConfig was last modified.

          " - } + "com.amazonaws.sagemaker#DeleteFlowDefinition": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeleteFlowDefinitionRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DeleteFlowDefinitionResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceInUse" }, - "KernelGatewayImageConfig": { - "target": "com.amazonaws.sagemaker#KernelGatewayImageConfig", - "traits": { - "smithy.api#documentation": "

          The configuration of a KernelGateway app.

          " - } + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" } + ], + "traits": { + "smithy.api#documentation": "

          Deletes the specified flow definition.

          " } }, - "com.amazonaws.sagemaker#DescribeAppRequest": { + "com.amazonaws.sagemaker#DeleteFlowDefinitionRequest": { "type": "structure", "members": { - "DomainId": { - "target": "com.amazonaws.sagemaker#DomainId", - "traits": { - "smithy.api#documentation": "

          The domain ID.

          ", - "smithy.api#required": {} - } - }, - "UserProfileName": { - "target": "com.amazonaws.sagemaker#UserProfileName", - "traits": { - "smithy.api#documentation": "

          The user profile name.

          ", - "smithy.api#required": {} - } - }, - "AppType": { - "target": "com.amazonaws.sagemaker#AppType", - "traits": { - "smithy.api#documentation": "

          The type of app.

          ", - "smithy.api#required": {} - } - }, - "AppName": { - "target": "com.amazonaws.sagemaker#AppName", + "FlowDefinitionName": { + "target": "com.amazonaws.sagemaker#FlowDefinitionName", "traits": { - "smithy.api#documentation": "

          The name of the app.

          ", + "smithy.api#documentation": "

          The name of the flow definition you are deleting.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeAppResponse": { + "com.amazonaws.sagemaker#DeleteFlowDefinitionResponse": { + "type": "structure", + "members": {} + }, + "com.amazonaws.sagemaker#DeleteHumanTaskUi": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeleteHumanTaskUiRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DeleteHumanTaskUiResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Use this operation to delete a human task user interface (worker task template).

          \n

          \n To see a list of human task user interfaces\n (work task templates) in your account, use .\n When you delete a worker task template, it no longer appears when you call ListHumanTaskUis.

          " + } + }, + "com.amazonaws.sagemaker#DeleteHumanTaskUiRequest": { "type": "structure", "members": { - "AppArn": { - "target": "com.amazonaws.sagemaker#AppArn", - "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the app.

          " - } - }, - "AppType": { - "target": "com.amazonaws.sagemaker#AppType", - "traits": { - "smithy.api#documentation": "

          The type of app.

          " - } - }, - "AppName": { - "target": "com.amazonaws.sagemaker#AppName", - "traits": { - "smithy.api#documentation": "

          The name of the app.

          " - } - }, - "DomainId": { - "target": "com.amazonaws.sagemaker#DomainId", - "traits": { - "smithy.api#documentation": "

          The domain ID.

          " - } - }, - "UserProfileName": { - "target": "com.amazonaws.sagemaker#UserProfileName", - "traits": { - "smithy.api#documentation": "

          The user profile name.

          " - } - }, - "Status": { - "target": "com.amazonaws.sagemaker#AppStatus", - "traits": { - "smithy.api#documentation": "

          The status.

          " - } - }, - "LastHealthCheckTimestamp": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          The timestamp of the last health check.

          " - } - }, - "LastUserActivityTimestamp": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          The timestamp of the last user's activity.

          " - } - }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#CreationTime", - "traits": { - "smithy.api#documentation": "

          The creation time.

          " - } - }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", - "traits": { - "smithy.api#documentation": "

          The failure reason.

          " - } - }, - "ResourceSpec": { - "target": "com.amazonaws.sagemaker#ResourceSpec", + "HumanTaskUiName": { + "target": "com.amazonaws.sagemaker#HumanTaskUiName", "traits": { - "smithy.api#documentation": "

          The instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance.

          " + "smithy.api#documentation": "

          The name of the human task user interface (work task template) you want to delete.

          ", + "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeAutoMLJob": { + "com.amazonaws.sagemaker#DeleteHumanTaskUiResponse": { + "type": "structure", + "members": {} + }, + "com.amazonaws.sagemaker#DeleteImage": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeAutoMLJobRequest" + "target": "com.amazonaws.sagemaker#DeleteImageRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeAutoMLJobResponse" + "target": "com.amazonaws.sagemaker#DeleteImageResponse" }, "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceInUse" + }, { "target": "com.amazonaws.sagemaker#ResourceNotFound" } ], "traits": { - "smithy.api#documentation": "

          Returns information about an Amazon SageMaker job.

          " + "smithy.api#documentation": "

          Deletes a SageMaker image and all versions of the image. The container images aren't\n deleted.

          " } }, - "com.amazonaws.sagemaker#DescribeAutoMLJobRequest": { + "com.amazonaws.sagemaker#DeleteImageRequest": { "type": "structure", "members": { - "AutoMLJobName": { - "target": "com.amazonaws.sagemaker#AutoMLJobName", + "ImageName": { + "target": "com.amazonaws.sagemaker#ImageName", "traits": { - "smithy.api#documentation": "

          Request information about a job using that job's unique name.

          ", + "smithy.api#documentation": "

          The name of the image to delete.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeAutoMLJobResponse": { + "com.amazonaws.sagemaker#DeleteImageResponse": { "type": "structure", - "members": { - "AutoMLJobName": { - "target": "com.amazonaws.sagemaker#AutoMLJobName", - "traits": { - "smithy.api#documentation": "

          Returns the name of a job.

          ", - "smithy.api#required": {} - } + "members": {} + }, + "com.amazonaws.sagemaker#DeleteImageVersion": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeleteImageVersionRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DeleteImageVersionResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceInUse" }, - "AutoMLJobArn": { - "target": "com.amazonaws.sagemaker#AutoMLJobArn", + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Deletes a version of a SageMaker image. The container image the version represents isn't\n deleted.

          " + } + }, + "com.amazonaws.sagemaker#DeleteImageVersionRequest": { + "type": "structure", + "members": { + "ImageName": { + "target": "com.amazonaws.sagemaker#ImageName", "traits": { - "smithy.api#documentation": "

          Returns the job's ARN.

          ", + "smithy.api#documentation": "

          The name of the image.

          ", "smithy.api#required": {} } }, - "InputDataConfig": { - "target": "com.amazonaws.sagemaker#AutoMLInputDataConfig", + "Version": { + "target": "com.amazonaws.sagemaker#ImageVersionNumber", "traits": { - "smithy.api#documentation": "

          Returns the job's input data config.

          ", + "smithy.api#documentation": "

          The version to delete.

          ", "smithy.api#required": {} } - }, - "OutputDataConfig": { - "target": "com.amazonaws.sagemaker#AutoMLOutputDataConfig", + } + } + }, + "com.amazonaws.sagemaker#DeleteImageVersionResponse": { + "type": "structure", + "members": {} + }, + "com.amazonaws.sagemaker#DeleteModel": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeleteModelInput" + }, + "traits": { + "smithy.api#documentation": "

          Deletes a model. The DeleteModel API deletes only the model entry that\n was created in Amazon SageMaker when you called the CreateModel API. It does not\n delete model artifacts, inference code, or the IAM role that you specified when\n creating the model.

          " + } + }, + "com.amazonaws.sagemaker#DeleteModelInput": { + "type": "structure", + "members": { + "ModelName": { + "target": "com.amazonaws.sagemaker#ModelName", "traits": { - "smithy.api#documentation": "

          Returns the job's output data config.

          ", + "smithy.api#documentation": "

          The name of the model to delete.

          ", "smithy.api#required": {} } - }, - "RoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", - "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that has read permission to\n the input data location and write permission to the output data location in Amazon S3.

          ", - "smithy.api#required": {} - } - }, - "AutoMLJobObjective": { - "target": "com.amazonaws.sagemaker#AutoMLJobObjective", - "traits": { - "smithy.api#documentation": "

          Returns the job's objective.

          " - } - }, - "ProblemType": { - "target": "com.amazonaws.sagemaker#ProblemType", - "traits": { - "smithy.api#documentation": "

          Returns the job's problem type.

          " - } - }, - "AutoMLJobConfig": { - "target": "com.amazonaws.sagemaker#AutoMLJobConfig", - "traits": { - "smithy.api#documentation": "

          Returns the job's config.

          " - } - }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          Returns the job's creation time.

          ", - "smithy.api#required": {} - } - }, - "EndTime": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          Returns the job's end time.

          " - } - }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          Returns the job's last modified time.

          ", - "smithy.api#required": {} - } - }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#AutoMLFailureReason", - "traits": { - "smithy.api#documentation": "

          Returns the job's FailureReason.

          " - } - }, - "BestCandidate": { - "target": "com.amazonaws.sagemaker#AutoMLCandidate", - "traits": { - "smithy.api#documentation": "

          Returns the job's BestCandidate.

          " - } - }, - "AutoMLJobStatus": { - "target": "com.amazonaws.sagemaker#AutoMLJobStatus", - "traits": { - "smithy.api#documentation": "

          Returns the job's AutoMLJobStatus.

          ", - "smithy.api#required": {} - } - }, - "AutoMLJobSecondaryStatus": { - "target": "com.amazonaws.sagemaker#AutoMLJobSecondaryStatus", - "traits": { - "smithy.api#documentation": "

          Returns the job's AutoMLJobSecondaryStatus.

          ", - "smithy.api#required": {} - } - }, - "GenerateCandidateDefinitionsOnly": { - "target": "com.amazonaws.sagemaker#GenerateCandidateDefinitionsOnly", - "traits": { - "smithy.api#documentation": "

          Returns the job's output from GenerateCandidateDefinitionsOnly.

          " - } - }, - "AutoMLJobArtifacts": { - "target": "com.amazonaws.sagemaker#AutoMLJobArtifacts", - "traits": { - "smithy.api#documentation": "

          Returns information on the job's artifacts found in AutoMLJobArtifacts.

          " - } - }, - "ResolvedAttributes": { - "target": "com.amazonaws.sagemaker#ResolvedAttributes", - "traits": { - "smithy.api#documentation": "

          This contains ProblemType, AutoMLJobObjective and CompletionCriteria. They're\n auto-inferred values, if not provided by you. If you do provide them, then they'll be the\n same as provided.

          " - } } } }, - "com.amazonaws.sagemaker#DescribeCodeRepository": { + "com.amazonaws.sagemaker#DeleteModelPackage": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeCodeRepositoryInput" + "target": "com.amazonaws.sagemaker#DeleteModelPackageInput" }, - "output": { - "target": "com.amazonaws.sagemaker#DescribeCodeRepositoryOutput" + "errors": [ + { + "target": "com.amazonaws.sagemaker#ConflictException" + } + ], + "traits": { + "smithy.api#documentation": "

          Deletes a model package.

          \n

          A model package is used to create Amazon SageMaker models or list on AWS Marketplace. Buyers can\n subscribe to model packages listed on AWS Marketplace to create models in Amazon SageMaker.

          " + } + }, + "com.amazonaws.sagemaker#DeleteModelPackageGroup": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeleteModelPackageGroupInput" }, "traits": { - "smithy.api#documentation": "

          Gets details about the specified Git repository.

          " + "smithy.api#documentation": "

          Deletes the specified model group.

          " } }, - "com.amazonaws.sagemaker#DescribeCodeRepositoryInput": { + "com.amazonaws.sagemaker#DeleteModelPackageGroupInput": { "type": "structure", "members": { - "CodeRepositoryName": { - "target": "com.amazonaws.sagemaker#EntityName", + "ModelPackageGroupName": { + "target": "com.amazonaws.sagemaker#ArnOrName", "traits": { - "smithy.api#documentation": "

          The name of the Git repository to describe.

          ", + "smithy.api#documentation": "

          The name of the model group to delete.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeCodeRepositoryOutput": { + "com.amazonaws.sagemaker#DeleteModelPackageGroupPolicy": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeleteModelPackageGroupPolicyInput" + }, + "traits": { + "smithy.api#documentation": "

          Deletes a model group resource policy.

          " + } + }, + "com.amazonaws.sagemaker#DeleteModelPackageGroupPolicyInput": { "type": "structure", "members": { - "CodeRepositoryName": { + "ModelPackageGroupName": { "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          The name of the Git repository.

          ", - "smithy.api#required": {} - } - }, - "CodeRepositoryArn": { - "target": "com.amazonaws.sagemaker#CodeRepositoryArn", - "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the Git repository.

          ", - "smithy.api#required": {} - } - }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#CreationTime", - "traits": { - "smithy.api#documentation": "

          The date and time that the repository was created.

          ", + "smithy.api#documentation": "

          The name of the model group for which to delete the policy.

          ", "smithy.api#required": {} } - }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#LastModifiedTime", + } + } + }, + "com.amazonaws.sagemaker#DeleteModelPackageInput": { + "type": "structure", + "members": { + "ModelPackageName": { + "target": "com.amazonaws.sagemaker#VersionedArnOrName", "traits": { - "smithy.api#documentation": "

          The date and time that the repository was last changed.

          ", + "smithy.api#documentation": "

          The name of the model package. The name must have 1 to 63 characters. Valid characters\n are a-z, A-Z, 0-9, and - (hyphen).

          ", "smithy.api#required": {} } - }, - "GitConfig": { - "target": "com.amazonaws.sagemaker#GitConfig", - "traits": { - "smithy.api#documentation": "

          Configuration details about the repository, including the URL where the repository is\n located, the default branch, and the Amazon Resource Name (ARN) of the AWS Secrets\n Manager secret that contains the credentials used to access the repository.

          " - } } } }, - "com.amazonaws.sagemaker#DescribeCompilationJob": { + "com.amazonaws.sagemaker#DeleteMonitoringSchedule": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeCompilationJobRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#DescribeCompilationJobResponse" + "target": "com.amazonaws.sagemaker#DeleteMonitoringScheduleRequest" }, "errors": [ { @@ -6695,413 +7649,412 @@ } ], "traits": { - "smithy.api#documentation": "

          Returns information about a model compilation job.

          \n

          To create a model compilation job, use CreateCompilationJob. To get\n information about multiple model compilation jobs, use ListCompilationJobs.

          " + "smithy.api#documentation": "

          Deletes a monitoring schedule. Also stops the schedule had not already been stopped.\n This does not delete the job execution history of the monitoring schedule.

          " } }, - "com.amazonaws.sagemaker#DescribeCompilationJobRequest": { + "com.amazonaws.sagemaker#DeleteMonitoringScheduleRequest": { "type": "structure", "members": { - "CompilationJobName": { - "target": "com.amazonaws.sagemaker#EntityName", + "MonitoringScheduleName": { + "target": "com.amazonaws.sagemaker#MonitoringScheduleName", "traits": { - "smithy.api#documentation": "

          The name of the model compilation job that you want information about.

          ", + "smithy.api#documentation": "

          The name of the monitoring schedule to delete.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeCompilationJobResponse": { + "com.amazonaws.sagemaker#DeleteNotebookInstance": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeleteNotebookInstanceInput" + }, + "traits": { + "smithy.api#documentation": "

          Deletes an Amazon SageMaker notebook instance. Before you can delete a notebook instance, you\n must call the StopNotebookInstance API.

          \n \n

          When you delete a notebook instance, you lose all of your data. Amazon SageMaker removes\n the ML compute instance, and deletes the ML storage volume and the network interface\n associated with the notebook instance.

          \n
          " + } + }, + "com.amazonaws.sagemaker#DeleteNotebookInstanceInput": { "type": "structure", "members": { - "CompilationJobName": { - "target": "com.amazonaws.sagemaker#EntityName", + "NotebookInstanceName": { + "target": "com.amazonaws.sagemaker#NotebookInstanceName", "traits": { - "smithy.api#documentation": "

          The name of the model compilation job.

          ", + "smithy.api#documentation": "

          The name of the Amazon SageMaker notebook instance to delete.

          ", "smithy.api#required": {} } - }, - "CompilationJobArn": { - "target": "com.amazonaws.sagemaker#CompilationJobArn", + } + } + }, + "com.amazonaws.sagemaker#DeleteNotebookInstanceLifecycleConfig": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeleteNotebookInstanceLifecycleConfigInput" + }, + "traits": { + "smithy.api#documentation": "

          Deletes a notebook instance lifecycle configuration.

          " + } + }, + "com.amazonaws.sagemaker#DeleteNotebookInstanceLifecycleConfigInput": { + "type": "structure", + "members": { + "NotebookInstanceLifecycleConfigName": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker assumes to perform the model\n compilation job.

          ", + "smithy.api#documentation": "

          The name of the lifecycle configuration to delete.

          ", "smithy.api#required": {} } - }, - "CompilationJobStatus": { - "target": "com.amazonaws.sagemaker#CompilationJobStatus", + } + } + }, + "com.amazonaws.sagemaker#DeletePipeline": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeletePipelineRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DeletePipelineResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Deletes a pipeline.

          " + } + }, + "com.amazonaws.sagemaker#DeletePipelineRequest": { + "type": "structure", + "members": { + "PipelineName": { + "target": "com.amazonaws.sagemaker#PipelineName", "traits": { - "smithy.api#documentation": "

          The status of the model compilation job.

          ", + "smithy.api#documentation": "

          The name of the pipeline to delete.

          ", "smithy.api#required": {} } }, - "CompilationStartTime": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          The time when the model compilation job started the CompilationJob\n instances.

          \n

          You are billed for the time between this timestamp and the timestamp in the DescribeCompilationJobResponse$CompilationEndTime field. In Amazon CloudWatch Logs,\n the start time might be later than this time. That's because it takes time to download\n the compilation job, which depends on the size of the compilation job container.

          " - } - }, - "CompilationEndTime": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          The time when the model compilation job on a compilation job instance ended. For a\n successful or stopped job, this is when the job's model artifacts have finished\n uploading. For a failed job, this is when Amazon SageMaker detected that the job failed.

          " - } - }, - "StoppingCondition": { - "target": "com.amazonaws.sagemaker#StoppingCondition", + "ClientRequestToken": { + "target": "com.amazonaws.sagemaker#IdempotencyToken", "traits": { - "smithy.api#documentation": "

          Specifies a limit to how long a model compilation job can run. When the job reaches\n the time limit, Amazon SageMaker ends the compilation job. Use this API to cap model training\n costs.

          ", + "smithy.api#documentation": "

          A unique, case-sensitive identifier that you provide to ensure the idempotency of the\n operation. An idempotent operation completes no more than one time.

          ", + "smithy.api#idempotencyToken": {}, "smithy.api#required": {} } - }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#CreationTime", + } + } + }, + "com.amazonaws.sagemaker#DeletePipelineResponse": { + "type": "structure", + "members": { + "PipelineArn": { + "target": "com.amazonaws.sagemaker#PipelineArn", "traits": { - "smithy.api#documentation": "

          The time that the model compilation job was created.

          ", - "smithy.api#required": {} - } - }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#LastModifiedTime", - "traits": { - "smithy.api#documentation": "

          The time that the status\n of\n the model compilation job was last modified.

          ", - "smithy.api#required": {} - } - }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", - "traits": { - "smithy.api#documentation": "

          If a model compilation job failed, the reason it failed.

          ", - "smithy.api#required": {} - } - }, - "ModelArtifacts": { - "target": "com.amazonaws.sagemaker#ModelArtifacts", - "traits": { - "smithy.api#documentation": "

          Information about the location in Amazon S3 that has been configured for storing the model\n artifacts used in the compilation job.

          ", - "smithy.api#required": {} - } - }, - "RoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", - "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model compilation job.

          ", - "smithy.api#required": {} - } - }, - "InputConfig": { - "target": "com.amazonaws.sagemaker#InputConfig", - "traits": { - "smithy.api#documentation": "

          Information about the location in Amazon S3 of the input model artifacts, the name and\n shape of the expected data inputs, and the framework in which the model was\n trained.

          ", - "smithy.api#required": {} - } - }, - "OutputConfig": { - "target": "com.amazonaws.sagemaker#OutputConfig", - "traits": { - "smithy.api#documentation": "

          Information about the output location for the compiled model and the target device\n that the model runs on.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pipeline to delete.

          " } } } }, - "com.amazonaws.sagemaker#DescribeDomain": { + "com.amazonaws.sagemaker#DeleteProject": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeDomainRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#DescribeDomainResponse" + "target": "com.amazonaws.sagemaker#DeleteProjectInput" }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], "traits": { - "smithy.api#documentation": "

          The description of the domain.

          " + "smithy.api#documentation": "

          Delete the specified project.

          " } }, - "com.amazonaws.sagemaker#DescribeDomainRequest": { + "com.amazonaws.sagemaker#DeleteProjectInput": { "type": "structure", "members": { - "DomainId": { - "target": "com.amazonaws.sagemaker#DomainId", + "ProjectName": { + "target": "com.amazonaws.sagemaker#ProjectEntityName", "traits": { - "smithy.api#documentation": "

          The domain ID.

          ", + "smithy.api#documentation": "

          The name of the project to delete.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeDomainResponse": { + "com.amazonaws.sagemaker#DeleteTags": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeleteTagsInput" + }, + "output": { + "target": "com.amazonaws.sagemaker#DeleteTagsOutput" + }, + "traits": { + "smithy.api#documentation": "

          Deletes the specified tags from an Amazon SageMaker resource.

          \n

          To list a resource's tags, use the ListTags API.

          \n \n

          When you call this API to delete tags from a hyperparameter tuning job, the\n deleted tags are not removed from training jobs that the hyperparameter tuning job\n launched before you called this API.

          \n
          " + } + }, + "com.amazonaws.sagemaker#DeleteTagsInput": { "type": "structure", "members": { - "DomainArn": { - "target": "com.amazonaws.sagemaker#DomainArn", - "traits": { - "smithy.api#documentation": "

          The domain's Amazon Resource Name (ARN).

          " - } - }, - "DomainId": { - "target": "com.amazonaws.sagemaker#DomainId", - "traits": { - "smithy.api#documentation": "

          The domain ID.

          " - } - }, - "DomainName": { - "target": "com.amazonaws.sagemaker#DomainName", - "traits": { - "smithy.api#documentation": "

          The domain name.

          " - } - }, - "HomeEfsFileSystemId": { - "target": "com.amazonaws.sagemaker#ResourceId", - "traits": { - "smithy.api#documentation": "

          The ID of the Amazon Elastic File System (EFS) managed by this Domain.

          " - } - }, - "SingleSignOnManagedApplicationInstanceId": { - "target": "com.amazonaws.sagemaker#String256", - "traits": { - "smithy.api#documentation": "

          The SSO managed application instance ID.

          " - } - }, - "Status": { - "target": "com.amazonaws.sagemaker#DomainStatus", - "traits": { - "smithy.api#documentation": "

          The status.

          " - } - }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#CreationTime", - "traits": { - "smithy.api#documentation": "

          The creation time.

          " - } - }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#LastModifiedTime", - "traits": { - "smithy.api#documentation": "

          The last modified time.

          " - } - }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", - "traits": { - "smithy.api#documentation": "

          The failure reason.

          " - } - }, - "AuthMode": { - "target": "com.amazonaws.sagemaker#AuthMode", - "traits": { - "smithy.api#documentation": "

          The domain's authentication mode.

          " - } - }, - "DefaultUserSettings": { - "target": "com.amazonaws.sagemaker#UserSettings", - "traits": { - "smithy.api#documentation": "

          Settings which are applied to all UserProfiles in this domain, if settings are not explicitly specified\n in a given UserProfile.\n

          " - } - }, - "AppNetworkAccessType": { - "target": "com.amazonaws.sagemaker#AppNetworkAccessType", - "traits": { - "smithy.api#documentation": "

          Specifies the VPC used for non-EFS traffic. The default value is\n PublicInternetOnly.

          \n
            \n
          • \n

            \n PublicInternetOnly - Non-EFS traffic is through a VPC managed by\n Amazon SageMaker, which allows direct internet access

            \n
          • \n
          • \n

            \n VpcOnly - All Studio traffic is through the specified VPC and subnets

            \n
          • \n
          " - } - }, - "HomeEfsFileSystemKmsKeyId": { - "target": "com.amazonaws.sagemaker#KmsKeyId", - "traits": { - "smithy.api#deprecated": { - "message": "This property is deprecated, use KmsKeyId instead." - }, - "smithy.api#documentation": "

          This member is deprecated and replaced with KmsKeyId.

          " - } - }, - "SubnetIds": { - "target": "com.amazonaws.sagemaker#Subnets", - "traits": { - "smithy.api#documentation": "

          The VPC subnets that Studio uses for communication.

          " - } - }, - "Url": { - "target": "com.amazonaws.sagemaker#String1024", - "traits": { - "smithy.api#documentation": "

          The domain's URL.

          " - } - }, - "VpcId": { - "target": "com.amazonaws.sagemaker#VpcId", + "ResourceArn": { + "target": "com.amazonaws.sagemaker#ResourceArn", "traits": { - "smithy.api#documentation": "

          The ID of the Amazon Virtual Private Cloud (VPC) that Studio uses for communication.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the resource whose tags you want to\n delete.

          ", + "smithy.api#required": {} } }, - "KmsKeyId": { - "target": "com.amazonaws.sagemaker#KmsKeyId", + "TagKeys": { + "target": "com.amazonaws.sagemaker#TagKeyList", "traits": { - "smithy.api#documentation": "

          The AWS KMS customer managed CMK used to encrypt\n the EFS volume attached to the domain.

          " + "smithy.api#documentation": "

          An array or one or more tag keys to delete.

          ", + "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeEndpoint": { + "com.amazonaws.sagemaker#DeleteTagsOutput": { + "type": "structure", + "members": {} + }, + "com.amazonaws.sagemaker#DeleteTrial": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeEndpointInput" + "target": "com.amazonaws.sagemaker#DeleteTrialRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeEndpointOutput" + "target": "com.amazonaws.sagemaker#DeleteTrialResponse" }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#documentation": "

          Returns the description of an endpoint.

          " + "smithy.api#documentation": "

          Deletes the specified trial. All trial components that make up the trial must be deleted\n first. Use the DescribeTrialComponent API to get the list of trial\n components.

          " } }, - "com.amazonaws.sagemaker#DescribeEndpointConfig": { + "com.amazonaws.sagemaker#DeleteTrialComponent": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeEndpointConfigInput" + "target": "com.amazonaws.sagemaker#DeleteTrialComponentRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeEndpointConfigOutput" + "target": "com.amazonaws.sagemaker#DeleteTrialComponentResponse" }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#documentation": "

          Returns the description of an endpoint configuration created using the\n CreateEndpointConfig API.

          " + "smithy.api#documentation": "

          Deletes the specified trial component. A trial component must be disassociated from all\n trials before the trial component can be deleted. To disassociate a trial component from a\n trial, call the DisassociateTrialComponent API.

          " } }, - "com.amazonaws.sagemaker#DescribeEndpointConfigInput": { + "com.amazonaws.sagemaker#DeleteTrialComponentRequest": { "type": "structure", "members": { - "EndpointConfigName": { - "target": "com.amazonaws.sagemaker#EndpointConfigName", + "TrialComponentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The name of the endpoint configuration.

          ", + "smithy.api#documentation": "

          The name of the component to delete.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeEndpointConfigOutput": { + "com.amazonaws.sagemaker#DeleteTrialComponentResponse": { "type": "structure", "members": { - "EndpointConfigName": { - "target": "com.amazonaws.sagemaker#EndpointConfigName", - "traits": { - "smithy.api#documentation": "

          Name of the Amazon SageMaker endpoint configuration.

          ", - "smithy.api#required": {} - } - }, - "EndpointConfigArn": { - "target": "com.amazonaws.sagemaker#EndpointConfigArn", - "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the endpoint configuration.

          ", - "smithy.api#required": {} - } - }, - "ProductionVariants": { - "target": "com.amazonaws.sagemaker#ProductionVariantList", - "traits": { - "smithy.api#documentation": "

          An array of ProductionVariant objects, one for each model that you\n want to host at this endpoint.

          ", - "smithy.api#required": {} - } - }, - "DataCaptureConfig": { - "target": "com.amazonaws.sagemaker#DataCaptureConfig" - }, - "KmsKeyId": { - "target": "com.amazonaws.sagemaker#KmsKeyId", - "traits": { - "smithy.api#documentation": "

          AWS KMS key ID Amazon SageMaker uses to encrypt data when storing it on the ML storage\n volume attached to the instance.

          " - } - }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "TrialComponentArn": { + "target": "com.amazonaws.sagemaker#TrialComponentArn", "traits": { - "smithy.api#documentation": "

          A timestamp that shows when the endpoint configuration was created.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the component is being deleted.

          " } } } }, - "com.amazonaws.sagemaker#DescribeEndpointInput": { + "com.amazonaws.sagemaker#DeleteTrialRequest": { "type": "structure", "members": { - "EndpointName": { - "target": "com.amazonaws.sagemaker#EndpointName", + "TrialName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The name of the endpoint.

          ", + "smithy.api#documentation": "

          The name of the trial to delete.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeEndpointOutput": { + "com.amazonaws.sagemaker#DeleteTrialResponse": { "type": "structure", "members": { - "EndpointName": { - "target": "com.amazonaws.sagemaker#EndpointName", + "TrialArn": { + "target": "com.amazonaws.sagemaker#TrialArn", "traits": { - "smithy.api#documentation": "

          Name of the endpoint.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the trial that is being deleted.

          " } + } + } + }, + "com.amazonaws.sagemaker#DeleteUserProfile": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeleteUserProfileRequest" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceInUse" }, - "EndpointArn": { - "target": "com.amazonaws.sagemaker#EndpointArn", + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Deletes a user profile. When a user profile is deleted, the user loses access to their EFS\n volume, including data, notebooks, and other artifacts.

          " + } + }, + "com.amazonaws.sagemaker#DeleteUserProfileRequest": { + "type": "structure", + "members": { + "DomainId": { + "target": "com.amazonaws.sagemaker#DomainId", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the endpoint.

          ", + "smithy.api#documentation": "

          The domain ID.

          ", "smithy.api#required": {} } }, - "EndpointConfigName": { - "target": "com.amazonaws.sagemaker#EndpointConfigName", + "UserProfileName": { + "target": "com.amazonaws.sagemaker#UserProfileName", "traits": { - "smithy.api#documentation": "

          The name of the endpoint configuration associated with this endpoint.

          ", + "smithy.api#documentation": "

          The user profile name.

          ", "smithy.api#required": {} } - }, - "ProductionVariants": { - "target": "com.amazonaws.sagemaker#ProductionVariantSummaryList", + } + } + }, + "com.amazonaws.sagemaker#DeleteWorkforce": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeleteWorkforceRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DeleteWorkforceResponse" + }, + "traits": { + "smithy.api#documentation": "

          Use this operation to delete a workforce.

          \n\n

          If you want to create a new workforce in an AWS Region where\n a workforce already exists, use this operation to delete the \n existing workforce and then use \n to create a new workforce.

          \n \n

          If a private workforce contains one or more work teams, you must use \n the \n operation to delete all work teams before you delete the workforce.\n If you try to delete a workforce that contains one or more work teams,\n you will recieve a ResourceInUse error.

          \n
          " + } + }, + "com.amazonaws.sagemaker#DeleteWorkforceRequest": { + "type": "structure", + "members": { + "WorkforceName": { + "target": "com.amazonaws.sagemaker#WorkforceName", "traits": { - "smithy.api#documentation": "

          An array of ProductionVariantSummary objects, one for each model\n hosted behind this endpoint.

          " + "smithy.api#documentation": "

          The name of the workforce.

          ", + "smithy.api#required": {} } - }, - "DataCaptureConfig": { - "target": "com.amazonaws.sagemaker#DataCaptureConfigSummary" - }, - "EndpointStatus": { - "target": "com.amazonaws.sagemaker#EndpointStatus", + } + } + }, + "com.amazonaws.sagemaker#DeleteWorkforceResponse": { + "type": "structure", + "members": {} + }, + "com.amazonaws.sagemaker#DeleteWorkteam": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DeleteWorkteamRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DeleteWorkteamResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + } + ], + "traits": { + "smithy.api#documentation": "

          Deletes an existing work team. This operation can't be undone.

          " + } + }, + "com.amazonaws.sagemaker#DeleteWorkteamRequest": { + "type": "structure", + "members": { + "WorkteamName": { + "target": "com.amazonaws.sagemaker#WorkteamName", "traits": { - "smithy.api#documentation": "

          The status of the endpoint.

          \n
            \n
          • \n

            \n OutOfService: Endpoint is not available to take incoming\n requests.

            \n
          • \n
          • \n

            \n Creating: CreateEndpoint is executing.

            \n
          • \n
          • \n

            \n Updating: UpdateEndpoint or UpdateEndpointWeightsAndCapacities is executing.

            \n
          • \n
          • \n

            \n SystemUpdating: Endpoint is undergoing maintenance and cannot be\n updated or deleted or re-scaled until it has completed. This maintenance\n operation does not change any customer-specified values such as VPC config, KMS\n encryption, model, instance type, or instance count.

            \n
          • \n
          • \n

            \n RollingBack: Endpoint fails to scale up or down or change its\n variant weight and is in the process of rolling back to its previous\n configuration. Once the rollback completes, endpoint returns to an\n InService status. This transitional status only applies to an\n endpoint that has autoscaling enabled and is undergoing variant weight or\n capacity changes as part of an UpdateEndpointWeightsAndCapacities call or when the UpdateEndpointWeightsAndCapacities operation is called\n explicitly.

            \n
          • \n
          • \n

            \n InService: Endpoint is available to process incoming\n requests.

            \n
          • \n
          • \n

            \n Deleting: DeleteEndpoint is executing.

            \n
          • \n
          • \n

            \n Failed: Endpoint could not be created, updated, or re-scaled. Use\n DescribeEndpointOutput$FailureReason for information about\n the failure. DeleteEndpoint is the only operation that can be\n performed on a failed endpoint.

            \n
          • \n
          ", + "smithy.api#documentation": "

          The name of the work team to delete.

          ", "smithy.api#required": {} } + } + } + }, + "com.amazonaws.sagemaker#DeleteWorkteamResponse": { + "type": "structure", + "members": { + "Success": { + "target": "com.amazonaws.sagemaker#Success", + "traits": { + "smithy.api#documentation": "

          Returns true if the work team was successfully deleted; otherwise,\n returns false.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.sagemaker#DeployedImage": { + "type": "structure", + "members": { + "SpecifiedImage": { + "target": "com.amazonaws.sagemaker#ContainerImage", + "traits": { + "smithy.api#documentation": "

          The image path you specified when you created the model.

          " + } }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", + "ResolvedImage": { + "target": "com.amazonaws.sagemaker#ContainerImage", "traits": { - "smithy.api#documentation": "

          If the status of the endpoint is Failed, the reason why it failed.\n

          " + "smithy.api#documentation": "

          The specific digest path of the image hosted in this\n ProductionVariant.

          " } }, - "CreationTime": { + "ResolutionTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A timestamp that shows when the endpoint was created.

          ", + "smithy.api#documentation": "

          The date and time when the image path for the model resolved to the\n ResolvedImage\n

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Gets the Amazon EC2 Container Registry path of the docker image of the model that is hosted in this ProductionVariant.

          \n

          If you used the registry/repository[:tag] form to specify the image path\n of the primary container when you created the model hosted in this\n ProductionVariant, the path resolves to a path of the form\n registry/repository[@digest]. A digest is a hash value that identifies\n a specific version of an image. For information about Amazon ECR paths, see Pulling an Image in the Amazon ECR User Guide.

          " + } + }, + "com.amazonaws.sagemaker#DeployedImages": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#DeployedImage" + } + }, + "com.amazonaws.sagemaker#DeploymentConfig": { + "type": "structure", + "members": { + "BlueGreenUpdatePolicy": { + "target": "com.amazonaws.sagemaker#BlueGreenUpdatePolicy", + "traits": { + "smithy.api#documentation": "

          ", "smithy.api#required": {} } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "AutoRollbackConfiguration": { + "target": "com.amazonaws.sagemaker#AutoRollbackConfig", "traits": { - "smithy.api#documentation": "

          A timestamp that shows when the endpoint was last modified.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          " } } + }, + "traits": { + "smithy.api#documentation": "

          Currently, the DeploymentConfig API is not supported.

          " } }, - "com.amazonaws.sagemaker#DescribeExperiment": { + "com.amazonaws.sagemaker#DescribeAction": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeExperimentRequest" + "target": "com.amazonaws.sagemaker#DescribeActionRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeExperimentResponse" + "target": "com.amazonaws.sagemaker#DescribeActionResponse" }, "errors": [ { @@ -7109,188 +8062,197 @@ } ], "traits": { - "smithy.api#documentation": "

          Provides a list of an experiment's properties.

          " + "smithy.api#documentation": "

          Describes an action.

          " } }, - "com.amazonaws.sagemaker#DescribeExperimentRequest": { + "com.amazonaws.sagemaker#DescribeActionRequest": { "type": "structure", "members": { - "ExperimentName": { + "ActionName": { "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The name of the experiment to describe.

          ", + "smithy.api#documentation": "

          The name of the action to describe.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeExperimentResponse": { + "com.amazonaws.sagemaker#DescribeActionResponse": { "type": "structure", "members": { - "ExperimentName": { + "ActionName": { "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The name of the experiment.

          " + "smithy.api#documentation": "

          The name of the action.

          " } }, - "ExperimentArn": { - "target": "com.amazonaws.sagemaker#ExperimentArn", + "ActionArn": { + "target": "com.amazonaws.sagemaker#ActionArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the experiment.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the action.

          " } }, - "DisplayName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "Source": { + "target": "com.amazonaws.sagemaker#ActionSource", "traits": { - "smithy.api#documentation": "

          The name of the experiment as displayed. If DisplayName isn't specified,\n ExperimentName is displayed.

          " + "smithy.api#documentation": "

          The source of the action.

          " } }, - "Source": { - "target": "com.amazonaws.sagemaker#ExperimentSource", + "ActionType": { + "target": "com.amazonaws.sagemaker#String256", "traits": { - "smithy.api#documentation": "

          The ARN of the source and, optionally, the type.

          " + "smithy.api#documentation": "

          The type of the action.

          " } }, "Description": { "target": "com.amazonaws.sagemaker#ExperimentDescription", "traits": { - "smithy.api#documentation": "

          The description of the experiment.

          " + "smithy.api#documentation": "

          The description of the action.

          " + } + }, + "Status": { + "target": "com.amazonaws.sagemaker#ActionStatus", + "traits": { + "smithy.api#documentation": "

          The status of the action.

          " + } + }, + "Properties": { + "target": "com.amazonaws.sagemaker#LineageEntityParameters", + "traits": { + "smithy.api#documentation": "

          A list of the action's properties.

          " } }, "CreationTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          When the experiment was created.

          " + "smithy.api#documentation": "

          When the action was created.

          " } }, "CreatedBy": { - "target": "com.amazonaws.sagemaker#UserContext", - "traits": { - "smithy.api#documentation": "

          Who created the experiment.

          " - } + "target": "com.amazonaws.sagemaker#UserContext" }, "LastModifiedTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          When the experiment was last modified.

          " + "smithy.api#documentation": "

          When the action was last modified.

          " } }, "LastModifiedBy": { - "target": "com.amazonaws.sagemaker#UserContext", - "traits": { - "smithy.api#documentation": "

          Who last modified the experiment.

          " - } + "target": "com.amazonaws.sagemaker#UserContext" + }, + "MetadataProperties": { + "target": "com.amazonaws.sagemaker#MetadataProperties" } } }, - "com.amazonaws.sagemaker#DescribeFlowDefinition": { + "com.amazonaws.sagemaker#DescribeAlgorithm": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeFlowDefinitionRequest" + "target": "com.amazonaws.sagemaker#DescribeAlgorithmInput" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeFlowDefinitionResponse" + "target": "com.amazonaws.sagemaker#DescribeAlgorithmOutput" }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], "traits": { - "smithy.api#documentation": "

          Returns information about the specified flow definition.

          " + "smithy.api#documentation": "

          Returns a description of the specified algorithm that is in your account.

          " } }, - "com.amazonaws.sagemaker#DescribeFlowDefinitionRequest": { + "com.amazonaws.sagemaker#DescribeAlgorithmInput": { "type": "structure", "members": { - "FlowDefinitionName": { - "target": "com.amazonaws.sagemaker#FlowDefinitionName", + "AlgorithmName": { + "target": "com.amazonaws.sagemaker#ArnOrName", "traits": { - "smithy.api#documentation": "

          The name of the flow definition.

          ", + "smithy.api#documentation": "

          The name of the algorithm to describe.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeFlowDefinitionResponse": { + "com.amazonaws.sagemaker#DescribeAlgorithmOutput": { "type": "structure", "members": { - "FlowDefinitionArn": { - "target": "com.amazonaws.sagemaker#FlowDefinitionArn", + "AlgorithmName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the flow defintion.

          ", + "smithy.api#documentation": "

          The name of the algorithm being described.

          ", "smithy.api#required": {} } }, - "FlowDefinitionName": { - "target": "com.amazonaws.sagemaker#FlowDefinitionName", + "AlgorithmArn": { + "target": "com.amazonaws.sagemaker#AlgorithmArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the flow definition.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the algorithm.

          ", "smithy.api#required": {} } }, - "FlowDefinitionStatus": { - "target": "com.amazonaws.sagemaker#FlowDefinitionStatus", + "AlgorithmDescription": { + "target": "com.amazonaws.sagemaker#EntityDescription", "traits": { - "smithy.api#documentation": "

          The status of the flow definition. Valid values are listed below.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A brief summary about the algorithm.

          " } }, "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          The timestamp when the flow definition was created.

          ", + "smithy.api#documentation": "

          A timestamp specifying when the algorithm was created.

          ", "smithy.api#required": {} } }, - "HumanLoopRequestSource": { - "target": "com.amazonaws.sagemaker#HumanLoopRequestSource", + "TrainingSpecification": { + "target": "com.amazonaws.sagemaker#TrainingSpecification", "traits": { - "smithy.api#documentation": "

          Container for configuring the source of human task requests. Used to specify if\n Amazon Rekognition or Amazon Textract is used as an integration source.

          " + "smithy.api#documentation": "

          Details about training jobs run by this algorithm.

          ", + "smithy.api#required": {} } }, - "HumanLoopActivationConfig": { - "target": "com.amazonaws.sagemaker#HumanLoopActivationConfig", + "InferenceSpecification": { + "target": "com.amazonaws.sagemaker#InferenceSpecification", "traits": { - "smithy.api#documentation": "

          An object containing information about what triggers a human review workflow.

          " + "smithy.api#documentation": "

          Details about inference jobs that the algorithm runs.

          " } }, - "HumanLoopConfig": { - "target": "com.amazonaws.sagemaker#HumanLoopConfig", + "ValidationSpecification": { + "target": "com.amazonaws.sagemaker#AlgorithmValidationSpecification", "traits": { - "smithy.api#documentation": "

          An object containing information about who works on the task, the workforce task price, and other task details.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Details about configurations for one or more training jobs that Amazon SageMaker runs to test the\n algorithm.

          " } }, - "OutputConfig": { - "target": "com.amazonaws.sagemaker#FlowDefinitionOutputConfig", + "AlgorithmStatus": { + "target": "com.amazonaws.sagemaker#AlgorithmStatus", "traits": { - "smithy.api#documentation": "

          An object containing information about the output file.

          ", + "smithy.api#documentation": "

          The current status of the algorithm.

          ", "smithy.api#required": {} } }, - "RoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", + "AlgorithmStatusDetails": { + "target": "com.amazonaws.sagemaker#AlgorithmStatusDetails", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) execution role for the flow definition.

          ", + "smithy.api#documentation": "

          Details about the current status of the algorithm.

          ", "smithy.api#required": {} } }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", + "ProductId": { + "target": "com.amazonaws.sagemaker#ProductId", "traits": { - "smithy.api#documentation": "

          The reason your flow definition failed.

          " + "smithy.api#documentation": "

          The product identifier of the algorithm.

          " + } + }, + "CertifyForMarketplace": { + "target": "com.amazonaws.sagemaker#CertifyForMarketplace", + "traits": { + "smithy.api#documentation": "

          Whether the algorithm is certified to be listed in AWS Marketplace.

          " } } } }, - "com.amazonaws.sagemaker#DescribeHumanTaskUi": { + "com.amazonaws.sagemaker#DescribeApp": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeHumanTaskUiRequest" + "target": "com.amazonaws.sagemaker#DescribeAppRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeHumanTaskUiResponse" + "target": "com.amazonaws.sagemaker#DescribeAppResponse" }, "errors": [ { @@ -7298,197 +8260,184 @@ } ], "traits": { - "smithy.api#documentation": "

          Returns information about the requested human task user interface (worker task template).

          " + "smithy.api#documentation": "

          Describes the app.

          " } }, - "com.amazonaws.sagemaker#DescribeHumanTaskUiRequest": { + "com.amazonaws.sagemaker#DescribeAppImageConfig": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribeAppImageConfigRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DescribeAppImageConfigResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Describes an AppImageConfig.

          " + } + }, + "com.amazonaws.sagemaker#DescribeAppImageConfigRequest": { "type": "structure", "members": { - "HumanTaskUiName": { - "target": "com.amazonaws.sagemaker#HumanTaskUiName", + "AppImageConfigName": { + "target": "com.amazonaws.sagemaker#AppImageConfigName", "traits": { - "smithy.api#documentation": "

          The name of the human task user interface \n (worker task template) you want information about.

          ", + "smithy.api#documentation": "

          The name of the AppImageConfig to describe.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeHumanTaskUiResponse": { + "com.amazonaws.sagemaker#DescribeAppImageConfigResponse": { "type": "structure", "members": { - "HumanTaskUiArn": { - "target": "com.amazonaws.sagemaker#HumanTaskUiArn", - "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the human task user interface (worker task template).

          ", - "smithy.api#required": {} - } - }, - "HumanTaskUiName": { - "target": "com.amazonaws.sagemaker#HumanTaskUiName", + "AppImageConfigArn": { + "target": "com.amazonaws.sagemaker#AppImageConfigArn", "traits": { - "smithy.api#documentation": "

          The name of the human task user interface (worker task template).

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the AppImageConfig.

          " } }, - "HumanTaskUiStatus": { - "target": "com.amazonaws.sagemaker#HumanTaskUiStatus", + "AppImageConfigName": { + "target": "com.amazonaws.sagemaker#AppImageConfigName", "traits": { - "smithy.api#documentation": "

          The status of the human task user interface (worker task template). Valid values are listed below.

          " + "smithy.api#documentation": "

          The name of the AppImageConfig.

          " } }, "CreationTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The timestamp when the human task user interface was created.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          When the AppImageConfig was created.

          " } }, - "UiTemplate": { - "target": "com.amazonaws.sagemaker#UiTemplateInfo", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#required": {} + "smithy.api#documentation": "

          When the AppImageConfig was last modified.

          " } - } - } - }, - "com.amazonaws.sagemaker#DescribeHyperParameterTuningJob": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#DescribeHyperParameterTuningJobRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#DescribeHyperParameterTuningJobResponse" - }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], - "traits": { - "smithy.api#documentation": "

          Gets\n a description of a hyperparameter tuning job.

          " - } - }, - "com.amazonaws.sagemaker#DescribeHyperParameterTuningJobRequest": { - "type": "structure", - "members": { - "HyperParameterTuningJobName": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobName", + }, + "KernelGatewayImageConfig": { + "target": "com.amazonaws.sagemaker#KernelGatewayImageConfig", "traits": { - "smithy.api#documentation": "

          The name of the tuning job.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The configuration of a KernelGateway app.

          " } } } }, - "com.amazonaws.sagemaker#DescribeHyperParameterTuningJobResponse": { + "com.amazonaws.sagemaker#DescribeAppRequest": { "type": "structure", "members": { - "HyperParameterTuningJobName": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobName", + "DomainId": { + "target": "com.amazonaws.sagemaker#DomainId", "traits": { - "smithy.api#documentation": "

          The name of the tuning job.

          ", + "smithy.api#documentation": "

          The domain ID.

          ", "smithy.api#required": {} } }, - "HyperParameterTuningJobArn": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobArn", + "UserProfileName": { + "target": "com.amazonaws.sagemaker#UserProfileName", "traits": { - "smithy.api#documentation": "

          The\n Amazon Resource Name (ARN) of the tuning job.

          ", + "smithy.api#documentation": "

          The user profile name.

          ", "smithy.api#required": {} } }, - "HyperParameterTuningJobConfig": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobConfig", + "AppType": { + "target": "com.amazonaws.sagemaker#AppType", "traits": { - "smithy.api#documentation": "

          The HyperParameterTuningJobConfig object that specifies the\n configuration of the tuning job.

          ", + "smithy.api#documentation": "

          The type of app.

          ", "smithy.api#required": {} } }, - "TrainingJobDefinition": { - "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinition", + "AppName": { + "target": "com.amazonaws.sagemaker#AppName", "traits": { - "smithy.api#documentation": "

          The HyperParameterTrainingJobDefinition object that specifies the\n definition of the training jobs that this tuning job launches.

          " + "smithy.api#documentation": "

          The name of the app.

          ", + "smithy.api#required": {} } - }, - "TrainingJobDefinitions": { - "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinitions", + } + } + }, + "com.amazonaws.sagemaker#DescribeAppResponse": { + "type": "structure", + "members": { + "AppArn": { + "target": "com.amazonaws.sagemaker#AppArn", "traits": { - "smithy.api#documentation": "

          A list of the HyperParameterTrainingJobDefinition objects launched\n for this tuning job.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the app.

          " } }, - "HyperParameterTuningJobStatus": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobStatus", + "AppType": { + "target": "com.amazonaws.sagemaker#AppType", "traits": { - "smithy.api#documentation": "

          The status of the tuning job: InProgress, Completed, Failed, Stopping, or\n Stopped.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The type of app.

          " } }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "AppName": { + "target": "com.amazonaws.sagemaker#AppName", "traits": { - "smithy.api#documentation": "

          The date and time that the tuning job started.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The name of the app.

          " } }, - "HyperParameterTuningEndTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "DomainId": { + "target": "com.amazonaws.sagemaker#DomainId", "traits": { - "smithy.api#documentation": "

          The date and time that the tuning job ended.

          " + "smithy.api#documentation": "

          The domain ID.

          " } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "UserProfileName": { + "target": "com.amazonaws.sagemaker#UserProfileName", "traits": { - "smithy.api#documentation": "

          The date and time that the status of the tuning job was modified.

          " + "smithy.api#documentation": "

          The user profile name.

          " } }, - "TrainingJobStatusCounters": { - "target": "com.amazonaws.sagemaker#TrainingJobStatusCounters", + "Status": { + "target": "com.amazonaws.sagemaker#AppStatus", "traits": { - "smithy.api#documentation": "

          The TrainingJobStatusCounters object that specifies the number of\n training jobs, categorized by status, that this tuning job launched.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The status.

          " } }, - "ObjectiveStatusCounters": { - "target": "com.amazonaws.sagemaker#ObjectiveStatusCounters", + "LastHealthCheckTimestamp": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The ObjectiveStatusCounters object that specifies the number of\n training jobs, categorized by the status of their final objective metric, that this\n tuning job launched.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The timestamp of the last health check.

          " } }, - "BestTrainingJob": { - "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobSummary", + "LastUserActivityTimestamp": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A TrainingJobSummary object that describes the training job that\n completed with the best current HyperParameterTuningJobObjective.

          " + "smithy.api#documentation": "

          The timestamp of the last user's activity.

          " } }, - "OverallBestTrainingJob": { - "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobSummary", + "CreationTime": { + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          If the hyperparameter tuning job is an warm start tuning job with a\n WarmStartType of IDENTICAL_DATA_AND_ALGORITHM, this is the\n TrainingJobSummary for the training job with the best objective\n metric value of all training jobs launched by this tuning job and all parent jobs\n specified for the warm start tuning job.

          " + "smithy.api#documentation": "

          The creation time.

          " } }, - "WarmStartConfig": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobWarmStartConfig", + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", "traits": { - "smithy.api#documentation": "

          The configuration for starting the hyperparameter parameter tuning job using one or\n more previous tuning jobs as a starting point. The results of previous tuning jobs are\n used to inform which combinations of hyperparameters to search over in the new tuning\n job.

          " + "smithy.api#documentation": "

          The failure reason.

          " } }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", + "ResourceSpec": { + "target": "com.amazonaws.sagemaker#ResourceSpec", "traits": { - "smithy.api#documentation": "

          If the tuning job failed, the reason it failed.

          " + "smithy.api#documentation": "

          The instance type and the Amazon Resource Name (ARN) of the SageMaker image created on the instance.

          " } } } }, - "com.amazonaws.sagemaker#DescribeImage": { + "com.amazonaws.sagemaker#DescribeArtifact": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeImageRequest" + "target": "com.amazonaws.sagemaker#DescribeArtifactRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeImageResponse" + "target": "com.amazonaws.sagemaker#DescribeArtifactResponse" }, "errors": [ { @@ -7496,87 +8445,84 @@ } ], "traits": { - "smithy.api#documentation": "

          Describes a SageMaker image.

          " + "smithy.api#documentation": "

          Describes an artifact.

          " } }, - "com.amazonaws.sagemaker#DescribeImageRequest": { + "com.amazonaws.sagemaker#DescribeArtifactRequest": { "type": "structure", "members": { - "ImageName": { - "target": "com.amazonaws.sagemaker#ImageName", + "ArtifactArn": { + "target": "com.amazonaws.sagemaker#ArtifactArn", "traits": { - "smithy.api#documentation": "

          The name of the image to describe.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the artifact to describe.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeImageResponse": { + "com.amazonaws.sagemaker#DescribeArtifactResponse": { "type": "structure", "members": { - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "ArtifactName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          When the image was created.

          " + "smithy.api#documentation": "

          The name of the artifact.

          " } }, - "Description": { - "target": "com.amazonaws.sagemaker#ImageDescription", + "ArtifactArn": { + "target": "com.amazonaws.sagemaker#ArtifactArn", "traits": { - "smithy.api#documentation": "

          The description of the image.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the artifact.

          " } }, - "DisplayName": { - "target": "com.amazonaws.sagemaker#ImageDisplayName", + "Source": { + "target": "com.amazonaws.sagemaker#ArtifactSource", "traits": { - "smithy.api#documentation": "

          The name of the image as displayed.

          " + "smithy.api#documentation": "

          The source of the artifact.

          " } }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", + "ArtifactType": { + "target": "com.amazonaws.sagemaker#String256", "traits": { - "smithy.api#documentation": "

          When a create, update, or delete operation fails, the reason for the failure.

          " + "smithy.api#documentation": "

          The type of the artifact.

          " } }, - "ImageArn": { - "target": "com.amazonaws.sagemaker#ImageArn", + "Properties": { + "target": "com.amazonaws.sagemaker#LineageEntityParameters", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the image.

          " + "smithy.api#documentation": "

          A list of the artifact's properties.

          " } }, - "ImageName": { - "target": "com.amazonaws.sagemaker#ImageName", + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The name of the image.

          " + "smithy.api#documentation": "

          When the artifact was created.

          " } }, - "ImageStatus": { - "target": "com.amazonaws.sagemaker#ImageStatus", - "traits": { - "smithy.api#documentation": "

          The status of the image.

          " - } + "CreatedBy": { + "target": "com.amazonaws.sagemaker#UserContext" }, "LastModifiedTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          When the image was last modified.

          " + "smithy.api#documentation": "

          When the artifact was last modified.

          " } }, - "RoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", - "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the IAM role that enables Amazon SageMaker to perform tasks on your behalf.

          " - } + "LastModifiedBy": { + "target": "com.amazonaws.sagemaker#UserContext" + }, + "MetadataProperties": { + "target": "com.amazonaws.sagemaker#MetadataProperties" } } }, - "com.amazonaws.sagemaker#DescribeImageVersion": { + "com.amazonaws.sagemaker#DescribeAutoMLJob": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeImageVersionRequest" + "target": "com.amazonaws.sagemaker#DescribeAutoMLJobRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeImageVersionResponse" + "target": "com.amazonaws.sagemaker#DescribeAutoMLJobResponse" }, "errors": [ { @@ -7584,729 +8530,946 @@ } ], "traits": { - "smithy.api#documentation": "

          Describes a version of a SageMaker image.

          " + "smithy.api#documentation": "

          Returns information about an Amazon SageMaker job.

          " } }, - "com.amazonaws.sagemaker#DescribeImageVersionRequest": { + "com.amazonaws.sagemaker#DescribeAutoMLJobRequest": { "type": "structure", "members": { - "ImageName": { - "target": "com.amazonaws.sagemaker#ImageName", + "AutoMLJobName": { + "target": "com.amazonaws.sagemaker#AutoMLJobName", "traits": { - "smithy.api#documentation": "

          The name of the image.

          ", + "smithy.api#documentation": "

          Request information about a job using that job's unique name.

          ", "smithy.api#required": {} } - }, - "Version": { - "target": "com.amazonaws.sagemaker#ImageVersionNumber", - "traits": { - "smithy.api#documentation": "

          The version of the image. If not specified, the latest version is described.

          " - } } } }, - "com.amazonaws.sagemaker#DescribeImageVersionResponse": { + "com.amazonaws.sagemaker#DescribeAutoMLJobResponse": { "type": "structure", "members": { - "BaseImage": { - "target": "com.amazonaws.sagemaker#ImageBaseImage", + "AutoMLJobName": { + "target": "com.amazonaws.sagemaker#AutoMLJobName", "traits": { - "smithy.api#documentation": "

          The registry path of the container image on which this image version is based.

          " + "smithy.api#documentation": "

          Returns the name of a job.

          ", + "smithy.api#required": {} } }, - "ContainerImage": { - "target": "com.amazonaws.sagemaker#ImageContainerImage", + "AutoMLJobArn": { + "target": "com.amazonaws.sagemaker#AutoMLJobArn", "traits": { - "smithy.api#documentation": "

          The registry path of the container image that contains this image version.

          " + "smithy.api#documentation": "

          Returns the job's ARN.

          ", + "smithy.api#required": {} + } + }, + "InputDataConfig": { + "target": "com.amazonaws.sagemaker#AutoMLInputDataConfig", + "traits": { + "smithy.api#documentation": "

          Returns the job's input data config.

          ", + "smithy.api#required": {} + } + }, + "OutputDataConfig": { + "target": "com.amazonaws.sagemaker#AutoMLOutputDataConfig", + "traits": { + "smithy.api#documentation": "

          Returns the job's output data config.

          ", + "smithy.api#required": {} + } + }, + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that has read permission to\n the input data location and write permission to the output data location in Amazon S3.

          ", + "smithy.api#required": {} + } + }, + "AutoMLJobObjective": { + "target": "com.amazonaws.sagemaker#AutoMLJobObjective", + "traits": { + "smithy.api#documentation": "

          Returns the job's objective.

          " + } + }, + "ProblemType": { + "target": "com.amazonaws.sagemaker#ProblemType", + "traits": { + "smithy.api#documentation": "

          Returns the job's problem type.

          " + } + }, + "AutoMLJobConfig": { + "target": "com.amazonaws.sagemaker#AutoMLJobConfig", + "traits": { + "smithy.api#documentation": "

          Returns the job's config.

          " } }, "CreationTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          When the version was created.

          " + "smithy.api#documentation": "

          Returns the job's creation time.

          ", + "smithy.api#required": {} + } + }, + "EndTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          Returns the job's end time.

          " + } + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          Returns the job's last modified time.

          ", + "smithy.api#required": {} } }, "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", + "target": "com.amazonaws.sagemaker#AutoMLFailureReason", "traits": { - "smithy.api#documentation": "

          When a create or delete operation fails, the reason for the failure.

          " + "smithy.api#documentation": "

          Returns the job's FailureReason.

          " } }, - "ImageArn": { - "target": "com.amazonaws.sagemaker#ImageArn", + "BestCandidate": { + "target": "com.amazonaws.sagemaker#AutoMLCandidate", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the image the version is based on.

          " + "smithy.api#documentation": "

          Returns the job's BestCandidate.

          " } }, - "ImageVersionArn": { - "target": "com.amazonaws.sagemaker#ImageVersionArn", + "AutoMLJobStatus": { + "target": "com.amazonaws.sagemaker#AutoMLJobStatus", "traits": { - "smithy.api#documentation": "

          The ARN of the version.

          " + "smithy.api#documentation": "

          Returns the job's AutoMLJobStatus.

          ", + "smithy.api#required": {} } }, - "ImageVersionStatus": { - "target": "com.amazonaws.sagemaker#ImageVersionStatus", + "AutoMLJobSecondaryStatus": { + "target": "com.amazonaws.sagemaker#AutoMLJobSecondaryStatus", "traits": { - "smithy.api#documentation": "

          The status of the version.

          " + "smithy.api#documentation": "

          Returns the job's AutoMLJobSecondaryStatus.

          ", + "smithy.api#required": {} } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "GenerateCandidateDefinitionsOnly": { + "target": "com.amazonaws.sagemaker#GenerateCandidateDefinitionsOnly", "traits": { - "smithy.api#documentation": "

          When the version was last modified.

          " + "smithy.api#documentation": "

          Returns the job's output from GenerateCandidateDefinitionsOnly.

          " } }, - "Version": { - "target": "com.amazonaws.sagemaker#ImageVersionNumber", + "AutoMLJobArtifacts": { + "target": "com.amazonaws.sagemaker#AutoMLJobArtifacts", "traits": { - "smithy.api#documentation": "

          The version number.

          " + "smithy.api#documentation": "

          Returns information on the job's artifacts found in AutoMLJobArtifacts.

          " + } + }, + "ResolvedAttributes": { + "target": "com.amazonaws.sagemaker#ResolvedAttributes", + "traits": { + "smithy.api#documentation": "

          This contains ProblemType, AutoMLJobObjective and CompletionCriteria. They're\n auto-inferred values, if not provided by you. If you do provide them, then they'll be the\n same as provided.

          " } } } }, - "com.amazonaws.sagemaker#DescribeLabelingJob": { + "com.amazonaws.sagemaker#DescribeCodeRepository": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeLabelingJobRequest" + "target": "com.amazonaws.sagemaker#DescribeCodeRepositoryInput" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeLabelingJobResponse" + "target": "com.amazonaws.sagemaker#DescribeCodeRepositoryOutput" }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], "traits": { - "smithy.api#documentation": "

          Gets information about a labeling job.

          " + "smithy.api#documentation": "

          Gets details about the specified Git repository.

          " } }, - "com.amazonaws.sagemaker#DescribeLabelingJobRequest": { + "com.amazonaws.sagemaker#DescribeCodeRepositoryInput": { "type": "structure", "members": { - "LabelingJobName": { - "target": "com.amazonaws.sagemaker#LabelingJobName", + "CodeRepositoryName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          The name of the labeling job to return information for.

          ", + "smithy.api#documentation": "

          The name of the Git repository to describe.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeLabelingJobResponse": { + "com.amazonaws.sagemaker#DescribeCodeRepositoryOutput": { "type": "structure", "members": { - "LabelingJobStatus": { - "target": "com.amazonaws.sagemaker#LabelingJobStatus", + "CodeRepositoryName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          The processing status of the labeling job.

          ", + "smithy.api#documentation": "

          The name of the Git repository.

          ", "smithy.api#required": {} } }, - "LabelCounters": { - "target": "com.amazonaws.sagemaker#LabelCounters", + "CodeRepositoryArn": { + "target": "com.amazonaws.sagemaker#CodeRepositoryArn", "traits": { - "smithy.api#documentation": "

          Provides a breakdown of the number of data objects labeled by humans, the number of\n objects labeled by machine, the number of objects than couldn't be labeled, and the\n total number of objects labeled.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the Git repository.

          ", "smithy.api#required": {} } }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", - "traits": { - "smithy.api#documentation": "

          If the job failed, the reason that it failed.

          " - } - }, "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          The date and time that the labeling job was created.

          ", + "smithy.api#documentation": "

          The date and time that the repository was created.

          ", "smithy.api#required": {} } }, "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "target": "com.amazonaws.sagemaker#LastModifiedTime", "traits": { - "smithy.api#documentation": "

          The date and time that the labeling job was last updated.

          ", + "smithy.api#documentation": "

          The date and time that the repository was last changed.

          ", "smithy.api#required": {} } }, - "JobReferenceCode": { - "target": "com.amazonaws.sagemaker#JobReferenceCode", + "GitConfig": { + "target": "com.amazonaws.sagemaker#GitConfig", "traits": { - "smithy.api#documentation": "

          A unique identifier for work done as part of a labeling job.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Configuration details about the repository, including the URL where the repository is\n located, the default branch, and the Amazon Resource Name (ARN) of the AWS Secrets\n Manager secret that contains the credentials used to access the repository.

          " } - }, - "LabelingJobName": { - "target": "com.amazonaws.sagemaker#LabelingJobName", + } + } + }, + "com.amazonaws.sagemaker#DescribeCompilationJob": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribeCompilationJobRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DescribeCompilationJobResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Returns information about a model compilation job.

          \n

          To create a model compilation job, use CreateCompilationJob. To get\n information about multiple model compilation jobs, use ListCompilationJobs.

          " + } + }, + "com.amazonaws.sagemaker#DescribeCompilationJobRequest": { + "type": "structure", + "members": { + "CompilationJobName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          The name assigned to the labeling job when it was created.

          ", + "smithy.api#documentation": "

          The name of the model compilation job that you want information about.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.sagemaker#DescribeCompilationJobResponse": { + "type": "structure", + "members": { + "CompilationJobName": { + "target": "com.amazonaws.sagemaker#EntityName", + "traits": { + "smithy.api#documentation": "

          The name of the model compilation job.

          ", "smithy.api#required": {} } }, - "LabelingJobArn": { - "target": "com.amazonaws.sagemaker#LabelingJobArn", + "CompilationJobArn": { + "target": "com.amazonaws.sagemaker#CompilationJobArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the labeling job.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker assumes to perform the model\n compilation job.

          ", "smithy.api#required": {} } }, - "LabelAttributeName": { - "target": "com.amazonaws.sagemaker#LabelAttributeName", + "CompilationJobStatus": { + "target": "com.amazonaws.sagemaker#CompilationJobStatus", "traits": { - "smithy.api#documentation": "

          The attribute used as the label in the output manifest file.

          " + "smithy.api#documentation": "

          The status of the model compilation job.

          ", + "smithy.api#required": {} } }, - "InputConfig": { - "target": "com.amazonaws.sagemaker#LabelingJobInputConfig", + "CompilationStartTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Input configuration information for the labeling job, such as the Amazon S3 location of the\n data objects and the location of the manifest file that describes the data\n objects.

          ", + "smithy.api#documentation": "

          The time when the model compilation job started the CompilationJob\n instances.

          \n

          You are billed for the time between this timestamp and the timestamp in the DescribeCompilationJobResponse$CompilationEndTime field. In Amazon CloudWatch Logs,\n the start time might be later than this time. That's because it takes time to download\n the compilation job, which depends on the size of the compilation job container.

          " + } + }, + "CompilationEndTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          The time when the model compilation job on a compilation job instance ended. For a\n successful or stopped job, this is when the job's model artifacts have finished\n uploading. For a failed job, this is when Amazon SageMaker detected that the job failed.

          " + } + }, + "StoppingCondition": { + "target": "com.amazonaws.sagemaker#StoppingCondition", + "traits": { + "smithy.api#documentation": "

          Specifies a limit to how long a model compilation job can run. When the job reaches\n the time limit, Amazon SageMaker ends the compilation job. Use this API to cap model training\n costs.

          ", "smithy.api#required": {} } }, - "OutputConfig": { - "target": "com.amazonaws.sagemaker#LabelingJobOutputConfig", + "CreationTime": { + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          The location of the job's output data and the AWS Key Management Service key ID for the key used to\n encrypt the output data, if any.

          ", + "smithy.api#documentation": "

          The time that the model compilation job was created.

          ", "smithy.api#required": {} } }, - "RoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#LastModifiedTime", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf\n during data labeling.

          ", + "smithy.api#documentation": "

          The time that the status\n of\n the model compilation job was last modified.

          ", "smithy.api#required": {} } }, - "LabelCategoryConfigS3Uri": { - "target": "com.amazonaws.sagemaker#S3Uri", + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", "traits": { - "smithy.api#documentation": "

          The S3 location of the JSON file that defines the categories used to label data\n objects. Please note the following label-category limits:

          \n
            \n
          • \n

            Semantic segmentation labeling jobs using automated labeling: 20 labels

            \n
          • \n
          • \n

            Box bounding labeling jobs (all): 10 labels

            \n
          • \n
          \n

          The file is a JSON structure in the following format:

          \n

          \n {\n

          \n

          \n \"document-version\": \"2018-11-28\"\n

          \n

          \n \"labels\": [\n

          \n

          \n {\n

          \n

          \n \"label\": \"label 1\"\n

          \n

          \n },\n

          \n

          \n {\n

          \n

          \n \"label\": \"label 2\"\n

          \n

          \n },\n

          \n

          \n ...\n

          \n

          \n {\n

          \n

          \n \"label\": \"label n\"\n

          \n

          \n }\n

          \n

          \n ]\n

          \n

          \n }\n

          " + "smithy.api#documentation": "

          If a model compilation job failed, the reason it failed.

          ", + "smithy.api#required": {} } }, - "StoppingConditions": { - "target": "com.amazonaws.sagemaker#LabelingJobStoppingConditions", + "ModelArtifacts": { + "target": "com.amazonaws.sagemaker#ModelArtifacts", "traits": { - "smithy.api#documentation": "

          A set of conditions for stopping a labeling job. If any of the conditions are met, the\n job is automatically stopped.

          " + "smithy.api#documentation": "

          Information about the location in Amazon S3 that has been configured for storing the model\n artifacts used in the compilation job.

          ", + "smithy.api#required": {} } }, - "LabelingJobAlgorithmsConfig": { - "target": "com.amazonaws.sagemaker#LabelingJobAlgorithmsConfig", + "ModelDigests": { + "target": "com.amazonaws.sagemaker#ModelDigests", "traits": { - "smithy.api#documentation": "

          Configuration information for automated data labeling.

          " + "smithy.api#documentation": "

          Provides a BLAKE2 hash value that identifies the compiled model artifacts in Amazon S3.

          " } }, - "HumanTaskConfig": { - "target": "com.amazonaws.sagemaker#HumanTaskConfig", + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          Configuration information required for human workers to complete a labeling\n task.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model compilation job.

          ", "smithy.api#required": {} } }, - "Tags": { - "target": "com.amazonaws.sagemaker#TagList", + "InputConfig": { + "target": "com.amazonaws.sagemaker#InputConfig", "traits": { - "smithy.api#documentation": "

          An array of key/value pairs. For more information, see Using\n Cost Allocation Tags in the AWS Billing and Cost Management User\n Guide.

          " + "smithy.api#documentation": "

          Information about the location in Amazon S3 of the input model artifacts, the name and\n shape of the expected data inputs, and the framework in which the model was\n trained.

          ", + "smithy.api#required": {} } }, - "LabelingJobOutput": { - "target": "com.amazonaws.sagemaker#LabelingJobOutput", + "OutputConfig": { + "target": "com.amazonaws.sagemaker#OutputConfig", "traits": { - "smithy.api#documentation": "

          The location of the output produced by the labeling job.

          " + "smithy.api#documentation": "

          Information about the output location for the compiled model and the target device\n that the model runs on.

          ", + "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeModel": { + "com.amazonaws.sagemaker#DescribeContext": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeModelInput" + "target": "com.amazonaws.sagemaker#DescribeContextRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeModelOutput" + "target": "com.amazonaws.sagemaker#DescribeContextResponse" }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#documentation": "

          Describes a model that you created using the CreateModel\n API.

          " + "smithy.api#documentation": "

          Describes a context.

          " } }, - "com.amazonaws.sagemaker#DescribeModelInput": { + "com.amazonaws.sagemaker#DescribeContextRequest": { "type": "structure", "members": { - "ModelName": { - "target": "com.amazonaws.sagemaker#ModelName", + "ContextName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The name of the model.

          ", + "smithy.api#documentation": "

          The name of the context to describe.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeModelOutput": { + "com.amazonaws.sagemaker#DescribeContextResponse": { "type": "structure", "members": { - "ModelName": { - "target": "com.amazonaws.sagemaker#ModelName", + "ContextName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          Name of the Amazon SageMaker model.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The name of the context.

          " } }, - "PrimaryContainer": { - "target": "com.amazonaws.sagemaker#ContainerDefinition", + "ContextArn": { + "target": "com.amazonaws.sagemaker#ContextArn", "traits": { - "smithy.api#documentation": "

          The location of the primary inference code, associated artifacts, and custom\n environment map that the inference code uses when it is deployed in production.\n

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the context.

          " } }, - "Containers": { - "target": "com.amazonaws.sagemaker#ContainerDefinitionList", + "Source": { + "target": "com.amazonaws.sagemaker#ContextSource", "traits": { - "smithy.api#documentation": "

          The containers in the inference pipeline.

          " + "smithy.api#documentation": "

          The source of the context.

          " } }, - "ExecutionRoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", + "ContextType": { + "target": "com.amazonaws.sagemaker#String256", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the IAM role that you specified for the\n model.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The type of the context.

          " } }, - "VpcConfig": { - "target": "com.amazonaws.sagemaker#VpcConfig", + "Description": { + "target": "com.amazonaws.sagemaker#ExperimentDescription", "traits": { - "smithy.api#documentation": "

          A VpcConfig object that specifies the VPC that this model has access\n to. For more information, see Protect Endpoints by Using an Amazon Virtual\n Private Cloud\n

          " + "smithy.api#documentation": "

          The description of the context.

          " } }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "Properties": { + "target": "com.amazonaws.sagemaker#LineageEntityParameters", "traits": { - "smithy.api#documentation": "

          A timestamp that shows when the model was created.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A list of the context's properties.

          " } }, - "ModelArn": { - "target": "com.amazonaws.sagemaker#ModelArn", + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          When the context was created.

          " } }, - "EnableNetworkIsolation": { - "target": "com.amazonaws.sagemaker#Boolean", + "CreatedBy": { + "target": "com.amazonaws.sagemaker#UserContext" + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          If True, no inbound or outbound network calls can be made to or from the\n model container.

          " + "smithy.api#documentation": "

          When the context was last modified.

          " } + }, + "LastModifiedBy": { + "target": "com.amazonaws.sagemaker#UserContext" } } }, - "com.amazonaws.sagemaker#DescribeModelPackage": { + "com.amazonaws.sagemaker#DescribeDomain": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeModelPackageInput" + "target": "com.amazonaws.sagemaker#DescribeDomainRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeModelPackageOutput" + "target": "com.amazonaws.sagemaker#DescribeDomainResponse" }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#documentation": "

          Returns a description of the specified model package, which is used to create Amazon SageMaker\n models or list them on AWS Marketplace.

          \n

          To create models in Amazon SageMaker, buyers can subscribe to model packages listed on AWS\n Marketplace.

          " + "smithy.api#documentation": "

          The description of the domain.

          " } }, - "com.amazonaws.sagemaker#DescribeModelPackageInput": { + "com.amazonaws.sagemaker#DescribeDomainRequest": { "type": "structure", "members": { - "ModelPackageName": { - "target": "com.amazonaws.sagemaker#VersionedArnOrName", + "DomainId": { + "target": "com.amazonaws.sagemaker#DomainId", "traits": { - "smithy.api#documentation": "

          The name of the model package to describe.

          ", + "smithy.api#documentation": "

          The domain ID.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeModelPackageOutput": { + "com.amazonaws.sagemaker#DescribeDomainResponse": { "type": "structure", "members": { - "ModelPackageName": { - "target": "com.amazonaws.sagemaker#EntityName", + "DomainArn": { + "target": "com.amazonaws.sagemaker#DomainArn", "traits": { - "smithy.api#documentation": "

          The name of the model package being described.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The domain's Amazon Resource Name (ARN).

          " } }, - "ModelPackageArn": { - "target": "com.amazonaws.sagemaker#ModelPackageArn", + "DomainId": { + "target": "com.amazonaws.sagemaker#DomainId", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model package.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The domain ID.

          " } }, - "ModelPackageDescription": { - "target": "com.amazonaws.sagemaker#EntityDescription", + "DomainName": { + "target": "com.amazonaws.sagemaker#DomainName", "traits": { - "smithy.api#documentation": "

          A brief summary of the model package.

          " + "smithy.api#documentation": "

          The domain name.

          " + } + }, + "HomeEfsFileSystemId": { + "target": "com.amazonaws.sagemaker#ResourceId", + "traits": { + "smithy.api#documentation": "

          The ID of the Amazon Elastic File System (EFS) managed by this Domain.

          " + } + }, + "SingleSignOnManagedApplicationInstanceId": { + "target": "com.amazonaws.sagemaker#String256", + "traits": { + "smithy.api#documentation": "

          The SSO managed application instance ID.

          " + } + }, + "Status": { + "target": "com.amazonaws.sagemaker#DomainStatus", + "traits": { + "smithy.api#documentation": "

          The status.

          " } }, "CreationTime": { "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          A timestamp specifying when the model package was created.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The creation time.

          " } }, - "InferenceSpecification": { - "target": "com.amazonaws.sagemaker#InferenceSpecification", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#LastModifiedTime", "traits": { - "smithy.api#documentation": "

          Details about inference jobs that can be run with models based on this model\n package.

          " + "smithy.api#documentation": "

          The last modified time.

          " } }, - "SourceAlgorithmSpecification": { - "target": "com.amazonaws.sagemaker#SourceAlgorithmSpecification", + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", "traits": { - "smithy.api#documentation": "

          Details about the algorithm that was used to create the model package.

          " + "smithy.api#documentation": "

          The failure reason.

          " } }, - "ValidationSpecification": { - "target": "com.amazonaws.sagemaker#ModelPackageValidationSpecification", + "AuthMode": { + "target": "com.amazonaws.sagemaker#AuthMode", "traits": { - "smithy.api#documentation": "

          Configurations for one or more transform jobs that Amazon SageMaker runs to test the model\n package.

          " + "smithy.api#documentation": "

          The domain's authentication mode.

          " } }, - "ModelPackageStatus": { - "target": "com.amazonaws.sagemaker#ModelPackageStatus", + "DefaultUserSettings": { + "target": "com.amazonaws.sagemaker#UserSettings", "traits": { - "smithy.api#documentation": "

          The current status of the model package.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Settings which are applied to all UserProfiles in this domain, if settings are not explicitly specified\n in a given UserProfile.\n

          " } }, - "ModelPackageStatusDetails": { - "target": "com.amazonaws.sagemaker#ModelPackageStatusDetails", + "AppNetworkAccessType": { + "target": "com.amazonaws.sagemaker#AppNetworkAccessType", "traits": { - "smithy.api#documentation": "

          Details about the current status of the model package.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Specifies the VPC used for non-EFS traffic. The default value is\n PublicInternetOnly.

          \n
            \n
          • \n

            \n PublicInternetOnly - Non-EFS traffic is through a VPC managed by\n Amazon SageMaker, which allows direct internet access

            \n
          • \n
          • \n

            \n VpcOnly - All Studio traffic is through the specified VPC and subnets

            \n
          • \n
          " } }, - "CertifyForMarketplace": { - "target": "com.amazonaws.sagemaker#CertifyForMarketplace", + "HomeEfsFileSystemKmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", "traits": { - "smithy.api#documentation": "

          Whether the model package is certified for listing on AWS Marketplace.

          " + "smithy.api#deprecated": { + "message": "This property is deprecated, use KmsKeyId instead." + }, + "smithy.api#documentation": "

          This member is deprecated and replaced with KmsKeyId.

          " } - } + }, + "SubnetIds": { + "target": "com.amazonaws.sagemaker#Subnets", + "traits": { + "smithy.api#documentation": "

          The VPC subnets that Studio uses for communication.

          " + } + }, + "Url": { + "target": "com.amazonaws.sagemaker#String1024", + "traits": { + "smithy.api#documentation": "

          The domain's URL.

          " + } + }, + "VpcId": { + "target": "com.amazonaws.sagemaker#VpcId", + "traits": { + "smithy.api#documentation": "

          The ID of the Amazon Virtual Private Cloud (VPC) that Studio uses for communication.

          " + } + }, + "KmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", + "traits": { + "smithy.api#documentation": "

          The AWS KMS customer managed CMK used to encrypt\n the EFS volume attached to the domain.

          " + } + } } }, - "com.amazonaws.sagemaker#DescribeMonitoringSchedule": { + "com.amazonaws.sagemaker#DescribeEndpoint": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeMonitoringScheduleRequest" + "target": "com.amazonaws.sagemaker#DescribeEndpointInput" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeMonitoringScheduleResponse" + "target": "com.amazonaws.sagemaker#DescribeEndpointOutput" }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], "traits": { - "smithy.api#documentation": "

          Describes the schedule for a monitoring job.

          " + "smithy.api#documentation": "

          Returns the description of an endpoint.

          " } }, - "com.amazonaws.sagemaker#DescribeMonitoringScheduleRequest": { + "com.amazonaws.sagemaker#DescribeEndpointConfig": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribeEndpointConfigInput" + }, + "output": { + "target": "com.amazonaws.sagemaker#DescribeEndpointConfigOutput" + }, + "traits": { + "smithy.api#documentation": "

          Returns the description of an endpoint configuration created using the\n CreateEndpointConfig API.

          " + } + }, + "com.amazonaws.sagemaker#DescribeEndpointConfigInput": { "type": "structure", "members": { - "MonitoringScheduleName": { - "target": "com.amazonaws.sagemaker#MonitoringScheduleName", + "EndpointConfigName": { + "target": "com.amazonaws.sagemaker#EndpointConfigName", "traits": { - "smithy.api#documentation": "

          Name of a previously created monitoring schedule.

          ", + "smithy.api#documentation": "

          The name of the endpoint configuration.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeMonitoringScheduleResponse": { + "com.amazonaws.sagemaker#DescribeEndpointConfigOutput": { "type": "structure", "members": { - "MonitoringScheduleArn": { - "target": "com.amazonaws.sagemaker#MonitoringScheduleArn", + "EndpointConfigName": { + "target": "com.amazonaws.sagemaker#EndpointConfigName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the monitoring schedule.

          ", + "smithy.api#documentation": "

          Name of the Amazon SageMaker endpoint configuration.

          ", "smithy.api#required": {} } }, - "MonitoringScheduleName": { - "target": "com.amazonaws.sagemaker#MonitoringScheduleName", + "EndpointConfigArn": { + "target": "com.amazonaws.sagemaker#EndpointConfigArn", "traits": { - "smithy.api#documentation": "

          Name of the monitoring schedule.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the endpoint configuration.

          ", "smithy.api#required": {} } }, - "MonitoringScheduleStatus": { - "target": "com.amazonaws.sagemaker#ScheduleStatus", + "ProductionVariants": { + "target": "com.amazonaws.sagemaker#ProductionVariantList", "traits": { - "smithy.api#documentation": "

          The status of an monitoring job.

          ", + "smithy.api#documentation": "

          An array of ProductionVariant objects, one for each model that you\n want to host at this endpoint.

          ", "smithy.api#required": {} } }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", + "DataCaptureConfig": { + "target": "com.amazonaws.sagemaker#DataCaptureConfig" + }, + "KmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", "traits": { - "smithy.api#documentation": "

          A string, up to one KB in size, that contains the reason a monitoring job failed, if it\n failed.

          " + "smithy.api#documentation": "

          AWS KMS key ID Amazon SageMaker uses to encrypt data when storing it on the ML storage\n volume attached to the instance.

          " } }, "CreationTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The time at which the monitoring job was created.

          ", + "smithy.api#documentation": "

          A timestamp that shows when the endpoint configuration was created.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.sagemaker#DescribeEndpointInput": { + "type": "structure", + "members": { + "EndpointName": { + "target": "com.amazonaws.sagemaker#EndpointName", + "traits": { + "smithy.api#documentation": "

          The name of the endpoint.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.sagemaker#DescribeEndpointOutput": { + "type": "structure", + "members": { + "EndpointName": { + "target": "com.amazonaws.sagemaker#EndpointName", + "traits": { + "smithy.api#documentation": "

          Name of the endpoint.

          ", "smithy.api#required": {} } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "EndpointArn": { + "target": "com.amazonaws.sagemaker#EndpointArn", "traits": { - "smithy.api#documentation": "

          The time at which the monitoring job was last modified.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the endpoint.

          ", "smithy.api#required": {} } }, - "MonitoringScheduleConfig": { - "target": "com.amazonaws.sagemaker#MonitoringScheduleConfig", + "EndpointConfigName": { + "target": "com.amazonaws.sagemaker#EndpointConfigName", "traits": { - "smithy.api#documentation": "

          The configuration object that specifies the monitoring schedule and defines the\n monitoring job.

          ", + "smithy.api#documentation": "

          The name of the endpoint configuration associated with this endpoint.

          ", "smithy.api#required": {} } }, - "EndpointName": { - "target": "com.amazonaws.sagemaker#EndpointName", + "ProductionVariants": { + "target": "com.amazonaws.sagemaker#ProductionVariantSummaryList", "traits": { - "smithy.api#documentation": "

          The name of the endpoint for the monitoring job.

          " + "smithy.api#documentation": "

          An array of ProductionVariantSummary objects, one for each model\n hosted behind this endpoint.

          " } }, - "LastMonitoringExecutionSummary": { - "target": "com.amazonaws.sagemaker#MonitoringExecutionSummary", + "DataCaptureConfig": { + "target": "com.amazonaws.sagemaker#DataCaptureConfigSummary" + }, + "EndpointStatus": { + "target": "com.amazonaws.sagemaker#EndpointStatus", "traits": { - "smithy.api#documentation": "

          Describes metadata on the last execution to run, if there was one.

          " + "smithy.api#documentation": "

          The status of the endpoint.

          \n
            \n
          • \n

            \n OutOfService: Endpoint is not available to take incoming\n requests.

            \n
          • \n
          • \n

            \n Creating: CreateEndpoint is executing.

            \n
          • \n
          • \n

            \n Updating: UpdateEndpoint or UpdateEndpointWeightsAndCapacities is executing.

            \n
          • \n
          • \n

            \n SystemUpdating: Endpoint is undergoing maintenance and cannot be\n updated or deleted or re-scaled until it has completed. This maintenance\n operation does not change any customer-specified values such as VPC config, KMS\n encryption, model, instance type, or instance count.

            \n
          • \n
          • \n

            \n RollingBack: Endpoint fails to scale up or down or change its\n variant weight and is in the process of rolling back to its previous\n configuration. Once the rollback completes, endpoint returns to an\n InService status. This transitional status only applies to an\n endpoint that has autoscaling enabled and is undergoing variant weight or\n capacity changes as part of an UpdateEndpointWeightsAndCapacities call or when the UpdateEndpointWeightsAndCapacities operation is called\n explicitly.

            \n
          • \n
          • \n

            \n InService: Endpoint is available to process incoming\n requests.

            \n
          • \n
          • \n

            \n Deleting: DeleteEndpoint is executing.

            \n
          • \n
          • \n

            \n Failed: Endpoint could not be created, updated, or re-scaled. Use\n DescribeEndpointOutput$FailureReason for information about\n the failure. DeleteEndpoint is the only operation that can be\n performed on a failed endpoint.

            \n
          • \n
          ", + "smithy.api#required": {} } - } - } - }, - "com.amazonaws.sagemaker#DescribeNotebookInstance": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#DescribeNotebookInstanceInput" - }, - "output": { - "target": "com.amazonaws.sagemaker#DescribeNotebookInstanceOutput" - }, - "traits": { - "smithy.api#documentation": "

          Returns information about a notebook instance.

          " - } - }, - "com.amazonaws.sagemaker#DescribeNotebookInstanceInput": { - "type": "structure", - "members": { - "NotebookInstanceName": { - "target": "com.amazonaws.sagemaker#NotebookInstanceName", + }, + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", "traits": { - "smithy.api#documentation": "

          The name of the notebook instance that you want information about.

          ", + "smithy.api#documentation": "

          If the status of the endpoint is Failed, the reason why it failed.\n

          " + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A timestamp that shows when the endpoint was created.

          ", + "smithy.api#required": {} + } + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A timestamp that shows when the endpoint was last modified.

          ", "smithy.api#required": {} } + }, + "LastDeploymentConfig": { + "target": "com.amazonaws.sagemaker#DeploymentConfig", + "traits": { + "smithy.api#documentation": "

          The most recent deployment configuration for the endpoint.

          " + } } } }, - "com.amazonaws.sagemaker#DescribeNotebookInstanceLifecycleConfig": { + "com.amazonaws.sagemaker#DescribeExperiment": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeNotebookInstanceLifecycleConfigInput" + "target": "com.amazonaws.sagemaker#DescribeExperimentRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeNotebookInstanceLifecycleConfigOutput" + "target": "com.amazonaws.sagemaker#DescribeExperimentResponse" }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#documentation": "

          Returns a description of a notebook instance lifecycle configuration.

          \n

          For information about notebook instance lifestyle configurations, see Step\n 2.1: (Optional) Customize a Notebook Instance.

          " + "smithy.api#documentation": "

          Provides a list of an experiment's properties.

          " } }, - "com.amazonaws.sagemaker#DescribeNotebookInstanceLifecycleConfigInput": { + "com.amazonaws.sagemaker#DescribeExperimentRequest": { "type": "structure", "members": { - "NotebookInstanceLifecycleConfigName": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName", + "ExperimentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The name of the lifecycle configuration to describe.

          ", + "smithy.api#documentation": "

          The name of the experiment to describe.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeNotebookInstanceLifecycleConfigOutput": { + "com.amazonaws.sagemaker#DescribeExperimentResponse": { "type": "structure", "members": { - "NotebookInstanceLifecycleConfigArn": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigArn", + "ExperimentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the lifecycle configuration.

          " + "smithy.api#documentation": "

          The name of the experiment.

          " } }, - "NotebookInstanceLifecycleConfigName": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName", + "ExperimentArn": { + "target": "com.amazonaws.sagemaker#ExperimentArn", "traits": { - "smithy.api#documentation": "

          The name of the lifecycle configuration.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the experiment.

          " } }, - "OnCreate": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigList", + "DisplayName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The shell script that runs only once, when you create a notebook instance.

          " + "smithy.api#documentation": "

          The name of the experiment as displayed. If DisplayName isn't specified,\n ExperimentName is displayed.

          " } }, - "OnStart": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigList", + "Source": { + "target": "com.amazonaws.sagemaker#ExperimentSource", "traits": { - "smithy.api#documentation": "

          The shell script that runs every time you start a notebook instance, including when\n you create the notebook instance.

          " + "smithy.api#documentation": "

          The ARN of the source and, optionally, the type.

          " } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#LastModifiedTime", + "Description": { + "target": "com.amazonaws.sagemaker#ExperimentDescription", "traits": { - "smithy.api#documentation": "

          A timestamp that tells when the lifecycle configuration was last modified.

          " + "smithy.api#documentation": "

          The description of the experiment.

          " } }, "CreationTime": { - "target": "com.amazonaws.sagemaker#CreationTime", - "traits": { - "smithy.api#documentation": "

          A timestamp that tells when the lifecycle configuration was created.

          " - } - } - } - }, - "com.amazonaws.sagemaker#DescribeNotebookInstanceOutput": { - "type": "structure", - "members": { - "NotebookInstanceArn": { - "target": "com.amazonaws.sagemaker#NotebookInstanceArn", + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the notebook instance.

          " + "smithy.api#documentation": "

          When the experiment was created.

          " } }, - "NotebookInstanceName": { - "target": "com.amazonaws.sagemaker#NotebookInstanceName", + "CreatedBy": { + "target": "com.amazonaws.sagemaker#UserContext", "traits": { - "smithy.api#documentation": "

          The name of the Amazon SageMaker notebook instance.

          " + "smithy.api#documentation": "

          Who created the experiment.

          " } }, - "NotebookInstanceStatus": { - "target": "com.amazonaws.sagemaker#NotebookInstanceStatus", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The status of the notebook instance.

          " + "smithy.api#documentation": "

          When the experiment was last modified.

          " } }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", + "LastModifiedBy": { + "target": "com.amazonaws.sagemaker#UserContext", "traits": { - "smithy.api#documentation": "

          If status is Failed, the reason it failed.

          " + "smithy.api#documentation": "

          Who last modified the experiment.

          " } - }, - "Url": { - "target": "com.amazonaws.sagemaker#NotebookInstanceUrl", + } + } + }, + "com.amazonaws.sagemaker#DescribeFeatureGroup": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribeFeatureGroupRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DescribeFeatureGroupResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Use this operation to describe a FeatureGroup. The response includes\n information on the creation time, FeatureGroup name, the unique identifier for\n each FeatureGroup, and more.

          " + } + }, + "com.amazonaws.sagemaker#DescribeFeatureGroupRequest": { + "type": "structure", + "members": { + "FeatureGroupName": { + "target": "com.amazonaws.sagemaker#FeatureGroupName", "traits": { - "smithy.api#documentation": "

          The URL that you use to connect to the Jupyter notebook that is running in your\n notebook instance.

          " + "smithy.api#documentation": "

          The name of the FeatureGroup you want described.

          ", + "smithy.api#required": {} } }, - "InstanceType": { - "target": "com.amazonaws.sagemaker#InstanceType", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          The type of ML compute instance running on the notebook instance.

          " + "smithy.api#documentation": "

          A token to resume pagination of the list of Features\n (FeatureDefinitions). 2,500 Features are returned by\n default.

          " } - }, - "SubnetId": { - "target": "com.amazonaws.sagemaker#SubnetId", + } + } + }, + "com.amazonaws.sagemaker#DescribeFeatureGroupResponse": { + "type": "structure", + "members": { + "FeatureGroupArn": { + "target": "com.amazonaws.sagemaker#FeatureGroupArn", "traits": { - "smithy.api#documentation": "

          The ID of the VPC subnet.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the FeatureGroup.

          ", + "smithy.api#required": {} } }, - "SecurityGroups": { - "target": "com.amazonaws.sagemaker#SecurityGroupIds", + "FeatureGroupName": { + "target": "com.amazonaws.sagemaker#FeatureGroupName", "traits": { - "smithy.api#documentation": "

          The IDs of the VPC security groups.

          " + "smithy.api#documentation": "

          he name of the FeatureGroup.

          ", + "smithy.api#required": {} } }, - "RoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", + "RecordIdentifierFeatureName": { + "target": "com.amazonaws.sagemaker#FeatureName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the IAM role associated with the instance.\n

          " + "smithy.api#documentation": "

          The name of the Feature used for RecordIdentifier, whose value\n uniquely identifies a record stored in the feature store.

          ", + "smithy.api#required": {} } }, - "KmsKeyId": { - "target": "com.amazonaws.sagemaker#KmsKeyId", + "EventTimeFeatureName": { + "target": "com.amazonaws.sagemaker#FeatureName", "traits": { - "smithy.api#documentation": "

          The AWS KMS key ID Amazon SageMaker uses to encrypt data when storing it on the ML storage\n volume attached to the instance.

          " + "smithy.api#documentation": "

          The name of the feature that stores the EventTime of a Record in a\n FeatureGroup.

          \n

          An EventTime is a point in time when a new event occurs that\n corresponds to the creation or update of a Record in a\n FeatureGroup. All Records in the FeatureGroup\n have a corresponding EventTime.

          ", + "smithy.api#required": {} } }, - "NetworkInterfaceId": { - "target": "com.amazonaws.sagemaker#NetworkInterfaceId", + "FeatureDefinitions": { + "target": "com.amazonaws.sagemaker#FeatureDefinitions", "traits": { - "smithy.api#documentation": "

          The network interface IDs that Amazon SageMaker created at the time of creating the instance.\n

          " + "smithy.api#documentation": "

          A list of the Features in the FeatureGroup. \n Each feature is defined by a FeatureName and FeatureType.

          ", + "smithy.api#required": {} } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#LastModifiedTime", + "CreationTime": { + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          A timestamp. Use this parameter to retrieve the time when the notebook instance was\n last modified.

          " + "smithy.api#documentation": "

          A timestamp indicating when SageMaker created the FeatureGroup.

          ", + "smithy.api#required": {} } }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#CreationTime", + "OnlineStoreConfig": { + "target": "com.amazonaws.sagemaker#OnlineStoreConfig", "traits": { - "smithy.api#documentation": "

          A timestamp. Use this parameter to return the time when the notebook instance was\n created

          " + "smithy.api#documentation": "

          The configuration for the OnlineStore.

          " } }, - "NotebookInstanceLifecycleConfigName": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName", + "OfflineStoreConfig": { + "target": "com.amazonaws.sagemaker#OfflineStoreConfig", "traits": { - "smithy.api#documentation": "

          Returns the name of a notebook instance lifecycle configuration.

          \n

          For information about notebook instance lifestyle configurations, see Step\n 2.1: (Optional) Customize a Notebook Instance\n

          " + "smithy.api#documentation": "

          The configuration of the OfflineStore, inducing the S3 location of the\n OfflineStore, AWS Glue or AWS Hive data catalogue configurations, and the\n security configuration.

          " } }, - "DirectInternetAccess": { - "target": "com.amazonaws.sagemaker#DirectInternetAccess", + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          Describes whether Amazon SageMaker provides internet access to the notebook instance. If this\n value is set to Disabled, the notebook instance does not have\n internet access, and cannot connect to Amazon SageMaker training and endpoint services.

          \n

          For more information, see Notebook Instances Are Internet-Enabled by Default.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the IAM execution role used to persist data into the\n OfflineStore if an OfflineStoreConfig is provided.

          " } }, - "VolumeSizeInGB": { - "target": "com.amazonaws.sagemaker#NotebookInstanceVolumeSizeInGB", + "FeatureGroupStatus": { + "target": "com.amazonaws.sagemaker#FeatureGroupStatus", "traits": { - "smithy.api#documentation": "

          The size, in GB, of the ML storage volume attached to the notebook instance.

          " + "smithy.api#documentation": "

          The status of the feature group.

          " } }, - "AcceleratorTypes": { - "target": "com.amazonaws.sagemaker#NotebookInstanceAcceleratorTypes", + "OfflineStoreStatus": { + "target": "com.amazonaws.sagemaker#OfflineStoreStatus", "traits": { - "smithy.api#documentation": "

          A list of the Elastic Inference (EI) instance types associated with this notebook\n instance. Currently only one EI instance type can be associated with a notebook\n instance. For more information, see Using Elastic Inference in Amazon\n SageMaker.

          " + "smithy.api#documentation": "

          The status of the OfflineStore. Notifies you if replicating data into the\n OfflineStore has failed. Returns either: Active or\n Blocked\n

          " } }, - "DefaultCodeRepository": { - "target": "com.amazonaws.sagemaker#CodeRepositoryNameOrUrl", + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", "traits": { - "smithy.api#documentation": "

          The Git repository associated with the notebook instance as its default code\n repository. This can be either the name of a Git repository stored as a resource in your\n account, or the URL of a Git repository in AWS CodeCommit or in any\n other Git repository. When you open a notebook instance, it opens in the directory that\n contains this repository. For more information, see Associating Git Repositories with Amazon SageMaker\n Notebook Instances.

          " + "smithy.api#documentation": "

          The reason that the FeatureGroup failed to be replicated in the\n OfflineStore. This is failure can occur because:

          \n
            \n
          • \n

            The FeatureGroup could not be created in the\n OfflineStore.

            \n
          • \n
          • \n

            The FeatureGroup could not be deleted from the\n OfflineStore.

            \n
          • \n
          " } }, - "AdditionalCodeRepositories": { - "target": "com.amazonaws.sagemaker#AdditionalCodeRepositoryNamesOrUrls", + "Description": { + "target": "com.amazonaws.sagemaker#Description", "traits": { - "smithy.api#documentation": "

          An array of up to three Git repositories associated with the notebook instance. These\n can be either the names of Git repositories stored as resources in your account, or the\n URL of Git repositories in AWS CodeCommit or in any\n other Git repository. These repositories are cloned at the same level as the default\n repository of your notebook instance. For more information, see Associating Git\n Repositories with Amazon SageMaker Notebook Instances.

          " + "smithy.api#documentation": "

          A free form description of the feature group.

          " } }, - "RootAccess": { - "target": "com.amazonaws.sagemaker#RootAccess", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          Whether root access is enabled or disabled for users of the notebook instance.

          \n \n

          Lifecycle configurations need root access to be able to set up a notebook\n instance. Because of this, lifecycle configurations associated with a notebook\n instance always run with root access even if you disable root access for\n users.

          \n
          " + "smithy.api#documentation": "

          A token to resume pagination of the list of Features\n (FeatureDefinitions).

          ", + "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeProcessingJob": { + "com.amazonaws.sagemaker#DescribeFlowDefinition": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeProcessingJobRequest" + "target": "com.amazonaws.sagemaker#DescribeFlowDefinitionRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeProcessingJobResponse" + "target": "com.amazonaws.sagemaker#DescribeFlowDefinitionResponse" }, "errors": [ { @@ -8314,201 +9477,167 @@ } ], "traits": { - "smithy.api#documentation": "

          Returns a description of a processing job.

          " + "smithy.api#documentation": "

          Returns information about the specified flow definition.

          " } }, - "com.amazonaws.sagemaker#DescribeProcessingJobRequest": { + "com.amazonaws.sagemaker#DescribeFlowDefinitionRequest": { "type": "structure", "members": { - "ProcessingJobName": { - "target": "com.amazonaws.sagemaker#ProcessingJobName", + "FlowDefinitionName": { + "target": "com.amazonaws.sagemaker#FlowDefinitionName", "traits": { - "smithy.api#documentation": "

          The name of the processing job. The name must be unique within an AWS Region in the\n AWS account.

          ", + "smithy.api#documentation": "

          The name of the flow definition.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeProcessingJobResponse": { + "com.amazonaws.sagemaker#DescribeFlowDefinitionResponse": { "type": "structure", "members": { - "ProcessingInputs": { - "target": "com.amazonaws.sagemaker#ProcessingInputs", + "FlowDefinitionArn": { + "target": "com.amazonaws.sagemaker#FlowDefinitionArn", "traits": { - "smithy.api#documentation": "

          The inputs for a processing job.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the flow defintion.

          ", + "smithy.api#required": {} } }, - "ProcessingOutputConfig": { - "target": "com.amazonaws.sagemaker#ProcessingOutputConfig", + "FlowDefinitionName": { + "target": "com.amazonaws.sagemaker#FlowDefinitionName", "traits": { - "smithy.api#documentation": "

          Output configuration for the processing job.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the flow definition.

          ", + "smithy.api#required": {} } }, - "ProcessingJobName": { - "target": "com.amazonaws.sagemaker#ProcessingJobName", + "FlowDefinitionStatus": { + "target": "com.amazonaws.sagemaker#FlowDefinitionStatus", "traits": { - "smithy.api#documentation": "

          The name of the processing job. The name must be unique within an AWS Region in the\n AWS account.

          ", + "smithy.api#documentation": "

          The status of the flow definition. Valid values are listed below.

          ", "smithy.api#required": {} } }, - "ProcessingResources": { - "target": "com.amazonaws.sagemaker#ProcessingResources", + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Identifies the resources, ML compute instances, and ML storage volumes to deploy for a\n processing job. In distributed training, you specify more than one instance.

          ", + "smithy.api#documentation": "

          The timestamp when the flow definition was created.

          ", "smithy.api#required": {} } }, - "StoppingCondition": { - "target": "com.amazonaws.sagemaker#ProcessingStoppingCondition", + "HumanLoopRequestSource": { + "target": "com.amazonaws.sagemaker#HumanLoopRequestSource", "traits": { - "smithy.api#documentation": "

          The time limit for how long the processing job is allowed to run.

          " + "smithy.api#documentation": "

          Container for configuring the source of human task requests. Used to specify if\n Amazon Rekognition or Amazon Textract is used as an integration source.

          " } }, - "AppSpecification": { - "target": "com.amazonaws.sagemaker#AppSpecification", + "HumanLoopActivationConfig": { + "target": "com.amazonaws.sagemaker#HumanLoopActivationConfig", "traits": { - "smithy.api#documentation": "

          Configures the processing job to run a specified container image.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          An object containing information about what triggers a human review workflow.

          " } }, - "Environment": { - "target": "com.amazonaws.sagemaker#ProcessingEnvironmentMap", + "HumanLoopConfig": { + "target": "com.amazonaws.sagemaker#HumanLoopConfig", "traits": { - "smithy.api#documentation": "

          The environment variables set in the Docker container.

          " + "smithy.api#documentation": "

          An object containing information about who works on the task, the workforce task price, and other task details.

          ", + "smithy.api#required": {} } }, - "NetworkConfig": { - "target": "com.amazonaws.sagemaker#NetworkConfig", + "OutputConfig": { + "target": "com.amazonaws.sagemaker#FlowDefinitionOutputConfig", "traits": { - "smithy.api#documentation": "

          Networking options for a processing job.

          " + "smithy.api#documentation": "

          An object containing information about the output file.

          ", + "smithy.api#required": {} } }, "RoleArn": { "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on\n your behalf.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) execution role for the flow definition.

          ", + "smithy.api#required": {} } }, - "ExperimentConfig": { - "target": "com.amazonaws.sagemaker#ExperimentConfig", + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", "traits": { - "smithy.api#documentation": "

          The configuration information used to create an experiment.

          " + "smithy.api#documentation": "

          The reason your flow definition failed.

          " } - }, - "ProcessingJobArn": { - "target": "com.amazonaws.sagemaker#ProcessingJobArn", + } + } + }, + "com.amazonaws.sagemaker#DescribeHumanTaskUi": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribeHumanTaskUiRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DescribeHumanTaskUiResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Returns information about the requested human task user interface (worker task template).

          " + } + }, + "com.amazonaws.sagemaker#DescribeHumanTaskUiRequest": { + "type": "structure", + "members": { + "HumanTaskUiName": { + "target": "com.amazonaws.sagemaker#HumanTaskUiName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the processing job.

          ", + "smithy.api#documentation": "

          The name of the human task user interface \n (worker task template) you want information about.

          ", "smithy.api#required": {} } - }, - "ProcessingJobStatus": { - "target": "com.amazonaws.sagemaker#ProcessingJobStatus", + } + } + }, + "com.amazonaws.sagemaker#DescribeHumanTaskUiResponse": { + "type": "structure", + "members": { + "HumanTaskUiArn": { + "target": "com.amazonaws.sagemaker#HumanTaskUiArn", "traits": { - "smithy.api#documentation": "

          Provides the status of a processing job.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the human task user interface (worker task template).

          ", "smithy.api#required": {} } }, - "ExitMessage": { - "target": "com.amazonaws.sagemaker#ExitMessage", + "HumanTaskUiName": { + "target": "com.amazonaws.sagemaker#HumanTaskUiName", "traits": { - "smithy.api#documentation": "

          An optional string, up to one KB in size, that contains metadata from the processing\n container when the processing job exits.

          " + "smithy.api#documentation": "

          The name of the human task user interface (worker task template).

          ", + "smithy.api#required": {} } }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", + "HumanTaskUiStatus": { + "target": "com.amazonaws.sagemaker#HumanTaskUiStatus", "traits": { - "smithy.api#documentation": "

          A string, up to one KB in size, that contains the reason a processing job failed, if\n it failed.

          " + "smithy.api#documentation": "

          The status of the human task user interface (worker task template). Valid values are listed below.

          " } }, - "ProcessingEndTime": { + "CreationTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The time at which the processing job completed.

          " + "smithy.api#documentation": "

          The timestamp when the human task user interface was created.

          ", + "smithy.api#required": {} } }, - "ProcessingStartTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "UiTemplate": { + "target": "com.amazonaws.sagemaker#UiTemplateInfo", "traits": { - "smithy.api#documentation": "

          The time at which the processing job started.

          " - } - }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          The time at which the processing job was last modified.

          " - } - }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          The time at which the processing job was created.

          ", - "smithy.api#required": {} - } - }, - "MonitoringScheduleArn": { - "target": "com.amazonaws.sagemaker#MonitoringScheduleArn", - "traits": { - "smithy.api#documentation": "

          The ARN of a monitoring schedule for an endpoint associated with this processing\n job.

          " - } - }, - "AutoMLJobArn": { - "target": "com.amazonaws.sagemaker#AutoMLJobArn", - "traits": { - "smithy.api#documentation": "

          The ARN of an AutoML job associated with this processing job.

          " - } - }, - "TrainingJobArn": { - "target": "com.amazonaws.sagemaker#TrainingJobArn", - "traits": { - "smithy.api#documentation": "

          The ARN of a training job associated with this processing job.

          " - } - } - } - }, - "com.amazonaws.sagemaker#DescribeSubscribedWorkteam": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#DescribeSubscribedWorkteamRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#DescribeSubscribedWorkteamResponse" - }, - "traits": { - "smithy.api#documentation": "

          Gets information about a work team provided by a vendor. It returns details about the\n subscription with a vendor in the AWS Marketplace.

          " - } - }, - "com.amazonaws.sagemaker#DescribeSubscribedWorkteamRequest": { - "type": "structure", - "members": { - "WorkteamArn": { - "target": "com.amazonaws.sagemaker#WorkteamArn", - "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the subscribed work team to describe.

          ", - "smithy.api#required": {} - } - } - } - }, - "com.amazonaws.sagemaker#DescribeSubscribedWorkteamResponse": { - "type": "structure", - "members": { - "SubscribedWorkteam": { - "target": "com.amazonaws.sagemaker#SubscribedWorkteam", - "traits": { - "smithy.api#documentation": "

          A Workteam instance that contains information about the work team.

          ", - "smithy.api#required": {} + "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeTrainingJob": { + "com.amazonaws.sagemaker#DescribeHyperParameterTuningJob": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeTrainingJobRequest" + "target": "com.amazonaws.sagemaker#DescribeHyperParameterTuningJobRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeTrainingJobResponse" + "target": "com.amazonaws.sagemaker#DescribeHyperParameterTuningJobResponse" }, "errors": [ { @@ -8516,234 +9645,312 @@ } ], "traits": { - "smithy.api#documentation": "

          Returns information about a training job.

          " + "smithy.api#documentation": "

          Gets\n a description of a hyperparameter tuning job.

          " } }, - "com.amazonaws.sagemaker#DescribeTrainingJobRequest": { + "com.amazonaws.sagemaker#DescribeHyperParameterTuningJobRequest": { "type": "structure", "members": { - "TrainingJobName": { - "target": "com.amazonaws.sagemaker#TrainingJobName", + "HyperParameterTuningJobName": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobName", "traits": { - "smithy.api#documentation": "

          The name of the training job.

          ", + "smithy.api#documentation": "

          The name of the tuning job.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeTrainingJobResponse": { + "com.amazonaws.sagemaker#DescribeHyperParameterTuningJobResponse": { "type": "structure", "members": { - "TrainingJobName": { - "target": "com.amazonaws.sagemaker#TrainingJobName", - "traits": { - "smithy.api#documentation": "

          Name of the model training job.

          ", - "smithy.api#required": {} - } - }, - "TrainingJobArn": { - "target": "com.amazonaws.sagemaker#TrainingJobArn", + "HyperParameterTuningJobName": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the training job.

          ", + "smithy.api#documentation": "

          The name of the tuning job.

          ", "smithy.api#required": {} } }, - "TuningJobArn": { + "HyperParameterTuningJobArn": { "target": "com.amazonaws.sagemaker#HyperParameterTuningJobArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the associated hyperparameter tuning job if the\n training job was launched by a hyperparameter tuning job.

          " + "smithy.api#documentation": "

          The\n Amazon Resource Name (ARN) of the tuning job.

          ", + "smithy.api#required": {} } }, - "LabelingJobArn": { - "target": "com.amazonaws.sagemaker#LabelingJobArn", + "HyperParameterTuningJobConfig": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobConfig", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the Amazon SageMaker Ground Truth labeling job that created the\n transform or training job.

          " + "smithy.api#documentation": "

          The HyperParameterTuningJobConfig object that specifies the\n configuration of the tuning job.

          ", + "smithy.api#required": {} } }, - "AutoMLJobArn": { - "target": "com.amazonaws.sagemaker#AutoMLJobArn", + "TrainingJobDefinition": { + "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinition", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an AutoML job.

          " + "smithy.api#documentation": "

          The HyperParameterTrainingJobDefinition object that specifies the\n definition of the training jobs that this tuning job launches.

          " } }, - "ModelArtifacts": { - "target": "com.amazonaws.sagemaker#ModelArtifacts", + "TrainingJobDefinitions": { + "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinitions", "traits": { - "smithy.api#documentation": "

          Information about the Amazon S3 location that is configured for storing model artifacts.\n

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A list of the HyperParameterTrainingJobDefinition objects launched\n for this tuning job.

          " } }, - "TrainingJobStatus": { - "target": "com.amazonaws.sagemaker#TrainingJobStatus", + "HyperParameterTuningJobStatus": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobStatus", "traits": { - "smithy.api#documentation": "

          The status of the\n training\n job.

          \n

          Amazon SageMaker provides the following training job statuses:

          \n
            \n
          • \n

            \n InProgress - The training is in progress.

            \n
          • \n
          • \n

            \n Completed - The training job has completed.

            \n
          • \n
          • \n

            \n Failed - The training job has failed. To see the reason for the\n failure, see the FailureReason field in the response to a\n DescribeTrainingJobResponse call.

            \n
          • \n
          • \n

            \n Stopping - The training job is stopping.

            \n
          • \n
          • \n

            \n Stopped - The training job has stopped.

            \n
          • \n
          \n

          For\n more detailed information, see SecondaryStatus.

          ", + "smithy.api#documentation": "

          The status of the tuning job: InProgress, Completed, Failed, Stopping, or\n Stopped.

          ", "smithy.api#required": {} } }, - "SecondaryStatus": { - "target": "com.amazonaws.sagemaker#SecondaryStatus", + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Provides detailed information about the state of the training job. For detailed\n information on the secondary status of the training job, see StatusMessage\n under SecondaryStatusTransition.

          \n

          Amazon SageMaker provides primary statuses and secondary statuses that apply to each of\n them:

          \n
          \n
          InProgress
          \n
          \n
            \n
          • \n

            \n Starting\n - Starting the training job.

            \n
          • \n
          • \n

            \n Downloading - An optional stage for algorithms that\n support File training input mode. It indicates that\n data is being downloaded to the ML storage volumes.

            \n
          • \n
          • \n

            \n Training - Training is in progress.

            \n
          • \n
          • \n

            \n Interrupted - The job stopped because the managed\n spot training instances were interrupted.

            \n
          • \n
          • \n

            \n Uploading - Training is complete and the model\n artifacts are being uploaded to the S3 location.

            \n
          • \n
          \n
          \n
          Completed
          \n
          \n
            \n
          • \n

            \n Completed - The training job has completed.

            \n
          • \n
          \n
          \n
          Failed
          \n
          \n
            \n
          • \n

            \n Failed - The training job has failed. The reason for\n the failure is returned in the FailureReason field of\n DescribeTrainingJobResponse.

            \n
          • \n
          \n
          \n
          Stopped
          \n
          \n
            \n
          • \n

            \n MaxRuntimeExceeded - The job stopped because it\n exceeded the maximum allowed runtime.

            \n
          • \n
          • \n

            \n MaxWaitTimeExceeded - The job stopped because it\n exceeded the maximum allowed wait time.

            \n
          • \n
          • \n

            \n Stopped - The training job has stopped.

            \n
          • \n
          \n
          \n
          Stopping
          \n
          \n
            \n
          • \n

            \n Stopping - Stopping the training job.

            \n
          • \n
          \n
          \n
          \n \n \n

          Valid values for SecondaryStatus are subject to change.

          \n
          \n

          We no longer support the following secondary statuses:

          \n
            \n
          • \n

            \n LaunchingMLInstances\n

            \n
          • \n
          • \n

            \n PreparingTrainingStack\n

            \n
          • \n
          • \n

            \n DownloadingTrainingImage\n

            \n
          • \n
          ", + "smithy.api#documentation": "

          The date and time that the tuning job started.

          ", "smithy.api#required": {} } }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", + "HyperParameterTuningEndTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          If the training job failed, the reason it failed.

          " + "smithy.api#documentation": "

          The date and time that the tuning job ended.

          " } }, - "HyperParameters": { - "target": "com.amazonaws.sagemaker#HyperParameters", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Algorithm-specific parameters.

          " + "smithy.api#documentation": "

          The date and time that the status of the tuning job was modified.

          " } }, - "AlgorithmSpecification": { - "target": "com.amazonaws.sagemaker#AlgorithmSpecification", + "TrainingJobStatusCounters": { + "target": "com.amazonaws.sagemaker#TrainingJobStatusCounters", "traits": { - "smithy.api#documentation": "

          Information about the algorithm used for training, and algorithm metadata.\n

          ", + "smithy.api#documentation": "

          The TrainingJobStatusCounters object that specifies the number of\n training jobs, categorized by status, that this tuning job launched.

          ", "smithy.api#required": {} } }, - "RoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", + "ObjectiveStatusCounters": { + "target": "com.amazonaws.sagemaker#ObjectiveStatusCounters", "traits": { - "smithy.api#documentation": "

          The AWS Identity and Access Management (IAM) role configured for the training job.

          " + "smithy.api#documentation": "

          The ObjectiveStatusCounters object that specifies the number of\n training jobs, categorized by the status of their final objective metric, that this\n tuning job launched.

          ", + "smithy.api#required": {} } }, - "InputDataConfig": { - "target": "com.amazonaws.sagemaker#InputDataConfig", + "BestTrainingJob": { + "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobSummary", "traits": { - "smithy.api#documentation": "

          An array of Channel objects that describes each data input channel.\n

          " + "smithy.api#documentation": "

          A TrainingJobSummary object that describes the training job that\n completed with the best current HyperParameterTuningJobObjective.

          " } }, - "OutputDataConfig": { - "target": "com.amazonaws.sagemaker#OutputDataConfig", + "OverallBestTrainingJob": { + "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobSummary", "traits": { - "smithy.api#documentation": "

          The S3 path where model artifacts that you configured when creating the job are\n stored. Amazon SageMaker creates subfolders for model artifacts.

          " + "smithy.api#documentation": "

          If the hyperparameter tuning job is an warm start tuning job with a\n WarmStartType of IDENTICAL_DATA_AND_ALGORITHM, this is the\n TrainingJobSummary for the training job with the best objective\n metric value of all training jobs launched by this tuning job and all parent jobs\n specified for the warm start tuning job.

          " } }, - "ResourceConfig": { - "target": "com.amazonaws.sagemaker#ResourceConfig", + "WarmStartConfig": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobWarmStartConfig", "traits": { - "smithy.api#documentation": "

          Resources, including ML compute instances and ML storage volumes, that are\n configured for model training.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The configuration for starting the hyperparameter parameter tuning job using one or\n more previous tuning jobs as a starting point. The results of previous tuning jobs are\n used to inform which combinations of hyperparameters to search over in the new tuning\n job.

          " } }, - "VpcConfig": { - "target": "com.amazonaws.sagemaker#VpcConfig", + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", "traits": { - "smithy.api#documentation": "

          A VpcConfig object that specifies the VPC that this training job has\n access to. For more information, see Protect Training Jobs by Using an Amazon\n Virtual Private Cloud.

          " + "smithy.api#documentation": "

          If the tuning job failed, the reason it failed.

          " } - }, - "StoppingCondition": { - "target": "com.amazonaws.sagemaker#StoppingCondition", + } + } + }, + "com.amazonaws.sagemaker#DescribeImage": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribeImageRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DescribeImageResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Describes a SageMaker image.

          " + } + }, + "com.amazonaws.sagemaker#DescribeImageRequest": { + "type": "structure", + "members": { + "ImageName": { + "target": "com.amazonaws.sagemaker#ImageName", "traits": { - "smithy.api#documentation": "

          Specifies a limit to how long a model training job can run. It also specifies the\n maximum time to wait for a spot instance. When the job reaches the time limit, Amazon SageMaker ends\n the training job. Use this API to cap model training costs.

          \n

          To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays\n job termination for 120 seconds. Algorithms can use this 120-second window to save the\n model artifacts, so the results of training are not lost.

          ", + "smithy.api#documentation": "

          The name of the image to describe.

          ", "smithy.api#required": {} } - }, + } + } + }, + "com.amazonaws.sagemaker#DescribeImageResponse": { + "type": "structure", + "members": { "CreationTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A timestamp that indicates when the training job was created.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          When the image was created.

          " } }, - "TrainingStartTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "Description": { + "target": "com.amazonaws.sagemaker#ImageDescription", "traits": { - "smithy.api#documentation": "

          Indicates the time when the training job starts on training instances. You are\n billed for the time interval between this time and the value of\n TrainingEndTime. The start time in CloudWatch Logs might be later than this time.\n The difference is due to the time it takes to download the training data and to the size\n of the training container.

          " + "smithy.api#documentation": "

          The description of the image.

          " } }, - "TrainingEndTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "DisplayName": { + "target": "com.amazonaws.sagemaker#ImageDisplayName", "traits": { - "smithy.api#documentation": "

          Indicates the time when the training job ends on training instances. You are billed\n for the time interval between the value of TrainingStartTime and this time.\n For successful jobs and stopped jobs, this is the time after model artifacts are\n uploaded. For failed jobs, this is the time when Amazon SageMaker detects a job failure.

          " + "smithy.api#documentation": "

          The name of the image as displayed.

          " } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", "traits": { - "smithy.api#documentation": "

          A timestamp that indicates when the status of the training job was last\n modified.

          " + "smithy.api#documentation": "

          When a create, update, or delete operation fails, the reason for the failure.

          " } }, - "SecondaryStatusTransitions": { - "target": "com.amazonaws.sagemaker#SecondaryStatusTransitions", + "ImageArn": { + "target": "com.amazonaws.sagemaker#ImageArn", "traits": { - "smithy.api#documentation": "

          A history of all of the secondary statuses that the training job has transitioned\n through.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the image.

          " } }, - "FinalMetricDataList": { - "target": "com.amazonaws.sagemaker#FinalMetricDataList", + "ImageName": { + "target": "com.amazonaws.sagemaker#ImageName", "traits": { - "smithy.api#documentation": "

          A collection of MetricData objects that specify the names, values, and\n dates and times that the training algorithm emitted to Amazon CloudWatch.

          " + "smithy.api#documentation": "

          The name of the image.

          " } }, - "EnableNetworkIsolation": { - "target": "com.amazonaws.sagemaker#Boolean", + "ImageStatus": { + "target": "com.amazonaws.sagemaker#ImageStatus", "traits": { - "smithy.api#documentation": "

          If you want to allow inbound or outbound network calls, except for calls between peers\n within a training cluster for distributed training, choose True. If you\n enable network isolation for training jobs that are configured to use a VPC, Amazon SageMaker\n downloads and uploads customer data and model artifacts through the specified VPC, but\n the training container does not have network access.

          " + "smithy.api#documentation": "

          The status of the image.

          " } }, - "EnableInterContainerTrafficEncryption": { - "target": "com.amazonaws.sagemaker#Boolean", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          To encrypt all communications between ML compute instances in distributed training,\n choose True. Encryption provides greater security for distributed training,\n but training might take longer. How long it takes depends on the amount of communication\n between compute instances, especially if you use a deep learning algorithms in\n distributed training.

          " + "smithy.api#documentation": "

          When the image was last modified.

          " } }, - "EnableManagedSpotTraining": { - "target": "com.amazonaws.sagemaker#Boolean", + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          A Boolean indicating whether managed spot training is enabled (True) or\n not (False).

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the IAM role that enables Amazon SageMaker to perform tasks on your behalf.

          " + } + } + } + }, + "com.amazonaws.sagemaker#DescribeImageVersion": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribeImageVersionRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DescribeImageVersionResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Describes a version of a SageMaker image.

          " + } + }, + "com.amazonaws.sagemaker#DescribeImageVersionRequest": { + "type": "structure", + "members": { + "ImageName": { + "target": "com.amazonaws.sagemaker#ImageName", + "traits": { + "smithy.api#documentation": "

          The name of the image.

          ", + "smithy.api#required": {} } }, - "CheckpointConfig": { - "target": "com.amazonaws.sagemaker#CheckpointConfig" + "Version": { + "target": "com.amazonaws.sagemaker#ImageVersionNumber", + "traits": { + "smithy.api#documentation": "

          The version of the image. If not specified, the latest version is described.

          " + } + } + } + }, + "com.amazonaws.sagemaker#DescribeImageVersionResponse": { + "type": "structure", + "members": { + "BaseImage": { + "target": "com.amazonaws.sagemaker#ImageBaseImage", + "traits": { + "smithy.api#documentation": "

          The registry path of the container image on which this image version is based.

          " + } }, - "TrainingTimeInSeconds": { - "target": "com.amazonaws.sagemaker#TrainingTimeInSeconds", + "ContainerImage": { + "target": "com.amazonaws.sagemaker#ImageContainerImage", "traits": { - "smithy.api#documentation": "

          The training time in seconds.

          " + "smithy.api#documentation": "

          The registry path of the container image that contains this image version.

          " } }, - "BillableTimeInSeconds": { - "target": "com.amazonaws.sagemaker#BillableTimeInSeconds", + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The billable time in seconds.

          \n

          You can calculate the savings from using managed spot training using the formula\n (1 - BillableTimeInSeconds / TrainingTimeInSeconds) * 100. For example,\n if BillableTimeInSeconds is 100 and TrainingTimeInSeconds is\n 500, the savings is 80%.

          " + "smithy.api#documentation": "

          When the version was created.

          " } }, - "DebugHookConfig": { - "target": "com.amazonaws.sagemaker#DebugHookConfig" + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", + "traits": { + "smithy.api#documentation": "

          When a create or delete operation fails, the reason for the failure.

          " + } }, - "ExperimentConfig": { - "target": "com.amazonaws.sagemaker#ExperimentConfig" + "ImageArn": { + "target": "com.amazonaws.sagemaker#ImageArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the image the version is based on.

          " + } }, - "DebugRuleConfigurations": { - "target": "com.amazonaws.sagemaker#DebugRuleConfigurations", + "ImageVersionArn": { + "target": "com.amazonaws.sagemaker#ImageVersionArn", "traits": { - "smithy.api#documentation": "

          Configuration information for debugging rules.

          " + "smithy.api#documentation": "

          The ARN of the version.

          " } }, - "TensorBoardOutputConfig": { - "target": "com.amazonaws.sagemaker#TensorBoardOutputConfig" + "ImageVersionStatus": { + "target": "com.amazonaws.sagemaker#ImageVersionStatus", + "traits": { + "smithy.api#documentation": "

          The status of the version.

          " + } }, - "DebugRuleEvaluationStatuses": { - "target": "com.amazonaws.sagemaker#DebugRuleEvaluationStatuses", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Status about the debug rule evaluation.

          " + "smithy.api#documentation": "

          When the version was last modified.

          " + } + }, + "Version": { + "target": "com.amazonaws.sagemaker#ImageVersionNumber", + "traits": { + "smithy.api#documentation": "

          The version number.

          " } } } }, - "com.amazonaws.sagemaker#DescribeTransformJob": { + "com.amazonaws.sagemaker#DescribeLabelingJob": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeTransformJobRequest" + "target": "com.amazonaws.sagemaker#DescribeLabelingJobRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeTransformJobResponse" + "target": "com.amazonaws.sagemaker#DescribeLabelingJobResponse" }, "errors": [ { @@ -8751,366 +9958,441 @@ } ], "traits": { - "smithy.api#documentation": "

          Returns information about a transform job.

          " + "smithy.api#documentation": "

          Gets information about a labeling job.

          " } }, - "com.amazonaws.sagemaker#DescribeTransformJobRequest": { + "com.amazonaws.sagemaker#DescribeLabelingJobRequest": { "type": "structure", "members": { - "TransformJobName": { - "target": "com.amazonaws.sagemaker#TransformJobName", + "LabelingJobName": { + "target": "com.amazonaws.sagemaker#LabelingJobName", "traits": { - "smithy.api#documentation": "

          The name of the transform job that you want to view details of.

          ", + "smithy.api#documentation": "

          The name of the labeling job to return information for.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeTransformJobResponse": { + "com.amazonaws.sagemaker#DescribeLabelingJobResponse": { "type": "structure", "members": { - "TransformJobName": { - "target": "com.amazonaws.sagemaker#TransformJobName", - "traits": { - "smithy.api#documentation": "

          The name of the transform job.

          ", - "smithy.api#required": {} - } - }, - "TransformJobArn": { - "target": "com.amazonaws.sagemaker#TransformJobArn", + "LabelingJobStatus": { + "target": "com.amazonaws.sagemaker#LabelingJobStatus", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the transform job.

          ", + "smithy.api#documentation": "

          The processing status of the labeling job.

          ", "smithy.api#required": {} } }, - "TransformJobStatus": { - "target": "com.amazonaws.sagemaker#TransformJobStatus", + "LabelCounters": { + "target": "com.amazonaws.sagemaker#LabelCounters", "traits": { - "smithy.api#documentation": "

          The\n status of the transform job. If the transform job failed, the reason\n is returned in the FailureReason field.

          ", + "smithy.api#documentation": "

          Provides a breakdown of the number of data objects labeled by humans, the number of\n objects labeled by machine, the number of objects than couldn't be labeled, and the\n total number of objects labeled.

          ", "smithy.api#required": {} } }, "FailureReason": { "target": "com.amazonaws.sagemaker#FailureReason", "traits": { - "smithy.api#documentation": "

          If the transform job failed, FailureReason describes\n why\n it failed. A transform job creates a log file, which includes error\n messages, and stores it\n as\n an Amazon S3 object. For more information, see Log Amazon SageMaker Events with\n Amazon CloudWatch.

          " + "smithy.api#documentation": "

          If the job failed, the reason that it failed.

          " } }, - "ModelName": { - "target": "com.amazonaws.sagemaker#ModelName", + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The name of the model used in the transform job.

          ", + "smithy.api#documentation": "

          The date and time that the labeling job was created.

          ", "smithy.api#required": {} } }, - "MaxConcurrentTransforms": { - "target": "com.amazonaws.sagemaker#MaxConcurrentTransforms", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The\n maximum number\n of\n parallel requests on each instance node\n that can be launched in a transform job. The default value is 1.

          " + "smithy.api#documentation": "

          The date and time that the labeling job was last updated.

          ", + "smithy.api#required": {} } }, - "ModelClientConfig": { - "target": "com.amazonaws.sagemaker#ModelClientConfig", + "JobReferenceCode": { + "target": "com.amazonaws.sagemaker#JobReferenceCode", "traits": { - "smithy.api#documentation": "

          The timeout and maximum number of retries for processing a transform job\n invocation.

          " + "smithy.api#documentation": "

          A unique identifier for work done as part of a labeling job.

          ", + "smithy.api#required": {} } }, - "MaxPayloadInMB": { - "target": "com.amazonaws.sagemaker#MaxPayloadInMB", + "LabelingJobName": { + "target": "com.amazonaws.sagemaker#LabelingJobName", "traits": { - "smithy.api#documentation": "

          The\n maximum\n payload size, in MB, used in the\n transform job.

          " + "smithy.api#documentation": "

          The name assigned to the labeling job when it was created.

          ", + "smithy.api#required": {} } }, - "BatchStrategy": { - "target": "com.amazonaws.sagemaker#BatchStrategy", + "LabelingJobArn": { + "target": "com.amazonaws.sagemaker#LabelingJobArn", "traits": { - "smithy.api#documentation": "

          Specifies the number of records to include in a mini-batch for an HTTP inference\n request.\n A record\n is a single unit of input data that inference\n can be made on. For example, a single line in a CSV file is a record.

          \n

          To enable the batch strategy, you must set SplitType\n to\n Line, RecordIO, or\n TFRecord.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the labeling job.

          ", + "smithy.api#required": {} } }, - "Environment": { - "target": "com.amazonaws.sagemaker#TransformEnvironmentMap", + "LabelAttributeName": { + "target": "com.amazonaws.sagemaker#LabelAttributeName", "traits": { - "smithy.api#documentation": "

          The\n environment variables to set in the Docker container. We support up to 16 key and values\n entries in the map.

          " + "smithy.api#documentation": "

          The attribute used as the label in the output manifest file.

          " } }, - "TransformInput": { - "target": "com.amazonaws.sagemaker#TransformInput", + "InputConfig": { + "target": "com.amazonaws.sagemaker#LabelingJobInputConfig", "traits": { - "smithy.api#documentation": "

          Describes the dataset to be transformed and the Amazon S3 location where it is\n stored.

          ", + "smithy.api#documentation": "

          Input configuration information for the labeling job, such as the Amazon S3 location of the\n data objects and the location of the manifest file that describes the data\n objects.

          ", "smithy.api#required": {} } }, - "TransformOutput": { - "target": "com.amazonaws.sagemaker#TransformOutput", + "OutputConfig": { + "target": "com.amazonaws.sagemaker#LabelingJobOutputConfig", "traits": { - "smithy.api#documentation": "

          Identifies the Amazon S3 location where you want Amazon SageMaker to save the results from the\n transform job.

          " + "smithy.api#documentation": "

          The location of the job's output data and the AWS Key Management Service key ID for the key used to\n encrypt the output data, if any.

          ", + "smithy.api#required": {} } }, - "TransformResources": { - "target": "com.amazonaws.sagemaker#TransformResources", + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          Describes\n the resources, including ML instance types and ML instance count, to\n use for the transform job.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf\n during data labeling.

          ", "smithy.api#required": {} } }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "LabelCategoryConfigS3Uri": { + "target": "com.amazonaws.sagemaker#S3Uri", "traits": { - "smithy.api#documentation": "

          A timestamp that shows when the transform Job was created.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The S3 location of the JSON file that defines the categories used to label data\n objects. Please note the following label-category limits:

          \n
            \n
          • \n

            Semantic segmentation labeling jobs using automated labeling: 20 labels

            \n
          • \n
          • \n

            Box bounding labeling jobs (all): 10 labels

            \n
          • \n
          \n

          The file is a JSON structure in the following format:

          \n

          \n {\n

          \n

          \n \"document-version\": \"2018-11-28\"\n

          \n

          \n \"labels\": [\n

          \n

          \n {\n

          \n

          \n \"label\": \"label 1\"\n

          \n

          \n },\n

          \n

          \n {\n

          \n

          \n \"label\": \"label 2\"\n

          \n

          \n },\n

          \n

          \n ...\n

          \n

          \n {\n

          \n

          \n \"label\": \"label n\"\n

          \n

          \n }\n

          \n

          \n ]\n

          \n

          \n }\n

          " } }, - "TransformStartTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "StoppingConditions": { + "target": "com.amazonaws.sagemaker#LabelingJobStoppingConditions", "traits": { - "smithy.api#documentation": "

          Indicates when the transform job starts\n on\n ML instances. You are billed for the time interval between this time\n and the value of TransformEndTime.

          " + "smithy.api#documentation": "

          A set of conditions for stopping a labeling job. If any of the conditions are met, the\n job is automatically stopped.

          " } }, - "TransformEndTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "LabelingJobAlgorithmsConfig": { + "target": "com.amazonaws.sagemaker#LabelingJobAlgorithmsConfig", "traits": { - "smithy.api#documentation": "

          Indicates when the transform job has been\n \n completed, or has stopped or failed. You are billed for the time\n interval between this time and the value of TransformStartTime.

          " + "smithy.api#documentation": "

          Configuration information for automated data labeling.

          " } }, - "LabelingJobArn": { - "target": "com.amazonaws.sagemaker#LabelingJobArn", + "HumanTaskConfig": { + "target": "com.amazonaws.sagemaker#HumanTaskConfig", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the Amazon SageMaker Ground Truth labeling job that created the\n transform or training job.

          " + "smithy.api#documentation": "

          Configuration information required for human workers to complete a labeling\n task.

          ", + "smithy.api#required": {} } }, - "AutoMLJobArn": { - "target": "com.amazonaws.sagemaker#AutoMLJobArn", + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the AutoML transform job.

          " + "smithy.api#documentation": "

          An array of key-value pairs. You can use tags to categorize your AWS resources in\n different ways, for example, by purpose, owner, or environment. For more information,\n see Tagging AWS\n Resources.

          " } }, - "DataProcessing": { - "target": "com.amazonaws.sagemaker#DataProcessing" - }, - "ExperimentConfig": { - "target": "com.amazonaws.sagemaker#ExperimentConfig" + "LabelingJobOutput": { + "target": "com.amazonaws.sagemaker#LabelingJobOutput", + "traits": { + "smithy.api#documentation": "

          The location of the output produced by the labeling job.

          " + } } } }, - "com.amazonaws.sagemaker#DescribeTrial": { + "com.amazonaws.sagemaker#DescribeModel": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeTrialRequest" + "target": "com.amazonaws.sagemaker#DescribeModelInput" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeTrialResponse" + "target": "com.amazonaws.sagemaker#DescribeModelOutput" }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], - "traits": { - "smithy.api#documentation": "

          Provides a list of a trial's properties.

          " - } - }, - "com.amazonaws.sagemaker#DescribeTrialComponent": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#DescribeTrialComponentRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#DescribeTrialComponentResponse" - }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], "traits": { - "smithy.api#documentation": "

          Provides a list of a trials component's properties.

          " + "smithy.api#documentation": "

          Describes a model that you created using the CreateModel\n API.

          " } }, - "com.amazonaws.sagemaker#DescribeTrialComponentRequest": { + "com.amazonaws.sagemaker#DescribeModelInput": { "type": "structure", "members": { - "TrialComponentName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "ModelName": { + "target": "com.amazonaws.sagemaker#ModelName", "traits": { - "smithy.api#documentation": "

          The name of the trial component to describe.

          ", + "smithy.api#documentation": "

          The name of the model.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeTrialComponentResponse": { + "com.amazonaws.sagemaker#DescribeModelOutput": { "type": "structure", "members": { - "TrialComponentName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "ModelName": { + "target": "com.amazonaws.sagemaker#ModelName", "traits": { - "smithy.api#documentation": "

          The name of the trial component.

          " + "smithy.api#documentation": "

          Name of the Amazon SageMaker model.

          ", + "smithy.api#required": {} } }, - "TrialComponentArn": { - "target": "com.amazonaws.sagemaker#TrialComponentArn", + "PrimaryContainer": { + "target": "com.amazonaws.sagemaker#ContainerDefinition", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the trial component.

          " + "smithy.api#documentation": "

          The location of the primary inference code, associated artifacts, and custom\n environment map that the inference code uses when it is deployed in production.\n

          " } }, - "DisplayName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "Containers": { + "target": "com.amazonaws.sagemaker#ContainerDefinitionList", "traits": { - "smithy.api#documentation": "

          The name of the component as displayed. If DisplayName isn't specified,\n TrialComponentName is displayed.

          " + "smithy.api#documentation": "

          The containers in the inference pipeline.

          " } }, - "Source": { - "target": "com.amazonaws.sagemaker#TrialComponentSource", + "ExecutionRoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the source and, optionally, the job type.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the IAM role that you specified for the\n model.

          ", + "smithy.api#required": {} } }, - "Status": { - "target": "com.amazonaws.sagemaker#TrialComponentStatus", + "VpcConfig": { + "target": "com.amazonaws.sagemaker#VpcConfig", "traits": { - "smithy.api#documentation": "

          The status of the component. States include:

          \n
            \n
          • \n

            InProgress

            \n
          • \n
          • \n

            Completed

            \n
          • \n
          • \n

            Failed

            \n
          • \n
          " + "smithy.api#documentation": "

          A VpcConfig object that specifies the VPC that this model has access\n to. For more information, see Protect Endpoints by Using an Amazon Virtual\n Private Cloud\n

          " } }, - "StartTime": { + "CreationTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          When the component started.

          " + "smithy.api#documentation": "

          A timestamp that shows when the model was created.

          ", + "smithy.api#required": {} } }, - "EndTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "ModelArn": { + "target": "com.amazonaws.sagemaker#ModelArn", "traits": { - "smithy.api#documentation": "

          When the component ended.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model.

          ", + "smithy.api#required": {} } }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "EnableNetworkIsolation": { + "target": "com.amazonaws.sagemaker#Boolean", "traits": { - "smithy.api#documentation": "

          When the component was created.

          " + "smithy.api#documentation": "

          If True, no inbound or outbound network calls can be made to or from the\n model container.

          " } - }, - "CreatedBy": { - "target": "com.amazonaws.sagemaker#UserContext", + } + } + }, + "com.amazonaws.sagemaker#DescribeModelPackage": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribeModelPackageInput" + }, + "output": { + "target": "com.amazonaws.sagemaker#DescribeModelPackageOutput" + }, + "traits": { + "smithy.api#documentation": "

          Returns a description of the specified model package, which is used to create Amazon SageMaker\n models or list them on AWS Marketplace.

          \n

          To create models in Amazon SageMaker, buyers can subscribe to model packages listed on AWS\n Marketplace.

          " + } + }, + "com.amazonaws.sagemaker#DescribeModelPackageGroup": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribeModelPackageGroupInput" + }, + "output": { + "target": "com.amazonaws.sagemaker#DescribeModelPackageGroupOutput" + }, + "traits": { + "smithy.api#documentation": "

          Gets a description for the specified model group.

          " + } + }, + "com.amazonaws.sagemaker#DescribeModelPackageGroupInput": { + "type": "structure", + "members": { + "ModelPackageGroupName": { + "target": "com.amazonaws.sagemaker#ArnOrName", "traits": { - "smithy.api#documentation": "

          Who created the component.

          " + "smithy.api#documentation": "

          The name of the model group to describe.

          ", + "smithy.api#required": {} } - }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + } + } + }, + "com.amazonaws.sagemaker#DescribeModelPackageGroupOutput": { + "type": "structure", + "members": { + "ModelPackageGroupName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          When the component was last modified.

          " + "smithy.api#documentation": "

          The name of the model group.

          ", + "smithy.api#required": {} } }, - "LastModifiedBy": { - "target": "com.amazonaws.sagemaker#UserContext", + "ModelPackageGroupArn": { + "target": "com.amazonaws.sagemaker#ModelPackageGroupArn", "traits": { - "smithy.api#documentation": "

          Who last modified the component.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model group.

          ", + "smithy.api#required": {} } }, - "Parameters": { - "target": "com.amazonaws.sagemaker#TrialComponentParameters", + "ModelPackageGroupDescription": { + "target": "com.amazonaws.sagemaker#EntityDescription", "traits": { - "smithy.api#documentation": "

          The hyperparameters of the component.

          " + "smithy.api#documentation": "

          A description of the model group.

          " } }, - "InputArtifacts": { - "target": "com.amazonaws.sagemaker#TrialComponentArtifacts", + "CreationTime": { + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          The input artifacts of the component.

          " + "smithy.api#documentation": "

          The time that the model group was created.

          ", + "smithy.api#required": {} } }, - "OutputArtifacts": { - "target": "com.amazonaws.sagemaker#TrialComponentArtifacts", + "CreatedBy": { + "target": "com.amazonaws.sagemaker#UserContext", "traits": { - "smithy.api#documentation": "

          The output artifacts of the component.

          " + "smithy.api#required": {} } }, - "Metrics": { - "target": "com.amazonaws.sagemaker#TrialComponentMetricSummaries", + "ModelPackageGroupStatus": { + "target": "com.amazonaws.sagemaker#ModelPackageGroupStatus", "traits": { - "smithy.api#documentation": "

          The metrics for the component.

          " + "smithy.api#documentation": "

          The status of the model group.

          ", + "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeTrialRequest": { + "com.amazonaws.sagemaker#DescribeModelPackageInput": { "type": "structure", "members": { - "TrialName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "ModelPackageName": { + "target": "com.amazonaws.sagemaker#VersionedArnOrName", "traits": { - "smithy.api#documentation": "

          The name of the trial to describe.

          ", + "smithy.api#documentation": "

          The name of the model package to describe.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeTrialResponse": { + "com.amazonaws.sagemaker#DescribeModelPackageOutput": { "type": "structure", "members": { - "TrialName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "ModelPackageName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          The name of the trial.

          " + "smithy.api#documentation": "

          The name of the model package being described.

          ", + "smithy.api#required": {} } }, - "TrialArn": { - "target": "com.amazonaws.sagemaker#TrialArn", + "ModelPackageGroupName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the trial.

          " + "smithy.api#documentation": "

          If the model is a versioned model, the name of the model group that the versioned\n model belongs to.

          " } }, - "DisplayName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "ModelPackageVersion": { + "target": "com.amazonaws.sagemaker#ModelPackageVersion", "traits": { - "smithy.api#documentation": "

          The name of the trial as displayed. If DisplayName isn't specified,\n TrialName is displayed.

          " + "smithy.api#documentation": "

          The version of the model package.

          " } }, - "ExperimentName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "ModelPackageArn": { + "target": "com.amazonaws.sagemaker#ModelPackageArn", "traits": { - "smithy.api#documentation": "

          The name of the experiment the trial is part of.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model package.

          ", + "smithy.api#required": {} } }, - "Source": { - "target": "com.amazonaws.sagemaker#TrialSource", + "ModelPackageDescription": { + "target": "com.amazonaws.sagemaker#EntityDescription", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the source and, optionally, the job type.

          " + "smithy.api#documentation": "

          A brief summary of the model package.

          " } }, "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          When the trial was created.

          " + "smithy.api#documentation": "

          A timestamp specifying when the model package was created.

          ", + "smithy.api#required": {} + } + }, + "InferenceSpecification": { + "target": "com.amazonaws.sagemaker#InferenceSpecification", + "traits": { + "smithy.api#documentation": "

          Details about inference jobs that can be run with models based on this model\n package.

          " + } + }, + "SourceAlgorithmSpecification": { + "target": "com.amazonaws.sagemaker#SourceAlgorithmSpecification", + "traits": { + "smithy.api#documentation": "

          Details about the algorithm that was used to create the model package.

          " + } + }, + "ValidationSpecification": { + "target": "com.amazonaws.sagemaker#ModelPackageValidationSpecification", + "traits": { + "smithy.api#documentation": "

          Configurations for one or more transform jobs that Amazon SageMaker runs to test the model\n package.

          " + } + }, + "ModelPackageStatus": { + "target": "com.amazonaws.sagemaker#ModelPackageStatus", + "traits": { + "smithy.api#documentation": "

          The current status of the model package.

          ", + "smithy.api#required": {} + } + }, + "ModelPackageStatusDetails": { + "target": "com.amazonaws.sagemaker#ModelPackageStatusDetails", + "traits": { + "smithy.api#documentation": "

          Details about the current status of the model package.

          ", + "smithy.api#required": {} + } + }, + "CertifyForMarketplace": { + "target": "com.amazonaws.sagemaker#CertifyForMarketplace", + "traits": { + "smithy.api#documentation": "

          Whether the model package is certified for listing on AWS Marketplace.

          " + } + }, + "ModelApprovalStatus": { + "target": "com.amazonaws.sagemaker#ModelApprovalStatus", + "traits": { + "smithy.api#documentation": "

          The approval status of the model package.

          " } }, "CreatedBy": { - "target": "com.amazonaws.sagemaker#UserContext", + "target": "com.amazonaws.sagemaker#UserContext" + }, + "MetadataProperties": { + "target": "com.amazonaws.sagemaker#MetadataProperties" + }, + "ModelMetrics": { + "target": "com.amazonaws.sagemaker#ModelMetrics", "traits": { - "smithy.api#documentation": "

          Who created the trial.

          " + "smithy.api#documentation": "

          Metrics for the model.

          " } }, "LastModifiedTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          When the trial was last modified.

          " + "smithy.api#documentation": "

          The last time the model package was modified.

          " } }, "LastModifiedBy": { - "target": "com.amazonaws.sagemaker#UserContext", + "target": "com.amazonaws.sagemaker#UserContext" + }, + "ApprovalDescription": { + "target": "com.amazonaws.sagemaker#ApprovalDescription", "traits": { - "smithy.api#documentation": "

          Who last modified the trial.

          " + "smithy.api#documentation": "

          A description provided for the model approval.

          " } } } }, - "com.amazonaws.sagemaker#DescribeUserProfile": { + "com.amazonaws.sagemaker#DescribeMonitoringSchedule": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeUserProfileRequest" + "target": "com.amazonaws.sagemaker#DescribeMonitoringScheduleRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeUserProfileResponse" + "target": "com.amazonaws.sagemaker#DescribeMonitoringScheduleResponse" }, "errors": [ { @@ -9118,309 +10400,324 @@ } ], "traits": { - "smithy.api#documentation": "

          Describes a user profile. For more information, see CreateUserProfile.

          " + "smithy.api#documentation": "

          Describes the schedule for a monitoring job.

          " } }, - "com.amazonaws.sagemaker#DescribeUserProfileRequest": { + "com.amazonaws.sagemaker#DescribeMonitoringScheduleRequest": { "type": "structure", "members": { - "DomainId": { - "target": "com.amazonaws.sagemaker#DomainId", + "MonitoringScheduleName": { + "target": "com.amazonaws.sagemaker#MonitoringScheduleName", "traits": { - "smithy.api#documentation": "

          The domain ID.

          ", - "smithy.api#required": {} - } - }, - "UserProfileName": { - "target": "com.amazonaws.sagemaker#UserProfileName", - "traits": { - "smithy.api#documentation": "

          The user profile name.

          ", + "smithy.api#documentation": "

          Name of a previously created monitoring schedule.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeUserProfileResponse": { + "com.amazonaws.sagemaker#DescribeMonitoringScheduleResponse": { "type": "structure", "members": { - "DomainId": { - "target": "com.amazonaws.sagemaker#DomainId", - "traits": { - "smithy.api#documentation": "

          The ID of the domain that contains the profile.

          " - } - }, - "UserProfileArn": { - "target": "com.amazonaws.sagemaker#UserProfileArn", - "traits": { - "smithy.api#documentation": "

          The user profile Amazon Resource Name (ARN).

          " - } - }, - "UserProfileName": { - "target": "com.amazonaws.sagemaker#UserProfileName", + "MonitoringScheduleArn": { + "target": "com.amazonaws.sagemaker#MonitoringScheduleArn", "traits": { - "smithy.api#documentation": "

          The user profile name.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the monitoring schedule.

          ", + "smithy.api#required": {} } }, - "HomeEfsFileSystemUid": { - "target": "com.amazonaws.sagemaker#EfsUid", + "MonitoringScheduleName": { + "target": "com.amazonaws.sagemaker#MonitoringScheduleName", "traits": { - "smithy.api#documentation": "

          The ID of the user's profile in the Amazon Elastic File System (EFS) volume.

          " + "smithy.api#documentation": "

          Name of the monitoring schedule.

          ", + "smithy.api#required": {} } }, - "Status": { - "target": "com.amazonaws.sagemaker#UserProfileStatus", + "MonitoringScheduleStatus": { + "target": "com.amazonaws.sagemaker#ScheduleStatus", "traits": { - "smithy.api#documentation": "

          The status.

          " + "smithy.api#documentation": "

          The status of an monitoring job.

          ", + "smithy.api#required": {} } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#LastModifiedTime", + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", "traits": { - "smithy.api#documentation": "

          The last modified time.

          " + "smithy.api#documentation": "

          A string, up to one KB in size, that contains the reason a monitoring job failed, if it\n failed.

          " } }, "CreationTime": { - "target": "com.amazonaws.sagemaker#CreationTime", + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The creation time.

          " + "smithy.api#documentation": "

          The time at which the monitoring job was created.

          ", + "smithy.api#required": {} } }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The failure reason.

          " + "smithy.api#documentation": "

          The time at which the monitoring job was last modified.

          ", + "smithy.api#required": {} } }, - "SingleSignOnUserIdentifier": { - "target": "com.amazonaws.sagemaker#SingleSignOnUserIdentifier", + "MonitoringScheduleConfig": { + "target": "com.amazonaws.sagemaker#MonitoringScheduleConfig", "traits": { - "smithy.api#documentation": "

          The SSO user identifier.

          " + "smithy.api#documentation": "

          The configuration object that specifies the monitoring schedule and defines the\n monitoring job.

          ", + "smithy.api#required": {} } }, - "SingleSignOnUserValue": { - "target": "com.amazonaws.sagemaker#String256", + "EndpointName": { + "target": "com.amazonaws.sagemaker#EndpointName", "traits": { - "smithy.api#documentation": "

          The SSO user value.

          " + "smithy.api#documentation": "

          The name of the endpoint for the monitoring job.

          " } }, - "UserSettings": { - "target": "com.amazonaws.sagemaker#UserSettings", + "LastMonitoringExecutionSummary": { + "target": "com.amazonaws.sagemaker#MonitoringExecutionSummary", "traits": { - "smithy.api#documentation": "

          A collection of settings.

          " + "smithy.api#documentation": "

          Describes metadata on the last execution to run, if there was one.

          " } } } }, - "com.amazonaws.sagemaker#DescribeWorkforce": { + "com.amazonaws.sagemaker#DescribeNotebookInstance": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeWorkforceRequest" + "target": "com.amazonaws.sagemaker#DescribeNotebookInstanceInput" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeWorkforceResponse" + "target": "com.amazonaws.sagemaker#DescribeNotebookInstanceOutput" }, "traits": { - "smithy.api#documentation": "

          Lists private workforce information, including workforce name, Amazon Resource Name\n (ARN), and, if applicable, allowed IP address ranges (CIDRs). Allowable IP address\n ranges are the IP addresses that workers can use to access tasks.

          \n \n

          This operation applies only to private workforces.

          \n
          " - } - }, - "com.amazonaws.sagemaker#DescribeWorkforceRequest": { - "type": "structure", - "members": { - "WorkforceName": { - "target": "com.amazonaws.sagemaker#WorkforceName", - "traits": { - "smithy.api#documentation": "

          The name of the private workforce whose access you want to restrict.\n WorkforceName is automatically set to default when a\n workforce is created and cannot be modified.

          ", - "smithy.api#required": {} - } - } + "smithy.api#documentation": "

          Returns information about a notebook instance.

          " } }, - "com.amazonaws.sagemaker#DescribeWorkforceResponse": { + "com.amazonaws.sagemaker#DescribeNotebookInstanceInput": { "type": "structure", "members": { - "Workforce": { - "target": "com.amazonaws.sagemaker#Workforce", + "NotebookInstanceName": { + "target": "com.amazonaws.sagemaker#NotebookInstanceName", "traits": { - "smithy.api#documentation": "

          A single private workforce, which is automatically created when you create your first\n private work team. You can create one private work force in each AWS Region. By default,\n any workforce-related API operation used in a specific region will apply to the\n workforce created in that region. To learn how to create a private workforce, see Create a Private Workforce.

          ", + "smithy.api#documentation": "

          The name of the notebook instance that you want information about.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeWorkteam": { + "com.amazonaws.sagemaker#DescribeNotebookInstanceLifecycleConfig": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DescribeWorkteamRequest" + "target": "com.amazonaws.sagemaker#DescribeNotebookInstanceLifecycleConfigInput" }, "output": { - "target": "com.amazonaws.sagemaker#DescribeWorkteamResponse" + "target": "com.amazonaws.sagemaker#DescribeNotebookInstanceLifecycleConfigOutput" }, "traits": { - "smithy.api#documentation": "

          Gets information about a specific work team. You can see information such as the\n create date, the last updated date, membership information, and the work team's Amazon\n Resource Name (ARN).

          " + "smithy.api#documentation": "

          Returns a description of a notebook instance lifecycle configuration.

          \n

          For information about notebook instance lifestyle configurations, see Step\n 2.1: (Optional) Customize a Notebook Instance.

          " } }, - "com.amazonaws.sagemaker#DescribeWorkteamRequest": { + "com.amazonaws.sagemaker#DescribeNotebookInstanceLifecycleConfigInput": { "type": "structure", "members": { - "WorkteamName": { - "target": "com.amazonaws.sagemaker#WorkteamName", + "NotebookInstanceLifecycleConfigName": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName", "traits": { - "smithy.api#documentation": "

          The name of the work team to return a description of.

          ", + "smithy.api#documentation": "

          The name of the lifecycle configuration to describe.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DescribeWorkteamResponse": { + "com.amazonaws.sagemaker#DescribeNotebookInstanceLifecycleConfigOutput": { "type": "structure", "members": { - "Workteam": { - "target": "com.amazonaws.sagemaker#Workteam", + "NotebookInstanceLifecycleConfigArn": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigArn", "traits": { - "smithy.api#documentation": "

          A Workteam instance that contains information about the work team.\n

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the lifecycle configuration.

          " + } + }, + "NotebookInstanceLifecycleConfigName": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName", + "traits": { + "smithy.api#documentation": "

          The name of the lifecycle configuration.

          " + } + }, + "OnCreate": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigList", + "traits": { + "smithy.api#documentation": "

          The shell script that runs only once, when you create a notebook instance.

          " + } + }, + "OnStart": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigList", + "traits": { + "smithy.api#documentation": "

          The shell script that runs every time you start a notebook instance, including when\n you create the notebook instance.

          " + } + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#LastModifiedTime", + "traits": { + "smithy.api#documentation": "

          A timestamp that tells when the lifecycle configuration was last modified.

          " + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#CreationTime", + "traits": { + "smithy.api#documentation": "

          A timestamp that tells when the lifecycle configuration was created.

          " } } } }, - "com.amazonaws.sagemaker#DesiredWeightAndCapacity": { + "com.amazonaws.sagemaker#DescribeNotebookInstanceOutput": { "type": "structure", "members": { - "VariantName": { - "target": "com.amazonaws.sagemaker#VariantName", + "NotebookInstanceArn": { + "target": "com.amazonaws.sagemaker#NotebookInstanceArn", "traits": { - "smithy.api#documentation": "

          The name of the\n variant\n to update.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the notebook instance.

          " } }, - "DesiredWeight": { - "target": "com.amazonaws.sagemaker#VariantWeight", + "NotebookInstanceName": { + "target": "com.amazonaws.sagemaker#NotebookInstanceName", "traits": { - "smithy.api#documentation": "

          The variant's weight.

          " + "smithy.api#documentation": "

          The name of the Amazon SageMaker notebook instance.

          " } }, - "DesiredInstanceCount": { - "target": "com.amazonaws.sagemaker#TaskCount", + "NotebookInstanceStatus": { + "target": "com.amazonaws.sagemaker#NotebookInstanceStatus", "traits": { - "smithy.api#documentation": "

          The variant's capacity.

          " + "smithy.api#documentation": "

          The status of the notebook instance.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          Specifies weight and capacity values for a production variant.

          " - } - }, - "com.amazonaws.sagemaker#DesiredWeightAndCapacityList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#DesiredWeightAndCapacity" - }, - "traits": { - "smithy.api#length": { - "min": 1 - } - } - }, - "com.amazonaws.sagemaker#DestinationS3Uri": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 512 }, - "smithy.api#pattern": "^(https|s3)://([^/])/?(.*)$" - } - }, - "com.amazonaws.sagemaker#DetailedAlgorithmStatus": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "NotStarted", - "name": "NOT_STARTED" - }, - { - "value": "InProgress", - "name": "IN_PROGRESS" - }, - { - "value": "Completed", - "name": "COMPLETED" - }, - { - "value": "Failed", - "name": "FAILED" + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", + "traits": { + "smithy.api#documentation": "

          If status is Failed, the reason it failed.

          " } - ] - } - }, - "com.amazonaws.sagemaker#DetailedModelPackageStatus": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "NotStarted", - "name": "NOT_STARTED" - }, - { - "value": "InProgress", - "name": "IN_PROGRESS" - }, - { - "value": "Completed", - "name": "COMPLETED" - }, - { - "value": "Failed", - "name": "FAILED" + }, + "Url": { + "target": "com.amazonaws.sagemaker#NotebookInstanceUrl", + "traits": { + "smithy.api#documentation": "

          The URL that you use to connect to the Jupyter notebook that is running in your\n notebook instance.

          " } - ] - } - }, - "com.amazonaws.sagemaker#DirectInternetAccess": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Enabled", - "name": "ENABLED" - }, - { - "value": "Disabled", - "name": "DISABLED" + }, + "InstanceType": { + "target": "com.amazonaws.sagemaker#InstanceType", + "traits": { + "smithy.api#documentation": "

          The type of ML compute instance running on the notebook instance.

          " } - ] + }, + "SubnetId": { + "target": "com.amazonaws.sagemaker#SubnetId", + "traits": { + "smithy.api#documentation": "

          The ID of the VPC subnet.

          " + } + }, + "SecurityGroups": { + "target": "com.amazonaws.sagemaker#SecurityGroupIds", + "traits": { + "smithy.api#documentation": "

          The IDs of the VPC security groups.

          " + } + }, + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the IAM role associated with the instance.\n

          " + } + }, + "KmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", + "traits": { + "smithy.api#documentation": "

          The AWS KMS key ID Amazon SageMaker uses to encrypt data when storing it on the ML storage\n volume attached to the instance.

          " + } + }, + "NetworkInterfaceId": { + "target": "com.amazonaws.sagemaker#NetworkInterfaceId", + "traits": { + "smithy.api#documentation": "

          The network interface IDs that Amazon SageMaker created at the time of creating the instance.\n

          " + } + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#LastModifiedTime", + "traits": { + "smithy.api#documentation": "

          A timestamp. Use this parameter to retrieve the time when the notebook instance was\n last modified.

          " + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#CreationTime", + "traits": { + "smithy.api#documentation": "

          A timestamp. Use this parameter to return the time when the notebook instance was\n created

          " + } + }, + "NotebookInstanceLifecycleConfigName": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName", + "traits": { + "smithy.api#documentation": "

          Returns the name of a notebook instance lifecycle configuration.

          \n

          For information about notebook instance lifestyle configurations, see Step\n 2.1: (Optional) Customize a Notebook Instance\n

          " + } + }, + "DirectInternetAccess": { + "target": "com.amazonaws.sagemaker#DirectInternetAccess", + "traits": { + "smithy.api#documentation": "

          Describes whether Amazon SageMaker provides internet access to the notebook instance. If this\n value is set to Disabled, the notebook instance does not have\n internet access, and cannot connect to Amazon SageMaker training and endpoint services.

          \n

          For more information, see Notebook Instances Are Internet-Enabled by Default.

          " + } + }, + "VolumeSizeInGB": { + "target": "com.amazonaws.sagemaker#NotebookInstanceVolumeSizeInGB", + "traits": { + "smithy.api#documentation": "

          The size, in GB, of the ML storage volume attached to the notebook instance.

          " + } + }, + "AcceleratorTypes": { + "target": "com.amazonaws.sagemaker#NotebookInstanceAcceleratorTypes", + "traits": { + "smithy.api#documentation": "

          A list of the Elastic Inference (EI) instance types associated with this notebook\n instance. Currently only one EI instance type can be associated with a notebook\n instance. For more information, see Using Elastic Inference in Amazon\n SageMaker.

          " + } + }, + "DefaultCodeRepository": { + "target": "com.amazonaws.sagemaker#CodeRepositoryNameOrUrl", + "traits": { + "smithy.api#documentation": "

          The Git repository associated with the notebook instance as its default code\n repository. This can be either the name of a Git repository stored as a resource in your\n account, or the URL of a Git repository in AWS CodeCommit or in any\n other Git repository. When you open a notebook instance, it opens in the directory that\n contains this repository. For more information, see Associating Git Repositories with Amazon SageMaker\n Notebook Instances.

          " + } + }, + "AdditionalCodeRepositories": { + "target": "com.amazonaws.sagemaker#AdditionalCodeRepositoryNamesOrUrls", + "traits": { + "smithy.api#documentation": "

          An array of up to three Git repositories associated with the notebook instance. These\n can be either the names of Git repositories stored as resources in your account, or the\n URL of Git repositories in AWS CodeCommit or in any\n other Git repository. These repositories are cloned at the same level as the default\n repository of your notebook instance. For more information, see Associating Git\n Repositories with Amazon SageMaker Notebook Instances.

          " + } + }, + "RootAccess": { + "target": "com.amazonaws.sagemaker#RootAccess", + "traits": { + "smithy.api#documentation": "

          Whether root access is enabled or disabled for users of the notebook instance.

          \n \n

          Lifecycle configurations need root access to be able to set up a notebook\n instance. Because of this, lifecycle configurations associated with a notebook\n instance always run with root access even if you disable root access for\n users.

          \n
          " + } + } } }, - "com.amazonaws.sagemaker#DirectoryPath": { - "type": "string", + "com.amazonaws.sagemaker#DescribePipeline": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribePipelineRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DescribePipelineResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#length": { - "min": 0, - "max": 4096 - }, - "smithy.api#pattern": ".*" + "smithy.api#documentation": "

          Describes the details of a pipeline.

          " } }, - "com.amazonaws.sagemaker#DisassociateAdditionalCodeRepositories": { - "type": "boolean" - }, - "com.amazonaws.sagemaker#DisassociateDefaultCodeRepository": { - "type": "boolean" - }, - "com.amazonaws.sagemaker#DisassociateNotebookInstanceAcceleratorTypes": { - "type": "boolean" - }, - "com.amazonaws.sagemaker#DisassociateNotebookInstanceLifecycleConfig": { - "type": "boolean" - }, - "com.amazonaws.sagemaker#DisassociateTrialComponent": { + "com.amazonaws.sagemaker#DescribePipelineDefinitionForExecution": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#DisassociateTrialComponentRequest" + "target": "com.amazonaws.sagemaker#DescribePipelineDefinitionForExecutionRequest" }, "output": { - "target": "com.amazonaws.sagemaker#DisassociateTrialComponentResponse" + "target": "com.amazonaws.sagemaker#DescribePipelineDefinitionForExecutionResponse" }, "errors": [ { @@ -9428,3673 +10725,7315 @@ } ], "traits": { - "smithy.api#documentation": "

          Disassociates a trial component from a trial. This doesn't effect other trials the\n component is associated with. Before you can delete a component, you must disassociate the\n component from all trials it is associated with. To associate a trial component with a trial,\n call the AssociateTrialComponent API.

          \n

          To get a list of the trials a component is associated with, use the Search API. Specify ExperimentTrialComponent for the Resource parameter.\n The list appears in the response under Results.TrialComponent.Parents.

          " + "smithy.api#documentation": "

          Describes the details of an execution's pipeline definition.

          " } }, - "com.amazonaws.sagemaker#DisassociateTrialComponentRequest": { + "com.amazonaws.sagemaker#DescribePipelineDefinitionForExecutionRequest": { "type": "structure", "members": { - "TrialComponentName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", - "traits": { - "smithy.api#documentation": "

          The name of the component to disassociate from the trial.

          ", - "smithy.api#required": {} - } - }, - "TrialName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "PipelineExecutionArn": { + "target": "com.amazonaws.sagemaker#PipelineExecutionArn", "traits": { - "smithy.api#documentation": "

          The name of the trial to disassociate from.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pipeline execution.

          ", "smithy.api#required": {} } } } }, - "com.amazonaws.sagemaker#DisassociateTrialComponentResponse": { + "com.amazonaws.sagemaker#DescribePipelineDefinitionForExecutionResponse": { "type": "structure", "members": { - "TrialComponentArn": { - "target": "com.amazonaws.sagemaker#TrialComponentArn", + "PipelineDefinition": { + "target": "com.amazonaws.sagemaker#PipelineDefinition", "traits": { - "smithy.api#documentation": "

          The ARN of the trial component.

          " + "smithy.api#documentation": "

          The JSON pipeline definition.

          " } }, - "TrialArn": { - "target": "com.amazonaws.sagemaker#TrialArn", + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the trial.

          " + "smithy.api#documentation": "

          The time when the pipeline was created.

          " } } } }, - "com.amazonaws.sagemaker#Dollars": { - "type": "integer", - "traits": { - "smithy.api#range": { - "min": 0, - "max": 2 + "com.amazonaws.sagemaker#DescribePipelineExecution": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribePipelineExecutionRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DescribePipelineExecutionResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" } + ], + "traits": { + "smithy.api#documentation": "

          Describes the details of a pipeline execution.

          " } }, - "com.amazonaws.sagemaker#DomainArn": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 256 - }, - "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:domain/.*" + "com.amazonaws.sagemaker#DescribePipelineExecutionRequest": { + "type": "structure", + "members": { + "PipelineExecutionArn": { + "target": "com.amazonaws.sagemaker#PipelineExecutionArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pipeline execution.

          ", + "smithy.api#required": {} + } + } } }, - "com.amazonaws.sagemaker#DomainDetails": { + "com.amazonaws.sagemaker#DescribePipelineExecutionResponse": { "type": "structure", "members": { - "DomainArn": { - "target": "com.amazonaws.sagemaker#DomainArn", + "PipelineArn": { + "target": "com.amazonaws.sagemaker#PipelineArn", "traits": { - "smithy.api#documentation": "

          The domain's Amazon Resource Name (ARN).

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pipeline.

          " } }, - "DomainId": { - "target": "com.amazonaws.sagemaker#DomainId", + "PipelineExecutionArn": { + "target": "com.amazonaws.sagemaker#PipelineExecutionArn", "traits": { - "smithy.api#documentation": "

          The domain ID.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pipeline execution.

          " } }, - "DomainName": { - "target": "com.amazonaws.sagemaker#DomainName", + "PipelineExecutionDisplayName": { + "target": "com.amazonaws.sagemaker#PipelineExecutionName", "traits": { - "smithy.api#documentation": "

          The domain name.

          " + "smithy.api#documentation": "

          The display name of the pipeline execution.

          " } }, - "Status": { - "target": "com.amazonaws.sagemaker#DomainStatus", + "PipelineExecutionStatus": { + "target": "com.amazonaws.sagemaker#PipelineExecutionStatus", "traits": { - "smithy.api#documentation": "

          The status.

          " + "smithy.api#documentation": "

          The status of the pipeline execution.

          " } }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#CreationTime", + "PipelineExecutionDescription": { + "target": "com.amazonaws.sagemaker#PipelineExecutionDescription", "traits": { - "smithy.api#documentation": "

          The creation time.

          " + "smithy.api#documentation": "

          The description of the pipeline execution.

          " } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#LastModifiedTime", + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The last modified time.

          " + "smithy.api#documentation": "

          The time when the pipeline execution was created.

          " } }, - "Url": { - "target": "com.amazonaws.sagemaker#String1024", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The domain's URL.

          " + "smithy.api#documentation": "

          The time when the pipeline execution was modified last.

          " } + }, + "CreatedBy": { + "target": "com.amazonaws.sagemaker#UserContext" + }, + "LastModifiedBy": { + "target": "com.amazonaws.sagemaker#UserContext" } - }, - "traits": { - "smithy.api#documentation": "

          The domain's details.

          " } }, - "com.amazonaws.sagemaker#DomainId": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 63 + "com.amazonaws.sagemaker#DescribePipelineRequest": { + "type": "structure", + "members": { + "PipelineName": { + "target": "com.amazonaws.sagemaker#PipelineName", + "traits": { + "smithy.api#documentation": "

          The name of the pipeline to describe.

          ", + "smithy.api#required": {} + } } } }, - "com.amazonaws.sagemaker#DomainList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#DomainDetails" - } - }, - "com.amazonaws.sagemaker#DomainName": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 63 + "com.amazonaws.sagemaker#DescribePipelineResponse": { + "type": "structure", + "members": { + "PipelineArn": { + "target": "com.amazonaws.sagemaker#PipelineArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pipeline.

          " + } }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" - } - }, - "com.amazonaws.sagemaker#DomainStatus": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Deleting", - "name": "Deleting" - }, - { - "value": "Failed", - "name": "Failed" - }, - { - "value": "InService", - "name": "InService" - }, - { - "value": "Pending", - "name": "Pending" - }, - { - "value": "Updating", - "name": "Updating" - }, - { - "value": "Update_Failed", - "name": "Update_Failed" - }, - { - "value": "Delete_Failed", - "name": "Delete_Failed" + "PipelineName": { + "target": "com.amazonaws.sagemaker#PipelineName", + "traits": { + "smithy.api#documentation": "

          The name of the pipeline.

          " } - ] - } - }, - "com.amazonaws.sagemaker#DoubleParameterValue": { - "type": "double", - "traits": { - "smithy.api#box": {} - } - }, - "com.amazonaws.sagemaker#EfsUid": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 10 }, - "smithy.api#pattern": "\\d+" - } - }, - "com.amazonaws.sagemaker#EnableCapture": { - "type": "boolean" - }, - "com.amazonaws.sagemaker#EndpointArn": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 20, - "max": 2048 + "PipelineDisplayName": { + "target": "com.amazonaws.sagemaker#PipelineName", + "traits": { + "smithy.api#documentation": "

          The display name of the pipeline.

          " + } }, - "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:endpoint/.*" - } - }, - "com.amazonaws.sagemaker#EndpointConfigArn": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 20, - "max": 2048 + "PipelineDefinition": { + "target": "com.amazonaws.sagemaker#PipelineDefinition", + "traits": { + "smithy.api#documentation": "

          The JSON pipeline definition.

          " + } }, - "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:endpoint-config/.*" - } - }, - "com.amazonaws.sagemaker#EndpointConfigName": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 63 + "PipelineDescription": { + "target": "com.amazonaws.sagemaker#PipelineDescription", + "traits": { + "smithy.api#documentation": "

          The description of the pipeline.

          " + } }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" - } - }, - "com.amazonaws.sagemaker#EndpointConfigNameContains": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 63 + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) that the pipeline uses to execute.

          " + } }, - "smithy.api#pattern": "[a-zA-Z0-9-]+" - } - }, - "com.amazonaws.sagemaker#EndpointConfigSortKey": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Name", - "name": "Name" - }, - { - "value": "CreationTime", - "name": "CreationTime" + "PipelineStatus": { + "target": "com.amazonaws.sagemaker#PipelineStatus", + "traits": { + "smithy.api#documentation": "

          The status of the pipeline execution.

          " } - ] - } - }, - "com.amazonaws.sagemaker#EndpointConfigSummary": { - "type": "structure", - "members": { - "EndpointConfigName": { - "target": "com.amazonaws.sagemaker#EndpointConfigName", + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The name of the endpoint configuration.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The time when the pipeline was created.

          " } }, - "EndpointConfigArn": { - "target": "com.amazonaws.sagemaker#EndpointConfigArn", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the endpoint configuration.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The time when the pipeline was last modified.

          " } }, - "CreationTime": { + "LastRunTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A timestamp that shows when the endpoint configuration was created.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The time when the pipeline was last run.

          " } + }, + "CreatedBy": { + "target": "com.amazonaws.sagemaker#UserContext" + }, + "LastModifiedBy": { + "target": "com.amazonaws.sagemaker#UserContext" } + } + }, + "com.amazonaws.sagemaker#DescribeProcessingJob": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribeProcessingJobRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DescribeProcessingJobResponse" }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#documentation": "

          Provides summary information for an endpoint configuration.

          " + "smithy.api#documentation": "

          Returns a description of a processing job.

          " } }, - "com.amazonaws.sagemaker#EndpointConfigSummaryList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#EndpointConfigSummary" + "com.amazonaws.sagemaker#DescribeProcessingJobRequest": { + "type": "structure", + "members": { + "ProcessingJobName": { + "target": "com.amazonaws.sagemaker#ProcessingJobName", + "traits": { + "smithy.api#documentation": "

          The name of the processing job. The name must be unique within an AWS Region in the\n AWS account.

          ", + "smithy.api#required": {} + } + } } }, - "com.amazonaws.sagemaker#EndpointInput": { + "com.amazonaws.sagemaker#DescribeProcessingJobResponse": { "type": "structure", "members": { - "EndpointName": { - "target": "com.amazonaws.sagemaker#EndpointName", + "ProcessingInputs": { + "target": "com.amazonaws.sagemaker#ProcessingInputs", "traits": { - "smithy.api#documentation": "

          An endpoint in customer's account which has enabled DataCaptureConfig\n enabled.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The inputs for a processing job.

          " } }, - "LocalPath": { - "target": "com.amazonaws.sagemaker#ProcessingLocalPath", + "ProcessingOutputConfig": { + "target": "com.amazonaws.sagemaker#ProcessingOutputConfig", "traits": { - "smithy.api#documentation": "

          Path to the filesystem where the endpoint data is available to the container.

          ", + "smithy.api#documentation": "

          Output configuration for the processing job.

          " + } + }, + "ProcessingJobName": { + "target": "com.amazonaws.sagemaker#ProcessingJobName", + "traits": { + "smithy.api#documentation": "

          The name of the processing job. The name must be unique within an AWS Region in the\n AWS account.

          ", "smithy.api#required": {} } }, - "S3InputMode": { - "target": "com.amazonaws.sagemaker#ProcessingS3InputMode", + "ProcessingResources": { + "target": "com.amazonaws.sagemaker#ProcessingResources", "traits": { - "smithy.api#documentation": "

          Whether the Pipe or File is used as the input mode for\n transfering data for the monitoring job. Pipe mode is recommended for large\n datasets. File mode is useful for small files that fit in memory. Defaults to\n File.

          " + "smithy.api#documentation": "

          Identifies the resources, ML compute instances, and ML storage volumes to deploy for a\n processing job. In distributed training, you specify more than one instance.

          ", + "smithy.api#required": {} } }, - "S3DataDistributionType": { - "target": "com.amazonaws.sagemaker#ProcessingS3DataDistributionType", + "StoppingCondition": { + "target": "com.amazonaws.sagemaker#ProcessingStoppingCondition", "traits": { - "smithy.api#documentation": "

          Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key.\n Defauts to FullyReplicated\n

          " + "smithy.api#documentation": "

          The time limit for how long the processing job is allowed to run.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          Input object for the endpoint

          " - } - }, - "com.amazonaws.sagemaker#EndpointName": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 63 }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" - } - }, - "com.amazonaws.sagemaker#EndpointNameContains": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 63 + "AppSpecification": { + "target": "com.amazonaws.sagemaker#AppSpecification", + "traits": { + "smithy.api#documentation": "

          Configures the processing job to run a specified container image.

          ", + "smithy.api#required": {} + } }, - "smithy.api#pattern": "[a-zA-Z0-9-]+" - } - }, - "com.amazonaws.sagemaker#EndpointSortKey": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Name", - "name": "Name" - }, - { - "value": "CreationTime", - "name": "CreationTime" - }, - { - "value": "Status", - "name": "Status" + "Environment": { + "target": "com.amazonaws.sagemaker#ProcessingEnvironmentMap", + "traits": { + "smithy.api#documentation": "

          The environment variables set in the Docker container.

          " } - ] - } - }, - "com.amazonaws.sagemaker#EndpointStatus": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "OutOfService", - "name": "OUT_OF_SERVICE" - }, - { - "value": "Creating", - "name": "CREATING" - }, - { - "value": "Updating", - "name": "UPDATING" - }, - { - "value": "SystemUpdating", - "name": "SYSTEM_UPDATING" - }, - { - "value": "RollingBack", - "name": "ROLLING_BACK" - }, - { - "value": "InService", - "name": "IN_SERVICE" - }, - { - "value": "Deleting", - "name": "DELETING" - }, - { - "value": "Failed", - "name": "FAILED" + }, + "NetworkConfig": { + "target": "com.amazonaws.sagemaker#NetworkConfig", + "traits": { + "smithy.api#documentation": "

          Networking options for a processing job.

          " } - ] - } - }, - "com.amazonaws.sagemaker#EndpointSummary": { - "type": "structure", - "members": { - "EndpointName": { - "target": "com.amazonaws.sagemaker#EndpointName", + }, + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          The name of the endpoint.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on\n your behalf.

          " + } + }, + "ExperimentConfig": { + "target": "com.amazonaws.sagemaker#ExperimentConfig", + "traits": { + "smithy.api#documentation": "

          The configuration information used to create an experiment.

          " + } + }, + "ProcessingJobArn": { + "target": "com.amazonaws.sagemaker#ProcessingJobArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the processing job.

          ", "smithy.api#required": {} } }, - "EndpointArn": { - "target": "com.amazonaws.sagemaker#EndpointArn", + "ProcessingJobStatus": { + "target": "com.amazonaws.sagemaker#ProcessingJobStatus", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the endpoint.

          ", + "smithy.api#documentation": "

          Provides the status of a processing job.

          ", "smithy.api#required": {} } }, - "CreationTime": { + "ExitMessage": { + "target": "com.amazonaws.sagemaker#ExitMessage", + "traits": { + "smithy.api#documentation": "

          An optional string, up to one KB in size, that contains metadata from the processing\n container when the processing job exits.

          " + } + }, + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", + "traits": { + "smithy.api#documentation": "

          A string, up to one KB in size, that contains the reason a processing job failed, if\n it failed.

          " + } + }, + "ProcessingEndTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A timestamp that shows when the endpoint was created.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The time at which the processing job completed.

          " + } + }, + "ProcessingStartTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          The time at which the processing job started.

          " } }, "LastModifiedTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A timestamp that shows when the endpoint was last modified.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The time at which the processing job was last modified.

          " } }, - "EndpointStatus": { - "target": "com.amazonaws.sagemaker#EndpointStatus", + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The status of the endpoint.

          \n
            \n
          • \n

            \n OutOfService: Endpoint is not available to take incoming\n requests.

            \n
          • \n
          • \n

            \n Creating: CreateEndpoint is executing.

            \n
          • \n
          • \n

            \n Updating: UpdateEndpoint or UpdateEndpointWeightsAndCapacities is executing.

            \n
          • \n
          • \n

            \n SystemUpdating: Endpoint is undergoing maintenance and cannot be\n updated or deleted or re-scaled until it has completed. This maintenance\n operation does not change any customer-specified values such as VPC config, KMS\n encryption, model, instance type, or instance count.

            \n
          • \n
          • \n

            \n RollingBack: Endpoint fails to scale up or down or change its\n variant weight and is in the process of rolling back to its previous\n configuration. Once the rollback completes, endpoint returns to an\n InService status. This transitional status only applies to an\n endpoint that has autoscaling enabled and is undergoing variant weight or\n capacity changes as part of an UpdateEndpointWeightsAndCapacities call or when the UpdateEndpointWeightsAndCapacities operation is called\n explicitly.

            \n
          • \n
          • \n

            \n InService: Endpoint is available to process incoming\n requests.

            \n
          • \n
          • \n

            \n Deleting: DeleteEndpoint is executing.

            \n
          • \n
          • \n

            \n Failed: Endpoint could not be created, updated, or re-scaled. Use\n DescribeEndpointOutput$FailureReason for information about\n the failure. DeleteEndpoint is the only operation that can be\n performed on a failed endpoint.

            \n
          • \n
          \n

          To get a list of endpoints with a specified status, use the ListEndpointsInput$StatusEquals filter.

          ", + "smithy.api#documentation": "

          The time at which the processing job was created.

          ", "smithy.api#required": {} } - } - }, - "traits": { - "smithy.api#documentation": "

          Provides summary information for an endpoint.

          " - } - }, - "com.amazonaws.sagemaker#EndpointSummaryList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#EndpointSummary" - } - }, - "com.amazonaws.sagemaker#EntityDescription": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 1024 }, - "smithy.api#pattern": "[\\p{L}\\p{M}\\p{Z}\\p{S}\\p{N}\\p{P}]*" - } - }, - "com.amazonaws.sagemaker#EntityName": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 63 + "MonitoringScheduleArn": { + "target": "com.amazonaws.sagemaker#MonitoringScheduleArn", + "traits": { + "smithy.api#documentation": "

          The ARN of a monitoring schedule for an endpoint associated with this processing\n job.

          " + } }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*$" - } - }, - "com.amazonaws.sagemaker#EnvironmentKey": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 1024 + "AutoMLJobArn": { + "target": "com.amazonaws.sagemaker#AutoMLJobArn", + "traits": { + "smithy.api#documentation": "

          The ARN of an AutoML job associated with this processing job.

          " + } }, - "smithy.api#pattern": "[a-zA-Z_][a-zA-Z0-9_]*" + "TrainingJobArn": { + "target": "com.amazonaws.sagemaker#TrainingJobArn", + "traits": { + "smithy.api#documentation": "

          The ARN of a training job associated with this processing job.

          " + } + } } }, - "com.amazonaws.sagemaker#EnvironmentMap": { - "type": "map", - "key": { - "target": "com.amazonaws.sagemaker#EnvironmentKey" + "com.amazonaws.sagemaker#DescribeProject": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribeProjectInput" }, - "value": { - "target": "com.amazonaws.sagemaker#EnvironmentValue" + "output": { + "target": "com.amazonaws.sagemaker#DescribeProjectOutput" }, "traits": { - "smithy.api#length": { - "min": 0, - "max": 16 - } - } - }, - "com.amazonaws.sagemaker#EnvironmentValue": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 1024 - }, - "smithy.api#pattern": "[\\S\\s]*" + "smithy.api#documentation": "

          Describes the details of a project.

          " } }, - "com.amazonaws.sagemaker#ExecutionStatus": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Pending", - "name": "PENDING" - }, - { - "value": "Completed", - "name": "COMPLETED" - }, - { - "value": "CompletedWithViolations", - "name": "COMPLETED_WITH_VIOLATIONS" - }, - { - "value": "InProgress", - "name": "IN_PROGRESS" - }, - { - "value": "Failed", - "name": "FAILED" - }, - { - "value": "Stopping", - "name": "STOPPING" - }, - { - "value": "Stopped", - "name": "STOPPED" + "com.amazonaws.sagemaker#DescribeProjectInput": { + "type": "structure", + "members": { + "ProjectName": { + "target": "com.amazonaws.sagemaker#ProjectEntityName", + "traits": { + "smithy.api#documentation": "

          The name of the project to describe.

          ", + "smithy.api#required": {} } - ] - } - }, - "com.amazonaws.sagemaker#ExitMessage": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 1024 - }, - "smithy.api#pattern": "[\\S\\s]*" + } } }, - "com.amazonaws.sagemaker#Experiment": { + "com.amazonaws.sagemaker#DescribeProjectOutput": { "type": "structure", "members": { - "ExperimentName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "ProjectArn": { + "target": "com.amazonaws.sagemaker#ProjectArn", "traits": { - "smithy.api#documentation": "

          The name of the experiment.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the project.

          ", + "smithy.api#required": {} } }, - "ExperimentArn": { - "target": "com.amazonaws.sagemaker#ExperimentArn", + "ProjectName": { + "target": "com.amazonaws.sagemaker#ProjectEntityName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the experiment.

          " + "smithy.api#documentation": "

          The name of the project.

          ", + "smithy.api#required": {} } }, - "DisplayName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "ProjectId": { + "target": "com.amazonaws.sagemaker#ProjectId", "traits": { - "smithy.api#documentation": "

          The name of the experiment as displayed. If DisplayName isn't specified,\n ExperimentName is displayed.

          " + "smithy.api#documentation": "

          The ID of the project.

          ", + "smithy.api#required": {} } }, - "Source": { - "target": "com.amazonaws.sagemaker#ExperimentSource" + "ProjectDescription": { + "target": "com.amazonaws.sagemaker#EntityDescription", + "traits": { + "smithy.api#documentation": "

          The description of the project.

          " + } }, - "Description": { - "target": "com.amazonaws.sagemaker#ExperimentDescription", + "ServiceCatalogProvisioningDetails": { + "target": "com.amazonaws.sagemaker#ServiceCatalogProvisioningDetails", "traits": { - "smithy.api#documentation": "

          The description of the experiment.

          " + "smithy.api#documentation": "

          Information used to provision a service catalog product. For information, see What is AWS Service\n Catalog.

          ", + "smithy.api#required": {} } }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "ServiceCatalogProvisionedProductDetails": { + "target": "com.amazonaws.sagemaker#ServiceCatalogProvisionedProductDetails", "traits": { - "smithy.api#documentation": "

          When the experiment was created.

          " + "smithy.api#documentation": "

          Information about a provisioned service catalog product.

          " + } + }, + "ProjectStatus": { + "target": "com.amazonaws.sagemaker#ProjectStatus", + "traits": { + "smithy.api#documentation": "

          The status of the project.

          ", + "smithy.api#required": {} } }, "CreatedBy": { "target": "com.amazonaws.sagemaker#UserContext" }, - "LastModifiedTime": { + "CreationTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          When the experiment was last modified.

          " - } - }, - "LastModifiedBy": { - "target": "com.amazonaws.sagemaker#UserContext" - }, - "Tags": { - "target": "com.amazonaws.sagemaker#TagList", - "traits": { - "smithy.api#documentation": "

          The list of tags that are associated with the experiment. You can use Search API to search on the tags.

          " + "smithy.api#documentation": "

          The time when the project was created.

          ", + "smithy.api#required": {} } } - }, - "traits": { - "smithy.api#documentation": "

          The properties of an experiment as returned by the Search API.

          " } }, - "com.amazonaws.sagemaker#ExperimentArn": { - "type": "string", + "com.amazonaws.sagemaker#DescribeSubscribedWorkteam": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribeSubscribedWorkteamRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DescribeSubscribedWorkteamResponse" + }, "traits": { - "smithy.api#length": { - "min": 0, - "max": 256 - }, - "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:experiment/.*" + "smithy.api#documentation": "

          Gets information about a work team provided by a vendor. It returns details about the\n subscription with a vendor in the AWS Marketplace.

          " } }, - "com.amazonaws.sagemaker#ExperimentConfig": { + "com.amazonaws.sagemaker#DescribeSubscribedWorkteamRequest": { "type": "structure", "members": { - "ExperimentName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", - "traits": { - "smithy.api#documentation": "

          The name of an existing experiment to associate the trial component with.

          " - } - }, - "TrialName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", - "traits": { - "smithy.api#documentation": "

          The name of an existing trial to associate the trial component with. If not specified, a\n new trial is created.

          " - } - }, - "TrialComponentDisplayName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "WorkteamArn": { + "target": "com.amazonaws.sagemaker#WorkteamArn", "traits": { - "smithy.api#documentation": "

          The display name for the trial component. If this key isn't specified, the display name is\n the trial component name.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the subscribed work team to describe.

          ", + "smithy.api#required": {} } } - }, - "traits": { - "smithy.api#documentation": "

          Associates a SageMaker job as a trial component with an experiment and trial. Specified when\n you call the following APIs:

          \n " - } - }, - "com.amazonaws.sagemaker#ExperimentDescription": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 3072 - }, - "smithy.api#pattern": ".*" - } - }, - "com.amazonaws.sagemaker#ExperimentEntityName": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 120 - }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" } }, - "com.amazonaws.sagemaker#ExperimentSource": { + "com.amazonaws.sagemaker#DescribeSubscribedWorkteamResponse": { "type": "structure", "members": { - "SourceArn": { - "target": "com.amazonaws.sagemaker#ExperimentSourceArn", + "SubscribedWorkteam": { + "target": "com.amazonaws.sagemaker#SubscribedWorkteam", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the source.

          ", + "smithy.api#documentation": "

          A Workteam instance that contains information about the work team.

          ", "smithy.api#required": {} } - }, - "SourceType": { - "target": "com.amazonaws.sagemaker#SourceType", - "traits": { - "smithy.api#documentation": "

          The source type.

          " - } } - }, - "traits": { - "smithy.api#documentation": "

          The source of the experiment.

          " } }, - "com.amazonaws.sagemaker#ExperimentSourceArn": { - "type": "string", + "com.amazonaws.sagemaker#DescribeTrainingJob": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribeTrainingJobRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DescribeTrainingJobResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#length": { - "min": 0, - "max": 256 - }, - "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:.*" + "smithy.api#documentation": "

          Returns information about a training job.

          " } }, - "com.amazonaws.sagemaker#ExperimentSummaries": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#ExperimentSummary" + "com.amazonaws.sagemaker#DescribeTrainingJobRequest": { + "type": "structure", + "members": { + "TrainingJobName": { + "target": "com.amazonaws.sagemaker#TrainingJobName", + "traits": { + "smithy.api#documentation": "

          The name of the training job.

          ", + "smithy.api#required": {} + } + } } }, - "com.amazonaws.sagemaker#ExperimentSummary": { + "com.amazonaws.sagemaker#DescribeTrainingJobResponse": { "type": "structure", "members": { - "ExperimentArn": { - "target": "com.amazonaws.sagemaker#ExperimentArn", + "TrainingJobName": { + "target": "com.amazonaws.sagemaker#TrainingJobName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the experiment.

          " + "smithy.api#documentation": "

          Name of the model training job.

          ", + "smithy.api#required": {} } }, - "ExperimentName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "TrainingJobArn": { + "target": "com.amazonaws.sagemaker#TrainingJobArn", "traits": { - "smithy.api#documentation": "

          The name of the experiment.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the training job.

          ", + "smithy.api#required": {} } }, - "DisplayName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "TuningJobArn": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobArn", "traits": { - "smithy.api#documentation": "

          The name of the experiment as displayed. If DisplayName isn't specified,\n ExperimentName is displayed.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the associated hyperparameter tuning job if the\n training job was launched by a hyperparameter tuning job.

          " } }, - "ExperimentSource": { - "target": "com.amazonaws.sagemaker#ExperimentSource" - }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "LabelingJobArn": { + "target": "com.amazonaws.sagemaker#LabelingJobArn", "traits": { - "smithy.api#documentation": "

          When the experiment was created.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the Amazon SageMaker Ground Truth labeling job that created the\n transform or training job.

          " } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          When the experiment was last modified.

          " - } - } - }, - "traits": { - "smithy.api#documentation": "

          A summary of the properties of an experiment. To get the complete set of properties, call\n the DescribeExperiment API and provide the\n ExperimentName.

          " - } - }, - "com.amazonaws.sagemaker#FailureReason": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 1024 - } - } - }, - "com.amazonaws.sagemaker#FileSystemAccessMode": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "rw", - "name": "RW" - }, - { - "value": "ro", - "name": "RO" - } - ] - } - }, - "com.amazonaws.sagemaker#FileSystemConfig": { - "type": "structure", - "members": { - "MountPath": { - "target": "com.amazonaws.sagemaker#MountPath", + "AutoMLJobArn": { + "target": "com.amazonaws.sagemaker#AutoMLJobArn", "traits": { - "smithy.api#documentation": "

          The path within the image to mount the user's EFS home directory. The directory\n should be empty. If not specified, defaults to /home/sagemaker-user.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an AutoML job.

          " } }, - "DefaultUid": { - "target": "com.amazonaws.sagemaker#DefaultUid", + "ModelArtifacts": { + "target": "com.amazonaws.sagemaker#ModelArtifacts", "traits": { - "smithy.api#box": {}, - "smithy.api#documentation": "

          The default POSIX user ID (UID). If not specified, defaults to 1000.

          " + "smithy.api#documentation": "

          Information about the Amazon S3 location that is configured for storing model artifacts.\n

          ", + "smithy.api#required": {} } }, - "DefaultGid": { - "target": "com.amazonaws.sagemaker#DefaultGid", + "TrainingJobStatus": { + "target": "com.amazonaws.sagemaker#TrainingJobStatus", "traits": { - "smithy.api#box": {}, - "smithy.api#documentation": "

          The default POSIX group ID (GID). If not specified, defaults to 100.

          " + "smithy.api#documentation": "

          The status of the\n training\n job.

          \n

          Amazon SageMaker provides the following training job statuses:

          \n
            \n
          • \n

            \n InProgress - The training is in progress.

            \n
          • \n
          • \n

            \n Completed - The training job has completed.

            \n
          • \n
          • \n

            \n Failed - The training job has failed. To see the reason for the\n failure, see the FailureReason field in the response to a\n DescribeTrainingJobResponse call.

            \n
          • \n
          • \n

            \n Stopping - The training job is stopping.

            \n
          • \n
          • \n

            \n Stopped - The training job has stopped.

            \n
          • \n
          \n

          For\n more detailed information, see SecondaryStatus.

          ", + "smithy.api#required": {} } - } - }, - "traits": { - "smithy.api#documentation": "

          The Amazon Elastic File System (EFS) storage configuration for a SageMaker image.

          " - } - }, - "com.amazonaws.sagemaker#FileSystemDataSource": { - "type": "structure", - "members": { - "FileSystemId": { - "target": "com.amazonaws.sagemaker#FileSystemId", + }, + "SecondaryStatus": { + "target": "com.amazonaws.sagemaker#SecondaryStatus", "traits": { - "smithy.api#documentation": "

          The file system id.

          ", + "smithy.api#documentation": "

          Provides detailed information about the state of the training job. For detailed\n information on the secondary status of the training job, see StatusMessage\n under SecondaryStatusTransition.

          \n

          Amazon SageMaker provides primary statuses and secondary statuses that apply to each of\n them:

          \n
          \n
          InProgress
          \n
          \n
            \n
          • \n

            \n Starting\n - Starting the training job.

            \n
          • \n
          • \n

            \n Downloading - An optional stage for algorithms that\n support File training input mode. It indicates that\n data is being downloaded to the ML storage volumes.

            \n
          • \n
          • \n

            \n Training - Training is in progress.

            \n
          • \n
          • \n

            \n Interrupted - The job stopped because the managed\n spot training instances were interrupted.

            \n
          • \n
          • \n

            \n Uploading - Training is complete and the model\n artifacts are being uploaded to the S3 location.

            \n
          • \n
          \n
          \n
          Completed
          \n
          \n
            \n
          • \n

            \n Completed - The training job has completed.

            \n
          • \n
          \n
          \n
          Failed
          \n
          \n
            \n
          • \n

            \n Failed - The training job has failed. The reason for\n the failure is returned in the FailureReason field of\n DescribeTrainingJobResponse.

            \n
          • \n
          \n
          \n
          Stopped
          \n
          \n
            \n
          • \n

            \n MaxRuntimeExceeded - The job stopped because it\n exceeded the maximum allowed runtime.

            \n
          • \n
          • \n

            \n MaxWaitTimeExceeded - The job stopped because it\n exceeded the maximum allowed wait time.

            \n
          • \n
          • \n

            \n Stopped - The training job has stopped.

            \n
          • \n
          \n
          \n
          Stopping
          \n
          \n
            \n
          • \n

            \n Stopping - Stopping the training job.

            \n
          • \n
          \n
          \n
          \n \n \n

          Valid values for SecondaryStatus are subject to change.

          \n
          \n

          We no longer support the following secondary statuses:

          \n
            \n
          • \n

            \n LaunchingMLInstances\n

            \n
          • \n
          • \n

            \n PreparingTrainingStack\n

            \n
          • \n
          • \n

            \n DownloadingTrainingImage\n

            \n
          • \n
          ", "smithy.api#required": {} } }, - "FileSystemAccessMode": { - "target": "com.amazonaws.sagemaker#FileSystemAccessMode", + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", "traits": { - "smithy.api#documentation": "

          The access mode of the mount of the directory associated with the channel. A directory\n can be mounted either in ro (read-only) or rw (read-write)\n mode.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          If the training job failed, the reason it failed.

          " } }, - "FileSystemType": { - "target": "com.amazonaws.sagemaker#FileSystemType", + "HyperParameters": { + "target": "com.amazonaws.sagemaker#HyperParameters", "traits": { - "smithy.api#documentation": "

          The file system type.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Algorithm-specific parameters.

          " } }, - "DirectoryPath": { - "target": "com.amazonaws.sagemaker#DirectoryPath", + "AlgorithmSpecification": { + "target": "com.amazonaws.sagemaker#AlgorithmSpecification", "traits": { - "smithy.api#documentation": "

          The full path to the directory to associate with the channel.

          ", + "smithy.api#documentation": "

          Information about the algorithm used for training, and algorithm metadata.\n

          ", "smithy.api#required": {} } - } - }, - "traits": { - "smithy.api#documentation": "

          Specifies a file system data source for a channel.

          " - } - }, - "com.amazonaws.sagemaker#FileSystemId": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 11 }, - "smithy.api#pattern": ".*" - } - }, - "com.amazonaws.sagemaker#FileSystemType": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "EFS", - "name": "EFS" - }, - { - "value": "FSxLustre", - "name": "FSXLUSTRE" - } - ] - } - }, - "com.amazonaws.sagemaker#Filter": { - "type": "structure", - "members": { - "Name": { - "target": "com.amazonaws.sagemaker#ResourcePropertyName", + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          A resource property name. For example, TrainingJobName. For\n valid property names, see SearchRecord.\n You must specify a valid property for the resource.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The AWS Identity and Access Management (IAM) role configured for the training job.

          " } }, - "Operator": { - "target": "com.amazonaws.sagemaker#Operator", + "InputDataConfig": { + "target": "com.amazonaws.sagemaker#InputDataConfig", "traits": { - "smithy.api#documentation": "

          A Boolean binary operator that is used to evaluate the filter. The operator field\n contains one of the following values:

          \n
          \n
          Equals
          \n
          \n

          The value of Name equals Value.

          \n
          \n
          NotEquals
          \n
          \n

          The value of Name doesn't equal Value.

          \n
          \n
          Exists
          \n
          \n

          The Name property exists.

          \n
          \n
          NotExists
          \n
          \n

          The Name property does not exist.

          \n
          \n
          GreaterThan
          \n
          \n

          The value of Name is greater than Value.\n Not supported for text properties.

          \n
          \n
          GreaterThanOrEqualTo
          \n
          \n

          The value of Name is greater than or equal to Value.\n Not supported for text properties.

          \n
          \n
          LessThan
          \n
          \n

          The value of Name is less than Value.\n Not supported for text properties.

          \n
          \n
          LessThanOrEqualTo
          \n
          \n

          The value of Name is less than or equal to Value.\n Not supported for text properties.

          \n
          \n
          In
          \n
          \n

          The value of Name is one of the comma delimited strings in\n Value. Only supported for text properties.

          \n
          \n
          Contains
          \n
          \n

          The value of Name contains the string Value.\n Only supported for text properties.

          \n

          A SearchExpression can include the Contains operator\n multiple times when the value of Name is one of the following:

          \n
            \n
          • \n

            \n Experiment.DisplayName\n

            \n
          • \n
          • \n

            \n Experiment.ExperimentName\n

            \n
          • \n
          • \n

            \n Experiment.Tags\n

            \n
          • \n
          • \n

            \n Trial.DisplayName\n

            \n
          • \n
          • \n

            \n Trial.TrialName\n

            \n
          • \n
          • \n

            \n Trial.Tags\n

            \n
          • \n
          • \n

            \n TrialComponent.DisplayName\n

            \n
          • \n
          • \n

            \n TrialComponent.TrialComponentName\n

            \n
          • \n
          • \n

            \n TrialComponent.Tags\n

            \n
          • \n
          • \n

            \n TrialComponent.InputArtifacts\n

            \n
          • \n
          • \n

            \n TrialComponent.OutputArtifacts\n

            \n
          • \n
          \n

          A SearchExpression can include only one Contains operator\n for all other values of Name. In these cases, if you include multiple\n Contains operators in the SearchExpression, the result is\n the following error message: \"'CONTAINS' operator usage limit of 1\n exceeded.\"

          \n
          \n
          " + "smithy.api#documentation": "

          An array of Channel objects that describes each data input channel.\n

          " } }, - "Value": { - "target": "com.amazonaws.sagemaker#FilterValue", + "OutputDataConfig": { + "target": "com.amazonaws.sagemaker#OutputDataConfig", "traits": { - "smithy.api#documentation": "

          A value used with Name and Operator to determine which\n resources satisfy the filter's condition. For numerical properties, Value\n must be an integer or floating-point decimal. For timestamp properties,\n Value must be an ISO 8601 date-time string of the following format:\n YYYY-mm-dd'T'HH:MM:SS.

          " + "smithy.api#documentation": "

          The S3 path where model artifacts that you configured when creating the job are\n stored. Amazon SageMaker creates subfolders for model artifacts.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          A conditional statement for a search expression that includes a resource property, a\n Boolean operator, and a value. Resources that match the statement are returned in the\n results from the Search API.

          \n \n

          If you specify a Value, but not an Operator, Amazon SageMaker uses the\n equals operator.

          \n

          In search, there are several property types:

          \n
          \n
          Metrics
          \n
          \n

          To define a metric filter, enter a value using the form\n \"Metrics.\", where is\n a metric name. For example, the following filter searches for training jobs\n with an \"accuracy\" metric greater than\n \"0.9\":

          \n

          \n {\n

          \n

          \n \"Name\": \"Metrics.accuracy\",\n

          \n

          \n \"Operator\": \"GreaterThan\",\n

          \n

          \n \"Value\": \"0.9\"\n

          \n

          \n }\n

          \n
          \n
          HyperParameters
          \n
          \n

          To define a hyperparameter filter, enter a value with the form\n \"HyperParameters.\". Decimal hyperparameter\n values are treated as a decimal in a comparison if the specified\n Value is also a decimal value. If the specified\n Value is an integer, the decimal hyperparameter values are\n treated as integers. For example, the following filter is satisfied by\n training jobs with a \"learning_rate\" hyperparameter that is\n less than \"0.5\":

          \n

          \n {\n

          \n

          \n \"Name\": \"HyperParameters.learning_rate\",\n

          \n

          \n \"Operator\": \"LessThan\",\n

          \n

          \n \"Value\": \"0.5\"\n

          \n

          \n }\n

          \n
          \n
          Tags
          \n
          \n

          To define a tag filter, enter a value with the form\n Tags..

          \n
          \n
          " - } - }, - "com.amazonaws.sagemaker#FilterList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#Filter" - }, - "traits": { - "smithy.api#length": { - "min": 1, - "max": 20 - } - } - }, - "com.amazonaws.sagemaker#FilterValue": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 1024 }, - "smithy.api#pattern": ".+" - } - }, - "com.amazonaws.sagemaker#FinalAutoMLJobObjectiveMetric": { - "type": "structure", - "members": { - "Type": { - "target": "com.amazonaws.sagemaker#AutoMLJobObjectiveType", + "ResourceConfig": { + "target": "com.amazonaws.sagemaker#ResourceConfig", "traits": { - "smithy.api#documentation": "

          The type of metric with the best result.

          " + "smithy.api#documentation": "

          Resources, including ML compute instances and ML storage volumes, that are\n configured for model training.

          ", + "smithy.api#required": {} } }, - "MetricName": { - "target": "com.amazonaws.sagemaker#AutoMLMetricEnum", + "VpcConfig": { + "target": "com.amazonaws.sagemaker#VpcConfig", "traits": { - "smithy.api#documentation": "

          The name of the metric with the best result. For a description of the possible objective\n metrics, see AutoMLJobObjective$MetricName.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A VpcConfig object that specifies the VPC that this training job has\n access to. For more information, see Protect Training Jobs by Using an Amazon\n Virtual Private Cloud.

          " } }, - "Value": { - "target": "com.amazonaws.sagemaker#MetricValue", + "StoppingCondition": { + "target": "com.amazonaws.sagemaker#StoppingCondition", "traits": { - "smithy.api#documentation": "

          The value of the metric with the best result.

          ", + "smithy.api#documentation": "

          Specifies a limit to how long a model training job can run. It also specifies the\n maximum time to wait for a spot instance. When the job reaches the time limit, Amazon SageMaker ends\n the training job. Use this API to cap model training costs.

          \n

          To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays\n job termination for 120 seconds. Algorithms can use this 120-second window to save the\n model artifacts, so the results of training are not lost.

          ", "smithy.api#required": {} } - } - }, - "traits": { - "smithy.api#documentation": "

          The best candidate result from an AutoML training job.

          " - } - }, - "com.amazonaws.sagemaker#FinalHyperParameterTuningJobObjectiveMetric": { - "type": "structure", - "members": { - "Type": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobObjectiveType", - "traits": { - "smithy.api#documentation": "

          Whether to\n minimize\n or maximize the objective metric. Valid values are Minimize and\n Maximize.

          " - } }, - "MetricName": { - "target": "com.amazonaws.sagemaker#MetricName", + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The name of the\n objective\n metric.

          ", + "smithy.api#documentation": "

          A timestamp that indicates when the training job was created.

          ", "smithy.api#required": {} } }, - "Value": { - "target": "com.amazonaws.sagemaker#MetricValue", + "TrainingStartTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The value of the objective metric.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Indicates the time when the training job starts on training instances. You are\n billed for the time interval between this time and the value of\n TrainingEndTime. The start time in CloudWatch Logs might be later than this time.\n The difference is due to the time it takes to download the training data and to the size\n of the training container.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          Shows the final value for the\n objective\n metric for a training job that was launched by a hyperparameter\n tuning job. You define the objective metric in the\n HyperParameterTuningJobObjective parameter of HyperParameterTuningJobConfig.

          " - } - }, - "com.amazonaws.sagemaker#FinalMetricDataList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#MetricData" - }, - "traits": { - "smithy.api#length": { - "min": 0, - "max": 40 - } - } - }, - "com.amazonaws.sagemaker#Float": { - "type": "float" - }, - "com.amazonaws.sagemaker#FlowDefinitionArn": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 1024 - }, - "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:flow-definition/.*" - } - }, - "com.amazonaws.sagemaker#FlowDefinitionName": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 63 }, - "smithy.api#pattern": "^[a-z0-9](-*[a-z0-9])*" - } - }, - "com.amazonaws.sagemaker#FlowDefinitionOutputConfig": { - "type": "structure", - "members": { - "S3OutputPath": { - "target": "com.amazonaws.sagemaker#S3Uri", + "TrainingEndTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The Amazon S3 path where the object containing human output will be made available.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Indicates the time when the training job ends on training instances. You are billed\n for the time interval between the value of TrainingStartTime and this time.\n For successful jobs and stopped jobs, this is the time after model artifacts are\n uploaded. For failed jobs, this is the time when Amazon SageMaker detects a job failure.

          " } }, - "KmsKeyId": { - "target": "com.amazonaws.sagemaker#KmsKeyId", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The Amazon Key Management Service (KMS) key ID for server-side encryption.

          " - } - } - }, - "traits": { - "smithy.api#documentation": "

          Contains information about where human output will be stored.

          " - } - }, - "com.amazonaws.sagemaker#FlowDefinitionStatus": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Initializing", - "name": "INITIALIZING" - }, - { - "value": "Active", - "name": "ACTIVE" - }, - { - "value": "Failed", - "name": "FAILED" - }, - { - "value": "Deleting", - "name": "DELETING" + "smithy.api#documentation": "

          A timestamp that indicates when the status of the training job was last\n modified.

          " } - ] - } - }, - "com.amazonaws.sagemaker#FlowDefinitionSummaries": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#FlowDefinitionSummary" - } - }, - "com.amazonaws.sagemaker#FlowDefinitionSummary": { - "type": "structure", - "members": { - "FlowDefinitionName": { - "target": "com.amazonaws.sagemaker#FlowDefinitionName", + }, + "SecondaryStatusTransitions": { + "target": "com.amazonaws.sagemaker#SecondaryStatusTransitions", "traits": { - "smithy.api#documentation": "

          The name of the flow definition.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A history of all of the secondary statuses that the training job has transitioned\n through.

          " } }, - "FlowDefinitionArn": { - "target": "com.amazonaws.sagemaker#FlowDefinitionArn", + "FinalMetricDataList": { + "target": "com.amazonaws.sagemaker#FinalMetricDataList", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the flow definition.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A collection of MetricData objects that specify the names, values, and\n dates and times that the training algorithm emitted to Amazon CloudWatch.

          " } }, - "FlowDefinitionStatus": { - "target": "com.amazonaws.sagemaker#FlowDefinitionStatus", + "EnableNetworkIsolation": { + "target": "com.amazonaws.sagemaker#Boolean", "traits": { - "smithy.api#documentation": "

          The status of the flow definition. Valid values:

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          If you want to allow inbound or outbound network calls, except for calls between peers\n within a training cluster for distributed training, choose True. If you\n enable network isolation for training jobs that are configured to use a VPC, Amazon SageMaker\n downloads and uploads customer data and model artifacts through the specified VPC, but\n the training container does not have network access.

          " } }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "EnableInterContainerTrafficEncryption": { + "target": "com.amazonaws.sagemaker#Boolean", "traits": { - "smithy.api#documentation": "

          The timestamp when SageMaker created the flow definition.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          To encrypt all communications between ML compute instances in distributed training,\n choose True. Encryption provides greater security for distributed training,\n but training might take longer. How long it takes depends on the amount of communication\n between compute instances, especially if you use a deep learning algorithms in\n distributed training.

          " } }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", + "EnableManagedSpotTraining": { + "target": "com.amazonaws.sagemaker#Boolean", "traits": { - "smithy.api#documentation": "

          The reason why the flow definition creation failed. A failure reason is returned only when the flow definition status is Failed.

          " + "smithy.api#documentation": "

          A Boolean indicating whether managed spot training is enabled (True) or\n not (False).

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          Contains summary information about the flow definition.

          " - } - }, - "com.amazonaws.sagemaker#FlowDefinitionTaskAvailabilityLifetimeInSeconds": { - "type": "integer", - "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 1, - "max": 864000 - } - } - }, - "com.amazonaws.sagemaker#FlowDefinitionTaskCount": { - "type": "integer", - "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 1, - "max": 3 - } - } - }, - "com.amazonaws.sagemaker#FlowDefinitionTaskDescription": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 255 }, - "smithy.api#pattern": ".+" - } - }, - "com.amazonaws.sagemaker#FlowDefinitionTaskKeyword": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 30 + "CheckpointConfig": { + "target": "com.amazonaws.sagemaker#CheckpointConfig" }, - "smithy.api#pattern": "^[A-Za-z0-9]+( [A-Za-z0-9]+)*$" - } - }, - "com.amazonaws.sagemaker#FlowDefinitionTaskKeywords": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#FlowDefinitionTaskKeyword" - }, - "traits": { - "smithy.api#length": { - "min": 1, - "max": 5 - } - } - }, - "com.amazonaws.sagemaker#FlowDefinitionTaskTimeLimitInSeconds": { - "type": "integer", - "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 30, - "max": 28800 - } - } - }, - "com.amazonaws.sagemaker#FlowDefinitionTaskTitle": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 128 + "TrainingTimeInSeconds": { + "target": "com.amazonaws.sagemaker#TrainingTimeInSeconds", + "traits": { + "smithy.api#documentation": "

          The training time in seconds.

          " + } }, - "smithy.api#pattern": "^[\\t\\n\\r -\\uD7FF\\uE000-\\uFFFD]*$" - } - }, - "com.amazonaws.sagemaker#Framework": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "TENSORFLOW", - "name": "TENSORFLOW" - }, - { - "value": "KERAS", - "name": "KERAS" - }, - { - "value": "MXNET", - "name": "MXNET" - }, - { - "value": "ONNX", - "name": "ONNX" - }, - { - "value": "PYTORCH", - "name": "PYTORCH" - }, - { - "value": "XGBOOST", - "name": "XGBOOST" - }, - { - "value": "TFLITE", - "name": "TFLITE" - }, - { - "value": "DARKNET", - "name": "DARKNET" + "BillableTimeInSeconds": { + "target": "com.amazonaws.sagemaker#BillableTimeInSeconds", + "traits": { + "smithy.api#documentation": "

          The billable time in seconds.

          \n

          You can calculate the savings from using managed spot training using the formula\n (1 - BillableTimeInSeconds / TrainingTimeInSeconds) * 100. For example,\n if BillableTimeInSeconds is 100 and TrainingTimeInSeconds is\n 500, the savings is 80%.

          " } - ] + }, + "DebugHookConfig": { + "target": "com.amazonaws.sagemaker#DebugHookConfig" + }, + "ExperimentConfig": { + "target": "com.amazonaws.sagemaker#ExperimentConfig" + }, + "DebugRuleConfigurations": { + "target": "com.amazonaws.sagemaker#DebugRuleConfigurations", + "traits": { + "smithy.api#documentation": "

          Configuration information for debugging rules.

          " + } + }, + "TensorBoardOutputConfig": { + "target": "com.amazonaws.sagemaker#TensorBoardOutputConfig" + }, + "DebugRuleEvaluationStatuses": { + "target": "com.amazonaws.sagemaker#DebugRuleEvaluationStatuses", + "traits": { + "smithy.api#documentation": "

          Status about the debug rule evaluation.

          " + } + } } }, - "com.amazonaws.sagemaker#GenerateCandidateDefinitionsOnly": { - "type": "boolean" - }, - "com.amazonaws.sagemaker#GetSearchSuggestions": { + "com.amazonaws.sagemaker#DescribeTransformJob": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#GetSearchSuggestionsRequest" + "target": "com.amazonaws.sagemaker#DescribeTransformJobRequest" }, "output": { - "target": "com.amazonaws.sagemaker#GetSearchSuggestionsResponse" + "target": "com.amazonaws.sagemaker#DescribeTransformJobResponse" }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#documentation": "

          An auto-complete API for the search functionality in the Amazon SageMaker console. It returns\n suggestions of possible matches for the property name to use in Search\n queries. Provides suggestions for HyperParameters, Tags, and\n Metrics.

          " + "smithy.api#documentation": "

          Returns information about a transform job.

          " } }, - "com.amazonaws.sagemaker#GetSearchSuggestionsRequest": { + "com.amazonaws.sagemaker#DescribeTransformJobRequest": { "type": "structure", "members": { - "Resource": { - "target": "com.amazonaws.sagemaker#ResourceType", + "TransformJobName": { + "target": "com.amazonaws.sagemaker#TransformJobName", "traits": { - "smithy.api#documentation": "

          The name of the Amazon SageMaker resource to search for.

          ", + "smithy.api#documentation": "

          The name of the transform job that you want to view details of.

          ", "smithy.api#required": {} } - }, - "SuggestionQuery": { - "target": "com.amazonaws.sagemaker#SuggestionQuery", - "traits": { - "smithy.api#documentation": "

          Limits the property names that are included in the response.

          " - } } } }, - "com.amazonaws.sagemaker#GetSearchSuggestionsResponse": { + "com.amazonaws.sagemaker#DescribeTransformJobResponse": { "type": "structure", "members": { - "PropertyNameSuggestions": { - "target": "com.amazonaws.sagemaker#PropertyNameSuggestionList", + "TransformJobName": { + "target": "com.amazonaws.sagemaker#TransformJobName", "traits": { - "smithy.api#documentation": "

          A list of property names for a Resource that match a\n SuggestionQuery.

          " + "smithy.api#documentation": "

          The name of the transform job.

          ", + "smithy.api#required": {} } - } - } - }, - "com.amazonaws.sagemaker#GitConfig": { - "type": "structure", - "members": { - "RepositoryUrl": { - "target": "com.amazonaws.sagemaker#GitConfigUrl", + }, + "TransformJobArn": { + "target": "com.amazonaws.sagemaker#TransformJobArn", "traits": { - "smithy.api#documentation": "

          The URL where the Git repository is located.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the transform job.

          ", "smithy.api#required": {} } }, - "Branch": { - "target": "com.amazonaws.sagemaker#Branch", + "TransformJobStatus": { + "target": "com.amazonaws.sagemaker#TransformJobStatus", "traits": { - "smithy.api#documentation": "

          The default branch for the Git repository.

          " + "smithy.api#documentation": "

          The\n status of the transform job. If the transform job failed, the reason\n is returned in the FailureReason field.

          ", + "smithy.api#required": {} } }, - "SecretArn": { - "target": "com.amazonaws.sagemaker#SecretArn", + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the AWS Secrets Manager secret that contains the\n credentials used to access the git repository. The secret must have a staging label of\n AWSCURRENT and must be in the following format:

          \n

          \n {\"username\": UserName, \"password\":\n Password}\n

          " + "smithy.api#documentation": "

          If the transform job failed, FailureReason describes\n why\n it failed. A transform job creates a log file, which includes error\n messages, and stores it\n as\n an Amazon S3 object. For more information, see Log Amazon SageMaker Events with\n Amazon CloudWatch.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          Specifies configuration details for a Git repository in your AWS account.

          " - } - }, - "com.amazonaws.sagemaker#GitConfigForUpdate": { - "type": "structure", - "members": { - "SecretArn": { - "target": "com.amazonaws.sagemaker#SecretArn", + }, + "ModelName": { + "target": "com.amazonaws.sagemaker#ModelName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the AWS Secrets Manager secret that contains the\n credentials used to access the git repository. The secret must have a staging label of\n AWSCURRENT and must be in the following format:

          \n

          \n {\"username\": UserName, \"password\":\n Password}\n

          " + "smithy.api#documentation": "

          The name of the model used in the transform job.

          ", + "smithy.api#required": {} } - } - }, - "traits": { - "smithy.api#documentation": "

          Specifies configuration details for a Git repository when the repository is\n updated.

          " - } - }, - "com.amazonaws.sagemaker#GitConfigUrl": { - "type": "string", - "traits": { - "smithy.api#pattern": "^https://([^/]+)/?(.*)$" - } - }, - "com.amazonaws.sagemaker#Group": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 63 }, - "smithy.api#pattern": "[\\p{L}\\p{M}\\p{S}\\p{N}\\p{P}]+" - } - }, - "com.amazonaws.sagemaker#Groups": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#Group" - }, - "traits": { - "smithy.api#length": { - "min": 1, - "max": 10 - } - } - }, - "com.amazonaws.sagemaker#HookParameters": { - "type": "map", - "key": { - "target": "com.amazonaws.sagemaker#ConfigKey" - }, - "value": { - "target": "com.amazonaws.sagemaker#ConfigValue" - }, - "traits": { - "smithy.api#length": { - "min": 0, - "max": 20 - } - } - }, - "com.amazonaws.sagemaker#HumanLoopActivationConditionsConfig": { - "type": "structure", - "members": { - "HumanLoopActivationConditions": { - "target": "com.amazonaws.sagemaker#SynthesizedJsonHumanLoopActivationConditions", + "MaxConcurrentTransforms": { + "target": "com.amazonaws.sagemaker#MaxConcurrentTransforms", "traits": { - "smithy.api#documentation": "

          JSON expressing use-case specific conditions declaratively. If any condition is matched, atomic tasks are created against the configured work team. \n The set of conditions is different for Rekognition and Textract. For more information about how to structure the JSON, see \n JSON Schema for Human Loop Activation Conditions in Amazon Augmented AI \n in the Amazon SageMaker Developer Guide.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The\n maximum number\n of\n parallel requests on each instance node\n that can be launched in a transform job. The default value is 1.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          Defines under what conditions SageMaker creates a human loop. Used within . See for the required\n format of activation conditions.

          " - } - }, - "com.amazonaws.sagemaker#HumanLoopActivationConfig": { - "type": "structure", - "members": { - "HumanLoopActivationConditionsConfig": { - "target": "com.amazonaws.sagemaker#HumanLoopActivationConditionsConfig", + }, + "ModelClientConfig": { + "target": "com.amazonaws.sagemaker#ModelClientConfig", "traits": { - "smithy.api#documentation": "

          Container structure for defining under what conditions SageMaker creates a human loop.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The timeout and maximum number of retries for processing a transform job\n invocation.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          Provides information about how and under what conditions SageMaker creates a human loop. If HumanLoopActivationConfig is not given, then all requests go to humans.

          " - } - }, - "com.amazonaws.sagemaker#HumanLoopConfig": { - "type": "structure", - "members": { - "WorkteamArn": { - "target": "com.amazonaws.sagemaker#WorkteamArn", + }, + "MaxPayloadInMB": { + "target": "com.amazonaws.sagemaker#MaxPayloadInMB", "traits": { - "smithy.api#documentation": "

          Amazon Resource Name (ARN) of a team of workers.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The\n maximum\n payload size, in MB, used in the\n transform job.

          " } }, - "HumanTaskUiArn": { - "target": "com.amazonaws.sagemaker#HumanTaskUiArn", + "BatchStrategy": { + "target": "com.amazonaws.sagemaker#BatchStrategy", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the human task user interface.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Specifies the number of records to include in a mini-batch for an HTTP inference\n request.\n A record\n is a single unit of input data that inference\n can be made on. For example, a single line in a CSV file is a record.

          \n

          To enable the batch strategy, you must set SplitType\n to\n Line, RecordIO, or\n TFRecord.

          " } }, - "TaskTitle": { - "target": "com.amazonaws.sagemaker#FlowDefinitionTaskTitle", + "Environment": { + "target": "com.amazonaws.sagemaker#TransformEnvironmentMap", "traits": { - "smithy.api#documentation": "

          A title for the human worker task.

          ", + "smithy.api#documentation": "

          The\n environment variables to set in the Docker container. We support up to 16 key and values\n entries in the map.

          " + } + }, + "TransformInput": { + "target": "com.amazonaws.sagemaker#TransformInput", + "traits": { + "smithy.api#documentation": "

          Describes the dataset to be transformed and the Amazon S3 location where it is\n stored.

          ", "smithy.api#required": {} } }, - "TaskDescription": { - "target": "com.amazonaws.sagemaker#FlowDefinitionTaskDescription", + "TransformOutput": { + "target": "com.amazonaws.sagemaker#TransformOutput", "traits": { - "smithy.api#documentation": "

          A description for the human worker task.

          ", + "smithy.api#documentation": "

          Identifies the Amazon S3 location where you want Amazon SageMaker to save the results from the\n transform job.

          " + } + }, + "TransformResources": { + "target": "com.amazonaws.sagemaker#TransformResources", + "traits": { + "smithy.api#documentation": "

          Describes\n the resources, including ML instance types and ML instance count, to\n use for the transform job.

          ", "smithy.api#required": {} } }, - "TaskCount": { - "target": "com.amazonaws.sagemaker#FlowDefinitionTaskCount", + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The number of distinct workers who will perform the same task on each object.\n For example, if TaskCount is set to 3 for an image classification \n labeling job, three workers will classify each input image. \n Increasing TaskCount can improve label accuracy.

          ", + "smithy.api#documentation": "

          A timestamp that shows when the transform Job was created.

          ", "smithy.api#required": {} } }, - "TaskAvailabilityLifetimeInSeconds": { - "target": "com.amazonaws.sagemaker#FlowDefinitionTaskAvailabilityLifetimeInSeconds", + "TransformStartTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The length of time that a task remains available for review by human workers.

          " + "smithy.api#documentation": "

          Indicates when the transform job starts\n on\n ML instances. You are billed for the time interval between this time\n and the value of TransformEndTime.

          " } }, - "TaskTimeLimitInSeconds": { - "target": "com.amazonaws.sagemaker#FlowDefinitionTaskTimeLimitInSeconds", + "TransformEndTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The amount of time that a worker has to complete a task. The default value is 3,600 seconds (1 hour)

          " + "smithy.api#documentation": "

          Indicates when the transform job has been\n \n completed, or has stopped or failed. You are billed for the time\n interval between this time and the value of TransformStartTime.

          " } }, - "TaskKeywords": { - "target": "com.amazonaws.sagemaker#FlowDefinitionTaskKeywords", + "LabelingJobArn": { + "target": "com.amazonaws.sagemaker#LabelingJobArn", "traits": { - "smithy.api#documentation": "

          Keywords used to describe the task so that workers can discover the task.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the Amazon SageMaker Ground Truth labeling job that created the\n transform or training job.

          " } }, - "PublicWorkforceTaskPrice": { - "target": "com.amazonaws.sagemaker#PublicWorkforceTaskPrice" + "AutoMLJobArn": { + "target": "com.amazonaws.sagemaker#AutoMLJobArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the AutoML transform job.

          " + } + }, + "DataProcessing": { + "target": "com.amazonaws.sagemaker#DataProcessing" + }, + "ExperimentConfig": { + "target": "com.amazonaws.sagemaker#ExperimentConfig" } + } + }, + "com.amazonaws.sagemaker#DescribeTrial": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribeTrialRequest" }, + "output": { + "target": "com.amazonaws.sagemaker#DescribeTrialResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#documentation": "

          Describes the work to be performed by human workers.

          " + "smithy.api#documentation": "

          Provides a list of a trial's properties.

          " } }, - "com.amazonaws.sagemaker#HumanLoopRequestSource": { + "com.amazonaws.sagemaker#DescribeTrialComponent": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribeTrialComponentRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DescribeTrialComponentResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Provides a list of a trials component's properties.

          " + } + }, + "com.amazonaws.sagemaker#DescribeTrialComponentRequest": { "type": "structure", "members": { - "AwsManagedHumanLoopRequestSource": { - "target": "com.amazonaws.sagemaker#AwsManagedHumanLoopRequestSource", + "TrialComponentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          Specifies whether Amazon Rekognition or Amazon Textract are used as the integration source. \n The default field settings and JSON parsing rules are different based on the integration source. Valid values:

          ", + "smithy.api#documentation": "

          The name of the trial component to describe.

          ", "smithy.api#required": {} } } - }, - "traits": { - "smithy.api#documentation": "

          Container for configuring the source of human task requests.

          " } }, - "com.amazonaws.sagemaker#HumanTaskConfig": { + "com.amazonaws.sagemaker#DescribeTrialComponentResponse": { "type": "structure", "members": { - "WorkteamArn": { - "target": "com.amazonaws.sagemaker#WorkteamArn", + "TrialComponentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the work team assigned to complete the tasks.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The name of the trial component.

          " } }, - "UiConfig": { - "target": "com.amazonaws.sagemaker#UiConfig", + "TrialComponentArn": { + "target": "com.amazonaws.sagemaker#TrialComponentArn", "traits": { - "smithy.api#documentation": "

          Information about the user interface that workers use to complete the labeling\n task.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the trial component.

          " } }, - "PreHumanTaskLambdaArn": { - "target": "com.amazonaws.sagemaker#LambdaFunctionArn", + "DisplayName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of a Lambda function that is run before a data object\n is sent to a human worker. Use this function to provide input to a custom labeling\n job.

          \n

          For built-in\n task types, use one of the following Amazon SageMaker Ground Truth Lambda function ARNs for\n PreHumanTaskLambdaArn. For custom labeling workflows, see Pre-annotation Lambda.

          \n \n \n\n

          \n Bounding box - Finds the most similar boxes from\n different workers based on the Jaccard index of the boxes.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-BoundingBox\n

            \n
          • \n
          \n \n\n

          \n Image classification - Uses a variant of the Expectation\n Maximization approach to estimate the true class of an image based on\n annotations from individual workers.

          \n\n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-ImageMultiClass\n

            \n
          • \n
          \n \n

          \n Multi-label image classification - Uses a variant of the Expectation\n Maximization approach to estimate the true classes of an image based on\n annotations from individual workers.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          \n \n

          \n Semantic segmentation - Treats each pixel in an image as\n a multi-class classification and treats pixel annotations from workers as\n \"votes\" for the correct label.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-SemanticSegmentation\n

            \n
          • \n
          \n \n

          \n Text classification - Uses a variant of the Expectation\n Maximization approach to estimate the true class of text based on annotations\n from individual workers.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-TextMultiClass\n

            \n
          • \n
          \n \n

          \n Multi-label text classification - Uses a variant of the\n Expectation Maximization approach to estimate the true classes of text based on\n annotations from individual workers.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          \n \n

          \n Named entity recognition - Groups similar selections and\n calculates aggregate boundaries, resolving to most-assigned label.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          \n \n \n \n \n

          \n Video Classification - Use this task type when you need workers to classify videos using\n predefined labels that you specify. Workers are shown videos and are asked to choose one\n label for each video.

          \n \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-VideoMultiClass\n

            \n
          • \n
          \n \n

          \n Video Frame Object Detection - Use this task type to\n have workers identify and locate objects in a sequence of video frames (images extracted\n from a video) using bounding boxes. For example, you can use this task to ask workers to\n identify and localize various objects in a series of video frames, such as cars, bikes,\n and pedestrians.

          \n \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-VideoObjectDetection\n

            \n
          • \n
          \n \n

          \n Video Frame Object Tracking - Use this task type to\n have workers track the movement of objects in a sequence of video frames (images\n extracted from a video) using bounding boxes. For example, you can use this task to ask\n workers to track the movement of objects, such as cars, bikes, and pedestrians.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-VideoObjectTracking\n

            \n
          • \n
          \n \n \n \n\n

          \n 3D Point Cloud Modalities\n

          \n

          Use the following pre-annotation lambdas for 3D point cloud labeling modality tasks.\n See 3D Point Cloud Task types\n to learn more.

          \n\n \n

          \n 3D Point Cloud Object Detection - \n Use this task type when you want workers to classify objects in a 3D point cloud by \n drawing 3D cuboids around objects. For example, you can use this task type to ask workers \n to identify different types of objects in a point cloud, such as cars, bikes, and pedestrians.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          \n \n

          \n 3D Point Cloud Object Tracking - \n Use this task type when you want workers to draw 3D cuboids around objects\n that appear in a sequence of 3D point cloud frames. \n For example, you can use this task type to ask workers to track \n the movement of vehicles across multiple point cloud frames.\n

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          \n \n \n \n

          \n 3D Point Cloud Semantic Segmentation - \n Use this task type when you want workers to create a point-level semantic segmentation masks by \n painting objects in a 3D point cloud using different colors where each color is assigned to one of \n the classes you specify.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          \n \n

          \n Use the following ARNs for Label Verification and Adjustment Jobs\n

          \n

          Use label verification and adjustment jobs to review and adjust labels. To learn more,\n see Verify and Adjust Labels .

          \n \n

          \n Bounding box verification - Uses a variant of the\n Expectation Maximization approach to estimate the true class of verification\n judgement for bounding box labels based on annotations from individual\n workers.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          \n \n

          \n Bounding box adjustment - Finds the most similar boxes\n from different workers based on the Jaccard index of the adjusted\n annotations.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          \n \n

          \n Semantic segmentation verification - Uses a variant of\n the Expectation Maximization approach to estimate the true class of verification\n judgment for semantic segmentation labels based on annotations from individual\n workers.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          \n \n

          \n Semantic segmentation adjustment - Treats each pixel in\n an image as a multi-class classification and treats pixel adjusted annotations\n from workers as \"votes\" for the correct label.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          \n \n \n \n \n

          \n Video Frame Object Detection Adjustment - \n Use this task type when you want workers to adjust bounding boxes that workers have added \n to video frames to classify and localize objects in a sequence of video frames.

          \n \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          \n \n

          \n Video Frame Object Tracking Adjustment - \n Use this task type when you want workers to adjust bounding boxes that workers have added \n to video frames to track object movement across a sequence of video frames.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          \n \n \n \n \n

          \n 3D point cloud object detection adjustment - Adjust\n 3D cuboids in a point cloud frame.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          \n \n

          \n 3D point cloud object tracking adjustment - Adjust 3D\n cuboids across a sequence of point cloud frames.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          \n \n

          \n 3D point cloud semantic segmentation adjustment -\n Adjust semantic segmentation masks in a 3D point cloud.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The name of the component as displayed. If DisplayName isn't specified,\n TrialComponentName is displayed.

          " } }, - "TaskKeywords": { - "target": "com.amazonaws.sagemaker#TaskKeywords", + "Source": { + "target": "com.amazonaws.sagemaker#TrialComponentSource", "traits": { - "smithy.api#documentation": "

          Keywords used to describe the task so that workers on Amazon Mechanical Turk can\n discover the task.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the source and, optionally, the job type.

          " } }, - "TaskTitle": { - "target": "com.amazonaws.sagemaker#TaskTitle", + "Status": { + "target": "com.amazonaws.sagemaker#TrialComponentStatus", "traits": { - "smithy.api#documentation": "

          A title for the task for your human workers.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The status of the component. States include:

          \n
            \n
          • \n

            InProgress

            \n
          • \n
          • \n

            Completed

            \n
          • \n
          • \n

            Failed

            \n
          • \n
          " } }, - "TaskDescription": { - "target": "com.amazonaws.sagemaker#TaskDescription", + "StartTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A description of the task for your human workers.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          When the component started.

          " } }, - "NumberOfHumanWorkersPerDataObject": { - "target": "com.amazonaws.sagemaker#NumberOfHumanWorkersPerDataObject", + "EndTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The number of human workers that will label an object.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          When the component ended.

          " } }, - "TaskTimeLimitInSeconds": { - "target": "com.amazonaws.sagemaker#TaskTimeLimitInSeconds", + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The amount of time that a worker has to complete a task.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          When the component was created.

          " } }, - "TaskAvailabilityLifetimeInSeconds": { - "target": "com.amazonaws.sagemaker#TaskAvailabilityLifetimeInSeconds", + "CreatedBy": { + "target": "com.amazonaws.sagemaker#UserContext", "traits": { - "smithy.api#documentation": "

          The length of time that a task remains available for labeling by human workers.\n If you choose the Amazon Mechanical Turk workforce, the maximum is 12 hours\n (43200). The default value is 864000 seconds (10 days). For private and vendor workforces, the maximum is as\n listed.

          " + "smithy.api#documentation": "

          Who created the component.

          " } }, - "MaxConcurrentTaskCount": { - "target": "com.amazonaws.sagemaker#MaxConcurrentTaskCount", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Defines the maximum number of data objects that can be labeled by human workers at the\n same time. Also referred to as batch size. Each object may have more than one worker at one time.\n The default value is 1000 objects.

          " + "smithy.api#documentation": "

          When the component was last modified.

          " } }, - "AnnotationConsolidationConfig": { - "target": "com.amazonaws.sagemaker#AnnotationConsolidationConfig", + "LastModifiedBy": { + "target": "com.amazonaws.sagemaker#UserContext", "traits": { - "smithy.api#documentation": "

          Configures how labels are consolidated across human workers.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Who last modified the component.

          " } }, - "PublicWorkforceTaskPrice": { - "target": "com.amazonaws.sagemaker#PublicWorkforceTaskPrice", + "Parameters": { + "target": "com.amazonaws.sagemaker#TrialComponentParameters", "traits": { - "smithy.api#documentation": "

          The price that you pay for each task performed by an Amazon Mechanical Turk worker.

          " + "smithy.api#documentation": "

          The hyperparameters of the component.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          Information required for human workers to complete a labeling task.

          " - } - }, - "com.amazonaws.sagemaker#HumanTaskUiArn": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 1024 - }, - "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:human-task-ui/.*" - } - }, - "com.amazonaws.sagemaker#HumanTaskUiName": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 63 }, - "smithy.api#pattern": "^[a-z0-9](-*[a-z0-9])*" - } - }, - "com.amazonaws.sagemaker#HumanTaskUiStatus": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Active", - "name": "ACTIVE" - }, - { - "value": "Deleting", - "name": "DELETING" - } - ] - } - }, - "com.amazonaws.sagemaker#HumanTaskUiSummaries": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#HumanTaskUiSummary" - } - }, - "com.amazonaws.sagemaker#HumanTaskUiSummary": { - "type": "structure", - "members": { - "HumanTaskUiName": { - "target": "com.amazonaws.sagemaker#HumanTaskUiName", + "InputArtifacts": { + "target": "com.amazonaws.sagemaker#TrialComponentArtifacts", "traits": { - "smithy.api#documentation": "

          The name of the human task user interface.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The input artifacts of the component.

          " } }, - "HumanTaskUiArn": { - "target": "com.amazonaws.sagemaker#HumanTaskUiArn", + "OutputArtifacts": { + "target": "com.amazonaws.sagemaker#TrialComponentArtifacts", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the human task user interface.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The output artifacts of the component.

          " } }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "MetadataProperties": { + "target": "com.amazonaws.sagemaker#MetadataProperties" + }, + "Metrics": { + "target": "com.amazonaws.sagemaker#TrialComponentMetricSummaries", "traits": { - "smithy.api#documentation": "

          A timestamp when SageMaker created the human task user interface.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The metrics for the component.

          " } } - }, - "traits": { - "smithy.api#documentation": "

          Container for human task user interface information.

          " } }, - "com.amazonaws.sagemaker#HyperParameterAlgorithmSpecification": { + "com.amazonaws.sagemaker#DescribeTrialRequest": { "type": "structure", "members": { - "TrainingImage": { - "target": "com.amazonaws.sagemaker#AlgorithmImage", - "traits": { - "smithy.api#documentation": "

          The registry path of the Docker image that contains the training algorithm. For\n information about Docker registry paths for built-in algorithms, see Algorithms\n Provided by Amazon SageMaker: Common Parameters. Amazon SageMaker supports both\n registry/repository[:tag] and registry/repository[@digest]\n image path formats. For more information, see Using Your Own Algorithms with Amazon\n SageMaker.

          " - } - }, - "TrainingInputMode": { - "target": "com.amazonaws.sagemaker#TrainingInputMode", + "TrialName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The input mode that the algorithm supports:\n File\n or Pipe. In File input mode, Amazon SageMaker downloads the training data from\n Amazon S3 to the\n storage\n volume that is attached to the training instance and mounts the directory to the Docker\n volume for the training container. In Pipe input mode, Amazon SageMaker streams\n data directly from Amazon S3 to the container.

          \n

          If you specify File mode, make sure that\n you\n provision the storage volume that is attached to the training instance with enough\n capacity to accommodate the training data downloaded from Amazon S3, the model artifacts, and\n intermediate\n information.

          \n

          \n

          For more information about input modes, see Algorithms.

          ", + "smithy.api#documentation": "

          The name of the trial to describe.

          ", "smithy.api#required": {} } - }, - "AlgorithmName": { - "target": "com.amazonaws.sagemaker#ArnOrName", + } + } + }, + "com.amazonaws.sagemaker#DescribeTrialResponse": { + "type": "structure", + "members": { + "TrialName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The name of the resource algorithm to use for the hyperparameter tuning job. If you\n specify a value for this parameter, do not specify a value for\n TrainingImage.

          " + "smithy.api#documentation": "

          The name of the trial.

          " } }, - "MetricDefinitions": { - "target": "com.amazonaws.sagemaker#MetricDefinitionList", + "TrialArn": { + "target": "com.amazonaws.sagemaker#TrialArn", "traits": { - "smithy.api#documentation": "

          An array of MetricDefinition objects that specify the\n metrics\n that the algorithm emits.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the trial.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          Specifies\n which\n training algorithm to use for training jobs that a hyperparameter\n tuning job launches and the metrics to monitor.

          " - } - }, - "com.amazonaws.sagemaker#HyperParameterKey": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 256 }, - "smithy.api#pattern": ".*" - } - }, - "com.amazonaws.sagemaker#HyperParameterScalingType": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Auto", - "name": "AUTO" - }, - { - "value": "Linear", - "name": "LINEAR" - }, - { - "value": "Logarithmic", - "name": "LOGARITHMIC" - }, - { - "value": "ReverseLogarithmic", - "name": "REVERSE_LOGARITHMIC" - } - ] - } - }, - "com.amazonaws.sagemaker#HyperParameterSpecification": { - "type": "structure", - "members": { - "Name": { - "target": "com.amazonaws.sagemaker#ParameterName", + "DisplayName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The name of this hyperparameter. The name must be unique.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The name of the trial as displayed. If DisplayName isn't specified,\n TrialName is displayed.

          " } }, - "Description": { - "target": "com.amazonaws.sagemaker#EntityDescription", + "ExperimentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          A brief description of the hyperparameter.

          " + "smithy.api#documentation": "

          The name of the experiment the trial is part of.

          " } }, - "Type": { - "target": "com.amazonaws.sagemaker#ParameterType", + "Source": { + "target": "com.amazonaws.sagemaker#TrialSource", "traits": { - "smithy.api#documentation": "

          The type of this hyperparameter. The valid types are Integer,\n Continuous, Categorical, and FreeText.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the source and, optionally, the job type.

          " } }, - "Range": { - "target": "com.amazonaws.sagemaker#ParameterRange", + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The allowed range for this hyperparameter.

          " + "smithy.api#documentation": "

          When the trial was created.

          " } }, - "IsTunable": { - "target": "com.amazonaws.sagemaker#Boolean", + "CreatedBy": { + "target": "com.amazonaws.sagemaker#UserContext", "traits": { - "smithy.api#documentation": "

          Indicates whether this hyperparameter is tunable in a hyperparameter tuning\n job.

          " + "smithy.api#documentation": "

          Who created the trial.

          " } }, - "IsRequired": { - "target": "com.amazonaws.sagemaker#Boolean", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Indicates whether this hyperparameter is required.

          " + "smithy.api#documentation": "

          When the trial was last modified.

          " } }, - "DefaultValue": { - "target": "com.amazonaws.sagemaker#HyperParameterValue", + "LastModifiedBy": { + "target": "com.amazonaws.sagemaker#UserContext", "traits": { - "smithy.api#documentation": "

          The default value for this hyperparameter. If a default value is specified, a\n hyperparameter cannot be required.

          " + "smithy.api#documentation": "

          Who last modified the trial.

          " } + }, + "MetadataProperties": { + "target": "com.amazonaws.sagemaker#MetadataProperties" } - }, - "traits": { - "smithy.api#documentation": "

          Defines a hyperparameter to be used by an algorithm.

          " } }, - "com.amazonaws.sagemaker#HyperParameterSpecifications": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#HyperParameterSpecification" + "com.amazonaws.sagemaker#DescribeUserProfile": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribeUserProfileRequest" }, - "traits": { - "smithy.api#length": { - "min": 0, - "max": 100 + "output": { + "target": "com.amazonaws.sagemaker#DescribeUserProfileResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" } + ], + "traits": { + "smithy.api#documentation": "

          Describes a user profile. For more information, see CreateUserProfile.

          " } }, - "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinition": { + "com.amazonaws.sagemaker#DescribeUserProfileRequest": { "type": "structure", "members": { - "DefinitionName": { - "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinitionName", + "DomainId": { + "target": "com.amazonaws.sagemaker#DomainId", "traits": { - "smithy.api#documentation": "

          The job definition name.

          " + "smithy.api#documentation": "

          The domain ID.

          ", + "smithy.api#required": {} } }, - "TuningObjective": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobObjective" - }, - "HyperParameterRanges": { - "target": "com.amazonaws.sagemaker#ParameterRanges" - }, - "StaticHyperParameters": { - "target": "com.amazonaws.sagemaker#HyperParameters", + "UserProfileName": { + "target": "com.amazonaws.sagemaker#UserProfileName", "traits": { - "smithy.api#documentation": "

          Specifies the values of hyperparameters\n that\n do not change for the tuning job.

          " + "smithy.api#documentation": "

          The user profile name.

          ", + "smithy.api#required": {} } - }, - "AlgorithmSpecification": { - "target": "com.amazonaws.sagemaker#HyperParameterAlgorithmSpecification", + } + } + }, + "com.amazonaws.sagemaker#DescribeUserProfileResponse": { + "type": "structure", + "members": { + "DomainId": { + "target": "com.amazonaws.sagemaker#DomainId", "traits": { - "smithy.api#documentation": "

          The HyperParameterAlgorithmSpecification object that\n specifies\n the resource algorithm to use for the training jobs that the tuning\n job launches.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The ID of the domain that contains the profile.

          " } }, - "RoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", - "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the\n IAM\n role associated with the training jobs that the tuning job\n launches.

          ", - "smithy.api#required": {} + "UserProfileArn": { + "target": "com.amazonaws.sagemaker#UserProfileArn", + "traits": { + "smithy.api#documentation": "

          The user profile Amazon Resource Name (ARN).

          " } }, - "InputDataConfig": { - "target": "com.amazonaws.sagemaker#InputDataConfig", + "UserProfileName": { + "target": "com.amazonaws.sagemaker#UserProfileName", "traits": { - "smithy.api#documentation": "

          An array of Channel objects that specify\n the\n input for the training jobs that the tuning job launches.

          " + "smithy.api#documentation": "

          The user profile name.

          " } }, - "VpcConfig": { - "target": "com.amazonaws.sagemaker#VpcConfig", + "HomeEfsFileSystemUid": { + "target": "com.amazonaws.sagemaker#EfsUid", "traits": { - "smithy.api#documentation": "

          The VpcConfig object that\n specifies\n the VPC that you want the training jobs that this hyperparameter\n tuning job launches to connect to. Control access to and from your\n training\n container by configuring the VPC. For more information, see Protect Training Jobs\n by Using an Amazon Virtual Private Cloud.

          " + "smithy.api#documentation": "

          The ID of the user's profile in the Amazon Elastic File System (EFS) volume.

          " } }, - "OutputDataConfig": { - "target": "com.amazonaws.sagemaker#OutputDataConfig", + "Status": { + "target": "com.amazonaws.sagemaker#UserProfileStatus", "traits": { - "smithy.api#documentation": "

          Specifies the path to the Amazon S3 bucket where you\n store\n model artifacts from the training jobs that the tuning job\n launches.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The status.

          " } }, - "ResourceConfig": { - "target": "com.amazonaws.sagemaker#ResourceConfig", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#LastModifiedTime", "traits": { - "smithy.api#documentation": "

          The resources,\n including\n the compute instances and storage volumes, to use for the training\n jobs that the tuning job launches.

          \n

          Storage\n volumes store model artifacts and\n incremental\n states. Training algorithms might also use storage volumes for\n scratch\n space. If you want Amazon SageMaker to use the storage volume\n to store the training data, choose File as the\n TrainingInputMode in the algorithm specification. For distributed\n training algorithms, specify an instance count greater than 1.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The last modified time.

          " } }, - "StoppingCondition": { - "target": "com.amazonaws.sagemaker#StoppingCondition", + "CreationTime": { + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          Specifies a limit to how long a model hyperparameter training job can run. It also\n specifies how long you are willing to wait for a managed spot training job to complete.\n When the job reaches the a limit, Amazon SageMaker ends the training job. Use this API to cap model\n training costs.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The creation time.

          " } }, - "EnableNetworkIsolation": { - "target": "com.amazonaws.sagemaker#Boolean", + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", "traits": { - "smithy.api#documentation": "

          Isolates the training container. No inbound or outbound network calls can be made,\n except for calls between peers within a training cluster for distributed training. If\n network isolation is used for training jobs that are configured to use a VPC, Amazon SageMaker\n downloads and uploads customer data and model artifacts through the specified VPC, but\n the training container does not have network access.

          " + "smithy.api#documentation": "

          The failure reason.

          " } }, - "EnableInterContainerTrafficEncryption": { - "target": "com.amazonaws.sagemaker#Boolean", + "SingleSignOnUserIdentifier": { + "target": "com.amazonaws.sagemaker#SingleSignOnUserIdentifier", "traits": { - "smithy.api#documentation": "

          To encrypt all communications between ML compute instances in distributed training,\n choose True. Encryption provides greater security for distributed training,\n but training might take longer. How long it takes depends on the amount of communication\n between compute instances, especially if you use a deep learning algorithm in\n distributed training.

          " + "smithy.api#documentation": "

          The SSO user identifier.

          " } }, - "EnableManagedSpotTraining": { - "target": "com.amazonaws.sagemaker#Boolean", + "SingleSignOnUserValue": { + "target": "com.amazonaws.sagemaker#String256", "traits": { - "smithy.api#documentation": "

          A Boolean indicating whether managed spot training is enabled (True) or\n not (False).

          " + "smithy.api#documentation": "

          The SSO user value.

          " } }, - "CheckpointConfig": { - "target": "com.amazonaws.sagemaker#CheckpointConfig" + "UserSettings": { + "target": "com.amazonaws.sagemaker#UserSettings", + "traits": { + "smithy.api#documentation": "

          A collection of settings.

          " + } } - }, - "traits": { - "smithy.api#documentation": "

          Defines\n the training jobs launched by a hyperparameter tuning job.

          " - } - }, - "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinitionName": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 64 - }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" } }, - "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinitions": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinition" + "com.amazonaws.sagemaker#DescribeWorkforce": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribeWorkforceRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DescribeWorkforceResponse" }, "traits": { - "smithy.api#length": { - "min": 1, - "max": 10 - } - } - }, - "com.amazonaws.sagemaker#HyperParameterTrainingJobSummaries": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobSummary" + "smithy.api#documentation": "

          Lists private workforce information, including workforce name, Amazon Resource Name\n (ARN), and, if applicable, allowed IP address ranges (CIDRs). Allowable IP address\n ranges are the IP addresses that workers can use to access tasks.

          \n \n

          This operation applies only to private workforces.

          \n
          " } }, - "com.amazonaws.sagemaker#HyperParameterTrainingJobSummary": { + "com.amazonaws.sagemaker#DescribeWorkforceRequest": { "type": "structure", "members": { - "TrainingJobDefinitionName": { - "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinitionName", - "traits": { - "smithy.api#documentation": "

          The training job definition name.

          " - } - }, - "TrainingJobName": { - "target": "com.amazonaws.sagemaker#TrainingJobName", - "traits": { - "smithy.api#documentation": "

          The name of the training job.

          ", - "smithy.api#required": {} - } - }, - "TrainingJobArn": { - "target": "com.amazonaws.sagemaker#TrainingJobArn", + "WorkforceName": { + "target": "com.amazonaws.sagemaker#WorkforceName", "traits": { - "smithy.api#documentation": "

          The\n Amazon\n Resource Name (ARN) of the training job.

          ", + "smithy.api#documentation": "

          The name of the private workforce whose access you want to restrict.\n WorkforceName is automatically set to default when a\n workforce is created and cannot be modified.

          ", "smithy.api#required": {} } - }, - "TuningJobName": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobName", - "traits": { - "smithy.api#documentation": "

          The HyperParameter tuning job that launched the training job.

          " - } - }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + } + } + }, + "com.amazonaws.sagemaker#DescribeWorkforceResponse": { + "type": "structure", + "members": { + "Workforce": { + "target": "com.amazonaws.sagemaker#Workforce", "traits": { - "smithy.api#documentation": "

          The date and time that the training job was created.

          ", + "smithy.api#documentation": "

          A single private workforce, which is automatically created when you create your first\n private work team. You can create one private work force in each AWS Region. By default,\n any workforce-related API operation used in a specific region will apply to the\n workforce created in that region. To learn how to create a private workforce, see Create a Private Workforce.

          ", "smithy.api#required": {} } - }, - "TrainingStartTime": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          The date and time that the training job started.

          " - } - }, - "TrainingEndTime": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          Specifies the time when the training job ends on training instances. You are billed\n for the time interval between the value of TrainingStartTime and this time.\n For successful jobs and stopped jobs, this is the time after model artifacts are\n uploaded. For failed jobs, this is the time when Amazon SageMaker detects a job failure.

          " - } - }, - "TrainingJobStatus": { - "target": "com.amazonaws.sagemaker#TrainingJobStatus", + } + } + }, + "com.amazonaws.sagemaker#DescribeWorkteam": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DescribeWorkteamRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DescribeWorkteamResponse" + }, + "traits": { + "smithy.api#documentation": "

          Gets information about a specific work team. You can see information such as the\n create date, the last updated date, membership information, and the work team's Amazon\n Resource Name (ARN).

          " + } + }, + "com.amazonaws.sagemaker#DescribeWorkteamRequest": { + "type": "structure", + "members": { + "WorkteamName": { + "target": "com.amazonaws.sagemaker#WorkteamName", "traits": { - "smithy.api#documentation": "

          The\n status\n of the training job.

          ", + "smithy.api#documentation": "

          The name of the work team to return a description of.

          ", "smithy.api#required": {} } - }, - "TunedHyperParameters": { - "target": "com.amazonaws.sagemaker#HyperParameters", + } + } + }, + "com.amazonaws.sagemaker#DescribeWorkteamResponse": { + "type": "structure", + "members": { + "Workteam": { + "target": "com.amazonaws.sagemaker#Workteam", "traits": { - "smithy.api#documentation": "

          A\n list of the hyperparameters for which you specified ranges to\n search.

          ", + "smithy.api#documentation": "

          A Workteam instance that contains information about the work team.\n

          ", "smithy.api#required": {} } - }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", - "traits": { - "smithy.api#documentation": "

          The\n reason that the training job failed.\n

          " - } - }, - "FinalHyperParameterTuningJobObjectiveMetric": { - "target": "com.amazonaws.sagemaker#FinalHyperParameterTuningJobObjectiveMetric", - "traits": { - "smithy.api#documentation": "

          The FinalHyperParameterTuningJobObjectiveMetric object that\n specifies the\n value\n of the\n objective\n metric of the tuning job that launched this training job.

          " - } - }, - "ObjectiveStatus": { - "target": "com.amazonaws.sagemaker#ObjectiveStatus", - "traits": { - "smithy.api#documentation": "

          The status of the objective metric for the training job:

          \n
            \n
          • \n

            Succeeded: The\n final\n objective metric for the training job was evaluated by the\n hyperparameter tuning job and\n used\n in the hyperparameter tuning process.

            \n
          • \n
          \n
            \n
          • \n

            Pending: The training job is in progress and evaluation of its final objective\n metric is pending.

            \n
          • \n
          \n
            \n
          • \n

            Failed:\n The final objective metric for the training job was not evaluated, and was not\n used in the hyperparameter tuning process. This typically occurs when the\n training job failed or did not emit an objective\n metric.

            \n
          • \n
          " - } } - }, - "traits": { - "smithy.api#documentation": "

          Specifies\n summary information about a training job.

          " } }, - "com.amazonaws.sagemaker#HyperParameterTuningJobArn": { + "com.amazonaws.sagemaker#Description": { "type": "string", "traits": { "smithy.api#length": { "min": 0, - "max": 256 - }, - "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:hyper-parameter-tuning-job/.*" + "max": 128 + } } }, - "com.amazonaws.sagemaker#HyperParameterTuningJobConfig": { + "com.amazonaws.sagemaker#DesiredWeightAndCapacity": { "type": "structure", "members": { - "Strategy": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobStrategyType", - "traits": { - "smithy.api#documentation": "

          Specifies how hyperparameter tuning chooses the combinations of hyperparameter values\n to use for the training job it launches. To use the Bayesian search strategy, set this\n to Bayesian. To randomly search, set it to Random. For\n information about search strategies, see How\n Hyperparameter Tuning Works.

          ", - "smithy.api#required": {} - } - }, - "HyperParameterTuningJobObjective": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobObjective", - "traits": { - "smithy.api#documentation": "

          The HyperParameterTuningJobObjective object that specifies the\n objective\n metric for this tuning job.

          " - } - }, - "ResourceLimits": { - "target": "com.amazonaws.sagemaker#ResourceLimits", + "VariantName": { + "target": "com.amazonaws.sagemaker#VariantName", "traits": { - "smithy.api#documentation": "

          The ResourceLimits object that specifies the\n maximum\n number of training jobs and parallel training jobs for this tuning\n job.

          ", + "smithy.api#documentation": "

          The name of the\n variant\n to update.

          ", "smithy.api#required": {} } }, - "ParameterRanges": { - "target": "com.amazonaws.sagemaker#ParameterRanges", - "traits": { - "smithy.api#documentation": "

          The ParameterRanges object that specifies the ranges of\n hyperparameters\n that this tuning job searches.

          " - } - }, - "TrainingJobEarlyStoppingType": { - "target": "com.amazonaws.sagemaker#TrainingJobEarlyStoppingType", + "DesiredWeight": { + "target": "com.amazonaws.sagemaker#VariantWeight", "traits": { - "smithy.api#documentation": "

          Specifies whether to use early stopping for training jobs launched by the\n hyperparameter tuning job. This can be one of the following values (the default value is\n OFF):

          \n
          \n
          OFF
          \n
          \n

          Training jobs launched by the hyperparameter tuning job do not use early\n stopping.

          \n
          \n
          AUTO
          \n
          \n

          Amazon SageMaker stops training jobs launched by the hyperparameter tuning job when\n they are unlikely to perform better than previously completed training jobs.\n For more information, see Stop Training Jobs Early.

          \n
          \n
          " + "smithy.api#documentation": "

          The variant's weight.

          " } }, - "TuningJobCompletionCriteria": { - "target": "com.amazonaws.sagemaker#TuningJobCompletionCriteria", + "DesiredInstanceCount": { + "target": "com.amazonaws.sagemaker#TaskCount", "traits": { - "smithy.api#documentation": "

          The tuning job's completion criteria.

          " + "smithy.api#documentation": "

          The variant's capacity.

          " } } }, "traits": { - "smithy.api#documentation": "

          Configures a hyperparameter tuning job.

          " + "smithy.api#documentation": "

          Specifies weight and capacity values for a production variant.

          " } }, - "com.amazonaws.sagemaker#HyperParameterTuningJobName": { - "type": "string", + "com.amazonaws.sagemaker#DesiredWeightAndCapacityList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#DesiredWeightAndCapacity" + }, "traits": { "smithy.api#length": { - "min": 1, - "max": 32 - }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" - } - }, - "com.amazonaws.sagemaker#HyperParameterTuningJobObjective": { - "type": "structure", - "members": { - "Type": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobObjectiveType", - "traits": { - "smithy.api#documentation": "

          Whether to\n minimize\n or maximize the objective metric.

          ", - "smithy.api#required": {} - } - }, - "MetricName": { - "target": "com.amazonaws.sagemaker#MetricName", - "traits": { - "smithy.api#documentation": "

          The\n name of the metric to use for the objective metric.

          ", - "smithy.api#required": {} - } + "min": 1 } - }, - "traits": { - "smithy.api#documentation": "

          Defines the objective metric for a hyperparameter tuning job.\n Hyperparameter\n tuning uses the value of this metric to evaluate the training jobs it launches, and\n returns the training job that results in either the highest or lowest value for this\n metric, depending on the value you specify for the Type\n parameter.

          " } }, - "com.amazonaws.sagemaker#HyperParameterTuningJobObjectiveType": { + "com.amazonaws.sagemaker#DestinationS3Uri": { "type": "string", "traits": { - "smithy.api#enum": [ - { - "value": "Maximize", - "name": "MAXIMIZE" - }, - { - "value": "Minimize", - "name": "MINIMIZE" - } - ] - } - }, - "com.amazonaws.sagemaker#HyperParameterTuningJobObjectives": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobObjective" + "smithy.api#length": { + "min": 0, + "max": 512 + }, + "smithy.api#pattern": "^(https|s3)://([^/])/?(.*)$" } }, - "com.amazonaws.sagemaker#HyperParameterTuningJobSortByOptions": { + "com.amazonaws.sagemaker#DetailedAlgorithmStatus": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "Name", - "name": "Name" + "value": "NotStarted", + "name": "NOT_STARTED" }, { - "value": "Status", - "name": "Status" + "value": "InProgress", + "name": "IN_PROGRESS" }, { - "value": "CreationTime", - "name": "CreationTime" + "value": "Completed", + "name": "COMPLETED" + }, + { + "value": "Failed", + "name": "FAILED" } ] } }, - "com.amazonaws.sagemaker#HyperParameterTuningJobStatus": { + "com.amazonaws.sagemaker#DetailedModelPackageStatus": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "Completed", - "name": "COMPLETED" + "value": "NotStarted", + "name": "NOT_STARTED" }, { "value": "InProgress", "name": "IN_PROGRESS" }, { - "value": "Failed", - "name": "FAILED" - }, - { - "value": "Stopped", - "name": "STOPPED" + "value": "Completed", + "name": "COMPLETED" }, { - "value": "Stopping", - "name": "STOPPING" + "value": "Failed", + "name": "FAILED" } ] } }, - "com.amazonaws.sagemaker#HyperParameterTuningJobStrategyType": { + "com.amazonaws.sagemaker#DirectInternetAccess": { "type": "string", "traits": { - "smithy.api#documentation": "

          The strategy hyperparameter tuning uses to\n find\n the best combination of hyperparameters for your model. Currently,\n the only\n supported\n value is Bayesian.

          ", "smithy.api#enum": [ { - "value": "Bayesian", - "name": "BAYESIAN" + "value": "Enabled", + "name": "ENABLED" }, { - "value": "Random", - "name": "RANDOM" + "value": "Disabled", + "name": "DISABLED" } ] } }, - "com.amazonaws.sagemaker#HyperParameterTuningJobSummaries": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobSummary" + "com.amazonaws.sagemaker#DirectoryPath": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 4096 + }, + "smithy.api#pattern": ".*" } }, - "com.amazonaws.sagemaker#HyperParameterTuningJobSummary": { + "com.amazonaws.sagemaker#DisableSagemakerServicecatalogPortfolio": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DisableSagemakerServicecatalogPortfolioInput" + }, + "output": { + "target": "com.amazonaws.sagemaker#DisableSagemakerServicecatalogPortfolioOutput" + }, + "traits": { + "smithy.api#documentation": "

          Disables using Service Catalog in SageMaker. Service Catalog is used to create\n SageMaker projects.

          " + } + }, + "com.amazonaws.sagemaker#DisableSagemakerServicecatalogPortfolioInput": { + "type": "structure", + "members": {} + }, + "com.amazonaws.sagemaker#DisableSagemakerServicecatalogPortfolioOutput": { + "type": "structure", + "members": {} + }, + "com.amazonaws.sagemaker#DisassociateAdditionalCodeRepositories": { + "type": "boolean" + }, + "com.amazonaws.sagemaker#DisassociateDefaultCodeRepository": { + "type": "boolean" + }, + "com.amazonaws.sagemaker#DisassociateNotebookInstanceAcceleratorTypes": { + "type": "boolean" + }, + "com.amazonaws.sagemaker#DisassociateNotebookInstanceLifecycleConfig": { + "type": "boolean" + }, + "com.amazonaws.sagemaker#DisassociateTrialComponent": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#DisassociateTrialComponentRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#DisassociateTrialComponentResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Disassociates a trial component from a trial. This doesn't effect other trials the\n component is associated with. Before you can delete a component, you must disassociate the\n component from all trials it is associated with. To associate a trial component with a trial,\n call the AssociateTrialComponent API.

          \n

          To get a list of the trials a component is associated with, use the Search API. Specify ExperimentTrialComponent for the Resource parameter.\n The list appears in the response under Results.TrialComponent.Parents.

          " + } + }, + "com.amazonaws.sagemaker#DisassociateTrialComponentRequest": { "type": "structure", "members": { - "HyperParameterTuningJobName": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobName", + "TrialComponentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The name of the tuning job.

          ", + "smithy.api#documentation": "

          The name of the component to disassociate from the trial.

          ", "smithy.api#required": {} } }, - "HyperParameterTuningJobArn": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobArn", + "TrialName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The\n Amazon\n Resource Name (ARN) of the tuning job.

          ", + "smithy.api#documentation": "

          The name of the trial to disassociate from.

          ", "smithy.api#required": {} } - }, - "HyperParameterTuningJobStatus": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobStatus", + } + } + }, + "com.amazonaws.sagemaker#DisassociateTrialComponentResponse": { + "type": "structure", + "members": { + "TrialComponentArn": { + "target": "com.amazonaws.sagemaker#TrialComponentArn", "traits": { - "smithy.api#documentation": "

          The status of the\n tuning\n job.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The ARN of the trial component.

          " } }, - "Strategy": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobStrategyType", + "TrialArn": { + "target": "com.amazonaws.sagemaker#TrialArn", "traits": { - "smithy.api#documentation": "

          Specifies the search strategy hyperparameter tuning uses to choose which\n hyperparameters to\n use\n for each iteration. Currently, the only valid value is\n Bayesian.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the trial.

          " } + } + } + }, + "com.amazonaws.sagemaker#Dollars": { + "type": "integer", + "traits": { + "smithy.api#range": { + "min": 0, + "max": 2 + } + } + }, + "com.amazonaws.sagemaker#DomainArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:domain/.*" + } + }, + "com.amazonaws.sagemaker#DomainDetails": { + "type": "structure", + "members": { + "DomainArn": { + "target": "com.amazonaws.sagemaker#DomainArn", "traits": { - "smithy.api#documentation": "

          The date and time that the tuning job was created.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The domain's Amazon Resource Name (ARN).

          " } }, - "HyperParameterTuningEndTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "DomainId": { + "target": "com.amazonaws.sagemaker#DomainId", "traits": { - "smithy.api#documentation": "

          The date and time that the tuning job ended.

          " + "smithy.api#documentation": "

          The domain ID.

          " } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "DomainName": { + "target": "com.amazonaws.sagemaker#DomainName", "traits": { - "smithy.api#documentation": "

          The date and time that the tuning job was\n modified.

          " + "smithy.api#documentation": "

          The domain name.

          " } }, - "TrainingJobStatusCounters": { - "target": "com.amazonaws.sagemaker#TrainingJobStatusCounters", + "Status": { + "target": "com.amazonaws.sagemaker#DomainStatus", "traits": { - "smithy.api#documentation": "

          The TrainingJobStatusCounters object that specifies the numbers of\n training jobs, categorized by status, that this tuning job launched.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The status.

          " } }, - "ObjectiveStatusCounters": { - "target": "com.amazonaws.sagemaker#ObjectiveStatusCounters", + "CreationTime": { + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          The ObjectiveStatusCounters object that specifies the numbers of\n training jobs, categorized by objective metric status, that this tuning job\n launched.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The creation time.

          " } }, - "ResourceLimits": { - "target": "com.amazonaws.sagemaker#ResourceLimits", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#LastModifiedTime", "traits": { - "smithy.api#documentation": "

          The ResourceLimits object that specifies the maximum number of\n training jobs and parallel training jobs allowed for this tuning job.

          " + "smithy.api#documentation": "

          The last modified time.

          " + } + }, + "Url": { + "target": "com.amazonaws.sagemaker#String1024", + "traits": { + "smithy.api#documentation": "

          The domain's URL.

          " } } }, "traits": { - "smithy.api#documentation": "

          Provides summary information about a hyperparameter tuning job.

          " + "smithy.api#documentation": "

          The domain's details.

          " } }, - "com.amazonaws.sagemaker#HyperParameterTuningJobWarmStartConfig": { - "type": "structure", - "members": { - "ParentHyperParameterTuningJobs": { - "target": "com.amazonaws.sagemaker#ParentHyperParameterTuningJobs", - "traits": { - "smithy.api#documentation": "

          An array of hyperparameter tuning jobs that are used as the starting point for the new\n hyperparameter tuning job. For more information about warm starting a hyperparameter\n tuning job, see Using a Previous\n Hyperparameter Tuning Job as a Starting Point.

          \n

          Hyperparameter tuning jobs created before October 1, 2018 cannot be used as parent\n jobs for warm start tuning jobs.

          ", - "smithy.api#required": {} - } - }, - "WarmStartType": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobWarmStartType", - "traits": { - "smithy.api#documentation": "

          Specifies one of the following:

          \n
          \n
          IDENTICAL_DATA_AND_ALGORITHM
          \n
          \n

          The new hyperparameter tuning job uses the same input data and training\n image as the parent tuning jobs. You can change the hyperparameter ranges to\n search and the maximum number of training jobs that the hyperparameter\n tuning job launches. You cannot use a new version of the training algorithm,\n unless the changes in the new version do not affect the algorithm itself.\n For example, changes that improve logging or adding support for a different\n data format are allowed. You can also change hyperparameters from tunable to\n static, and from static to tunable, but the total number of static plus\n tunable hyperparameters must remain the same as it is in all parent jobs.\n The objective metric for the new tuning job must be the same as for all\n parent jobs.

          \n
          \n
          TRANSFER_LEARNING
          \n
          \n

          The new hyperparameter tuning job can include input data, hyperparameter\n ranges, maximum number of concurrent training jobs, and maximum number of\n training jobs that are different than those of its parent hyperparameter\n tuning jobs. The training image can also be a different version from the\n version used in the parent hyperparameter tuning job. You can also change\n hyperparameters from tunable to static, and from static to tunable, but the\n total number of static plus tunable hyperparameters must remain the same as\n it is in all parent jobs. The objective metric for the new tuning job must\n be the same as for all parent jobs.

          \n
          \n
          ", - "smithy.api#required": {} - } + "com.amazonaws.sagemaker#DomainId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 63 } - }, + } + }, + "com.amazonaws.sagemaker#DomainList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#DomainDetails" + } + }, + "com.amazonaws.sagemaker#DomainName": { + "type": "string", "traits": { - "smithy.api#documentation": "

          Specifies the configuration for a hyperparameter tuning job that uses one or more\n previous hyperparameter tuning jobs as a starting point. The results of previous tuning\n jobs are used to inform which combinations of hyperparameters to search over in the new\n tuning job.

          \n

          All training jobs launched by the new hyperparameter tuning job are evaluated by using\n the objective metric, and the training job that performs the best is compared to the\n best training jobs from the parent tuning jobs. From these, the training job that\n performs the best as measured by the objective metric is returned as the overall best\n training job.

          \n \n

          All training jobs launched by parent hyperparameter tuning jobs and the new\n hyperparameter tuning jobs count against the limit of training jobs for the tuning\n job.

          \n
          " + "smithy.api#length": { + "min": 0, + "max": 63 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,62}" } }, - "com.amazonaws.sagemaker#HyperParameterTuningJobWarmStartType": { + "com.amazonaws.sagemaker#DomainStatus": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "IdenticalDataAndAlgorithm", - "name": "IDENTICAL_DATA_AND_ALGORITHM" + "value": "Deleting", + "name": "Deleting" }, { - "value": "TransferLearning", - "name": "TRANSFER_LEARNING" + "value": "Failed", + "name": "Failed" + }, + { + "value": "InService", + "name": "InService" + }, + { + "value": "Pending", + "name": "Pending" + }, + { + "value": "Updating", + "name": "Updating" + }, + { + "value": "Update_Failed", + "name": "Update_Failed" + }, + { + "value": "Delete_Failed", + "name": "Delete_Failed" } ] } }, - "com.amazonaws.sagemaker#HyperParameterValue": { + "com.amazonaws.sagemaker#DoubleParameterValue": { + "type": "double", + "traits": { + "smithy.api#box": {} + } + }, + "com.amazonaws.sagemaker#EfsUid": { "type": "string", "traits": { "smithy.api#length": { "min": 0, - "max": 2500 + "max": 10 }, - "smithy.api#pattern": ".*" + "smithy.api#pattern": "\\d+" } }, - "com.amazonaws.sagemaker#HyperParameters": { - "type": "map", - "key": { - "target": "com.amazonaws.sagemaker#HyperParameterKey" + "com.amazonaws.sagemaker#EnableCapture": { + "type": "boolean" + }, + "com.amazonaws.sagemaker#EnableSagemakerServicecatalogPortfolio": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#EnableSagemakerServicecatalogPortfolioInput" }, - "value": { - "target": "com.amazonaws.sagemaker#HyperParameterValue" + "output": { + "target": "com.amazonaws.sagemaker#EnableSagemakerServicecatalogPortfolioOutput" }, "traits": { - "smithy.api#length": { - "min": 0, - "max": 100 - } + "smithy.api#documentation": "

          Enables using Service Catalog in SageMaker. Service Catalog is used to create\n SageMaker projects.

          " } }, - "com.amazonaws.sagemaker#Image": { + "com.amazonaws.sagemaker#EnableSagemakerServicecatalogPortfolioInput": { + "type": "structure", + "members": {} + }, + "com.amazonaws.sagemaker#EnableSagemakerServicecatalogPortfolioOutput": { + "type": "structure", + "members": {} + }, + "com.amazonaws.sagemaker#Endpoint": { "type": "structure", "members": { - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "EndpointName": { + "target": "com.amazonaws.sagemaker#EndpointName", "traits": { - "smithy.api#documentation": "

          When the image was created.

          ", + "smithy.api#documentation": "

          The name of the endpoint.

          ", "smithy.api#required": {} } }, - "Description": { - "target": "com.amazonaws.sagemaker#ImageDescription", + "EndpointArn": { + "target": "com.amazonaws.sagemaker#EndpointArn", "traits": { - "smithy.api#documentation": "

          The description of the image.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the endpoint.

          ", + "smithy.api#required": {} } }, - "DisplayName": { - "target": "com.amazonaws.sagemaker#ImageDisplayName", + "EndpointConfigName": { + "target": "com.amazonaws.sagemaker#EndpointConfigName", "traits": { - "smithy.api#documentation": "

          The name of the image as displayed.

          " + "smithy.api#documentation": "

          The endpoint configuration associated with the endpoint.

          ", + "smithy.api#required": {} } }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", + "ProductionVariants": { + "target": "com.amazonaws.sagemaker#ProductionVariantSummaryList", "traits": { - "smithy.api#documentation": "

          When a create, update, or delete operation fails, the reason for the failure.

          " + "smithy.api#documentation": "

          A list of the production variants hosted on the endpoint. Each production variant is a\n model.

          " } }, - "ImageArn": { - "target": "com.amazonaws.sagemaker#ImageArn", + "DataCaptureConfig": { + "target": "com.amazonaws.sagemaker#DataCaptureConfigSummary" + }, + "EndpointStatus": { + "target": "com.amazonaws.sagemaker#EndpointStatus", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the image.

          ", + "smithy.api#documentation": "

          The status of the endpoint.

          ", "smithy.api#required": {} } }, - "ImageName": { - "target": "com.amazonaws.sagemaker#ImageName", + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", "traits": { - "smithy.api#documentation": "

          The name of the image.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          If the endpoint failed, the reason it failed.

          " } }, - "ImageStatus": { - "target": "com.amazonaws.sagemaker#ImageStatus", + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The status of the image.

          ", + "smithy.api#documentation": "

          The time that the endpoint was created.

          ", "smithy.api#required": {} } }, "LastModifiedTime": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          When the image was last modified.

          ", + "smithy.api#documentation": "

          The last time the endpoint was modified.

          ", "smithy.api#required": {} } - } - }, - "traits": { - "smithy.api#documentation": "

          A SageMaker image. A SageMaker image represents a set of container images that are derived from\n a common base container image. Each of these container images is represented by a SageMaker\n ImageVersion.

          " - } - }, - "com.amazonaws.sagemaker#ImageArn": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 256 }, - "smithy.api#pattern": "^arn:aws(-[\\w]+)*:sagemaker:.+:[0-9]{12}:image/[a-z0-9]([-.]?[a-z0-9])*$" - } - }, - "com.amazonaws.sagemaker#ImageBaseImage": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 255 + "MonitoringSchedules": { + "target": "com.amazonaws.sagemaker#MonitoringScheduleList", + "traits": { + "smithy.api#documentation": "

          A list of monitoring schedules for the endpoint. For information about model\n monitoring, see Amazon SageMaker Model Monitor.

          " + } }, - "smithy.api#pattern": ".*" - } - }, - "com.amazonaws.sagemaker#ImageConfig": { - "type": "structure", - "members": { - "RepositoryAccessMode": { - "target": "com.amazonaws.sagemaker#RepositoryAccessMode", + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          Set this to one of the following values:

          \n
            \n
          • \n

            \n Platform - The model image is hosted in Amazon ECR.

            \n
          • \n
          • \n

            \n Vpc - The model image is hosted in a private Docker registry in\n your VPC.

            \n
          • \n
          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A list of the tags associated with the endpoint. For more information, see Tagging AWS\n resources in the AWS General Reference Guide.

          " } } }, "traits": { - "smithy.api#documentation": "

          Specifies whether the model container is in Amazon ECR or a private Docker registry\n accessible from your\n Amazon Virtual Private Cloud (VPC).

          " - } - }, - "com.amazonaws.sagemaker#ImageContainerImage": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 255 - } + "smithy.api#documentation": "

          A hosted endpoint for real-time inference.

          " } }, - "com.amazonaws.sagemaker#ImageDeleteProperty": { + "com.amazonaws.sagemaker#EndpointArn": { "type": "string", "traits": { "smithy.api#length": { - "min": 1, - "max": 11 + "min": 20, + "max": 2048 }, - "smithy.api#pattern": "(^DisplayName$)|(^Description$)" - } - }, - "com.amazonaws.sagemaker#ImageDeletePropertyList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#ImageDeleteProperty" - }, - "traits": { - "smithy.api#length": { - "min": 0, - "max": 2 - } + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:endpoint/.*" } }, - "com.amazonaws.sagemaker#ImageDescription": { + "com.amazonaws.sagemaker#EndpointConfigArn": { "type": "string", "traits": { "smithy.api#length": { - "min": 1, - "max": 512 + "min": 20, + "max": 2048 }, - "smithy.api#pattern": "^\\S(.*\\S)?$" + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:endpoint-config/.*" } }, - "com.amazonaws.sagemaker#ImageDigest": { + "com.amazonaws.sagemaker#EndpointConfigName": { "type": "string", "traits": { "smithy.api#length": { "min": 0, - "max": 72 - }, - "smithy.api#pattern": "^[Ss][Hh][Aa]256:[0-9a-fA-F]{64}$" - } - }, - "com.amazonaws.sagemaker#ImageDisplayName": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 128 - }, - "smithy.api#pattern": "^\\S(.*\\S)?$" - } - }, - "com.amazonaws.sagemaker#ImageName": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, "max": 63 }, - "smithy.api#pattern": "^[a-zA-Z0-9]([-.]?[a-zA-Z0-9])*$" + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,62}" } }, - "com.amazonaws.sagemaker#ImageNameContains": { + "com.amazonaws.sagemaker#EndpointConfigNameContains": { "type": "string", "traits": { "smithy.api#length": { "min": 0, "max": 63 }, - "smithy.api#pattern": "^[a-zA-Z0-9\\-.]+$" + "smithy.api#pattern": "[a-zA-Z0-9-]+" } }, - "com.amazonaws.sagemaker#ImageSortBy": { + "com.amazonaws.sagemaker#EndpointConfigSortKey": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "CREATION_TIME", - "name": "CREATION_TIME" - }, - { - "value": "LAST_MODIFIED_TIME", - "name": "LAST_MODIFIED_TIME" + "value": "Name", + "name": "Name" }, { - "value": "IMAGE_NAME", - "name": "IMAGE_NAME" + "value": "CreationTime", + "name": "CreationTime" } ] } }, - "com.amazonaws.sagemaker#ImageSortOrder": { + "com.amazonaws.sagemaker#EndpointConfigSummary": { + "type": "structure", + "members": { + "EndpointConfigName": { + "target": "com.amazonaws.sagemaker#EndpointConfigName", + "traits": { + "smithy.api#documentation": "

          The name of the endpoint configuration.

          ", + "smithy.api#required": {} + } + }, + "EndpointConfigArn": { + "target": "com.amazonaws.sagemaker#EndpointConfigArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the endpoint configuration.

          ", + "smithy.api#required": {} + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A timestamp that shows when the endpoint configuration was created.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Provides summary information for an endpoint configuration.

          " + } + }, + "com.amazonaws.sagemaker#EndpointConfigSummaryList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#EndpointConfigSummary" + } + }, + "com.amazonaws.sagemaker#EndpointInput": { + "type": "structure", + "members": { + "EndpointName": { + "target": "com.amazonaws.sagemaker#EndpointName", + "traits": { + "smithy.api#documentation": "

          An endpoint in customer's account which has enabled DataCaptureConfig\n enabled.

          ", + "smithy.api#required": {} + } + }, + "LocalPath": { + "target": "com.amazonaws.sagemaker#ProcessingLocalPath", + "traits": { + "smithy.api#documentation": "

          Path to the filesystem where the endpoint data is available to the container.

          ", + "smithy.api#required": {} + } + }, + "S3InputMode": { + "target": "com.amazonaws.sagemaker#ProcessingS3InputMode", + "traits": { + "smithy.api#documentation": "

          Whether the Pipe or File is used as the input mode for\n transfering data for the monitoring job. Pipe mode is recommended for large\n datasets. File mode is useful for small files that fit in memory. Defaults to\n File.

          " + } + }, + "S3DataDistributionType": { + "target": "com.amazonaws.sagemaker#ProcessingS3DataDistributionType", + "traits": { + "smithy.api#documentation": "

          Whether input data distributed in Amazon S3 is fully replicated or sharded by an S3 key.\n Defauts to FullyReplicated\n

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Input object for the endpoint

          " + } + }, + "com.amazonaws.sagemaker#EndpointName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 63 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,62}" + } + }, + "com.amazonaws.sagemaker#EndpointNameContains": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 63 + }, + "smithy.api#pattern": "[a-zA-Z0-9-]+" + } + }, + "com.amazonaws.sagemaker#EndpointSortKey": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "ASCENDING", - "name": "ASCENDING" + "value": "Name", + "name": "Name" }, { - "value": "DESCENDING", - "name": "DESCENDING" + "value": "CreationTime", + "name": "CreationTime" + }, + { + "value": "Status", + "name": "Status" } ] } }, - "com.amazonaws.sagemaker#ImageStatus": { + "com.amazonaws.sagemaker#EndpointStatus": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "CREATING", + "value": "OutOfService", + "name": "OUT_OF_SERVICE" + }, + { + "value": "Creating", "name": "CREATING" }, { - "value": "CREATED", - "name": "CREATED" + "value": "Updating", + "name": "UPDATING" }, { - "value": "CREATE_FAILED", - "name": "CREATE_FAILED" + "value": "SystemUpdating", + "name": "SYSTEM_UPDATING" }, { - "value": "UPDATING", - "name": "UPDATING" + "value": "RollingBack", + "name": "ROLLING_BACK" }, { - "value": "UPDATE_FAILED", - "name": "UPDATE_FAILED" + "value": "InService", + "name": "IN_SERVICE" }, { - "value": "DELETING", + "value": "Deleting", "name": "DELETING" }, { - "value": "DELETE_FAILED", - "name": "DELETE_FAILED" + "value": "Failed", + "name": "FAILED" + } + ] + } + }, + "com.amazonaws.sagemaker#EndpointSummary": { + "type": "structure", + "members": { + "EndpointName": { + "target": "com.amazonaws.sagemaker#EndpointName", + "traits": { + "smithy.api#documentation": "

          The name of the endpoint.

          ", + "smithy.api#required": {} + } + }, + "EndpointArn": { + "target": "com.amazonaws.sagemaker#EndpointArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the endpoint.

          ", + "smithy.api#required": {} + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A timestamp that shows when the endpoint was created.

          ", + "smithy.api#required": {} + } + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A timestamp that shows when the endpoint was last modified.

          ", + "smithy.api#required": {} + } + }, + "EndpointStatus": { + "target": "com.amazonaws.sagemaker#EndpointStatus", + "traits": { + "smithy.api#documentation": "

          The status of the endpoint.

          \n
            \n
          • \n

            \n OutOfService: Endpoint is not available to take incoming\n requests.

            \n
          • \n
          • \n

            \n Creating: CreateEndpoint is executing.

            \n
          • \n
          • \n

            \n Updating: UpdateEndpoint or UpdateEndpointWeightsAndCapacities is executing.

            \n
          • \n
          • \n

            \n SystemUpdating: Endpoint is undergoing maintenance and cannot be\n updated or deleted or re-scaled until it has completed. This maintenance\n operation does not change any customer-specified values such as VPC config, KMS\n encryption, model, instance type, or instance count.

            \n
          • \n
          • \n

            \n RollingBack: Endpoint fails to scale up or down or change its\n variant weight and is in the process of rolling back to its previous\n configuration. Once the rollback completes, endpoint returns to an\n InService status. This transitional status only applies to an\n endpoint that has autoscaling enabled and is undergoing variant weight or\n capacity changes as part of an UpdateEndpointWeightsAndCapacities call or when the UpdateEndpointWeightsAndCapacities operation is called\n explicitly.

            \n
          • \n
          • \n

            \n InService: Endpoint is available to process incoming\n requests.

            \n
          • \n
          • \n

            \n Deleting: DeleteEndpoint is executing.

            \n
          • \n
          • \n

            \n Failed: Endpoint could not be created, updated, or re-scaled. Use\n DescribeEndpointOutput$FailureReason for information about\n the failure. DeleteEndpoint is the only operation that can be\n performed on a failed endpoint.

            \n
          • \n
          \n

          To get a list of endpoints with a specified status, use the ListEndpointsInput$StatusEquals filter.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Provides summary information for an endpoint.

          " + } + }, + "com.amazonaws.sagemaker#EndpointSummaryList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#EndpointSummary" + } + }, + "com.amazonaws.sagemaker#EntityDescription": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 1024 + }, + "smithy.api#pattern": "[\\p{L}\\p{M}\\p{Z}\\p{S}\\p{N}\\p{P}]*" + } + }, + "com.amazonaws.sagemaker#EntityName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 63 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,62}$" + } + }, + "com.amazonaws.sagemaker#EnvironmentKey": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 1024 + }, + "smithy.api#pattern": "[a-zA-Z_][a-zA-Z0-9_]*" + } + }, + "com.amazonaws.sagemaker#EnvironmentMap": { + "type": "map", + "key": { + "target": "com.amazonaws.sagemaker#EnvironmentKey" + }, + "value": { + "target": "com.amazonaws.sagemaker#EnvironmentValue" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 16 + } + } + }, + "com.amazonaws.sagemaker#EnvironmentValue": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 1024 + }, + "smithy.api#pattern": "[\\S\\s]*" + } + }, + "com.amazonaws.sagemaker#ExecutionStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Pending", + "name": "PENDING" + }, + { + "value": "Completed", + "name": "COMPLETED" + }, + { + "value": "CompletedWithViolations", + "name": "COMPLETED_WITH_VIOLATIONS" + }, + { + "value": "InProgress", + "name": "IN_PROGRESS" + }, + { + "value": "Failed", + "name": "FAILED" + }, + { + "value": "Stopping", + "name": "STOPPING" + }, + { + "value": "Stopped", + "name": "STOPPED" + } + ] + } + }, + "com.amazonaws.sagemaker#ExitMessage": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 1024 + }, + "smithy.api#pattern": "[\\S\\s]*" + } + }, + "com.amazonaws.sagemaker#Experiment": { + "type": "structure", + "members": { + "ExperimentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "traits": { + "smithy.api#documentation": "

          The name of the experiment.

          " + } + }, + "ExperimentArn": { + "target": "com.amazonaws.sagemaker#ExperimentArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the experiment.

          " + } + }, + "DisplayName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "traits": { + "smithy.api#documentation": "

          The name of the experiment as displayed. If DisplayName isn't specified,\n ExperimentName is displayed.

          " + } + }, + "Source": { + "target": "com.amazonaws.sagemaker#ExperimentSource" + }, + "Description": { + "target": "com.amazonaws.sagemaker#ExperimentDescription", + "traits": { + "smithy.api#documentation": "

          The description of the experiment.

          " + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          When the experiment was created.

          " + } + }, + "CreatedBy": { + "target": "com.amazonaws.sagemaker#UserContext" + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          When the experiment was last modified.

          " + } + }, + "LastModifiedBy": { + "target": "com.amazonaws.sagemaker#UserContext" + }, + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", + "traits": { + "smithy.api#documentation": "

          The list of tags that are associated with the experiment. You can use Search API to search on the tags.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          The properties of an experiment as returned by the Search API.

          " + } + }, + "com.amazonaws.sagemaker#ExperimentArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:experiment/.*" + } + }, + "com.amazonaws.sagemaker#ExperimentConfig": { + "type": "structure", + "members": { + "ExperimentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "traits": { + "smithy.api#documentation": "

          The name of an existing experiment to associate the trial component with.

          " + } + }, + "TrialName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "traits": { + "smithy.api#documentation": "

          The name of an existing trial to associate the trial component with. If not specified, a\n new trial is created.

          " + } + }, + "TrialComponentDisplayName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "traits": { + "smithy.api#documentation": "

          The display name for the trial component. If this key isn't specified, the display name is\n the trial component name.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Associates a SageMaker job as a trial component with an experiment and trial. Specified when\n you call the following APIs:

          \n " + } + }, + "com.amazonaws.sagemaker#ExperimentDescription": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 3072 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#ExperimentEntityName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 120 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,119}" + } + }, + "com.amazonaws.sagemaker#ExperimentSource": { + "type": "structure", + "members": { + "SourceArn": { + "target": "com.amazonaws.sagemaker#ExperimentSourceArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the source.

          ", + "smithy.api#required": {} + } + }, + "SourceType": { + "target": "com.amazonaws.sagemaker#SourceType", + "traits": { + "smithy.api#documentation": "

          The source type.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          The source of the experiment.

          " + } + }, + "com.amazonaws.sagemaker#ExperimentSourceArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:.*" + } + }, + "com.amazonaws.sagemaker#ExperimentSummaries": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ExperimentSummary" + } + }, + "com.amazonaws.sagemaker#ExperimentSummary": { + "type": "structure", + "members": { + "ExperimentArn": { + "target": "com.amazonaws.sagemaker#ExperimentArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the experiment.

          " + } + }, + "ExperimentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "traits": { + "smithy.api#documentation": "

          The name of the experiment.

          " + } + }, + "DisplayName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "traits": { + "smithy.api#documentation": "

          The name of the experiment as displayed. If DisplayName isn't specified,\n ExperimentName is displayed.

          " + } + }, + "ExperimentSource": { + "target": "com.amazonaws.sagemaker#ExperimentSource" + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          When the experiment was created.

          " + } + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          When the experiment was last modified.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          A summary of the properties of an experiment. To get the complete set of properties, call\n the DescribeExperiment API and provide the\n ExperimentName.

          " + } + }, + "com.amazonaws.sagemaker#Explainability": { + "type": "structure", + "members": { + "Report": { + "target": "com.amazonaws.sagemaker#MetricsSource", + "traits": { + "smithy.api#documentation": "

          The explainability report for a model.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Contains explainability metrics for a model.

          " + } + }, + "com.amazonaws.sagemaker#FailureReason": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 1024 + } + } + }, + "com.amazonaws.sagemaker#FeatureDefinition": { + "type": "structure", + "members": { + "FeatureName": { + "target": "com.amazonaws.sagemaker#FeatureName", + "traits": { + "smithy.api#documentation": "

          The name of a feature. The type must be a string. FeatureName cannot be any\n of the following: is_deleted, write_time,\n api_invocation_time.

          " + } + }, + "FeatureType": { + "target": "com.amazonaws.sagemaker#FeatureType", + "traits": { + "smithy.api#documentation": "

          The value type of a feature. Valid values are Integral, Fractional, or String.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          A list of features. You must include FeatureName and\n FeatureType. Valid feature FeatureTypes are\n Integral, Fractional and String.

          " + } + }, + "com.amazonaws.sagemaker#FeatureDefinitions": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#FeatureDefinition" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 2500 + } + } + }, + "com.amazonaws.sagemaker#FeatureGroup": { + "type": "structure", + "members": { + "FeatureGroupArn": { + "target": "com.amazonaws.sagemaker#FeatureGroupArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of a FeatureGroup.

          " + } + }, + "FeatureGroupName": { + "target": "com.amazonaws.sagemaker#FeatureGroupName", + "traits": { + "smithy.api#documentation": "

          The name of the FeatureGroup.

          " + } + }, + "RecordIdentifierFeatureName": { + "target": "com.amazonaws.sagemaker#FeatureName", + "traits": { + "smithy.api#documentation": "

          The name of the Feature whose value uniquely identifies a\n Record defined in the FeatureGroup \n FeatureDefinitions.

          " + } + }, + "EventTimeFeatureName": { + "target": "com.amazonaws.sagemaker#FeatureName", + "traits": { + "smithy.api#documentation": "

          The name of the feature that stores the EventTime of a Record in a\n FeatureGroup.

          \n

          A EventTime is point in time when a new event\n occurs that corresponds to the creation or update of a Record in\n FeatureGroup. All Records in the FeatureGroup\n must have a corresponding EventTime.

          " + } + }, + "FeatureDefinitions": { + "target": "com.amazonaws.sagemaker#FeatureDefinitions", + "traits": { + "smithy.api#documentation": "

          A list of Features. Each Feature must include a\n FeatureName and a FeatureType.

          \n

          Valid FeatureTypes are Integral, Fractional and\n String.

          \n

          \n FeatureNames cannot be any of the following: is_deleted,\n write_time, api_invocation_time.

          \n

          You can create up to 2,500 FeatureDefinitions per\n FeatureGroup.

          " + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#CreationTime", + "traits": { + "smithy.api#documentation": "

          The time a FeatureGroup was created.

          " + } + }, + "OnlineStoreConfig": { + "target": "com.amazonaws.sagemaker#OnlineStoreConfig" + }, + "OfflineStoreConfig": { + "target": "com.amazonaws.sagemaker#OfflineStoreConfig" + }, + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the IAM execution role used to create the feature\n group.

          " + } + }, + "FeatureGroupStatus": { + "target": "com.amazonaws.sagemaker#FeatureGroupStatus", + "traits": { + "smithy.api#documentation": "

          A FeatureGroup status.

          " + } + }, + "OfflineStoreStatus": { + "target": "com.amazonaws.sagemaker#OfflineStoreStatus" + }, + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", + "traits": { + "smithy.api#documentation": "

          The reason that the FeatureGroup failed to \n be replicated in the OfflineStore. This is \n failure may be due to a failure to create a FeatureGroup in\n or delete a FeatureGroup from the OfflineStore.

          " + } + }, + "Description": { + "target": "com.amazonaws.sagemaker#Description", + "traits": { + "smithy.api#documentation": "

          A free form description of a FeatureGroup.

          " + } + }, + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", + "traits": { + "smithy.api#documentation": "

          Tags used to define a FeatureGroup.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Amazon SageMaker Feature Store stores features in a collection called Feature Group. \n A Feature Group can be visualized as a table which has rows, \n with a unique identifier for each row where each column in the table is a feature. \n In principle, a Feature Group is composed of features and values per features.

          " + } + }, + "com.amazonaws.sagemaker#FeatureGroupArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:feature-group/.*" + } + }, + "com.amazonaws.sagemaker#FeatureGroupMaxResults": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.sagemaker#FeatureGroupName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 64 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,63}" + } + }, + "com.amazonaws.sagemaker#FeatureGroupNameContains": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 64 + } + } + }, + "com.amazonaws.sagemaker#FeatureGroupSortBy": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Name", + "name": "NAME" + }, + { + "value": "FeatureGroupStatus", + "name": "FEATURE_GROUP_STATUS" + }, + { + "value": "OfflineStoreStatus", + "name": "OFFLINE_STORE_STATUS" + }, + { + "value": "CreationTime", + "name": "CREATION_TIME" + } + ] + } + }, + "com.amazonaws.sagemaker#FeatureGroupSortOrder": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Ascending", + "name": "ASCENDING" + }, + { + "value": "Descending", + "name": "DESCENDING" + } + ] + } + }, + "com.amazonaws.sagemaker#FeatureGroupStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Creating", + "name": "CREATING" + }, + { + "value": "Created", + "name": "CREATED" + }, + { + "value": "CreateFailed", + "name": "CREATE_FAILED" + }, + { + "value": "Deleting", + "name": "DELETING" + }, + { + "value": "DeleteFailed", + "name": "DELETE_FAILED" + } + ] + } + }, + "com.amazonaws.sagemaker#FeatureGroupSummaries": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#FeatureGroupSummary" + } + }, + "com.amazonaws.sagemaker#FeatureGroupSummary": { + "type": "structure", + "members": { + "FeatureGroupName": { + "target": "com.amazonaws.sagemaker#FeatureGroupName", + "traits": { + "smithy.api#documentation": "

          The name of FeatureGroup.

          ", + "smithy.api#required": {} + } + }, + "FeatureGroupArn": { + "target": "com.amazonaws.sagemaker#FeatureGroupArn", + "traits": { + "smithy.api#documentation": "

          Unique identifier for the FeatureGroup.

          ", + "smithy.api#required": {} + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A timestamp indicating the time of creation time of the FeatureGroup.

          ", + "smithy.api#required": {} + } + }, + "FeatureGroupStatus": { + "target": "com.amazonaws.sagemaker#FeatureGroupStatus", + "traits": { + "smithy.api#documentation": "

          The status of a FeatureGroup. The status can be any of the following:\n Creating, Created, CreateFail,\n Deleting or DetailFail.

          " + } + }, + "OfflineStoreStatus": { + "target": "com.amazonaws.sagemaker#OfflineStoreStatus", + "traits": { + "smithy.api#documentation": "

          Notifies you if replicating data into the OfflineStore has failed. Returns\n either: Active or Blocked.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          The name, Arn, CreationTime, FeatureGroup values,\n LastUpdatedTime and EnableOnlineStorage status of a\n FeatureGroup.

          " + } + }, + "com.amazonaws.sagemaker#FeatureName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 64 + }, + "smithy.api#pattern": "^[a-zA-Z0-9]([-_]*[a-zA-Z0-9]){0,63}" + } + }, + "com.amazonaws.sagemaker#FeatureType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Integral", + "name": "INTEGRAL" + }, + { + "value": "Fractional", + "name": "FRACTIONAL" + }, + { + "value": "String", + "name": "STRING" + } + ] + } + }, + "com.amazonaws.sagemaker#FileSystemAccessMode": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "rw", + "name": "RW" + }, + { + "value": "ro", + "name": "RO" + } + ] + } + }, + "com.amazonaws.sagemaker#FileSystemConfig": { + "type": "structure", + "members": { + "MountPath": { + "target": "com.amazonaws.sagemaker#MountPath", + "traits": { + "smithy.api#documentation": "

          The path within the image to mount the user's EFS home directory. The directory\n should be empty. If not specified, defaults to /home/sagemaker-user.

          " + } + }, + "DefaultUid": { + "target": "com.amazonaws.sagemaker#DefaultUid", + "traits": { + "smithy.api#box": {}, + "smithy.api#documentation": "

          The default POSIX user ID (UID). If not specified, defaults to 1000.

          " + } + }, + "DefaultGid": { + "target": "com.amazonaws.sagemaker#DefaultGid", + "traits": { + "smithy.api#box": {}, + "smithy.api#documentation": "

          The default POSIX group ID (GID). If not specified, defaults to 100.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          The Amazon Elastic File System (EFS) storage configuration for a SageMaker image.

          " + } + }, + "com.amazonaws.sagemaker#FileSystemDataSource": { + "type": "structure", + "members": { + "FileSystemId": { + "target": "com.amazonaws.sagemaker#FileSystemId", + "traits": { + "smithy.api#documentation": "

          The file system id.

          ", + "smithy.api#required": {} + } + }, + "FileSystemAccessMode": { + "target": "com.amazonaws.sagemaker#FileSystemAccessMode", + "traits": { + "smithy.api#documentation": "

          The access mode of the mount of the directory associated with the channel. A directory\n can be mounted either in ro (read-only) or rw (read-write)\n mode.

          ", + "smithy.api#required": {} + } + }, + "FileSystemType": { + "target": "com.amazonaws.sagemaker#FileSystemType", + "traits": { + "smithy.api#documentation": "

          The file system type.

          ", + "smithy.api#required": {} + } + }, + "DirectoryPath": { + "target": "com.amazonaws.sagemaker#DirectoryPath", + "traits": { + "smithy.api#documentation": "

          The full path to the directory to associate with the channel.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Specifies a file system data source for a channel.

          " + } + }, + "com.amazonaws.sagemaker#FileSystemId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 11 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#FileSystemType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "EFS", + "name": "EFS" + }, + { + "value": "FSxLustre", + "name": "FSXLUSTRE" + } + ] + } + }, + "com.amazonaws.sagemaker#Filter": { + "type": "structure", + "members": { + "Name": { + "target": "com.amazonaws.sagemaker#ResourcePropertyName", + "traits": { + "smithy.api#documentation": "

          A resource property name. For example, TrainingJobName. For\n valid property names, see SearchRecord.\n You must specify a valid property for the resource.

          ", + "smithy.api#required": {} + } + }, + "Operator": { + "target": "com.amazonaws.sagemaker#Operator", + "traits": { + "smithy.api#documentation": "

          A Boolean binary operator that is used to evaluate the filter. The operator field\n contains one of the following values:

          \n
          \n
          Equals
          \n
          \n

          The value of Name equals Value.

          \n
          \n
          NotEquals
          \n
          \n

          The value of Name doesn't equal Value.

          \n
          \n
          Exists
          \n
          \n

          The Name property exists.

          \n
          \n
          NotExists
          \n
          \n

          The Name property does not exist.

          \n
          \n
          GreaterThan
          \n
          \n

          The value of Name is greater than Value.\n Not supported for text properties.

          \n
          \n
          GreaterThanOrEqualTo
          \n
          \n

          The value of Name is greater than or equal to Value.\n Not supported for text properties.

          \n
          \n
          LessThan
          \n
          \n

          The value of Name is less than Value.\n Not supported for text properties.

          \n
          \n
          LessThanOrEqualTo
          \n
          \n

          The value of Name is less than or equal to Value.\n Not supported for text properties.

          \n
          \n
          In
          \n
          \n

          The value of Name is one of the comma delimited strings in\n Value. Only supported for text properties.

          \n
          \n
          Contains
          \n
          \n

          The value of Name contains the string Value.\n Only supported for text properties.

          \n

          A SearchExpression can include the Contains operator\n multiple times when the value of Name is one of the following:

          \n
            \n
          • \n

            \n Experiment.DisplayName\n

            \n
          • \n
          • \n

            \n Experiment.ExperimentName\n

            \n
          • \n
          • \n

            \n Experiment.Tags\n

            \n
          • \n
          • \n

            \n Trial.DisplayName\n

            \n
          • \n
          • \n

            \n Trial.TrialName\n

            \n
          • \n
          • \n

            \n Trial.Tags\n

            \n
          • \n
          • \n

            \n TrialComponent.DisplayName\n

            \n
          • \n
          • \n

            \n TrialComponent.TrialComponentName\n

            \n
          • \n
          • \n

            \n TrialComponent.Tags\n

            \n
          • \n
          • \n

            \n TrialComponent.InputArtifacts\n

            \n
          • \n
          • \n

            \n TrialComponent.OutputArtifacts\n

            \n
          • \n
          \n

          A SearchExpression can include only one Contains operator\n for all other values of Name. In these cases, if you include multiple\n Contains operators in the SearchExpression, the result is\n the following error message: \"'CONTAINS' operator usage limit of 1\n exceeded.\"

          \n
          \n
          " + } + }, + "Value": { + "target": "com.amazonaws.sagemaker#FilterValue", + "traits": { + "smithy.api#documentation": "

          A value used with Name and Operator to determine which\n resources satisfy the filter's condition. For numerical properties, Value\n must be an integer or floating-point decimal. For timestamp properties,\n Value must be an ISO 8601 date-time string of the following format:\n YYYY-mm-dd'T'HH:MM:SS.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          A conditional statement for a search expression that includes a resource property, a\n Boolean operator, and a value. Resources that match the statement are returned in the\n results from the Search API.

          \n \n

          If you specify a Value, but not an Operator, Amazon SageMaker uses the\n equals operator.

          \n

          In search, there are several property types:

          \n
          \n
          Metrics
          \n
          \n

          To define a metric filter, enter a value using the form\n \"Metrics.\", where is\n a metric name. For example, the following filter searches for training jobs\n with an \"accuracy\" metric greater than\n \"0.9\":

          \n

          \n {\n

          \n

          \n \"Name\": \"Metrics.accuracy\",\n

          \n

          \n \"Operator\": \"GreaterThan\",\n

          \n

          \n \"Value\": \"0.9\"\n

          \n

          \n }\n

          \n
          \n
          HyperParameters
          \n
          \n

          To define a hyperparameter filter, enter a value with the form\n \"HyperParameters.\". Decimal hyperparameter\n values are treated as a decimal in a comparison if the specified\n Value is also a decimal value. If the specified\n Value is an integer, the decimal hyperparameter values are\n treated as integers. For example, the following filter is satisfied by\n training jobs with a \"learning_rate\" hyperparameter that is\n less than \"0.5\":

          \n

          \n {\n

          \n

          \n \"Name\": \"HyperParameters.learning_rate\",\n

          \n

          \n \"Operator\": \"LessThan\",\n

          \n

          \n \"Value\": \"0.5\"\n

          \n

          \n }\n

          \n
          \n
          Tags
          \n
          \n

          To define a tag filter, enter a value with the form\n Tags..

          \n
          \n
          " + } + }, + "com.amazonaws.sagemaker#FilterList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#Filter" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 20 + } + } + }, + "com.amazonaws.sagemaker#FilterValue": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 1024 + }, + "smithy.api#pattern": ".+" + } + }, + "com.amazonaws.sagemaker#FinalAutoMLJobObjectiveMetric": { + "type": "structure", + "members": { + "Type": { + "target": "com.amazonaws.sagemaker#AutoMLJobObjectiveType", + "traits": { + "smithy.api#documentation": "

          The type of metric with the best result.

          " + } + }, + "MetricName": { + "target": "com.amazonaws.sagemaker#AutoMLMetricEnum", + "traits": { + "smithy.api#documentation": "

          The name of the metric with the best result. For a description of the possible objective\n metrics, see AutoMLJobObjective$MetricName.

          ", + "smithy.api#required": {} + } + }, + "Value": { + "target": "com.amazonaws.sagemaker#MetricValue", + "traits": { + "smithy.api#documentation": "

          The value of the metric with the best result.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The best candidate result from an AutoML training job.

          " + } + }, + "com.amazonaws.sagemaker#FinalHyperParameterTuningJobObjectiveMetric": { + "type": "structure", + "members": { + "Type": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobObjectiveType", + "traits": { + "smithy.api#documentation": "

          Whether to\n minimize\n or maximize the objective metric. Valid values are Minimize and\n Maximize.

          " + } + }, + "MetricName": { + "target": "com.amazonaws.sagemaker#MetricName", + "traits": { + "smithy.api#documentation": "

          The name of the\n objective\n metric.

          ", + "smithy.api#required": {} + } + }, + "Value": { + "target": "com.amazonaws.sagemaker#MetricValue", + "traits": { + "smithy.api#documentation": "

          The value of the objective metric.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Shows the final value for the\n objective\n metric for a training job that was launched by a hyperparameter\n tuning job. You define the objective metric in the\n HyperParameterTuningJobObjective parameter of HyperParameterTuningJobConfig.

          " + } + }, + "com.amazonaws.sagemaker#FinalMetricDataList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#MetricData" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 40 + } + } + }, + "com.amazonaws.sagemaker#Float": { + "type": "float" + }, + "com.amazonaws.sagemaker#FlowDefinitionArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 1024 + }, + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:flow-definition/.*" + } + }, + "com.amazonaws.sagemaker#FlowDefinitionName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 63 + }, + "smithy.api#pattern": "^[a-z0-9](-*[a-z0-9]){0,62}" + } + }, + "com.amazonaws.sagemaker#FlowDefinitionOutputConfig": { + "type": "structure", + "members": { + "S3OutputPath": { + "target": "com.amazonaws.sagemaker#S3Uri", + "traits": { + "smithy.api#documentation": "

          The Amazon S3 path where the object containing human output will be made available.

          ", + "smithy.api#required": {} + } + }, + "KmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", + "traits": { + "smithy.api#documentation": "

          The Amazon Key Management Service (KMS) key ID for server-side encryption.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Contains information about where human output will be stored.

          " + } + }, + "com.amazonaws.sagemaker#FlowDefinitionStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Initializing", + "name": "INITIALIZING" + }, + { + "value": "Active", + "name": "ACTIVE" + }, + { + "value": "Failed", + "name": "FAILED" + }, + { + "value": "Deleting", + "name": "DELETING" + } + ] + } + }, + "com.amazonaws.sagemaker#FlowDefinitionSummaries": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#FlowDefinitionSummary" + } + }, + "com.amazonaws.sagemaker#FlowDefinitionSummary": { + "type": "structure", + "members": { + "FlowDefinitionName": { + "target": "com.amazonaws.sagemaker#FlowDefinitionName", + "traits": { + "smithy.api#documentation": "

          The name of the flow definition.

          ", + "smithy.api#required": {} + } + }, + "FlowDefinitionArn": { + "target": "com.amazonaws.sagemaker#FlowDefinitionArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the flow definition.

          ", + "smithy.api#required": {} + } + }, + "FlowDefinitionStatus": { + "target": "com.amazonaws.sagemaker#FlowDefinitionStatus", + "traits": { + "smithy.api#documentation": "

          The status of the flow definition. Valid values:

          ", + "smithy.api#required": {} + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          The timestamp when SageMaker created the flow definition.

          ", + "smithy.api#required": {} + } + }, + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", + "traits": { + "smithy.api#documentation": "

          The reason why the flow definition creation failed. A failure reason is returned only when the flow definition status is Failed.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Contains summary information about the flow definition.

          " + } + }, + "com.amazonaws.sagemaker#FlowDefinitionTaskAvailabilityLifetimeInSeconds": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 864000 + } + } + }, + "com.amazonaws.sagemaker#FlowDefinitionTaskCount": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 3 + } + } + }, + "com.amazonaws.sagemaker#FlowDefinitionTaskDescription": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 255 + }, + "smithy.api#pattern": ".+" + } + }, + "com.amazonaws.sagemaker#FlowDefinitionTaskKeyword": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 30 + }, + "smithy.api#pattern": "^[A-Za-z0-9]+( [A-Za-z0-9]+)*$" + } + }, + "com.amazonaws.sagemaker#FlowDefinitionTaskKeywords": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#FlowDefinitionTaskKeyword" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 5 + } + } + }, + "com.amazonaws.sagemaker#FlowDefinitionTaskTimeLimitInSeconds": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 30, + "max": 28800 + } + } + }, + "com.amazonaws.sagemaker#FlowDefinitionTaskTitle": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 128 + }, + "smithy.api#pattern": "^[\\t\\n\\r -\\uD7FF\\uE000-\\uFFFD]*$" + } + }, + "com.amazonaws.sagemaker#Framework": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "TENSORFLOW", + "name": "TENSORFLOW" + }, + { + "value": "KERAS", + "name": "KERAS" + }, + { + "value": "MXNET", + "name": "MXNET" + }, + { + "value": "ONNX", + "name": "ONNX" + }, + { + "value": "PYTORCH", + "name": "PYTORCH" + }, + { + "value": "XGBOOST", + "name": "XGBOOST" + }, + { + "value": "TFLITE", + "name": "TFLITE" + }, + { + "value": "DARKNET", + "name": "DARKNET" + }, + { + "value": "SKLEARN", + "name": "SKLEARN" + } + ] + } + }, + "com.amazonaws.sagemaker#GenerateCandidateDefinitionsOnly": { + "type": "boolean" + }, + "com.amazonaws.sagemaker#GetModelPackageGroupPolicy": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#GetModelPackageGroupPolicyInput" + }, + "output": { + "target": "com.amazonaws.sagemaker#GetModelPackageGroupPolicyOutput" + }, + "traits": { + "smithy.api#documentation": "

          Gets a resource policy that manages access for a model group. For information about\n resource policies, see Identity-based\n policies and resource-based policies in the AWS Identity and\n Access Management User Guide..

          " + } + }, + "com.amazonaws.sagemaker#GetModelPackageGroupPolicyInput": { + "type": "structure", + "members": { + "ModelPackageGroupName": { + "target": "com.amazonaws.sagemaker#EntityName", + "traits": { + "smithy.api#documentation": "

          The name of the model group for which to get the resource policy.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.sagemaker#GetModelPackageGroupPolicyOutput": { + "type": "structure", + "members": { + "ResourcePolicy": { + "target": "com.amazonaws.sagemaker#PolicyString", + "traits": { + "smithy.api#documentation": "

          The resource policy for the model group.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.sagemaker#GetSagemakerServicecatalogPortfolioStatus": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#GetSagemakerServicecatalogPortfolioStatusInput" + }, + "output": { + "target": "com.amazonaws.sagemaker#GetSagemakerServicecatalogPortfolioStatusOutput" + }, + "traits": { + "smithy.api#documentation": "

          Gets the status of Service Catalog in SageMaker. Service Catalog is used to create\n SageMaker projects.

          " + } + }, + "com.amazonaws.sagemaker#GetSagemakerServicecatalogPortfolioStatusInput": { + "type": "structure", + "members": {} + }, + "com.amazonaws.sagemaker#GetSagemakerServicecatalogPortfolioStatusOutput": { + "type": "structure", + "members": { + "Status": { + "target": "com.amazonaws.sagemaker#SagemakerServicecatalogStatus", + "traits": { + "smithy.api#documentation": "

          Whether Service Catalog is enabled or disabled in SageMaker.

          " + } + } + } + }, + "com.amazonaws.sagemaker#GetSearchSuggestions": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#GetSearchSuggestionsRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#GetSearchSuggestionsResponse" + }, + "traits": { + "smithy.api#documentation": "

          An auto-complete API for the search functionality in the Amazon SageMaker console. It returns\n suggestions of possible matches for the property name to use in Search\n queries. Provides suggestions for HyperParameters, Tags, and\n Metrics.

          " + } + }, + "com.amazonaws.sagemaker#GetSearchSuggestionsRequest": { + "type": "structure", + "members": { + "Resource": { + "target": "com.amazonaws.sagemaker#ResourceType", + "traits": { + "smithy.api#documentation": "

          The name of the Amazon SageMaker resource to search for.

          ", + "smithy.api#required": {} + } + }, + "SuggestionQuery": { + "target": "com.amazonaws.sagemaker#SuggestionQuery", + "traits": { + "smithy.api#documentation": "

          Limits the property names that are included in the response.

          " + } + } + } + }, + "com.amazonaws.sagemaker#GetSearchSuggestionsResponse": { + "type": "structure", + "members": { + "PropertyNameSuggestions": { + "target": "com.amazonaws.sagemaker#PropertyNameSuggestionList", + "traits": { + "smithy.api#documentation": "

          A list of property names for a Resource that match a\n SuggestionQuery.

          " + } + } + } + }, + "com.amazonaws.sagemaker#GitConfig": { + "type": "structure", + "members": { + "RepositoryUrl": { + "target": "com.amazonaws.sagemaker#GitConfigUrl", + "traits": { + "smithy.api#documentation": "

          The URL where the Git repository is located.

          ", + "smithy.api#required": {} + } + }, + "Branch": { + "target": "com.amazonaws.sagemaker#Branch", + "traits": { + "smithy.api#documentation": "

          The default branch for the Git repository.

          " + } + }, + "SecretArn": { + "target": "com.amazonaws.sagemaker#SecretArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the AWS Secrets Manager secret that contains the\n credentials used to access the git repository. The secret must have a staging label of\n AWSCURRENT and must be in the following format:

          \n

          \n {\"username\": UserName, \"password\":\n Password}\n

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Specifies configuration details for a Git repository in your AWS account.

          " + } + }, + "com.amazonaws.sagemaker#GitConfigForUpdate": { + "type": "structure", + "members": { + "SecretArn": { + "target": "com.amazonaws.sagemaker#SecretArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the AWS Secrets Manager secret that contains the\n credentials used to access the git repository. The secret must have a staging label of\n AWSCURRENT and must be in the following format:

          \n

          \n {\"username\": UserName, \"password\":\n Password}\n

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Specifies configuration details for a Git repository when the repository is\n updated.

          " + } + }, + "com.amazonaws.sagemaker#GitConfigUrl": { + "type": "string", + "traits": { + "smithy.api#pattern": "^https://([^/]+)/?(.*)$" + } + }, + "com.amazonaws.sagemaker#Group": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 63 + }, + "smithy.api#pattern": "[\\p{L}\\p{M}\\p{S}\\p{N}\\p{P}]+" + } + }, + "com.amazonaws.sagemaker#Groups": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#Group" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 10 + } + } + }, + "com.amazonaws.sagemaker#HookParameters": { + "type": "map", + "key": { + "target": "com.amazonaws.sagemaker#ConfigKey" + }, + "value": { + "target": "com.amazonaws.sagemaker#ConfigValue" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 20 + } + } + }, + "com.amazonaws.sagemaker#HumanLoopActivationConditionsConfig": { + "type": "structure", + "members": { + "HumanLoopActivationConditions": { + "target": "com.amazonaws.sagemaker#SynthesizedJsonHumanLoopActivationConditions", + "traits": { + "smithy.api#documentation": "

          JSON expressing use-case specific conditions declaratively. If any condition is matched, atomic tasks are created against the configured work team. \n The set of conditions is different for Rekognition and Textract. For more information about how to structure the JSON, see \n JSON Schema for Human Loop Activation Conditions in Amazon Augmented AI \n in the Amazon SageMaker Developer Guide.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Defines under what conditions SageMaker creates a human loop. Used within . See for the required\n format of activation conditions.

          " + } + }, + "com.amazonaws.sagemaker#HumanLoopActivationConfig": { + "type": "structure", + "members": { + "HumanLoopActivationConditionsConfig": { + "target": "com.amazonaws.sagemaker#HumanLoopActivationConditionsConfig", + "traits": { + "smithy.api#documentation": "

          Container structure for defining under what conditions SageMaker creates a human loop.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Provides information about how and under what conditions SageMaker creates a human loop. If HumanLoopActivationConfig is not given, then all requests go to humans.

          " + } + }, + "com.amazonaws.sagemaker#HumanLoopConfig": { + "type": "structure", + "members": { + "WorkteamArn": { + "target": "com.amazonaws.sagemaker#WorkteamArn", + "traits": { + "smithy.api#documentation": "

          Amazon Resource Name (ARN) of a team of workers.

          ", + "smithy.api#required": {} + } + }, + "HumanTaskUiArn": { + "target": "com.amazonaws.sagemaker#HumanTaskUiArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the human task user interface.

          ", + "smithy.api#required": {} + } + }, + "TaskTitle": { + "target": "com.amazonaws.sagemaker#FlowDefinitionTaskTitle", + "traits": { + "smithy.api#documentation": "

          A title for the human worker task.

          ", + "smithy.api#required": {} + } + }, + "TaskDescription": { + "target": "com.amazonaws.sagemaker#FlowDefinitionTaskDescription", + "traits": { + "smithy.api#documentation": "

          A description for the human worker task.

          ", + "smithy.api#required": {} + } + }, + "TaskCount": { + "target": "com.amazonaws.sagemaker#FlowDefinitionTaskCount", + "traits": { + "smithy.api#documentation": "

          The number of distinct workers who will perform the same task on each object.\n For example, if TaskCount is set to 3 for an image classification \n labeling job, three workers will classify each input image. \n Increasing TaskCount can improve label accuracy.

          ", + "smithy.api#required": {} + } + }, + "TaskAvailabilityLifetimeInSeconds": { + "target": "com.amazonaws.sagemaker#FlowDefinitionTaskAvailabilityLifetimeInSeconds", + "traits": { + "smithy.api#documentation": "

          The length of time that a task remains available for review by human workers.

          " + } + }, + "TaskTimeLimitInSeconds": { + "target": "com.amazonaws.sagemaker#FlowDefinitionTaskTimeLimitInSeconds", + "traits": { + "smithy.api#documentation": "

          The amount of time that a worker has to complete a task. The default value is 3,600 seconds (1 hour)

          " + } + }, + "TaskKeywords": { + "target": "com.amazonaws.sagemaker#FlowDefinitionTaskKeywords", + "traits": { + "smithy.api#documentation": "

          Keywords used to describe the task so that workers can discover the task.

          " + } + }, + "PublicWorkforceTaskPrice": { + "target": "com.amazonaws.sagemaker#PublicWorkforceTaskPrice" + } + }, + "traits": { + "smithy.api#documentation": "

          Describes the work to be performed by human workers.

          " + } + }, + "com.amazonaws.sagemaker#HumanLoopRequestSource": { + "type": "structure", + "members": { + "AwsManagedHumanLoopRequestSource": { + "target": "com.amazonaws.sagemaker#AwsManagedHumanLoopRequestSource", + "traits": { + "smithy.api#documentation": "

          Specifies whether Amazon Rekognition or Amazon Textract are used as the integration source. \n The default field settings and JSON parsing rules are different based on the integration source. Valid values:

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Container for configuring the source of human task requests.

          " + } + }, + "com.amazonaws.sagemaker#HumanTaskConfig": { + "type": "structure", + "members": { + "WorkteamArn": { + "target": "com.amazonaws.sagemaker#WorkteamArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the work team assigned to complete the tasks.

          ", + "smithy.api#required": {} + } + }, + "UiConfig": { + "target": "com.amazonaws.sagemaker#UiConfig", + "traits": { + "smithy.api#documentation": "

          Information about the user interface that workers use to complete the labeling\n task.

          ", + "smithy.api#required": {} + } + }, + "PreHumanTaskLambdaArn": { + "target": "com.amazonaws.sagemaker#LambdaFunctionArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of a Lambda function that is run before a data object\n is sent to a human worker. Use this function to provide input to a custom labeling\n job.

          \n

          For built-in\n task types, use one of the following Amazon SageMaker Ground Truth Lambda function ARNs for\n PreHumanTaskLambdaArn. For custom labeling workflows, see Pre-annotation Lambda.

          \n \n \n\n

          \n Bounding box - Finds the most similar boxes from\n different workers based on the Jaccard index of the boxes.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-BoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-BoundingBox\n

            \n
          • \n
          \n \n\n

          \n Image classification - Uses a variant of the Expectation\n Maximization approach to estimate the true class of an image based on\n annotations from individual workers.

          \n\n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-ImageMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-ImageMultiClass\n

            \n
          • \n
          \n \n

          \n Multi-label image classification - Uses a variant of the Expectation\n Maximization approach to estimate the true classes of an image based on\n annotations from individual workers.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-ImageMultiClassMultiLabel\n

            \n
          • \n
          \n \n

          \n Semantic segmentation - Treats each pixel in an image as\n a multi-class classification and treats pixel annotations from workers as\n \"votes\" for the correct label.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-SemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-SemanticSegmentation\n

            \n
          • \n
          \n \n

          \n Text classification - Uses a variant of the Expectation\n Maximization approach to estimate the true class of text based on annotations\n from individual workers.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-TextMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-TextMultiClass\n

            \n
          • \n
          \n \n

          \n Multi-label text classification - Uses a variant of the\n Expectation Maximization approach to estimate the true classes of text based on\n annotations from individual workers.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-TextMultiClassMultiLabel\n

            \n
          • \n
          \n \n

          \n Named entity recognition - Groups similar selections and\n calculates aggregate boundaries, resolving to most-assigned label.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-NamedEntityRecognition\n

            \n
          • \n
          \n \n \n \n \n

          \n Video Classification - Use this task type when you need workers to classify videos using\n predefined labels that you specify. Workers are shown videos and are asked to choose one\n label for each video.

          \n \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-VideoMultiClass\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-VideoMultiClass\n

            \n
          • \n
          \n \n

          \n Video Frame Object Detection - Use this task type to\n have workers identify and locate objects in a sequence of video frames (images extracted\n from a video) using bounding boxes. For example, you can use this task to ask workers to\n identify and localize various objects in a series of video frames, such as cars, bikes,\n and pedestrians.

          \n \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-VideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-VideoObjectDetection\n

            \n
          • \n
          \n \n

          \n Video Frame Object Tracking - Use this task type to\n have workers track the movement of objects in a sequence of video frames (images\n extracted from a video) using bounding boxes. For example, you can use this task to ask\n workers to track the movement of objects, such as cars, bikes, and pedestrians.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-VideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-VideoObjectTracking\n

            \n
          • \n
          \n \n \n \n\n

          \n 3D Point Cloud Modalities\n

          \n

          Use the following pre-annotation lambdas for 3D point cloud labeling modality tasks.\n See 3D Point Cloud Task types\n to learn more.

          \n\n \n

          \n 3D Point Cloud Object Detection - \n Use this task type when you want workers to classify objects in a 3D point cloud by \n drawing 3D cuboids around objects. For example, you can use this task type to ask workers \n to identify different types of objects in a point cloud, such as cars, bikes, and pedestrians.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-3DPointCloudObjectDetection\n

            \n
          • \n
          \n \n

          \n 3D Point Cloud Object Tracking - \n Use this task type when you want workers to draw 3D cuboids around objects\n that appear in a sequence of 3D point cloud frames. \n For example, you can use this task type to ask workers to track \n the movement of vehicles across multiple point cloud frames.\n

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-3DPointCloudObjectTracking\n

            \n
          • \n
          \n \n \n \n

          \n 3D Point Cloud Semantic Segmentation - \n Use this task type when you want workers to create a point-level semantic segmentation masks by \n painting objects in a 3D point cloud using different colors where each color is assigned to one of \n the classes you specify.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-3DPointCloudSemanticSegmentation\n

            \n
          • \n
          \n \n

          \n Use the following ARNs for Label Verification and Adjustment Jobs\n

          \n

          Use label verification and adjustment jobs to review and adjust labels. To learn more,\n see Verify and Adjust Labels .

          \n \n

          \n Bounding box verification - Uses a variant of the\n Expectation Maximization approach to estimate the true class of verification\n judgement for bounding box labels based on annotations from individual\n workers.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          \n \n

          \n Bounding box adjustment - Finds the most similar boxes\n from different workers based on the Jaccard index of the adjusted\n annotations.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-AdjustmentBoundingBox\n

            \n
          • \n
          \n \n

          \n Semantic segmentation verification - Uses a variant of\n the Expectation Maximization approach to estimate the true class of verification\n judgment for semantic segmentation labels based on annotations from individual\n workers.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-VerificationSemanticSegmentation\n

            \n
          • \n
          \n \n

          \n Semantic segmentation adjustment - Treats each pixel in\n an image as a multi-class classification and treats pixel adjusted annotations\n from workers as \"votes\" for the correct label.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-AdjustmentSemanticSegmentation\n

            \n
          • \n
          \n \n \n \n \n

          \n Video Frame Object Detection Adjustment - \n Use this task type when you want workers to adjust bounding boxes that workers have added \n to video frames to classify and localize objects in a sequence of video frames.

          \n \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-AdjustmentVideoObjectDetection\n

            \n
          • \n
          \n \n

          \n Video Frame Object Tracking Adjustment - \n Use this task type when you want workers to adjust bounding boxes that workers have added \n to video frames to track object movement across a sequence of video frames.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-AdjustmentVideoObjectTracking\n

            \n
          • \n
          \n \n \n \n \n

          \n 3D point cloud object detection adjustment - Adjust\n 3D cuboids in a point cloud frame.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-Adjustment3DPointCloudObjectDetection\n

            \n
          • \n
          \n \n

          \n 3D point cloud object tracking adjustment - Adjust 3D\n cuboids across a sequence of point cloud frames.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-Adjustment3DPointCloudObjectTracking\n

            \n
          • \n
          \n \n

          \n 3D point cloud semantic segmentation adjustment -\n Adjust semantic segmentation masks in a 3D point cloud.

          \n
            \n
          • \n

            \n arn:aws:lambda:us-east-1:432418664414:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-east-2:266458841044:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:us-west-2:081040173940:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-1:568282634449:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-2:454466003867:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-south-1:565803892007:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-central-1:203001061592:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-northeast-2:845288260483:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:eu-west-2:487402164563:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ap-southeast-1:377565633583:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          • \n

            \n arn:aws:lambda:ca-central-1:918755190332:function:PRE-Adjustment3DPointCloudSemanticSegmentation\n

            \n
          • \n
          ", + "smithy.api#required": {} + } + }, + "TaskKeywords": { + "target": "com.amazonaws.sagemaker#TaskKeywords", + "traits": { + "smithy.api#documentation": "

          Keywords used to describe the task so that workers on Amazon Mechanical Turk can\n discover the task.

          " + } + }, + "TaskTitle": { + "target": "com.amazonaws.sagemaker#TaskTitle", + "traits": { + "smithy.api#documentation": "

          A title for the task for your human workers.

          ", + "smithy.api#required": {} + } + }, + "TaskDescription": { + "target": "com.amazonaws.sagemaker#TaskDescription", + "traits": { + "smithy.api#documentation": "

          A description of the task for your human workers.

          ", + "smithy.api#required": {} + } + }, + "NumberOfHumanWorkersPerDataObject": { + "target": "com.amazonaws.sagemaker#NumberOfHumanWorkersPerDataObject", + "traits": { + "smithy.api#documentation": "

          The number of human workers that will label an object.

          ", + "smithy.api#required": {} + } + }, + "TaskTimeLimitInSeconds": { + "target": "com.amazonaws.sagemaker#TaskTimeLimitInSeconds", + "traits": { + "smithy.api#documentation": "

          The amount of time that a worker has to complete a task.

          ", + "smithy.api#required": {} + } + }, + "TaskAvailabilityLifetimeInSeconds": { + "target": "com.amazonaws.sagemaker#TaskAvailabilityLifetimeInSeconds", + "traits": { + "smithy.api#documentation": "

          The length of time that a task remains available for labeling by human workers.\n If you choose the Amazon Mechanical Turk workforce, the maximum is 12 hours\n (43200). The default value is 864000 seconds (10 days). For private and vendor workforces, the maximum is as\n listed.

          " + } + }, + "MaxConcurrentTaskCount": { + "target": "com.amazonaws.sagemaker#MaxConcurrentTaskCount", + "traits": { + "smithy.api#documentation": "

          Defines the maximum number of data objects that can be labeled by human workers at the\n same time. Also referred to as batch size. Each object may have more than one worker at one time.\n The default value is 1000 objects.

          " + } + }, + "AnnotationConsolidationConfig": { + "target": "com.amazonaws.sagemaker#AnnotationConsolidationConfig", + "traits": { + "smithy.api#documentation": "

          Configures how labels are consolidated across human workers.

          ", + "smithy.api#required": {} + } + }, + "PublicWorkforceTaskPrice": { + "target": "com.amazonaws.sagemaker#PublicWorkforceTaskPrice", + "traits": { + "smithy.api#documentation": "

          The price that you pay for each task performed by an Amazon Mechanical Turk worker.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Information required for human workers to complete a labeling task.

          " + } + }, + "com.amazonaws.sagemaker#HumanTaskUiArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 1024 + }, + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:human-task-ui/.*" + } + }, + "com.amazonaws.sagemaker#HumanTaskUiName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 63 + }, + "smithy.api#pattern": "^[a-z0-9](-*[a-z0-9])*" + } + }, + "com.amazonaws.sagemaker#HumanTaskUiStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Active", + "name": "ACTIVE" + }, + { + "value": "Deleting", + "name": "DELETING" + } + ] + } + }, + "com.amazonaws.sagemaker#HumanTaskUiSummaries": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#HumanTaskUiSummary" + } + }, + "com.amazonaws.sagemaker#HumanTaskUiSummary": { + "type": "structure", + "members": { + "HumanTaskUiName": { + "target": "com.amazonaws.sagemaker#HumanTaskUiName", + "traits": { + "smithy.api#documentation": "

          The name of the human task user interface.

          ", + "smithy.api#required": {} + } + }, + "HumanTaskUiArn": { + "target": "com.amazonaws.sagemaker#HumanTaskUiArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the human task user interface.

          ", + "smithy.api#required": {} + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A timestamp when SageMaker created the human task user interface.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Container for human task user interface information.

          " + } + }, + "com.amazonaws.sagemaker#HyperParameterAlgorithmSpecification": { + "type": "structure", + "members": { + "TrainingImage": { + "target": "com.amazonaws.sagemaker#AlgorithmImage", + "traits": { + "smithy.api#documentation": "

          The registry path of the Docker image that contains the training algorithm. For\n information about Docker registry paths for built-in algorithms, see Algorithms\n Provided by Amazon SageMaker: Common Parameters. Amazon SageMaker supports both\n registry/repository[:tag] and registry/repository[@digest]\n image path formats. For more information, see Using Your Own Algorithms with Amazon\n SageMaker.

          " + } + }, + "TrainingInputMode": { + "target": "com.amazonaws.sagemaker#TrainingInputMode", + "traits": { + "smithy.api#documentation": "

          The input mode that the algorithm supports:\n File\n or Pipe. In File input mode, Amazon SageMaker downloads the training data from\n Amazon S3 to the\n storage\n volume that is attached to the training instance and mounts the directory to the Docker\n volume for the training container. In Pipe input mode, Amazon SageMaker streams\n data directly from Amazon S3 to the container.

          \n

          If you specify File mode, make sure that\n you\n provision the storage volume that is attached to the training instance with enough\n capacity to accommodate the training data downloaded from Amazon S3, the model artifacts, and\n intermediate\n information.

          \n

          \n

          For more information about input modes, see Algorithms.

          ", + "smithy.api#required": {} + } + }, + "AlgorithmName": { + "target": "com.amazonaws.sagemaker#ArnOrName", + "traits": { + "smithy.api#documentation": "

          The name of the resource algorithm to use for the hyperparameter tuning job. If you\n specify a value for this parameter, do not specify a value for\n TrainingImage.

          " + } + }, + "MetricDefinitions": { + "target": "com.amazonaws.sagemaker#MetricDefinitionList", + "traits": { + "smithy.api#documentation": "

          An array of MetricDefinition objects that specify the\n metrics\n that the algorithm emits.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Specifies\n which\n training algorithm to use for training jobs that a hyperparameter\n tuning job launches and the metrics to monitor.

          " + } + }, + "com.amazonaws.sagemaker#HyperParameterKey": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#HyperParameterScalingType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Auto", + "name": "AUTO" + }, + { + "value": "Linear", + "name": "LINEAR" + }, + { + "value": "Logarithmic", + "name": "LOGARITHMIC" + }, + { + "value": "ReverseLogarithmic", + "name": "REVERSE_LOGARITHMIC" + } + ] + } + }, + "com.amazonaws.sagemaker#HyperParameterSpecification": { + "type": "structure", + "members": { + "Name": { + "target": "com.amazonaws.sagemaker#ParameterName", + "traits": { + "smithy.api#documentation": "

          The name of this hyperparameter. The name must be unique.

          ", + "smithy.api#required": {} + } + }, + "Description": { + "target": "com.amazonaws.sagemaker#EntityDescription", + "traits": { + "smithy.api#documentation": "

          A brief description of the hyperparameter.

          " + } + }, + "Type": { + "target": "com.amazonaws.sagemaker#ParameterType", + "traits": { + "smithy.api#documentation": "

          The type of this hyperparameter. The valid types are Integer,\n Continuous, Categorical, and FreeText.

          ", + "smithy.api#required": {} + } + }, + "Range": { + "target": "com.amazonaws.sagemaker#ParameterRange", + "traits": { + "smithy.api#documentation": "

          The allowed range for this hyperparameter.

          " + } + }, + "IsTunable": { + "target": "com.amazonaws.sagemaker#Boolean", + "traits": { + "smithy.api#documentation": "

          Indicates whether this hyperparameter is tunable in a hyperparameter tuning\n job.

          " + } + }, + "IsRequired": { + "target": "com.amazonaws.sagemaker#Boolean", + "traits": { + "smithy.api#documentation": "

          Indicates whether this hyperparameter is required.

          " + } + }, + "DefaultValue": { + "target": "com.amazonaws.sagemaker#HyperParameterValue", + "traits": { + "smithy.api#documentation": "

          The default value for this hyperparameter. If a default value is specified, a\n hyperparameter cannot be required.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Defines a hyperparameter to be used by an algorithm.

          " + } + }, + "com.amazonaws.sagemaker#HyperParameterSpecifications": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#HyperParameterSpecification" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 100 + } + } + }, + "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinition": { + "type": "structure", + "members": { + "DefinitionName": { + "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinitionName", + "traits": { + "smithy.api#documentation": "

          The job definition name.

          " + } + }, + "TuningObjective": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobObjective" + }, + "HyperParameterRanges": { + "target": "com.amazonaws.sagemaker#ParameterRanges" + }, + "StaticHyperParameters": { + "target": "com.amazonaws.sagemaker#HyperParameters", + "traits": { + "smithy.api#documentation": "

          Specifies the values of hyperparameters\n that\n do not change for the tuning job.

          " + } + }, + "AlgorithmSpecification": { + "target": "com.amazonaws.sagemaker#HyperParameterAlgorithmSpecification", + "traits": { + "smithy.api#documentation": "

          The HyperParameterAlgorithmSpecification object that\n specifies\n the resource algorithm to use for the training jobs that the tuning\n job launches.

          ", + "smithy.api#required": {} + } + }, + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the\n IAM\n role associated with the training jobs that the tuning job\n launches.

          ", + "smithy.api#required": {} + } + }, + "InputDataConfig": { + "target": "com.amazonaws.sagemaker#InputDataConfig", + "traits": { + "smithy.api#documentation": "

          An array of Channel objects that specify\n the\n input for the training jobs that the tuning job launches.

          " + } + }, + "VpcConfig": { + "target": "com.amazonaws.sagemaker#VpcConfig", + "traits": { + "smithy.api#documentation": "

          The VpcConfig object that\n specifies\n the VPC that you want the training jobs that this hyperparameter\n tuning job launches to connect to. Control access to and from your\n training\n container by configuring the VPC. For more information, see Protect Training Jobs\n by Using an Amazon Virtual Private Cloud.

          " + } + }, + "OutputDataConfig": { + "target": "com.amazonaws.sagemaker#OutputDataConfig", + "traits": { + "smithy.api#documentation": "

          Specifies the path to the Amazon S3 bucket where you\n store\n model artifacts from the training jobs that the tuning job\n launches.

          ", + "smithy.api#required": {} + } + }, + "ResourceConfig": { + "target": "com.amazonaws.sagemaker#ResourceConfig", + "traits": { + "smithy.api#documentation": "

          The resources,\n including\n the compute instances and storage volumes, to use for the training\n jobs that the tuning job launches.

          \n

          Storage\n volumes store model artifacts and\n incremental\n states. Training algorithms might also use storage volumes for\n scratch\n space. If you want Amazon SageMaker to use the storage volume\n to store the training data, choose File as the\n TrainingInputMode in the algorithm specification. For distributed\n training algorithms, specify an instance count greater than 1.

          ", + "smithy.api#required": {} + } + }, + "StoppingCondition": { + "target": "com.amazonaws.sagemaker#StoppingCondition", + "traits": { + "smithy.api#documentation": "

          Specifies a limit to how long a model hyperparameter training job can run. It also\n specifies how long you are willing to wait for a managed spot training job to complete.\n When the job reaches the a limit, Amazon SageMaker ends the training job. Use this API to cap model\n training costs.

          ", + "smithy.api#required": {} + } + }, + "EnableNetworkIsolation": { + "target": "com.amazonaws.sagemaker#Boolean", + "traits": { + "smithy.api#documentation": "

          Isolates the training container. No inbound or outbound network calls can be made,\n except for calls between peers within a training cluster for distributed training. If\n network isolation is used for training jobs that are configured to use a VPC, Amazon SageMaker\n downloads and uploads customer data and model artifacts through the specified VPC, but\n the training container does not have network access.

          " + } + }, + "EnableInterContainerTrafficEncryption": { + "target": "com.amazonaws.sagemaker#Boolean", + "traits": { + "smithy.api#documentation": "

          To encrypt all communications between ML compute instances in distributed training,\n choose True. Encryption provides greater security for distributed training,\n but training might take longer. How long it takes depends on the amount of communication\n between compute instances, especially if you use a deep learning algorithm in\n distributed training.

          " + } + }, + "EnableManagedSpotTraining": { + "target": "com.amazonaws.sagemaker#Boolean", + "traits": { + "smithy.api#documentation": "

          A Boolean indicating whether managed spot training is enabled (True) or\n not (False).

          " + } + }, + "CheckpointConfig": { + "target": "com.amazonaws.sagemaker#CheckpointConfig" + } + }, + "traits": { + "smithy.api#documentation": "

          Defines\n the training jobs launched by a hyperparameter tuning job.

          " + } + }, + "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinitionName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 64 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,63}" + } + }, + "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinitions": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinition" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 10 + } + } + }, + "com.amazonaws.sagemaker#HyperParameterTrainingJobSummaries": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobSummary" + } + }, + "com.amazonaws.sagemaker#HyperParameterTrainingJobSummary": { + "type": "structure", + "members": { + "TrainingJobDefinitionName": { + "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobDefinitionName", + "traits": { + "smithy.api#documentation": "

          The training job definition name.

          " + } + }, + "TrainingJobName": { + "target": "com.amazonaws.sagemaker#TrainingJobName", + "traits": { + "smithy.api#documentation": "

          The name of the training job.

          ", + "smithy.api#required": {} + } + }, + "TrainingJobArn": { + "target": "com.amazonaws.sagemaker#TrainingJobArn", + "traits": { + "smithy.api#documentation": "

          The\n Amazon\n Resource Name (ARN) of the training job.

          ", + "smithy.api#required": {} + } + }, + "TuningJobName": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobName", + "traits": { + "smithy.api#documentation": "

          The HyperParameter tuning job that launched the training job.

          " + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          The date and time that the training job was created.

          ", + "smithy.api#required": {} + } + }, + "TrainingStartTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          The date and time that the training job started.

          " + } + }, + "TrainingEndTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          Specifies the time when the training job ends on training instances. You are billed\n for the time interval between the value of TrainingStartTime and this time.\n For successful jobs and stopped jobs, this is the time after model artifacts are\n uploaded. For failed jobs, this is the time when Amazon SageMaker detects a job failure.

          " + } + }, + "TrainingJobStatus": { + "target": "com.amazonaws.sagemaker#TrainingJobStatus", + "traits": { + "smithy.api#documentation": "

          The\n status\n of the training job.

          ", + "smithy.api#required": {} + } + }, + "TunedHyperParameters": { + "target": "com.amazonaws.sagemaker#HyperParameters", + "traits": { + "smithy.api#documentation": "

          A\n list of the hyperparameters for which you specified ranges to\n search.

          ", + "smithy.api#required": {} + } + }, + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", + "traits": { + "smithy.api#documentation": "

          The\n reason that the training job failed.\n

          " + } + }, + "FinalHyperParameterTuningJobObjectiveMetric": { + "target": "com.amazonaws.sagemaker#FinalHyperParameterTuningJobObjectiveMetric", + "traits": { + "smithy.api#documentation": "

          The FinalHyperParameterTuningJobObjectiveMetric object that\n specifies the\n value\n of the\n objective\n metric of the tuning job that launched this training job.

          " + } + }, + "ObjectiveStatus": { + "target": "com.amazonaws.sagemaker#ObjectiveStatus", + "traits": { + "smithy.api#documentation": "

          The status of the objective metric for the training job:

          \n
            \n
          • \n

            Succeeded: The\n final\n objective metric for the training job was evaluated by the\n hyperparameter tuning job and\n used\n in the hyperparameter tuning process.

            \n
          • \n
          \n
            \n
          • \n

            Pending: The training job is in progress and evaluation of its final objective\n metric is pending.

            \n
          • \n
          \n
            \n
          • \n

            Failed:\n The final objective metric for the training job was not evaluated, and was not\n used in the hyperparameter tuning process. This typically occurs when the\n training job failed or did not emit an objective\n metric.

            \n
          • \n
          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Specifies\n summary information about a training job.

          " + } + }, + "com.amazonaws.sagemaker#HyperParameterTuningJobArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:hyper-parameter-tuning-job/.*" + } + }, + "com.amazonaws.sagemaker#HyperParameterTuningJobConfig": { + "type": "structure", + "members": { + "Strategy": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobStrategyType", + "traits": { + "smithy.api#documentation": "

          Specifies how hyperparameter tuning chooses the combinations of hyperparameter values\n to use for the training job it launches. To use the Bayesian search strategy, set this\n to Bayesian. To randomly search, set it to Random. For\n information about search strategies, see How\n Hyperparameter Tuning Works.

          ", + "smithy.api#required": {} + } + }, + "HyperParameterTuningJobObjective": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobObjective", + "traits": { + "smithy.api#documentation": "

          The HyperParameterTuningJobObjective object that specifies the\n objective\n metric for this tuning job.

          " + } + }, + "ResourceLimits": { + "target": "com.amazonaws.sagemaker#ResourceLimits", + "traits": { + "smithy.api#documentation": "

          The ResourceLimits object that specifies the\n maximum\n number of training jobs and parallel training jobs for this tuning\n job.

          ", + "smithy.api#required": {} + } + }, + "ParameterRanges": { + "target": "com.amazonaws.sagemaker#ParameterRanges", + "traits": { + "smithy.api#documentation": "

          The ParameterRanges object that specifies the ranges of\n hyperparameters\n that this tuning job searches.

          " + } + }, + "TrainingJobEarlyStoppingType": { + "target": "com.amazonaws.sagemaker#TrainingJobEarlyStoppingType", + "traits": { + "smithy.api#documentation": "

          Specifies whether to use early stopping for training jobs launched by the\n hyperparameter tuning job. This can be one of the following values (the default value is\n OFF):

          \n
          \n
          OFF
          \n
          \n

          Training jobs launched by the hyperparameter tuning job do not use early\n stopping.

          \n
          \n
          AUTO
          \n
          \n

          Amazon SageMaker stops training jobs launched by the hyperparameter tuning job when\n they are unlikely to perform better than previously completed training jobs.\n For more information, see Stop Training Jobs Early.

          \n
          \n
          " + } + }, + "TuningJobCompletionCriteria": { + "target": "com.amazonaws.sagemaker#TuningJobCompletionCriteria", + "traits": { + "smithy.api#documentation": "

          The tuning job's completion criteria.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Configures a hyperparameter tuning job.

          " + } + }, + "com.amazonaws.sagemaker#HyperParameterTuningJobName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 32 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,31}" + } + }, + "com.amazonaws.sagemaker#HyperParameterTuningJobObjective": { + "type": "structure", + "members": { + "Type": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobObjectiveType", + "traits": { + "smithy.api#documentation": "

          Whether to\n minimize\n or maximize the objective metric.

          ", + "smithy.api#required": {} + } + }, + "MetricName": { + "target": "com.amazonaws.sagemaker#MetricName", + "traits": { + "smithy.api#documentation": "

          The\n name of the metric to use for the objective metric.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Defines the objective metric for a hyperparameter tuning job.\n Hyperparameter\n tuning uses the value of this metric to evaluate the training jobs it launches, and\n returns the training job that results in either the highest or lowest value for this\n metric, depending on the value you specify for the Type\n parameter.

          " + } + }, + "com.amazonaws.sagemaker#HyperParameterTuningJobObjectiveType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Maximize", + "name": "MAXIMIZE" + }, + { + "value": "Minimize", + "name": "MINIMIZE" + } + ] + } + }, + "com.amazonaws.sagemaker#HyperParameterTuningJobObjectives": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobObjective" + } + }, + "com.amazonaws.sagemaker#HyperParameterTuningJobSortByOptions": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Name", + "name": "Name" + }, + { + "value": "Status", + "name": "Status" + }, + { + "value": "CreationTime", + "name": "CreationTime" + } + ] + } + }, + "com.amazonaws.sagemaker#HyperParameterTuningJobStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Completed", + "name": "COMPLETED" + }, + { + "value": "InProgress", + "name": "IN_PROGRESS" + }, + { + "value": "Failed", + "name": "FAILED" + }, + { + "value": "Stopped", + "name": "STOPPED" + }, + { + "value": "Stopping", + "name": "STOPPING" + } + ] + } + }, + "com.amazonaws.sagemaker#HyperParameterTuningJobStrategyType": { + "type": "string", + "traits": { + "smithy.api#documentation": "

          The strategy hyperparameter tuning uses to\n find\n the best combination of hyperparameters for your model. Currently,\n the only\n supported\n value is Bayesian.

          ", + "smithy.api#enum": [ + { + "value": "Bayesian", + "name": "BAYESIAN" + }, + { + "value": "Random", + "name": "RANDOM" + } + ] + } + }, + "com.amazonaws.sagemaker#HyperParameterTuningJobSummaries": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobSummary" + } + }, + "com.amazonaws.sagemaker#HyperParameterTuningJobSummary": { + "type": "structure", + "members": { + "HyperParameterTuningJobName": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobName", + "traits": { + "smithy.api#documentation": "

          The name of the tuning job.

          ", + "smithy.api#required": {} + } + }, + "HyperParameterTuningJobArn": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobArn", + "traits": { + "smithy.api#documentation": "

          The\n Amazon\n Resource Name (ARN) of the tuning job.

          ", + "smithy.api#required": {} + } + }, + "HyperParameterTuningJobStatus": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobStatus", + "traits": { + "smithy.api#documentation": "

          The status of the\n tuning\n job.

          ", + "smithy.api#required": {} + } + }, + "Strategy": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobStrategyType", + "traits": { + "smithy.api#documentation": "

          Specifies the search strategy hyperparameter tuning uses to choose which\n hyperparameters to\n use\n for each iteration. Currently, the only valid value is\n Bayesian.

          ", + "smithy.api#required": {} + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          The date and time that the tuning job was created.

          ", + "smithy.api#required": {} + } + }, + "HyperParameterTuningEndTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          The date and time that the tuning job ended.

          " + } + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          The date and time that the tuning job was\n modified.

          " + } + }, + "TrainingJobStatusCounters": { + "target": "com.amazonaws.sagemaker#TrainingJobStatusCounters", + "traits": { + "smithy.api#documentation": "

          The TrainingJobStatusCounters object that specifies the numbers of\n training jobs, categorized by status, that this tuning job launched.

          ", + "smithy.api#required": {} + } + }, + "ObjectiveStatusCounters": { + "target": "com.amazonaws.sagemaker#ObjectiveStatusCounters", + "traits": { + "smithy.api#documentation": "

          The ObjectiveStatusCounters object that specifies the numbers of\n training jobs, categorized by objective metric status, that this tuning job\n launched.

          ", + "smithy.api#required": {} + } + }, + "ResourceLimits": { + "target": "com.amazonaws.sagemaker#ResourceLimits", + "traits": { + "smithy.api#documentation": "

          The ResourceLimits object that specifies the maximum number of\n training jobs and parallel training jobs allowed for this tuning job.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Provides summary information about a hyperparameter tuning job.

          " + } + }, + "com.amazonaws.sagemaker#HyperParameterTuningJobWarmStartConfig": { + "type": "structure", + "members": { + "ParentHyperParameterTuningJobs": { + "target": "com.amazonaws.sagemaker#ParentHyperParameterTuningJobs", + "traits": { + "smithy.api#documentation": "

          An array of hyperparameter tuning jobs that are used as the starting point for the new\n hyperparameter tuning job. For more information about warm starting a hyperparameter\n tuning job, see Using a Previous\n Hyperparameter Tuning Job as a Starting Point.

          \n

          Hyperparameter tuning jobs created before October 1, 2018 cannot be used as parent\n jobs for warm start tuning jobs.

          ", + "smithy.api#required": {} + } + }, + "WarmStartType": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobWarmStartType", + "traits": { + "smithy.api#documentation": "

          Specifies one of the following:

          \n
          \n
          IDENTICAL_DATA_AND_ALGORITHM
          \n
          \n

          The new hyperparameter tuning job uses the same input data and training\n image as the parent tuning jobs. You can change the hyperparameter ranges to\n search and the maximum number of training jobs that the hyperparameter\n tuning job launches. You cannot use a new version of the training algorithm,\n unless the changes in the new version do not affect the algorithm itself.\n For example, changes that improve logging or adding support for a different\n data format are allowed. You can also change hyperparameters from tunable to\n static, and from static to tunable, but the total number of static plus\n tunable hyperparameters must remain the same as it is in all parent jobs.\n The objective metric for the new tuning job must be the same as for all\n parent jobs.

          \n
          \n
          TRANSFER_LEARNING
          \n
          \n

          The new hyperparameter tuning job can include input data, hyperparameter\n ranges, maximum number of concurrent training jobs, and maximum number of\n training jobs that are different than those of its parent hyperparameter\n tuning jobs. The training image can also be a different version from the\n version used in the parent hyperparameter tuning job. You can also change\n hyperparameters from tunable to static, and from static to tunable, but the\n total number of static plus tunable hyperparameters must remain the same as\n it is in all parent jobs. The objective metric for the new tuning job must\n be the same as for all parent jobs.

          \n
          \n
          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Specifies the configuration for a hyperparameter tuning job that uses one or more\n previous hyperparameter tuning jobs as a starting point. The results of previous tuning\n jobs are used to inform which combinations of hyperparameters to search over in the new\n tuning job.

          \n

          All training jobs launched by the new hyperparameter tuning job are evaluated by using\n the objective metric, and the training job that performs the best is compared to the\n best training jobs from the parent tuning jobs. From these, the training job that\n performs the best as measured by the objective metric is returned as the overall best\n training job.

          \n \n

          All training jobs launched by parent hyperparameter tuning jobs and the new\n hyperparameter tuning jobs count against the limit of training jobs for the tuning\n job.

          \n
          " + } + }, + "com.amazonaws.sagemaker#HyperParameterTuningJobWarmStartType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "IdenticalDataAndAlgorithm", + "name": "IDENTICAL_DATA_AND_ALGORITHM" + }, + { + "value": "TransferLearning", + "name": "TRANSFER_LEARNING" + } + ] + } + }, + "com.amazonaws.sagemaker#HyperParameterValue": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 2500 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#HyperParameters": { + "type": "map", + "key": { + "target": "com.amazonaws.sagemaker#HyperParameterKey" + }, + "value": { + "target": "com.amazonaws.sagemaker#HyperParameterValue" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 100 + } + } + }, + "com.amazonaws.sagemaker#IdempotencyToken": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 32, + "max": 128 + } + } + }, + "com.amazonaws.sagemaker#Image": { + "type": "structure", + "members": { + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          When the image was created.

          ", + "smithy.api#required": {} + } + }, + "Description": { + "target": "com.amazonaws.sagemaker#ImageDescription", + "traits": { + "smithy.api#documentation": "

          The description of the image.

          " + } + }, + "DisplayName": { + "target": "com.amazonaws.sagemaker#ImageDisplayName", + "traits": { + "smithy.api#documentation": "

          The name of the image as displayed.

          " + } + }, + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", + "traits": { + "smithy.api#documentation": "

          When a create, update, or delete operation fails, the reason for the failure.

          " + } + }, + "ImageArn": { + "target": "com.amazonaws.sagemaker#ImageArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the image.

          ", + "smithy.api#required": {} + } + }, + "ImageName": { + "target": "com.amazonaws.sagemaker#ImageName", + "traits": { + "smithy.api#documentation": "

          The name of the image.

          ", + "smithy.api#required": {} + } + }, + "ImageStatus": { + "target": "com.amazonaws.sagemaker#ImageStatus", + "traits": { + "smithy.api#documentation": "

          The status of the image.

          ", + "smithy.api#required": {} + } + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          When the image was last modified.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          A SageMaker image. A SageMaker image represents a set of container images that are derived from\n a common base container image. Each of these container images is represented by a SageMaker\n ImageVersion.

          " + } + }, + "com.amazonaws.sagemaker#ImageArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": "^arn:aws(-[\\w]+)*:sagemaker:.+:[0-9]{12}:image/[a-z0-9]([-.]?[a-z0-9])*$" + } + }, + "com.amazonaws.sagemaker#ImageBaseImage": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 255 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#ImageConfig": { + "type": "structure", + "members": { + "RepositoryAccessMode": { + "target": "com.amazonaws.sagemaker#RepositoryAccessMode", + "traits": { + "smithy.api#documentation": "

          Set this to one of the following values:

          \n
            \n
          • \n

            \n Platform - The model image is hosted in Amazon ECR.

            \n
          • \n
          • \n

            \n Vpc - The model image is hosted in a private Docker registry in\n your VPC.

            \n
          • \n
          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Specifies whether the model container is in Amazon ECR or a private Docker registry\n accessible from your\n Amazon Virtual Private Cloud (VPC).

          " + } + }, + "com.amazonaws.sagemaker#ImageContainerImage": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 255 + } + } + }, + "com.amazonaws.sagemaker#ImageDeleteProperty": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 11 + }, + "smithy.api#pattern": "(^DisplayName$)|(^Description$)" + } + }, + "com.amazonaws.sagemaker#ImageDeletePropertyList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ImageDeleteProperty" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 2 + } + } + }, + "com.amazonaws.sagemaker#ImageDescription": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 512 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#ImageDigest": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 72 + }, + "smithy.api#pattern": "^[Ss][Hh][Aa]256:[0-9a-fA-F]{64}$" + } + }, + "com.amazonaws.sagemaker#ImageDisplayName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 128 + }, + "smithy.api#pattern": "^\\S(.*\\S)?$" + } + }, + "com.amazonaws.sagemaker#ImageName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 63 + }, + "smithy.api#pattern": "^[a-zA-Z0-9]([-.]?[a-zA-Z0-9]){0,62}$" + } + }, + "com.amazonaws.sagemaker#ImageNameContains": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 63 + }, + "smithy.api#pattern": "^[a-zA-Z0-9\\-.]+$" + } + }, + "com.amazonaws.sagemaker#ImageSortBy": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "CREATION_TIME", + "name": "CREATION_TIME" + }, + { + "value": "LAST_MODIFIED_TIME", + "name": "LAST_MODIFIED_TIME" + }, + { + "value": "IMAGE_NAME", + "name": "IMAGE_NAME" + } + ] + } + }, + "com.amazonaws.sagemaker#ImageSortOrder": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "ASCENDING", + "name": "ASCENDING" + }, + { + "value": "DESCENDING", + "name": "DESCENDING" + } + ] + } + }, + "com.amazonaws.sagemaker#ImageStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "CREATING", + "name": "CREATING" + }, + { + "value": "CREATED", + "name": "CREATED" + }, + { + "value": "CREATE_FAILED", + "name": "CREATE_FAILED" + }, + { + "value": "UPDATING", + "name": "UPDATING" + }, + { + "value": "UPDATE_FAILED", + "name": "UPDATE_FAILED" + }, + { + "value": "DELETING", + "name": "DELETING" + }, + { + "value": "DELETE_FAILED", + "name": "DELETE_FAILED" + } + ] + } + }, + "com.amazonaws.sagemaker#ImageUri": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 255 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#ImageVersion": { + "type": "structure", + "members": { + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          When the version was created.

          ", + "smithy.api#required": {} + } + }, + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", + "traits": { + "smithy.api#documentation": "

          When a create or delete operation fails, the reason for the failure.

          " + } + }, + "ImageArn": { + "target": "com.amazonaws.sagemaker#ImageArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the image the version is based on.

          ", + "smithy.api#required": {} + } + }, + "ImageVersionArn": { + "target": "com.amazonaws.sagemaker#ImageVersionArn", + "traits": { + "smithy.api#documentation": "

          The ARN of the version.

          ", + "smithy.api#required": {} + } + }, + "ImageVersionStatus": { + "target": "com.amazonaws.sagemaker#ImageVersionStatus", + "traits": { + "smithy.api#documentation": "

          The status of the version.

          ", + "smithy.api#required": {} + } + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          When the version was last modified.

          ", + "smithy.api#required": {} + } + }, + "Version": { + "target": "com.amazonaws.sagemaker#ImageVersionNumber", + "traits": { + "smithy.api#documentation": "

          The version number.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          A version of a SageMaker Image. A version represents an existing container\n image.

          " + } + }, + "com.amazonaws.sagemaker#ImageVersionArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": "^arn:aws(-[\\w]+)*:sagemaker:.+:[0-9]{12}:image-version/[a-z0-9]([-.]?[a-z0-9])*/[0-9]+$" + } + }, + "com.amazonaws.sagemaker#ImageVersionNumber": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 0 + } + } + }, + "com.amazonaws.sagemaker#ImageVersionSortBy": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "CREATION_TIME", + "name": "CREATION_TIME" + }, + { + "value": "LAST_MODIFIED_TIME", + "name": "LAST_MODIFIED_TIME" + }, + { + "value": "VERSION", + "name": "VERSION" + } + ] + } + }, + "com.amazonaws.sagemaker#ImageVersionSortOrder": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "ASCENDING", + "name": "ASCENDING" + }, + { + "value": "DESCENDING", + "name": "DESCENDING" + } + ] + } + }, + "com.amazonaws.sagemaker#ImageVersionStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "CREATING", + "name": "CREATING" + }, + { + "value": "CREATED", + "name": "CREATED" + }, + { + "value": "CREATE_FAILED", + "name": "CREATE_FAILED" + }, + { + "value": "DELETING", + "name": "DELETING" + }, + { + "value": "DELETE_FAILED", + "name": "DELETE_FAILED" + } + ] + } + }, + "com.amazonaws.sagemaker#ImageVersions": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ImageVersion" + } + }, + "com.amazonaws.sagemaker#Images": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#Image" + } + }, + "com.amazonaws.sagemaker#InferenceSpecification": { + "type": "structure", + "members": { + "Containers": { + "target": "com.amazonaws.sagemaker#ModelPackageContainerDefinitionList", + "traits": { + "smithy.api#documentation": "

          The Amazon ECR registry path of the Docker image that contains the inference code.

          ", + "smithy.api#required": {} + } + }, + "SupportedTransformInstanceTypes": { + "target": "com.amazonaws.sagemaker#TransformInstanceTypes", + "traits": { + "smithy.api#documentation": "

          A list of the instance types on which a transformation job can be run or on which an\n endpoint can be deployed.

          \n

          This parameter is required for unversioned models, and optional for versioned models.

          " + } + }, + "SupportedRealtimeInferenceInstanceTypes": { + "target": "com.amazonaws.sagemaker#RealtimeInferenceInstanceTypes", + "traits": { + "smithy.api#documentation": "

          A list of the instance types that are used to generate inferences in real-time.

          \n

          This parameter is required for unversioned models, and optional for versioned models.

          " + } + }, + "SupportedContentTypes": { + "target": "com.amazonaws.sagemaker#ContentTypes", + "traits": { + "smithy.api#documentation": "

          The supported MIME types for the input data.

          ", + "smithy.api#required": {} + } + }, + "SupportedResponseMIMETypes": { + "target": "com.amazonaws.sagemaker#ResponseMIMETypes", + "traits": { + "smithy.api#documentation": "

          The supported MIME types for the output data.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Defines how to perform inference generation after a training job is run.

          " + } + }, + "com.amazonaws.sagemaker#InputConfig": { + "type": "structure", + "members": { + "S3Uri": { + "target": "com.amazonaws.sagemaker#S3Uri", + "traits": { + "smithy.api#documentation": "

          The S3 path where the model artifacts, which result from model training, are stored.\n This path must point to a single gzip compressed tar archive (.tar.gz suffix).

          ", + "smithy.api#required": {} + } + }, + "DataInputConfig": { + "target": "com.amazonaws.sagemaker#DataInputConfig", + "traits": { + "smithy.api#documentation": "

          Specifies the name and shape of the expected data inputs for your trained model with a\n JSON dictionary form. The data inputs are InputConfig$Framework\n specific.

          \n
            \n
          • \n

            \n TensorFlow: You must specify the name and shape (NHWC format) of\n the expected data inputs using a dictionary format for your trained model. The\n dictionary formats required for the console and CLI are different.

            \n
              \n
            • \n

              Examples for one input:

              \n
                \n
              • \n

                If using the console,\n {\"input\":[1,1024,1024,3]}\n

                \n
              • \n
              • \n

                If using the CLI,\n {\\\"input\\\":[1,1024,1024,3]}\n

                \n
              • \n
              \n
            • \n
            • \n

              Examples for two inputs:

              \n
                \n
              • \n

                If using the console, {\"data1\": [1,28,28,1],\n \"data2\":[1,28,28,1]}\n

                \n
              • \n
              • \n

                If using the CLI, {\\\"data1\\\": [1,28,28,1],\n \\\"data2\\\":[1,28,28,1]}\n

                \n
              • \n
              \n
            • \n
            \n
          • \n
          • \n

            \n KERAS: You must specify the name and shape (NCHW format) of\n expected data inputs using a dictionary format for your trained model. Note that\n while Keras model artifacts should be uploaded in NHWC (channel-last) format,\n DataInputConfig should be specified in NCHW (channel-first)\n format. The dictionary formats required for the console and CLI are\n different.

            \n
              \n
            • \n

              Examples for one input:

              \n
                \n
              • \n

                If using the console,\n {\"input_1\":[1,3,224,224]}\n

                \n
              • \n
              • \n

                If using the CLI,\n {\\\"input_1\\\":[1,3,224,224]}\n

                \n
              • \n
              \n
            • \n
            • \n

              Examples for two inputs:

              \n
                \n
              • \n

                If using the console, {\"input_1\": [1,3,224,224],\n \"input_2\":[1,3,224,224]} \n

                \n
              • \n
              • \n

                If using the CLI, {\\\"input_1\\\": [1,3,224,224],\n \\\"input_2\\\":[1,3,224,224]}\n

                \n
              • \n
              \n
            • \n
            \n
          • \n
          • \n

            \n MXNET/ONNX/DARKNET: You must specify the name and shape (NCHW format) of\n the expected data inputs in order using a dictionary format for your trained\n model. The dictionary formats required for the console and CLI are\n different.

            \n
              \n
            • \n

              Examples for one input:

              \n
                \n
              • \n

                If using the console,\n {\"data\":[1,3,1024,1024]}\n

                \n
              • \n
              • \n

                If using the CLI,\n {\\\"data\\\":[1,3,1024,1024]}\n

                \n
              • \n
              \n
            • \n
            • \n

              Examples for two inputs:

              \n
                \n
              • \n

                If using the console, {\"var1\": [1,1,28,28],\n \"var2\":[1,1,28,28]} \n

                \n
              • \n
              • \n

                If using the CLI, {\\\"var1\\\": [1,1,28,28],\n \\\"var2\\\":[1,1,28,28]}\n

                \n
              • \n
              \n
            • \n
            \n
          • \n
          • \n

            \n PyTorch: You can either specify the name and shape (NCHW format)\n of expected data inputs in order using a dictionary format for your trained\n model or you can specify the shape only using a list format. The dictionary\n formats required for the console and CLI are different. The list formats for the\n console and CLI are the same.

            \n
              \n
            • \n

              Examples for one input in dictionary format:

              \n
                \n
              • \n

                If using the console,\n {\"input0\":[1,3,224,224]}\n

                \n
              • \n
              • \n

                If using the CLI,\n {\\\"input0\\\":[1,3,224,224]}\n

                \n
              • \n
              \n
            • \n
            • \n

              Example for one input in list format:\n [[1,3,224,224]]\n

              \n
            • \n
            • \n

              Examples for two inputs in dictionary format:

              \n
                \n
              • \n

                If using the console, {\"input0\":[1,3,224,224],\n \"input1\":[1,3,224,224]}\n

                \n
              • \n
              • \n

                If using the CLI, {\\\"input0\\\":[1,3,224,224],\n \\\"input1\\\":[1,3,224,224]} \n

                \n
              • \n
              \n
            • \n
            • \n

              Example for two inputs in list format: [[1,3,224,224],\n [1,3,224,224]]\n

              \n
            • \n
            \n
          • \n
          • \n

            \n XGBOOST: input data name and shape are not needed.

            \n
          • \n
          \n

          \n DataInputConfig supports the following parameters for CoreML\n OutputConfig$TargetDevice (ML Model format):

          \n
            \n
          • \n

            \n shape: Input shape, for example {\"input_1\": {\"shape\": [1,224,224,3]}}.\n In addition to static input shapes, CoreML converter supports Flexible input shapes:

            \n
              \n
            • \n

              Range Dimension. You can use the Range Dimension feature if you know the input shape\n will be within some specific interval in that dimension,\n for example: {\"input_1\": {\"shape\": [\"1..10\", 224, 224, 3]}}\n

              \n
            • \n
            • \n

              Enumerated shapes. Sometimes, the models are trained to work only on a select\n set of inputs. You can enumerate all supported input shapes,\n for example: {\"input_1\": {\"shape\": [[1, 224, 224, 3], [1, 160, 160, 3]]}}\n

              \n
            • \n
            \n
          • \n
          • \n

            \n default_shape: Default input shape. You can set a default shape during\n conversion for both Range Dimension and Enumerated Shapes. For example\n {\"input_1\": {\"shape\": [\"1..10\", 224, 224, 3], \"default_shape\": [1, 224, 224, 3]}}\n

            \n
          • \n
          • \n

            \n type: Input type. Allowed values: Image and Tensor.\n By default, the converter generates an ML Model with inputs of type Tensor (MultiArray).\n User can set input type to be Image. Image input type requires additional input parameters\n such as bias and scale.

            \n
          • \n
          • \n

            \n bias: If the input type is an Image, you need to provide the bias vector.

            \n
          • \n
          • \n

            \n scale: If the input type is an Image, you need to provide a scale factor.

            \n
          • \n
          \n

          CoreML ClassifierConfig parameters can be specified using\n OutputConfig$CompilerOptions. CoreML converter supports Tensorflow and PyTorch models.\n CoreML conversion examples:

          \n
            \n
          • \n

            Tensor type input:

            \n
              \n
            • \n

              \n \"DataInputConfig\": {\"input_1\": {\"shape\": [[1,224,224,3], [1,160,160,3]], \"default_shape\":\n [1,224,224,3]}}\n

              \n
            • \n
            \n
          • \n
          • \n

            Tensor type input without input name (PyTorch):

            \n
              \n
            • \n

              \n \"DataInputConfig\": [{\"shape\": [[1,3,224,224], [1,3,160,160]], \"default_shape\":\n [1,3,224,224]}]\n

              \n
            • \n
            \n
          • \n
          • \n

            Image type input:

            \n
              \n
            • \n

              \n \"DataInputConfig\": {\"input_1\": {\"shape\": [[1,224,224,3], [1,160,160,3]], \"default_shape\":\n [1,224,224,3], \"type\": \"Image\", \"bias\": [-1,-1,-1], \"scale\": 0.007843137255}}\n

              \n
            • \n
            • \n

              \n \"CompilerOptions\": {\"class_labels\": \"imagenet_labels_1000.txt\"}\n

              \n
            • \n
            \n
          • \n
          • \n

            Image type input without input name (PyTorch):

            \n
              \n
            • \n

              \n \"DataInputConfig\": [{\"shape\": [[1,3,224,224], [1,3,160,160]], \"default_shape\":\n [1,3,224,224], \"type\": \"Image\", \"bias\": [-1,-1,-1], \"scale\": 0.007843137255}]\n

              \n
            • \n
            • \n

              \n \"CompilerOptions\": {\"class_labels\": \"imagenet_labels_1000.txt\"}\n

              \n
            • \n
            \n
          • \n
          ", + "smithy.api#required": {} + } + }, + "Framework": { + "target": "com.amazonaws.sagemaker#Framework", + "traits": { + "smithy.api#documentation": "

          Identifies the framework in which the model was trained. For example:\n TENSORFLOW.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Contains information about the location of input model artifacts, the name and\n shape\n of the expected data inputs, and the framework in which the model was trained.

          " + } + }, + "com.amazonaws.sagemaker#InputDataConfig": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#Channel" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 20 + } + } + }, + "com.amazonaws.sagemaker#InputMode": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Pipe", + "name": "PIPE" + }, + { + "value": "File", + "name": "FILE" + } + ] + } + }, + "com.amazonaws.sagemaker#InputModes": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#TrainingInputMode" + }, + "traits": { + "smithy.api#length": { + "min": 1 + } + } + }, + "com.amazonaws.sagemaker#InstanceType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "ml.t2.medium", + "name": "ML_T2_MEDIUM" + }, + { + "value": "ml.t2.large", + "name": "ML_T2_LARGE" + }, + { + "value": "ml.t2.xlarge", + "name": "ML_T2_XLARGE" + }, + { + "value": "ml.t2.2xlarge", + "name": "ML_T2_2XLARGE" + }, + { + "value": "ml.t3.medium", + "name": "ML_T3_MEDIUM" + }, + { + "value": "ml.t3.large", + "name": "ML_T3_LARGE" + }, + { + "value": "ml.t3.xlarge", + "name": "ML_T3_XLARGE" + }, + { + "value": "ml.t3.2xlarge", + "name": "ML_T3_2XLARGE" + }, + { + "value": "ml.m4.xlarge", + "name": "ML_M4_XLARGE" + }, + { + "value": "ml.m4.2xlarge", + "name": "ML_M4_2XLARGE" + }, + { + "value": "ml.m4.4xlarge", + "name": "ML_M4_4XLARGE" + }, + { + "value": "ml.m4.10xlarge", + "name": "ML_M4_10XLARGE" + }, + { + "value": "ml.m4.16xlarge", + "name": "ML_M4_16XLARGE" + }, + { + "value": "ml.m5.xlarge", + "name": "ML_M5_XLARGE" + }, + { + "value": "ml.m5.2xlarge", + "name": "ML_M5_2XLARGE" + }, + { + "value": "ml.m5.4xlarge", + "name": "ML_M5_4XLARGE" + }, + { + "value": "ml.m5.12xlarge", + "name": "ML_M5_12XLARGE" + }, + { + "value": "ml.m5.24xlarge", + "name": "ML_M5_24XLARGE" + }, + { + "value": "ml.c4.xlarge", + "name": "ML_C4_XLARGE" + }, + { + "value": "ml.c4.2xlarge", + "name": "ML_C4_2XLARGE" + }, + { + "value": "ml.c4.4xlarge", + "name": "ML_C4_4XLARGE" + }, + { + "value": "ml.c4.8xlarge", + "name": "ML_C4_8XLARGE" + }, + { + "value": "ml.c5.xlarge", + "name": "ML_C5_XLARGE" + }, + { + "value": "ml.c5.2xlarge", + "name": "ML_C5_2XLARGE" + }, + { + "value": "ml.c5.4xlarge", + "name": "ML_C5_4XLARGE" + }, + { + "value": "ml.c5.9xlarge", + "name": "ML_C5_9XLARGE" + }, + { + "value": "ml.c5.18xlarge", + "name": "ML_C5_18XLARGE" + }, + { + "value": "ml.c5d.xlarge", + "name": "ML_C5D_XLARGE" + }, + { + "value": "ml.c5d.2xlarge", + "name": "ML_C5D_2XLARGE" + }, + { + "value": "ml.c5d.4xlarge", + "name": "ML_C5D_4XLARGE" + }, + { + "value": "ml.c5d.9xlarge", + "name": "ML_C5D_9XLARGE" + }, + { + "value": "ml.c5d.18xlarge", + "name": "ML_C5D_18XLARGE" + }, + { + "value": "ml.p2.xlarge", + "name": "ML_P2_XLARGE" + }, + { + "value": "ml.p2.8xlarge", + "name": "ML_P2_8XLARGE" + }, + { + "value": "ml.p2.16xlarge", + "name": "ML_P2_16XLARGE" + }, + { + "value": "ml.p3.2xlarge", + "name": "ML_P3_2XLARGE" + }, + { + "value": "ml.p3.8xlarge", + "name": "ML_P3_8XLARGE" + }, + { + "value": "ml.p3.16xlarge", + "name": "ML_P3_16XLARGE" + } + ] + } + }, + "com.amazonaws.sagemaker#IntegerParameterRange": { + "type": "structure", + "members": { + "Name": { + "target": "com.amazonaws.sagemaker#ParameterKey", + "traits": { + "smithy.api#documentation": "

          The name of the hyperparameter to search.

          ", + "smithy.api#required": {} + } + }, + "MinValue": { + "target": "com.amazonaws.sagemaker#ParameterValue", + "traits": { + "smithy.api#documentation": "

          The minimum\n value\n of the hyperparameter to search.

          ", + "smithy.api#required": {} + } + }, + "MaxValue": { + "target": "com.amazonaws.sagemaker#ParameterValue", + "traits": { + "smithy.api#documentation": "

          The maximum\n value\n of the hyperparameter to search.

          ", + "smithy.api#required": {} + } + }, + "ScalingType": { + "target": "com.amazonaws.sagemaker#HyperParameterScalingType", + "traits": { + "smithy.api#documentation": "

          The scale that hyperparameter tuning uses to search the hyperparameter range. For\n information about choosing a hyperparameter scale, see Hyperparameter Scaling. One of the following values:

          \n
          \n
          Auto
          \n
          \n

          Amazon SageMaker hyperparameter tuning chooses the best scale for the\n hyperparameter.

          \n
          \n
          Linear
          \n
          \n

          Hyperparameter tuning searches the values in the hyperparameter range by\n using a linear scale.

          \n
          \n
          Logarithmic
          \n
          \n

          Hyperparameter tuning searches the values in the hyperparameter range by\n using a logarithmic scale.

          \n

          Logarithmic scaling works only for ranges that have only values greater\n than 0.

          \n
          \n
          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          For a hyperparameter of the integer type, specifies the range\n that\n a hyperparameter tuning job searches.

          " + } + }, + "com.amazonaws.sagemaker#IntegerParameterRangeSpecification": { + "type": "structure", + "members": { + "MinValue": { + "target": "com.amazonaws.sagemaker#ParameterValue", + "traits": { + "smithy.api#documentation": "

          The minimum integer value allowed.

          ", + "smithy.api#required": {} + } + }, + "MaxValue": { + "target": "com.amazonaws.sagemaker#ParameterValue", + "traits": { + "smithy.api#documentation": "

          The maximum integer value allowed.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Defines the possible values for an integer hyperparameter.

          " + } + }, + "com.amazonaws.sagemaker#IntegerParameterRanges": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#IntegerParameterRange" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 20 + } + } + }, + "com.amazonaws.sagemaker#InvocationsMaxRetries": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 0, + "max": 3 + } + } + }, + "com.amazonaws.sagemaker#InvocationsTimeoutInSeconds": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 3600 + } + } + }, + "com.amazonaws.sagemaker#JobReferenceCode": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1 + }, + "smithy.api#pattern": ".+" + } + }, + "com.amazonaws.sagemaker#JobReferenceCodeContains": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 255 + }, + "smithy.api#pattern": ".+" + } + }, + "com.amazonaws.sagemaker#JoinSource": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Input", + "name": "INPUT" + }, + { + "value": "None", + "name": "NONE" + } + ] + } + }, + "com.amazonaws.sagemaker#JsonContentType": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 256 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*\\/[a-zA-Z0-9](-*[a-zA-Z0-9.])*" + } + }, + "com.amazonaws.sagemaker#JsonContentTypes": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#JsonContentType" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 10 + } + } + }, + "com.amazonaws.sagemaker#JsonPath": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 63 + } + } + }, + "com.amazonaws.sagemaker#JupyterServerAppSettings": { + "type": "structure", + "members": { + "DefaultResourceSpec": { + "target": "com.amazonaws.sagemaker#ResourceSpec", + "traits": { + "smithy.api#documentation": "

          The default instance type and the Amazon Resource Name (ARN) of the default SageMaker image used by the JupyterServer app.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          The JupyterServer app settings.

          " + } + }, + "com.amazonaws.sagemaker#KernelDisplayName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 1024 + } + } + }, + "com.amazonaws.sagemaker#KernelGatewayAppSettings": { + "type": "structure", + "members": { + "DefaultResourceSpec": { + "target": "com.amazonaws.sagemaker#ResourceSpec", + "traits": { + "smithy.api#documentation": "

          The default instance type and the Amazon Resource Name (ARN) of the default SageMaker image used by the KernelGateway app.

          " + } + }, + "CustomImages": { + "target": "com.amazonaws.sagemaker#CustomImages", + "traits": { + "smithy.api#documentation": "

          A list of custom SageMaker images that are configured to run as a KernelGateway app.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          The KernelGateway app settings.

          " + } + }, + "com.amazonaws.sagemaker#KernelGatewayImageConfig": { + "type": "structure", + "members": { + "KernelSpecs": { + "target": "com.amazonaws.sagemaker#KernelSpecs", + "traits": { + "smithy.api#documentation": "

          The specification of the Jupyter kernels in the image.

          ", + "smithy.api#required": {} + } + }, + "FileSystemConfig": { + "target": "com.amazonaws.sagemaker#FileSystemConfig", + "traits": { + "smithy.api#documentation": "

          The Amazon Elastic File System (EFS) storage configuration for a SageMaker image.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          The configuration for the file system and kernels in a SageMaker image running as a\n KernelGateway app.

          " + } + }, + "com.amazonaws.sagemaker#KernelName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 1024 + } + } + }, + "com.amazonaws.sagemaker#KernelSpec": { + "type": "structure", + "members": { + "Name": { + "target": "com.amazonaws.sagemaker#KernelName", + "traits": { + "smithy.api#documentation": "

          The name of the kernel.

          ", + "smithy.api#required": {} + } + }, + "DisplayName": { + "target": "com.amazonaws.sagemaker#KernelDisplayName", + "traits": { + "smithy.api#documentation": "

          The display name of the kernel.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          The specification of a Jupyter kernel.

          " + } + }, + "com.amazonaws.sagemaker#KernelSpecs": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#KernelSpec" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 1 + } + } + }, + "com.amazonaws.sagemaker#KmsKeyId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 2048 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#LabelAttributeName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 127 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,126}" + } + }, + "com.amazonaws.sagemaker#LabelCounter": { + "type": "integer", + "traits": { + "smithy.api#range": { + "min": 0 + } + } + }, + "com.amazonaws.sagemaker#LabelCounters": { + "type": "structure", + "members": { + "TotalLabeled": { + "target": "com.amazonaws.sagemaker#LabelCounter", + "traits": { + "smithy.api#documentation": "

          The total number of objects labeled.

          " + } + }, + "HumanLabeled": { + "target": "com.amazonaws.sagemaker#LabelCounter", + "traits": { + "smithy.api#documentation": "

          The total number of objects labeled by a human worker.

          " + } + }, + "MachineLabeled": { + "target": "com.amazonaws.sagemaker#LabelCounter", + "traits": { + "smithy.api#documentation": "

          The total number of objects labeled by automated data labeling.

          " + } + }, + "FailedNonRetryableError": { + "target": "com.amazonaws.sagemaker#LabelCounter", + "traits": { + "smithy.api#documentation": "

          The total number of objects that could not be labeled due to an error.

          " + } + }, + "Unlabeled": { + "target": "com.amazonaws.sagemaker#LabelCounter", + "traits": { + "smithy.api#documentation": "

          The total number of objects not yet labeled.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Provides a breakdown of the number of objects labeled.

          " + } + }, + "com.amazonaws.sagemaker#LabelCountersForWorkteam": { + "type": "structure", + "members": { + "HumanLabeled": { + "target": "com.amazonaws.sagemaker#LabelCounter", + "traits": { + "smithy.api#documentation": "

          The total number of data objects labeled by a human worker.

          " + } + }, + "PendingHuman": { + "target": "com.amazonaws.sagemaker#LabelCounter", + "traits": { + "smithy.api#documentation": "

          The total number of data objects that need to be labeled by a human worker.

          " + } + }, + "Total": { + "target": "com.amazonaws.sagemaker#LabelCounter", + "traits": { + "smithy.api#documentation": "

          The total number of tasks in the labeling job.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Provides counts for human-labeled tasks in the labeling job.

          " + } + }, + "com.amazonaws.sagemaker#LabelingJobAlgorithmSpecificationArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 2048 + }, + "smithy.api#pattern": "arn:.*" + } + }, + "com.amazonaws.sagemaker#LabelingJobAlgorithmsConfig": { + "type": "structure", + "members": { + "LabelingJobAlgorithmSpecificationArn": { + "target": "com.amazonaws.sagemaker#LabelingJobAlgorithmSpecificationArn", + "traits": { + "smithy.api#documentation": "

          Specifies the Amazon Resource Name (ARN) of the algorithm used for auto-labeling. You\n must select one of the following ARNs:

          \n
            \n
          • \n

            \n Image classification\n

            \n

            \n arn:aws:sagemaker:region:027400017018:labeling-job-algorithm-specification/image-classification\n

            \n
          • \n
          • \n

            \n Text classification\n

            \n

            \n arn:aws:sagemaker:region:027400017018:labeling-job-algorithm-specification/text-classification\n

            \n
          • \n
          • \n

            \n Object detection\n

            \n

            \n arn:aws:sagemaker:region:027400017018:labeling-job-algorithm-specification/object-detection\n

            \n
          • \n
          • \n

            \n Semantic Segmentation\n

            \n

            \n arn:aws:sagemaker:region:027400017018:labeling-job-algorithm-specification/semantic-segmentation\n

            \n
          • \n
          ", + "smithy.api#required": {} + } + }, + "InitialActiveLearningModelArn": { + "target": "com.amazonaws.sagemaker#ModelArn", + "traits": { + "smithy.api#documentation": "

          At the end of an auto-label job Ground Truth sends the Amazon Resource Name (ARN) of the final\n model used for auto-labeling. You can use this model as the starting point for\n subsequent similar jobs by providing the ARN of the model here.

          " + } + }, + "LabelingJobResourceConfig": { + "target": "com.amazonaws.sagemaker#LabelingJobResourceConfig", + "traits": { + "smithy.api#documentation": "

          Provides configuration information for a labeling job.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Provides configuration information for auto-labeling of your data objects. A\n LabelingJobAlgorithmsConfig object must be supplied in order to use\n auto-labeling.

          " + } + }, + "com.amazonaws.sagemaker#LabelingJobArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 2048 + }, + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:labeling-job/.*" + } + }, + "com.amazonaws.sagemaker#LabelingJobDataAttributes": { + "type": "structure", + "members": { + "ContentClassifiers": { + "target": "com.amazonaws.sagemaker#ContentClassifiers", + "traits": { + "smithy.api#documentation": "

          Declares that your content is free of personally identifiable information or adult\n content. Amazon SageMaker may restrict the Amazon Mechanical Turk workers that can view your task\n based on this information.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Attributes of the data specified by the customer. Use these to describe the data to be\n labeled.

          " + } + }, + "com.amazonaws.sagemaker#LabelingJobDataSource": { + "type": "structure", + "members": { + "S3DataSource": { + "target": "com.amazonaws.sagemaker#LabelingJobS3DataSource", + "traits": { + "smithy.api#documentation": "

          The Amazon S3 location of the input data objects.

          " + } + }, + "SnsDataSource": { + "target": "com.amazonaws.sagemaker#LabelingJobSnsDataSource", + "traits": { + "smithy.api#documentation": "

          An Amazon SNS data source used for streaming labeling jobs.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Provides information about the location of input data.

          \n

          You must specify at least one of the following: S3DataSource or SnsDataSource.

          \n

          Use SnsDataSource to specify an SNS input topic\n for a streaming labeling job. If you do not specify \n and SNS input topic ARN, Ground Truth will create a one-time labeling job.

          \n

          Use S3DataSource to specify an input \n manifest file for both streaming and one-time labeling jobs.\n Adding an S3DataSource is optional if you use SnsDataSource to create a streaming labeling job.

          " + } + }, + "com.amazonaws.sagemaker#LabelingJobForWorkteamSummary": { + "type": "structure", + "members": { + "LabelingJobName": { + "target": "com.amazonaws.sagemaker#LabelingJobName", + "traits": { + "smithy.api#documentation": "

          The name of the labeling job that the work team is assigned to.

          " + } + }, + "JobReferenceCode": { + "target": "com.amazonaws.sagemaker#JobReferenceCode", + "traits": { + "smithy.api#documentation": "

          A unique identifier for a labeling job. You can use this to refer to a specific\n labeling job.

          ", + "smithy.api#required": {} + } + }, + "WorkRequesterAccountId": { + "target": "com.amazonaws.sagemaker#AccountId", + "traits": { + "smithy.api#documentation": "

          ", + "smithy.api#required": {} + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          The date and time that the labeling job was created.

          ", + "smithy.api#required": {} + } + }, + "LabelCounters": { + "target": "com.amazonaws.sagemaker#LabelCountersForWorkteam", + "traits": { + "smithy.api#documentation": "

          Provides information about the progress of a labeling job.

          " + } + }, + "NumberOfHumanWorkersPerDataObject": { + "target": "com.amazonaws.sagemaker#NumberOfHumanWorkersPerDataObject", + "traits": { + "smithy.api#documentation": "

          The configured number of workers per data object.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Provides summary information for a work team.

          " + } + }, + "com.amazonaws.sagemaker#LabelingJobForWorkteamSummaryList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#LabelingJobForWorkteamSummary" + } + }, + "com.amazonaws.sagemaker#LabelingJobInputConfig": { + "type": "structure", + "members": { + "DataSource": { + "target": "com.amazonaws.sagemaker#LabelingJobDataSource", + "traits": { + "smithy.api#documentation": "

          The location of the input data.

          ", + "smithy.api#required": {} + } + }, + "DataAttributes": { + "target": "com.amazonaws.sagemaker#LabelingJobDataAttributes", + "traits": { + "smithy.api#documentation": "

          Attributes of the data specified by the customer.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Input configuration information for a labeling job.

          " + } + }, + "com.amazonaws.sagemaker#LabelingJobName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 63 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,62}" + } + }, + "com.amazonaws.sagemaker#LabelingJobOutput": { + "type": "structure", + "members": { + "OutputDatasetS3Uri": { + "target": "com.amazonaws.sagemaker#S3Uri", + "traits": { + "smithy.api#documentation": "

          The Amazon S3 bucket location of the manifest file for labeled data.

          ", + "smithy.api#required": {} + } + }, + "FinalActiveLearningModelArn": { + "target": "com.amazonaws.sagemaker#ModelArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) for the most recent Amazon SageMaker model trained as part of\n automated data labeling.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Specifies the location of the output produced by the labeling job.

          " + } + }, + "com.amazonaws.sagemaker#LabelingJobOutputConfig": { + "type": "structure", + "members": { + "S3OutputPath": { + "target": "com.amazonaws.sagemaker#S3Uri", + "traits": { + "smithy.api#documentation": "

          The Amazon S3 location to write output data.

          ", + "smithy.api#required": {} + } + }, + "KmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", + "traits": { + "smithy.api#documentation": "

          The AWS Key Management Service ID of the key used to encrypt the output data, if any.

          \n

          If you use a KMS key ID or an alias of your master key, the Amazon SageMaker execution role must\n include permissions to call kms:Encrypt. If you don't provide a KMS key ID,\n Amazon SageMaker uses the default KMS key for Amazon S3 for your role's account. Amazon SageMaker uses server-side\n encryption with KMS-managed keys for LabelingJobOutputConfig. If you use a\n bucket policy with an s3:PutObject permission that only allows objects with\n server-side encryption, set the condition key of\n s3:x-amz-server-side-encryption to \"aws:kms\". For more\n information, see KMS-Managed Encryption Keys in the Amazon Simple Storage Service Developer\n Guide.\n

          \n

          The KMS key policy must grant permission to the IAM role that you specify in your\n CreateLabelingJob request. For more information, see Using\n Key Policies in AWS KMS in the AWS Key Management Service Developer\n Guide.

          " + } + }, + "SnsTopicArn": { + "target": "com.amazonaws.sagemaker#SnsTopicArn", + "traits": { + "smithy.api#documentation": "

          An Amazon Simple Notification Service (Amazon SNS) output topic ARN.

          \n

          When workers complete labeling tasks, Ground Truth will send \n labeling task output data to the SNS output topic you specify here.

          \n

          You must provide a value for this parameter\n if you provide an Amazon SNS input topic in SnsDataSource\n in InputConfig.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Output configuration information for a labeling job.

          " + } + }, + "com.amazonaws.sagemaker#LabelingJobResourceConfig": { + "type": "structure", + "members": { + "VolumeKmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", + "traits": { + "smithy.api#documentation": "

          The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume\n attached to the ML compute instance(s) that run the training job. The\n VolumeKmsKeyId can be any of the following formats:

          \n
            \n
          • \n

            // KMS Key ID

            \n

            \n \"1234abcd-12ab-34cd-56ef-1234567890ab\"\n

            \n
          • \n
          • \n

            // Amazon Resource Name (ARN) of a KMS Key

            \n

            \n \"arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\"\n

            \n
          • \n
          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Provides configuration information for labeling jobs.

          " + } + }, + "com.amazonaws.sagemaker#LabelingJobS3DataSource": { + "type": "structure", + "members": { + "ManifestS3Uri": { + "target": "com.amazonaws.sagemaker#S3Uri", + "traits": { + "smithy.api#documentation": "

          The Amazon S3 location of the manifest file that describes the input data objects.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The Amazon S3 location of the input data objects.

          " + } + }, + "com.amazonaws.sagemaker#LabelingJobSnsDataSource": { + "type": "structure", + "members": { + "SnsTopicArn": { + "target": "com.amazonaws.sagemaker#SnsTopicArn", + "traits": { + "smithy.api#documentation": "

          The Amazon SNS input topic Amazon Resource Name (ARN). Specify the ARN of the input topic\n you will use to send new data objects to a streaming labeling job.

          \n

          If you specify an input topic for SnsTopicArn in InputConfig,\n you must specify a value for SnsTopicArn in OutputConfig.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          An Amazon SNS data source used for streaming labeling jobs.

          " + } + }, + "com.amazonaws.sagemaker#LabelingJobStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Initializing", + "name": "INITIALIZING" + }, + { + "value": "InProgress", + "name": "IN_PROGRESS" + }, + { + "value": "Completed", + "name": "COMPLETED" + }, + { + "value": "Failed", + "name": "FAILED" + }, + { + "value": "Stopping", + "name": "STOPPING" + }, + { + "value": "Stopped", + "name": "STOPPED" + } + ] + } + }, + "com.amazonaws.sagemaker#LabelingJobStoppingConditions": { + "type": "structure", + "members": { + "MaxHumanLabeledObjectCount": { + "target": "com.amazonaws.sagemaker#MaxHumanLabeledObjectCount", + "traits": { + "smithy.api#documentation": "

          The maximum number of objects that can be labeled by human workers.

          " + } + }, + "MaxPercentageOfInputDatasetLabeled": { + "target": "com.amazonaws.sagemaker#MaxPercentageOfInputDatasetLabeled", + "traits": { + "smithy.api#documentation": "

          The maximum number of input data objects that should be labeled.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          A set of conditions for stopping a labeling job. If any of the conditions are met, the\n job is automatically stopped. You can use these conditions to control the cost of data\n labeling.

          \n \n

          Labeling jobs fail after 30 days with an appropriate client error message.

          \n
          " + } + }, + "com.amazonaws.sagemaker#LabelingJobSummary": { + "type": "structure", + "members": { + "LabelingJobName": { + "target": "com.amazonaws.sagemaker#LabelingJobName", + "traits": { + "smithy.api#documentation": "

          The name of the labeling job.

          ", + "smithy.api#required": {} + } + }, + "LabelingJobArn": { + "target": "com.amazonaws.sagemaker#LabelingJobArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) assigned to the labeling job when it was\n created.

          ", + "smithy.api#required": {} + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          The date and time that the job was created (timestamp).

          ", + "smithy.api#required": {} + } + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          The date and time that the job was last modified (timestamp).

          ", + "smithy.api#required": {} + } + }, + "LabelingJobStatus": { + "target": "com.amazonaws.sagemaker#LabelingJobStatus", + "traits": { + "smithy.api#documentation": "

          The current status of the labeling job.

          ", + "smithy.api#required": {} + } + }, + "LabelCounters": { + "target": "com.amazonaws.sagemaker#LabelCounters", + "traits": { + "smithy.api#documentation": "

          Counts showing the progress of the labeling job.

          ", + "smithy.api#required": {} + } + }, + "WorkteamArn": { + "target": "com.amazonaws.sagemaker#WorkteamArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the work team assigned to the job.

          ", + "smithy.api#required": {} + } + }, + "PreHumanTaskLambdaArn": { + "target": "com.amazonaws.sagemaker#LambdaFunctionArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of a Lambda function. The function is run before each\n data object is sent to a worker.

          ", + "smithy.api#required": {} + } + }, + "AnnotationConsolidationLambdaArn": { + "target": "com.amazonaws.sagemaker#LambdaFunctionArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the Lambda function used to consolidate the\n annotations from individual workers into a label for a data object. For more\n information, see Annotation\n Consolidation.

          " + } + }, + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", + "traits": { + "smithy.api#documentation": "

          If the LabelingJobStatus field is Failed, this field\n contains a description of the error.

          " + } + }, + "LabelingJobOutput": { + "target": "com.amazonaws.sagemaker#LabelingJobOutput", + "traits": { + "smithy.api#documentation": "

          The location of the output produced by the labeling job.

          " + } + }, + "InputConfig": { + "target": "com.amazonaws.sagemaker#LabelingJobInputConfig", + "traits": { + "smithy.api#documentation": "

          Input configuration for the labeling job.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Provides summary information about a labeling job.

          " + } + }, + "com.amazonaws.sagemaker#LabelingJobSummaryList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#LabelingJobSummary" + } + }, + "com.amazonaws.sagemaker#LambdaFunctionArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 2048 + }, + "smithy.api#pattern": "arn:aws[a-z\\-]*:lambda:[a-z0-9\\-]*:[0-9]{12}:function:.*" + } + }, + "com.amazonaws.sagemaker#LastModifiedTime": { + "type": "timestamp" + }, + "com.amazonaws.sagemaker#LineageEntityParameters": { + "type": "map", + "key": { + "target": "com.amazonaws.sagemaker#StringParameterValue" + }, + "value": { + "target": "com.amazonaws.sagemaker#StringParameterValue" + }, + "traits": { + "smithy.api#length": { + "min": 0, + "max": 30 + } + } + }, + "com.amazonaws.sagemaker#ListActions": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListActionsRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListActionsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Lists the actions in your account and their properties.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.sagemaker#ListActionsRequest": { + "type": "structure", + "members": { + "SourceUri": { + "target": "com.amazonaws.sagemaker#SourceUri", + "traits": { + "smithy.api#documentation": "

          A filter that returns only actions with the specified source URI.

          " + } + }, + "ActionType": { + "target": "com.amazonaws.sagemaker#String256", + "traits": { + "smithy.api#documentation": "

          A filter that returns only actions of the specified type.

          " + } + }, + "CreatedAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only actions created on or after the specified time.

          " + } + }, + "CreatedBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only actions created on or before the specified time.

          " + } + }, + "SortBy": { + "target": "com.amazonaws.sagemaker#SortActionsBy", + "traits": { + "smithy.api#documentation": "

          The property used to sort results. The default value is CreationTime.

          " + } + }, + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", + "traits": { + "smithy.api#documentation": "

          The sort order. The default value is Descending.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the previous call to ListActions didn't return the full set of actions,\n the call returns a token for getting the next set of actions.

          " + } + }, + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of actions to return in the response. The default value is 10.

          " + } + } + } + }, + "com.amazonaws.sagemaker#ListActionsResponse": { + "type": "structure", + "members": { + "ActionSummaries": { + "target": "com.amazonaws.sagemaker#ActionSummaries", + "traits": { + "smithy.api#documentation": "

          A list of actions and their properties.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          A token for getting the next set of actions, if there are any.

          " + } + } + } + }, + "com.amazonaws.sagemaker#ListAlgorithms": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListAlgorithmsInput" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListAlgorithmsOutput" + }, + "traits": { + "smithy.api#documentation": "

          Lists the machine learning algorithms that have been created.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.sagemaker#ListAlgorithmsInput": { + "type": "structure", + "members": { + "CreationTimeAfter": { + "target": "com.amazonaws.sagemaker#CreationTime", + "traits": { + "smithy.api#documentation": "

          A filter that returns only algorithms created after the specified time\n (timestamp).

          " + } + }, + "CreationTimeBefore": { + "target": "com.amazonaws.sagemaker#CreationTime", + "traits": { + "smithy.api#documentation": "

          A filter that returns only algorithms created before the specified time\n (timestamp).

          " + } + }, + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of algorithms to return in the response.

          " + } + }, + "NameContains": { + "target": "com.amazonaws.sagemaker#NameContains", + "traits": { + "smithy.api#documentation": "

          A string in the algorithm name. This filter returns only algorithms whose name\n contains the specified string.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the response to a previous ListAlgorithms request was truncated, the\n response includes a NextToken. To retrieve the next set of algorithms, use\n the token in the next request.

          " + } + }, + "SortBy": { + "target": "com.amazonaws.sagemaker#AlgorithmSortBy", + "traits": { + "smithy.api#documentation": "

          The parameter by which to sort the results. The default is\n CreationTime.

          " + } + }, + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", + "traits": { + "smithy.api#documentation": "

          The sort order for the results. The default is Ascending.

          " + } + } + } + }, + "com.amazonaws.sagemaker#ListAlgorithmsOutput": { + "type": "structure", + "members": { + "AlgorithmSummaryList": { + "target": "com.amazonaws.sagemaker#AlgorithmSummaryList", + "traits": { + "smithy.api#documentation": "

          >An array of AlgorithmSummary objects, each of which lists an\n algorithm.

          ", + "smithy.api#required": {} + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n algorithms, use it in the subsequent request.

          " + } + } + } + }, + "com.amazonaws.sagemaker#ListAppImageConfigs": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListAppImageConfigsRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListAppImageConfigsResponse" + }, + "traits": { + "smithy.api#documentation": "

          Lists the AppImageConfigs in your account and their properties. The list can be\n filtered by creation time or modified time, and whether the AppImageConfig name contains\n a specified string.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.sagemaker#ListAppImageConfigsRequest": { + "type": "structure", + "members": { + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of AppImageConfigs to return in the response. The default value is\n 10.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the previous call to ListImages didn't return the full set of\n AppImageConfigs, the call returns a token for getting the next set of AppImageConfigs.

          " + } + }, + "NameContains": { + "target": "com.amazonaws.sagemaker#AppImageConfigName", + "traits": { + "smithy.api#documentation": "

          A filter that returns only AppImageConfigs whose name contains the specified string.

          " + } + }, + "CreationTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only AppImageConfigs created on or before the specified time.

          " + } + }, + "CreationTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only AppImageConfigs created on or after the specified time.

          " + } + }, + "ModifiedTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only AppImageConfigs modified on or before the specified time.

          " + } + }, + "ModifiedTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only AppImageConfigs modified on or after the specified time.

          " + } + }, + "SortBy": { + "target": "com.amazonaws.sagemaker#AppImageConfigSortKey", + "traits": { + "smithy.api#documentation": "

          The property used to sort results. The default value is CreationTime.

          " + } + }, + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", + "traits": { + "smithy.api#documentation": "

          The sort order. The default value is Descending.

          " + } + } + } + }, + "com.amazonaws.sagemaker#ListAppImageConfigsResponse": { + "type": "structure", + "members": { + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          A token for getting the next set of AppImageConfigs, if there are any.

          " + } + }, + "AppImageConfigs": { + "target": "com.amazonaws.sagemaker#AppImageConfigList", + "traits": { + "smithy.api#documentation": "

          A list of AppImageConfigs and their properties.

          " + } + } + } + }, + "com.amazonaws.sagemaker#ListApps": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListAppsRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListAppsResponse" + }, + "traits": { + "smithy.api#documentation": "

          Lists apps.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.sagemaker#ListAppsRequest": { + "type": "structure", + "members": { + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the previous response was truncated, you will receive this token.\n Use it in your next request to receive the next set of results.

          " + } + }, + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", + "traits": { + "smithy.api#documentation": "

          Returns a list up to a specified limit.

          " + } + }, + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", + "traits": { + "smithy.api#documentation": "

          The sort order for the results. The default is Ascending.

          " + } + }, + "SortBy": { + "target": "com.amazonaws.sagemaker#AppSortKey", + "traits": { + "smithy.api#documentation": "

          The parameter by which to sort the results. The default is CreationTime.

          " + } + }, + "DomainIdEquals": { + "target": "com.amazonaws.sagemaker#DomainId", + "traits": { + "smithy.api#documentation": "

          A parameter to search for the domain ID.

          " + } + }, + "UserProfileNameEquals": { + "target": "com.amazonaws.sagemaker#UserProfileName", + "traits": { + "smithy.api#documentation": "

          A parameter to search by user profile name.

          " + } + } + } + }, + "com.amazonaws.sagemaker#ListAppsResponse": { + "type": "structure", + "members": { + "Apps": { + "target": "com.amazonaws.sagemaker#AppList", + "traits": { + "smithy.api#documentation": "

          The list of apps.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the previous response was truncated, you will receive this token.\n Use it in your next request to receive the next set of results.

          " + } + } + } + }, + "com.amazonaws.sagemaker#ListArtifacts": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListArtifactsRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListArtifactsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Lists the artifacts in your account and their properties.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.sagemaker#ListArtifactsRequest": { + "type": "structure", + "members": { + "SourceUri": { + "target": "com.amazonaws.sagemaker#SourceUri", + "traits": { + "smithy.api#documentation": "

          A filter that returns only artifacts with the specified source URI.

          " + } + }, + "ArtifactType": { + "target": "com.amazonaws.sagemaker#String256", + "traits": { + "smithy.api#documentation": "

          A filter that returns only artifacts of the specified type.

          " + } + }, + "CreatedAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only artifacts created on or after the specified time.

          " + } + }, + "CreatedBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only artifacts created on or before the specified time.

          " + } + }, + "SortBy": { + "target": "com.amazonaws.sagemaker#SortArtifactsBy", + "traits": { + "smithy.api#documentation": "

          The property used to sort results. The default value is CreationTime.

          " + } + }, + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", + "traits": { + "smithy.api#documentation": "

          The sort order. The default value is Descending.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the previous call to ListArtifacts didn't return the full set of artifacts,\n the call returns a token for getting the next set of artifacts.

          " + } + }, + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of artifacts to return in the response. The default value is 10.

          " + } + } + } + }, + "com.amazonaws.sagemaker#ListArtifactsResponse": { + "type": "structure", + "members": { + "ArtifactSummaries": { + "target": "com.amazonaws.sagemaker#ArtifactSummaries", + "traits": { + "smithy.api#documentation": "

          A list of artifacts and their properties.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          A token for getting the next set of artifacts, if there are any.

          " + } + } + } + }, + "com.amazonaws.sagemaker#ListAssociations": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListAssociationsRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListAssociationsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Lists the associations in your account and their properties.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.sagemaker#ListAssociationsRequest": { + "type": "structure", + "members": { + "SourceArn": { + "target": "com.amazonaws.sagemaker#AssociationEntityArn", + "traits": { + "smithy.api#documentation": "

          A filter that returns only associations with the specified source ARN.

          " + } + }, + "DestinationArn": { + "target": "com.amazonaws.sagemaker#AssociationEntityArn", + "traits": { + "smithy.api#documentation": "

          A filter that returns only associations with the specified destination Amazon Resource Name (ARN).

          " + } + }, + "SourceType": { + "target": "com.amazonaws.sagemaker#String256", + "traits": { + "smithy.api#documentation": "

          A filter that returns only associations with the specified source type.

          " + } + }, + "DestinationType": { + "target": "com.amazonaws.sagemaker#String256", + "traits": { + "smithy.api#documentation": "

          A filter that returns only associations with the specified destination type.

          " + } + }, + "AssociationType": { + "target": "com.amazonaws.sagemaker#AssociationEdgeType", + "traits": { + "smithy.api#documentation": "

          A filter that returns only associations of the specified type.

          " + } + }, + "CreatedAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only associations created on or after the specified time.

          " + } + }, + "CreatedBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only associations created on or before the specified time.

          " + } + }, + "SortBy": { + "target": "com.amazonaws.sagemaker#SortAssociationsBy", + "traits": { + "smithy.api#documentation": "

          The property used to sort results. The default value is CreationTime.

          " + } + }, + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", + "traits": { + "smithy.api#documentation": "

          The sort order. The default value is Descending.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the previous call to ListAssociations didn't return the full set of associations,\n the call returns a token for getting the next set of associations.

          " + } + }, + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of associations to return in the response. The default value is 10.

          " + } + } + } + }, + "com.amazonaws.sagemaker#ListAssociationsResponse": { + "type": "structure", + "members": { + "AssociationSummaries": { + "target": "com.amazonaws.sagemaker#AssociationSummaries", + "traits": { + "smithy.api#documentation": "

          A list of associations and their properties.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          A token for getting the next set of associations, if there are any.

          " + } + } + } + }, + "com.amazonaws.sagemaker#ListAutoMLJobs": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListAutoMLJobsRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListAutoMLJobsResponse" + }, + "traits": { + "smithy.api#documentation": "

          Request a list of jobs.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.sagemaker#ListAutoMLJobsRequest": { + "type": "structure", + "members": { + "CreationTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          Request a list of jobs, using a filter for time.

          " + } + }, + "CreationTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          Request a list of jobs, using a filter for time.

          " + } + }, + "LastModifiedTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          Request a list of jobs, using a filter for time.

          " + } + }, + "LastModifiedTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          Request a list of jobs, using a filter for time.

          " + } + }, + "NameContains": { + "target": "com.amazonaws.sagemaker#AutoMLNameContains", + "traits": { + "smithy.api#documentation": "

          Request a list of jobs, using a search filter for name.

          " + } + }, + "StatusEquals": { + "target": "com.amazonaws.sagemaker#AutoMLJobStatus", + "traits": { + "smithy.api#documentation": "

          Request a list of jobs, using a filter for status.

          " + } + }, + "SortOrder": { + "target": "com.amazonaws.sagemaker#AutoMLSortOrder", + "traits": { + "smithy.api#documentation": "

          The sort order for the results. The default is Descending.

          " + } + }, + "SortBy": { + "target": "com.amazonaws.sagemaker#AutoMLSortBy", + "traits": { + "smithy.api#documentation": "

          The parameter by which to sort the results. The default is AutoMLJobName.

          " + } + }, + "MaxResults": { + "target": "com.amazonaws.sagemaker#AutoMLMaxResults", + "traits": { + "smithy.api#box": {}, + "smithy.api#documentation": "

          Request a list of jobs up to a specified limit.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the previous response was truncated, you receive this token. Use it in your next\n request to receive the next set of results.

          " + } + } + } + }, + "com.amazonaws.sagemaker#ListAutoMLJobsResponse": { + "type": "structure", + "members": { + "AutoMLJobSummaries": { + "target": "com.amazonaws.sagemaker#AutoMLJobSummaries", + "traits": { + "smithy.api#documentation": "

          Returns a summary list of jobs.

          ", + "smithy.api#required": {} + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the previous response was truncated, you receive this token. Use it in your next\n request to receive the next set of results.

          " + } + } + } + }, + "com.amazonaws.sagemaker#ListCandidatesForAutoMLJob": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListCandidatesForAutoMLJobRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListCandidatesForAutoMLJobResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          List the Candidates created for the job.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.sagemaker#ListCandidatesForAutoMLJobRequest": { + "type": "structure", + "members": { + "AutoMLJobName": { + "target": "com.amazonaws.sagemaker#AutoMLJobName", + "traits": { + "smithy.api#documentation": "

          List the Candidates created for the job by providing the job's name.

          ", + "smithy.api#required": {} + } + }, + "StatusEquals": { + "target": "com.amazonaws.sagemaker#CandidateStatus", + "traits": { + "smithy.api#documentation": "

          List the Candidates for the job and filter by status.

          " + } + }, + "CandidateNameEquals": { + "target": "com.amazonaws.sagemaker#CandidateName", + "traits": { + "smithy.api#documentation": "

          List the Candidates for the job and filter by candidate name.

          " + } + }, + "SortOrder": { + "target": "com.amazonaws.sagemaker#AutoMLSortOrder", + "traits": { + "smithy.api#documentation": "

          The sort order for the results. The default is Ascending.

          " } - ] + }, + "SortBy": { + "target": "com.amazonaws.sagemaker#CandidateSortBy", + "traits": { + "smithy.api#documentation": "

          The parameter by which to sort the results. The default is Descending.

          " + } + }, + "MaxResults": { + "target": "com.amazonaws.sagemaker#AutoMLMaxResults", + "traits": { + "smithy.api#box": {}, + "smithy.api#documentation": "

          List the job's Candidates up to a specified limit.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the previous response was truncated, you receive this token. Use it in your next\n request to receive the next set of results.

          " + } + } } }, - "com.amazonaws.sagemaker#ImageUri": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 255 + "com.amazonaws.sagemaker#ListCandidatesForAutoMLJobResponse": { + "type": "structure", + "members": { + "Candidates": { + "target": "com.amazonaws.sagemaker#AutoMLCandidates", + "traits": { + "smithy.api#documentation": "

          Summaries about the Candidates.

          ", + "smithy.api#required": {} + } }, - "smithy.api#pattern": ".*" + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the previous response was truncated, you receive this token. Use it in your next\n request to receive the next set of results.

          " + } + } } }, - "com.amazonaws.sagemaker#ImageVersion": { + "com.amazonaws.sagemaker#ListCodeRepositories": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListCodeRepositoriesInput" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListCodeRepositoriesOutput" + }, + "traits": { + "smithy.api#documentation": "

          Gets a list of the Git repositories in your account.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.sagemaker#ListCodeRepositoriesInput": { "type": "structure", "members": { - "CreationTime": { + "CreationTimeAfter": { + "target": "com.amazonaws.sagemaker#CreationTime", + "traits": { + "smithy.api#documentation": "

          A filter that returns only Git repositories that were created after the specified\n time.

          " + } + }, + "CreationTimeBefore": { + "target": "com.amazonaws.sagemaker#CreationTime", + "traits": { + "smithy.api#documentation": "

          A filter that returns only Git repositories that were created before the specified\n time.

          " + } + }, + "LastModifiedTimeAfter": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          When the version was created.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A filter that returns only Git repositories that were last modified after the\n specified time.

          " } }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", + "LastModifiedTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          When a create or delete operation fails, the reason for the failure.

          " + "smithy.api#documentation": "

          A filter that returns only Git repositories that were last modified before the\n specified time.

          " } }, - "ImageArn": { - "target": "com.amazonaws.sagemaker#ImageArn", + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the image the version is based on.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The maximum number of Git repositories to return in the response.

          " } }, - "ImageVersionArn": { - "target": "com.amazonaws.sagemaker#ImageVersionArn", + "NameContains": { + "target": "com.amazonaws.sagemaker#CodeRepositoryNameContains", "traits": { - "smithy.api#documentation": "

          The ARN of the version.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A string in the Git repositories name. This filter returns only repositories whose\n name contains the specified string.

          " } }, - "ImageVersionStatus": { - "target": "com.amazonaws.sagemaker#ImageVersionStatus", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          The status of the version.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          If the result of a ListCodeRepositoriesOutput request was truncated, the\n response includes a NextToken. To get the next set of Git repositories, use\n the token in the next request.

          " } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "SortBy": { + "target": "com.amazonaws.sagemaker#CodeRepositorySortBy", "traits": { - "smithy.api#documentation": "

          When the version was last modified.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The field to sort results by. The default is Name.

          " } }, - "Version": { - "target": "com.amazonaws.sagemaker#ImageVersionNumber", + "SortOrder": { + "target": "com.amazonaws.sagemaker#CodeRepositorySortOrder", "traits": { - "smithy.api#documentation": "

          The version number.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The sort order for results. The default is Ascending.

          " } } - }, - "traits": { - "smithy.api#documentation": "

          A version of a SageMaker Image. A version represents an existing container\n image.

          " } }, - "com.amazonaws.sagemaker#ImageVersionArn": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 256 + "com.amazonaws.sagemaker#ListCodeRepositoriesOutput": { + "type": "structure", + "members": { + "CodeRepositorySummaryList": { + "target": "com.amazonaws.sagemaker#CodeRepositorySummaryList", + "traits": { + "smithy.api#documentation": "

          Gets a list of summaries of the Git repositories. Each summary specifies the following\n values for the repository:

          \n
            \n
          • \n

            Name

            \n
          • \n
          • \n

            Amazon Resource Name (ARN)

            \n
          • \n
          • \n

            Creation time

            \n
          • \n
          • \n

            Last modified time

            \n
          • \n
          • \n

            Configuration information, including the URL location of the repository and\n the ARN of the AWS Secrets Manager secret that contains the credentials used\n to access the repository.

            \n
          • \n
          ", + "smithy.api#required": {} + } }, - "smithy.api#pattern": "^arn:aws(-[\\w]+)*:sagemaker:.+:[0-9]{12}:image-version/[a-z0-9]([-.]?[a-z0-9])*/[0-9]+$" + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the result of a ListCodeRepositoriesOutput request was truncated, the\n response includes a NextToken. To get the next set of Git repositories, use\n the token in the next request.

          " + } + } } }, - "com.amazonaws.sagemaker#ImageVersionNumber": { - "type": "integer", + "com.amazonaws.sagemaker#ListCompilationJobs": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListCompilationJobsRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListCompilationJobsResponse" + }, "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 0 + "smithy.api#documentation": "

          Lists model compilation jobs that satisfy various filters.

          \n

          To create a model compilation job, use CreateCompilationJob. To get\n information about a particular model compilation job you have created, use DescribeCompilationJob.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" } } }, - "com.amazonaws.sagemaker#ImageVersionSortBy": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "CREATION_TIME", - "name": "CREATION_TIME" - }, - { - "value": "LAST_MODIFIED_TIME", - "name": "LAST_MODIFIED_TIME" - }, - { - "value": "VERSION", - "name": "VERSION" + "com.amazonaws.sagemaker#ListCompilationJobsRequest": { + "type": "structure", + "members": { + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the result of the previous ListCompilationJobs request was truncated,\n the response includes a NextToken. To retrieve the next set of model\n compilation jobs, use the token in the next request.

          " } - ] - } - }, - "com.amazonaws.sagemaker#ImageVersionSortOrder": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "ASCENDING", - "name": "ASCENDING" - }, - { - "value": "DESCENDING", - "name": "DESCENDING" + }, + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", + "traits": { + "smithy.api#box": {}, + "smithy.api#documentation": "

          The maximum number of model compilation jobs to return in the response.

          " } - ] - } - }, - "com.amazonaws.sagemaker#ImageVersionStatus": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "CREATING", - "name": "CREATING" - }, - { - "value": "CREATED", - "name": "CREATED" - }, - { - "value": "CREATE_FAILED", - "name": "CREATE_FAILED" - }, - { - "value": "DELETING", - "name": "DELETING" - }, - { - "value": "DELETE_FAILED", - "name": "DELETE_FAILED" + }, + "CreationTimeAfter": { + "target": "com.amazonaws.sagemaker#CreationTime", + "traits": { + "smithy.api#documentation": "

          A filter that returns the model compilation jobs that were created after a specified\n time.

          " } - ] - } - }, - "com.amazonaws.sagemaker#ImageVersions": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#ImageVersion" - } - }, - "com.amazonaws.sagemaker#Images": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#Image" - } - }, - "com.amazonaws.sagemaker#InferenceSpecification": { - "type": "structure", - "members": { - "Containers": { - "target": "com.amazonaws.sagemaker#ModelPackageContainerDefinitionList", + }, + "CreationTimeBefore": { + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          The Amazon ECR registry path of the Docker image that contains the inference code.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A filter that returns the model compilation jobs that were created before a specified\n time.

          " } }, - "SupportedTransformInstanceTypes": { - "target": "com.amazonaws.sagemaker#TransformInstanceTypes", + "LastModifiedTimeAfter": { + "target": "com.amazonaws.sagemaker#LastModifiedTime", "traits": { - "smithy.api#documentation": "

          A list of the instance types on which a transformation job can be run or on which an\n endpoint can be deployed.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A filter that returns the model compilation jobs that were modified after a specified\n time.

          " } }, - "SupportedRealtimeInferenceInstanceTypes": { - "target": "com.amazonaws.sagemaker#RealtimeInferenceInstanceTypes", + "LastModifiedTimeBefore": { + "target": "com.amazonaws.sagemaker#LastModifiedTime", "traits": { - "smithy.api#documentation": "

          A list of the instance types that are used to generate inferences in real-time.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A filter that returns the model compilation jobs that were modified before a specified\n time.

          " } }, - "SupportedContentTypes": { - "target": "com.amazonaws.sagemaker#ContentTypes", + "NameContains": { + "target": "com.amazonaws.sagemaker#NameContains", "traits": { - "smithy.api#documentation": "

          The supported MIME types for the input data.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A filter that returns the model compilation jobs whose name contains a specified\n string.

          " } }, - "SupportedResponseMIMETypes": { - "target": "com.amazonaws.sagemaker#ResponseMIMETypes", + "StatusEquals": { + "target": "com.amazonaws.sagemaker#CompilationJobStatus", "traits": { - "smithy.api#documentation": "

          The supported MIME types for the output data.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A filter that retrieves model compilation jobs with a specific DescribeCompilationJobResponse$CompilationJobStatus status.

          " + } + }, + "SortBy": { + "target": "com.amazonaws.sagemaker#ListCompilationJobsSortBy", + "traits": { + "smithy.api#documentation": "

          The field by which to sort results. The default is CreationTime.

          " + } + }, + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", + "traits": { + "smithy.api#documentation": "

          The sort order for results. The default is Ascending.

          " } } - }, - "traits": { - "smithy.api#documentation": "

          Defines how to perform inference generation after a training job is run.

          " } }, - "com.amazonaws.sagemaker#InputConfig": { + "com.amazonaws.sagemaker#ListCompilationJobsResponse": { "type": "structure", "members": { - "S3Uri": { - "target": "com.amazonaws.sagemaker#S3Uri", - "traits": { - "smithy.api#documentation": "

          The S3 path where the model artifacts, which result from model training, are stored.\n This path must point to a single gzip compressed tar archive (.tar.gz suffix).

          ", - "smithy.api#required": {} - } - }, - "DataInputConfig": { - "target": "com.amazonaws.sagemaker#DataInputConfig", + "CompilationJobSummaries": { + "target": "com.amazonaws.sagemaker#CompilationJobSummaries", "traits": { - "smithy.api#documentation": "

          Specifies the name and shape of the expected data inputs for your trained model with a\n JSON dictionary form. The data inputs are InputConfig$Framework\n specific.

          \n
            \n
          • \n

            \n TensorFlow: You must specify the name and shape (NHWC format) of\n the expected data inputs using a dictionary format for your trained model. The\n dictionary formats required for the console and CLI are different.

            \n
              \n
            • \n

              Examples for one input:

              \n
                \n
              • \n

                If using the console,\n {\"input\":[1,1024,1024,3]}\n

                \n
              • \n
              • \n

                If using the CLI,\n {\\\"input\\\":[1,1024,1024,3]}\n

                \n
              • \n
              \n
            • \n
            • \n

              Examples for two inputs:

              \n
                \n
              • \n

                If using the console, {\"data1\": [1,28,28,1],\n \"data2\":[1,28,28,1]}\n

                \n
              • \n
              • \n

                If using the CLI, {\\\"data1\\\": [1,28,28,1],\n \\\"data2\\\":[1,28,28,1]}\n

                \n
              • \n
              \n
            • \n
            \n
          • \n
          • \n

            \n KERAS: You must specify the name and shape (NCHW format) of\n expected data inputs using a dictionary format for your trained model. Note that\n while Keras model artifacts should be uploaded in NHWC (channel-last) format,\n DataInputConfig should be specified in NCHW (channel-first)\n format. The dictionary formats required for the console and CLI are\n different.

            \n
              \n
            • \n

              Examples for one input:

              \n
                \n
              • \n

                If using the console,\n {\"input_1\":[1,3,224,224]}\n

                \n
              • \n
              • \n

                If using the CLI,\n {\\\"input_1\\\":[1,3,224,224]}\n

                \n
              • \n
              \n
            • \n
            • \n

              Examples for two inputs:

              \n
                \n
              • \n

                If using the console, {\"input_1\": [1,3,224,224],\n \"input_2\":[1,3,224,224]} \n

                \n
              • \n
              • \n

                If using the CLI, {\\\"input_1\\\": [1,3,224,224],\n \\\"input_2\\\":[1,3,224,224]}\n

                \n
              • \n
              \n
            • \n
            \n
          • \n
          • \n

            \n MXNET/ONNX/DARKNET: You must specify the name and shape (NCHW format) of\n the expected data inputs in order using a dictionary format for your trained\n model. The dictionary formats required for the console and CLI are\n different.

            \n
              \n
            • \n

              Examples for one input:

              \n
                \n
              • \n

                If using the console,\n {\"data\":[1,3,1024,1024]}\n

                \n
              • \n
              • \n

                If using the CLI,\n {\\\"data\\\":[1,3,1024,1024]}\n

                \n
              • \n
              \n
            • \n
            • \n

              Examples for two inputs:

              \n
                \n
              • \n

                If using the console, {\"var1\": [1,1,28,28],\n \"var2\":[1,1,28,28]} \n

                \n
              • \n
              • \n

                If using the CLI, {\\\"var1\\\": [1,1,28,28],\n \\\"var2\\\":[1,1,28,28]}\n

                \n
              • \n
              \n
            • \n
            \n
          • \n
          • \n

            \n PyTorch: You can either specify the name and shape (NCHW format)\n of expected data inputs in order using a dictionary format for your trained\n model or you can specify the shape only using a list format. The dictionary\n formats required for the console and CLI are different. The list formats for the\n console and CLI are the same.

            \n
              \n
            • \n

              Examples for one input in dictionary format:

              \n
                \n
              • \n

                If using the console,\n {\"input0\":[1,3,224,224]}\n

                \n
              • \n
              • \n

                If using the CLI,\n {\\\"input0\\\":[1,3,224,224]}\n

                \n
              • \n
              \n
            • \n
            • \n

              Example for one input in list format:\n [[1,3,224,224]]\n

              \n
            • \n
            • \n

              Examples for two inputs in dictionary format:

              \n
                \n
              • \n

                If using the console, {\"input0\":[1,3,224,224],\n \"input1\":[1,3,224,224]}\n

                \n
              • \n
              • \n

                If using the CLI, {\\\"input0\\\":[1,3,224,224],\n \\\"input1\\\":[1,3,224,224]} \n

                \n
              • \n
              \n
            • \n
            • \n

              Example for two inputs in list format: [[1,3,224,224],\n [1,3,224,224]]\n

              \n
            • \n
            \n
          • \n
          • \n

            \n XGBOOST: input data name and shape are not needed.

            \n
          • \n
          \n

          \n DataInputConfig supports the following parameters for CoreML\n OutputConfig$TargetDevice (ML Model format):

          \n
            \n
          • \n

            \n shape: Input shape, for example {\"input_1\": {\"shape\": [1,224,224,3]}}.\n In addition to static input shapes, CoreML converter supports Flexible input shapes:

            \n
              \n
            • \n

              Range Dimension. You can use the Range Dimension feature if you know the input shape\n will be within some specific interval in that dimension,\n for example: {\"input_1\": {\"shape\": [\"1..10\", 224, 224, 3]}}\n

              \n
            • \n
            • \n

              Enumerated shapes. Sometimes, the models are trained to work only on a select\n set of inputs. You can enumerate all supported input shapes,\n for example: {\"input_1\": {\"shape\": [[1, 224, 224, 3], [1, 160, 160, 3]]}}\n

              \n
            • \n
            \n
          • \n
          • \n

            \n default_shape: Default input shape. You can set a default shape during\n conversion for both Range Dimension and Enumerated Shapes. For example\n {\"input_1\": {\"shape\": [\"1..10\", 224, 224, 3], \"default_shape\": [1, 224, 224, 3]}}\n

            \n
          • \n
          • \n

            \n type: Input type. Allowed values: Image and Tensor.\n By default, the converter generates an ML Model with inputs of type Tensor (MultiArray).\n User can set input type to be Image. Image input type requires additional input parameters\n such as bias and scale.

            \n
          • \n
          • \n

            \n bias: If the input type is an Image, you need to provide the bias vector.

            \n
          • \n
          • \n

            \n scale: If the input type is an Image, you need to provide a scale factor.

            \n
          • \n
          \n

          CoreML ClassifierConfig parameters can be specified using\n OutputConfig$CompilerOptions. CoreML converter supports Tensorflow and PyTorch models.\n CoreML conversion examples:

          \n
            \n
          • \n

            Tensor type input:

            \n
              \n
            • \n

              \n \"DataInputConfig\": {\"input_1\": {\"shape\": [[1,224,224,3], [1,160,160,3]], \"default_shape\":\n [1,224,224,3]}}\n

              \n
            • \n
            \n
          • \n
          • \n

            Tensor type input without input name (PyTorch):

            \n
              \n
            • \n

              \n \"DataInputConfig\": [{\"shape\": [[1,3,224,224], [1,3,160,160]], \"default_shape\":\n [1,3,224,224]}]\n

              \n
            • \n
            \n
          • \n
          • \n

            Image type input:

            \n
              \n
            • \n

              \n \"DataInputConfig\": {\"input_1\": {\"shape\": [[1,224,224,3], [1,160,160,3]], \"default_shape\":\n [1,224,224,3], \"type\": \"Image\", \"bias\": [-1,-1,-1], \"scale\": 0.007843137255}}\n

              \n
            • \n
            • \n

              \n \"CompilerOptions\": {\"class_labels\": \"imagenet_labels_1000.txt\"}\n

              \n
            • \n
            \n
          • \n
          • \n

            Image type input without input name (PyTorch):

            \n
              \n
            • \n

              \n \"DataInputConfig\": [{\"shape\": [[1,3,224,224], [1,3,160,160]], \"default_shape\":\n [1,3,224,224], \"type\": \"Image\", \"bias\": [-1,-1,-1], \"scale\": 0.007843137255}]\n

              \n
            • \n
            • \n

              \n \"CompilerOptions\": {\"class_labels\": \"imagenet_labels_1000.txt\"}\n

              \n
            • \n
            \n
          • \n
          ", + "smithy.api#documentation": "

          An array of CompilationJobSummary objects, each describing a model\n compilation job.

          ", "smithy.api#required": {} } }, - "Framework": { - "target": "com.amazonaws.sagemaker#Framework", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          Identifies the framework in which the model was trained. For example:\n TENSORFLOW.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this NextToken. To retrieve\n the next set of model compilation jobs, use this token in the next request.

          " } } - }, - "traits": { - "smithy.api#documentation": "

          Contains information about the location of input model artifacts, the name and\n shape\n of the expected data inputs, and the framework in which the model was trained.

          " - } - }, - "com.amazonaws.sagemaker#InputDataConfig": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#Channel" - }, - "traits": { - "smithy.api#length": { - "min": 1, - "max": 20 - } - } - }, - "com.amazonaws.sagemaker#InputModes": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#TrainingInputMode" - }, - "traits": { - "smithy.api#length": { - "min": 1 - } } }, - "com.amazonaws.sagemaker#InstanceType": { + "com.amazonaws.sagemaker#ListCompilationJobsSortBy": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "ml.t2.medium", - "name": "ML_T2_MEDIUM" - }, - { - "value": "ml.t2.large", - "name": "ML_T2_LARGE" - }, - { - "value": "ml.t2.xlarge", - "name": "ML_T2_XLARGE" - }, - { - "value": "ml.t2.2xlarge", - "name": "ML_T2_2XLARGE" - }, - { - "value": "ml.t3.medium", - "name": "ML_T3_MEDIUM" - }, - { - "value": "ml.t3.large", - "name": "ML_T3_LARGE" - }, - { - "value": "ml.t3.xlarge", - "name": "ML_T3_XLARGE" - }, - { - "value": "ml.t3.2xlarge", - "name": "ML_T3_2XLARGE" - }, - { - "value": "ml.m4.xlarge", - "name": "ML_M4_XLARGE" - }, - { - "value": "ml.m4.2xlarge", - "name": "ML_M4_2XLARGE" - }, - { - "value": "ml.m4.4xlarge", - "name": "ML_M4_4XLARGE" - }, - { - "value": "ml.m4.10xlarge", - "name": "ML_M4_10XLARGE" - }, - { - "value": "ml.m4.16xlarge", - "name": "ML_M4_16XLARGE" - }, - { - "value": "ml.m5.xlarge", - "name": "ML_M5_XLARGE" - }, - { - "value": "ml.m5.2xlarge", - "name": "ML_M5_2XLARGE" - }, - { - "value": "ml.m5.4xlarge", - "name": "ML_M5_4XLARGE" - }, - { - "value": "ml.m5.12xlarge", - "name": "ML_M5_12XLARGE" - }, - { - "value": "ml.m5.24xlarge", - "name": "ML_M5_24XLARGE" - }, - { - "value": "ml.c4.xlarge", - "name": "ML_C4_XLARGE" - }, - { - "value": "ml.c4.2xlarge", - "name": "ML_C4_2XLARGE" - }, - { - "value": "ml.c4.4xlarge", - "name": "ML_C4_4XLARGE" - }, - { - "value": "ml.c4.8xlarge", - "name": "ML_C4_8XLARGE" - }, - { - "value": "ml.c5.xlarge", - "name": "ML_C5_XLARGE" - }, - { - "value": "ml.c5.2xlarge", - "name": "ML_C5_2XLARGE" - }, - { - "value": "ml.c5.4xlarge", - "name": "ML_C5_4XLARGE" - }, - { - "value": "ml.c5.9xlarge", - "name": "ML_C5_9XLARGE" - }, - { - "value": "ml.c5.18xlarge", - "name": "ML_C5_18XLARGE" - }, - { - "value": "ml.c5d.xlarge", - "name": "ML_C5D_XLARGE" - }, - { - "value": "ml.c5d.2xlarge", - "name": "ML_C5D_2XLARGE" - }, - { - "value": "ml.c5d.4xlarge", - "name": "ML_C5D_4XLARGE" - }, - { - "value": "ml.c5d.9xlarge", - "name": "ML_C5D_9XLARGE" - }, - { - "value": "ml.c5d.18xlarge", - "name": "ML_C5D_18XLARGE" - }, - { - "value": "ml.p2.xlarge", - "name": "ML_P2_XLARGE" - }, - { - "value": "ml.p2.8xlarge", - "name": "ML_P2_8XLARGE" - }, - { - "value": "ml.p2.16xlarge", - "name": "ML_P2_16XLARGE" - }, - { - "value": "ml.p3.2xlarge", - "name": "ML_P3_2XLARGE" + "value": "Name", + "name": "NAME" }, { - "value": "ml.p3.8xlarge", - "name": "ML_P3_8XLARGE" + "value": "CreationTime", + "name": "CREATION_TIME" }, { - "value": "ml.p3.16xlarge", - "name": "ML_P3_16XLARGE" + "value": "Status", + "name": "STATUS" } ] } }, - "com.amazonaws.sagemaker#IntegerParameterRange": { + "com.amazonaws.sagemaker#ListContexts": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListContextsRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListContextsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Lists the contexts in your account and their properties.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.sagemaker#ListContextsRequest": { "type": "structure", "members": { - "Name": { - "target": "com.amazonaws.sagemaker#ParameterKey", + "SourceUri": { + "target": "com.amazonaws.sagemaker#SourceUri", "traits": { - "smithy.api#documentation": "

          The name of the hyperparameter to search.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A filter that returns only contexts with the specified source URI.

          " } }, - "MinValue": { - "target": "com.amazonaws.sagemaker#ParameterValue", + "ContextType": { + "target": "com.amazonaws.sagemaker#String256", "traits": { - "smithy.api#documentation": "

          The minimum\n value\n of the hyperparameter to search.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A filter that returns only contexts of the specified type.

          " } }, - "MaxValue": { - "target": "com.amazonaws.sagemaker#ParameterValue", + "CreatedAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The maximum\n value\n of the hyperparameter to search.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A filter that returns only contexts created on or after the specified time.

          " } }, - "ScalingType": { - "target": "com.amazonaws.sagemaker#HyperParameterScalingType", + "CreatedBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The scale that hyperparameter tuning uses to search the hyperparameter range. For\n information about choosing a hyperparameter scale, see Hyperparameter Scaling. One of the following values:

          \n
          \n
          Auto
          \n
          \n

          Amazon SageMaker hyperparameter tuning chooses the best scale for the\n hyperparameter.

          \n
          \n
          Linear
          \n
          \n

          Hyperparameter tuning searches the values in the hyperparameter range by\n using a linear scale.

          \n
          \n
          Logarithmic
          \n
          \n

          Hyperparameter tuning searches the values in the hyperparameter range by\n using a logarithmic scale.

          \n

          Logarithmic scaling works only for ranges that have only values greater\n than 0.

          \n
          \n
          " + "smithy.api#documentation": "

          A filter that returns only contexts created on or before the specified time.

          " + } + }, + "SortBy": { + "target": "com.amazonaws.sagemaker#SortContextsBy", + "traits": { + "smithy.api#documentation": "

          The property used to sort results. The default value is CreationTime.

          " + } + }, + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", + "traits": { + "smithy.api#documentation": "

          The sort order. The default value is Descending.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the previous call to ListContexts didn't return the full set of contexts,\n the call returns a token for getting the next set of contexts.

          " + } + }, + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of contexts to return in the response. The default value is 10.

          " } } - }, - "traits": { - "smithy.api#documentation": "

          For a hyperparameter of the integer type, specifies the range\n that\n a hyperparameter tuning job searches.

          " } }, - "com.amazonaws.sagemaker#IntegerParameterRangeSpecification": { + "com.amazonaws.sagemaker#ListContextsResponse": { "type": "structure", "members": { - "MinValue": { - "target": "com.amazonaws.sagemaker#ParameterValue", + "ContextSummaries": { + "target": "com.amazonaws.sagemaker#ContextSummaries", "traits": { - "smithy.api#documentation": "

          The minimum integer value allowed.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A list of contexts and their properties.

          " } }, - "MaxValue": { - "target": "com.amazonaws.sagemaker#ParameterValue", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          The maximum integer value allowed.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A token for getting the next set of contexts, if there are any.

          " } } - }, - "traits": { - "smithy.api#documentation": "

          Defines the possible values for an integer hyperparameter.

          " } }, - "com.amazonaws.sagemaker#IntegerParameterRanges": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#IntegerParameterRange" + "com.amazonaws.sagemaker#ListDomains": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListDomainsRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListDomainsResponse" }, "traits": { - "smithy.api#length": { - "min": 0, - "max": 20 - } - } - }, - "com.amazonaws.sagemaker#InvocationsMaxRetries": { - "type": "integer", - "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 0, - "max": 3 - } - } - }, - "com.amazonaws.sagemaker#InvocationsTimeoutInSeconds": { - "type": "integer", - "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 1, - "max": 3600 + "smithy.api#documentation": "

          Lists the domains.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" } } }, - "com.amazonaws.sagemaker#JobReferenceCode": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1 - }, - "smithy.api#pattern": ".+" - } - }, - "com.amazonaws.sagemaker#JobReferenceCodeContains": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 255 - }, - "smithy.api#pattern": ".+" - } - }, - "com.amazonaws.sagemaker#JoinSource": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Input", - "name": "INPUT" - }, - { - "value": "None", - "name": "NONE" + "com.amazonaws.sagemaker#ListDomainsRequest": { + "type": "structure", + "members": { + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the previous response was truncated, you will receive this token.\n Use it in your next request to receive the next set of results.

          " } - ] - } - }, - "com.amazonaws.sagemaker#JsonContentType": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 256 }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*\\/[a-zA-Z0-9](-*[a-zA-Z0-9.])*" - } - }, - "com.amazonaws.sagemaker#JsonContentTypes": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#JsonContentType" - }, - "traits": { - "smithy.api#length": { - "min": 1, - "max": 10 - } - } - }, - "com.amazonaws.sagemaker#JsonPath": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 63 + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", + "traits": { + "smithy.api#documentation": "

          Returns a list up to a specified limit.

          " + } } } }, - "com.amazonaws.sagemaker#JupyterServerAppSettings": { + "com.amazonaws.sagemaker#ListDomainsResponse": { "type": "structure", "members": { - "DefaultResourceSpec": { - "target": "com.amazonaws.sagemaker#ResourceSpec", + "Domains": { + "target": "com.amazonaws.sagemaker#DomainList", "traits": { - "smithy.api#documentation": "

          The default instance type and the Amazon Resource Name (ARN) of the default SageMaker image used by the JupyterServer app.

          " + "smithy.api#documentation": "

          The list of domains.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the previous response was truncated, you will receive this token.\n Use it in your next request to receive the next set of results.

          " } } - }, - "traits": { - "smithy.api#documentation": "

          The JupyterServer app settings.

          " } }, - "com.amazonaws.sagemaker#KernelDisplayName": { - "type": "string", + "com.amazonaws.sagemaker#ListEndpointConfigs": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListEndpointConfigsInput" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListEndpointConfigsOutput" + }, "traits": { - "smithy.api#length": { - "min": 0, - "max": 1024 + "smithy.api#documentation": "

          Lists endpoint configurations.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" } } }, - "com.amazonaws.sagemaker#KernelGatewayAppSettings": { + "com.amazonaws.sagemaker#ListEndpointConfigsInput": { "type": "structure", "members": { - "DefaultResourceSpec": { - "target": "com.amazonaws.sagemaker#ResourceSpec", + "SortBy": { + "target": "com.amazonaws.sagemaker#EndpointConfigSortKey", "traits": { - "smithy.api#documentation": "

          The default instance type and the Amazon Resource Name (ARN) of the default SageMaker image used by the KernelGateway app.

          " + "smithy.api#documentation": "

          The field to sort results by. The default is CreationTime.

          " } }, - "CustomImages": { - "target": "com.amazonaws.sagemaker#CustomImages", + "SortOrder": { + "target": "com.amazonaws.sagemaker#OrderKey", + "traits": { + "smithy.api#documentation": "

          The sort order for results. The default is Descending.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#PaginationToken", + "traits": { + "smithy.api#documentation": "

          If the result of the previous ListEndpointConfig request was\n truncated, the response includes a NextToken. To retrieve the next set of\n endpoint configurations, use the token in the next request.

          " + } + }, + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of training jobs to return in the response.

          " + } + }, + "NameContains": { + "target": "com.amazonaws.sagemaker#EndpointConfigNameContains", + "traits": { + "smithy.api#documentation": "

          A string in the endpoint configuration name. This filter returns only endpoint\n configurations whose name contains the specified string.

          " + } + }, + "CreationTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only endpoint configurations created before the specified\n time (timestamp).

          " + } + }, + "CreationTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A list of custom SageMaker images that are configured to run as a KernelGateway app.

          " + "smithy.api#documentation": "

          A filter that returns only endpoint configurations with a creation time greater\n than or equal to the specified time (timestamp).

          " } } - }, - "traits": { - "smithy.api#documentation": "

          The KernelGateway app settings.

          " } }, - "com.amazonaws.sagemaker#KernelGatewayImageConfig": { + "com.amazonaws.sagemaker#ListEndpointConfigsOutput": { "type": "structure", "members": { - "KernelSpecs": { - "target": "com.amazonaws.sagemaker#KernelSpecs", + "EndpointConfigs": { + "target": "com.amazonaws.sagemaker#EndpointConfigSummaryList", "traits": { - "smithy.api#documentation": "

          The specification of the Jupyter kernels in the image.

          ", + "smithy.api#documentation": "

          An array of endpoint configurations.

          ", "smithy.api#required": {} } }, - "FileSystemConfig": { - "target": "com.amazonaws.sagemaker#FileSystemConfig", + "NextToken": { + "target": "com.amazonaws.sagemaker#PaginationToken", "traits": { - "smithy.api#documentation": "

          The Amazon Elastic File System (EFS) storage configuration for a SageMaker image.

          " + "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n endpoint configurations, use it in the subsequent request

          " } } - }, - "traits": { - "smithy.api#documentation": "

          The configuration for the file system and kernels in a SageMaker image running as a\n KernelGateway app.

          " } }, - "com.amazonaws.sagemaker#KernelName": { - "type": "string", + "com.amazonaws.sagemaker#ListEndpoints": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListEndpointsInput" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListEndpointsOutput" + }, "traits": { - "smithy.api#length": { - "min": 0, - "max": 1024 + "smithy.api#documentation": "

          Lists endpoints.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" } } }, - "com.amazonaws.sagemaker#KernelSpec": { + "com.amazonaws.sagemaker#ListEndpointsInput": { "type": "structure", "members": { - "Name": { - "target": "com.amazonaws.sagemaker#KernelName", + "SortBy": { + "target": "com.amazonaws.sagemaker#EndpointSortKey", "traits": { - "smithy.api#documentation": "

          The name of the kernel.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Sorts the list of results. The default is CreationTime.

          " } }, - "DisplayName": { - "target": "com.amazonaws.sagemaker#KernelDisplayName", + "SortOrder": { + "target": "com.amazonaws.sagemaker#OrderKey", "traits": { - "smithy.api#documentation": "

          The display name of the kernel.

          " + "smithy.api#documentation": "

          The sort order for results. The default is Descending.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          The specification of a Jupyter kernel.

          " - } - }, - "com.amazonaws.sagemaker#KernelSpecs": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#KernelSpec" - }, - "traits": { - "smithy.api#length": { - "min": 1, - "max": 1 - } - } - }, - "com.amazonaws.sagemaker#KmsKeyId": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 2048 }, - "smithy.api#pattern": ".*" - } - }, - "com.amazonaws.sagemaker#LabelAttributeName": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 127 + "NextToken": { + "target": "com.amazonaws.sagemaker#PaginationToken", + "traits": { + "smithy.api#documentation": "

          If the result of a ListEndpoints request was truncated, the response\n includes a NextToken. To retrieve the next set of endpoints, use the token\n in the next request.

          " + } }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" - } - }, - "com.amazonaws.sagemaker#LabelCounter": { - "type": "integer", - "traits": { - "smithy.api#range": { - "min": 0 - } - } - }, - "com.amazonaws.sagemaker#LabelCounters": { - "type": "structure", - "members": { - "TotalLabeled": { - "target": "com.amazonaws.sagemaker#LabelCounter", + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          The total number of objects labeled.

          " + "smithy.api#documentation": "

          The maximum number of endpoints to return in the response.

          " } }, - "HumanLabeled": { - "target": "com.amazonaws.sagemaker#LabelCounter", + "NameContains": { + "target": "com.amazonaws.sagemaker#EndpointNameContains", "traits": { - "smithy.api#documentation": "

          The total number of objects labeled by a human worker.

          " + "smithy.api#documentation": "

          A string in endpoint names. This filter returns only endpoints whose name contains\n the specified string.

          " } }, - "MachineLabeled": { - "target": "com.amazonaws.sagemaker#LabelCounter", + "CreationTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The total number of objects labeled by automated data labeling.

          " + "smithy.api#documentation": "

          A filter that returns only endpoints that were created before the specified time\n (timestamp).

          " } }, - "FailedNonRetryableError": { - "target": "com.amazonaws.sagemaker#LabelCounter", + "CreationTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The total number of objects that could not be labeled due to an error.

          " + "smithy.api#documentation": "

          A filter that returns only endpoints with a creation time greater than or equal to\n the specified time (timestamp).

          " } }, - "Unlabeled": { - "target": "com.amazonaws.sagemaker#LabelCounter", + "LastModifiedTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The total number of objects not yet labeled.

          " + "smithy.api#documentation": "

          A filter that returns only endpoints that were modified before the specified\n timestamp.

          " + } + }, + "LastModifiedTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only endpoints that were modified after the specified\n timestamp.

          " + } + }, + "StatusEquals": { + "target": "com.amazonaws.sagemaker#EndpointStatus", + "traits": { + "smithy.api#documentation": "

          A filter that returns only endpoints with the specified status.

          " } } - }, - "traits": { - "smithy.api#documentation": "

          Provides a breakdown of the number of objects labeled.

          " } }, - "com.amazonaws.sagemaker#LabelCountersForWorkteam": { + "com.amazonaws.sagemaker#ListEndpointsOutput": { "type": "structure", "members": { - "HumanLabeled": { - "target": "com.amazonaws.sagemaker#LabelCounter", - "traits": { - "smithy.api#documentation": "

          The total number of data objects labeled by a human worker.

          " - } - }, - "PendingHuman": { - "target": "com.amazonaws.sagemaker#LabelCounter", + "Endpoints": { + "target": "com.amazonaws.sagemaker#EndpointSummaryList", "traits": { - "smithy.api#documentation": "

          The total number of data objects that need to be labeled by a human worker.

          " + "smithy.api#documentation": "

          An array or endpoint objects.

          ", + "smithy.api#required": {} } }, - "Total": { - "target": "com.amazonaws.sagemaker#LabelCounter", + "NextToken": { + "target": "com.amazonaws.sagemaker#PaginationToken", "traits": { - "smithy.api#documentation": "

          The total number of tasks in the labeling job.

          " + "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n training jobs, use it in the subsequent request.

          " } } - }, - "traits": { - "smithy.api#documentation": "

          Provides counts for human-labeled tasks in the labeling job.

          " } }, - "com.amazonaws.sagemaker#LabelingJobAlgorithmSpecificationArn": { - "type": "string", + "com.amazonaws.sagemaker#ListExperiments": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListExperimentsRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListExperimentsResponse" + }, "traits": { - "smithy.api#length": { - "min": 0, - "max": 2048 - }, - "smithy.api#pattern": "arn:.*" + "smithy.api#documentation": "

          Lists all the experiments in your account. The list can be filtered to show only\n experiments that were created in a specific time range. The list can be sorted by experiment\n name or creation time.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } } }, - "com.amazonaws.sagemaker#LabelingJobAlgorithmsConfig": { + "com.amazonaws.sagemaker#ListExperimentsRequest": { "type": "structure", "members": { - "LabelingJobAlgorithmSpecificationArn": { - "target": "com.amazonaws.sagemaker#LabelingJobAlgorithmSpecificationArn", + "CreatedAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Specifies the Amazon Resource Name (ARN) of the algorithm used for auto-labeling. You\n must select one of the following ARNs:

          \n
            \n
          • \n

            \n Image classification\n

            \n

            \n arn:aws:sagemaker:region:027400017018:labeling-job-algorithm-specification/image-classification\n

            \n
          • \n
          • \n

            \n Text classification\n

            \n

            \n arn:aws:sagemaker:region:027400017018:labeling-job-algorithm-specification/text-classification\n

            \n
          • \n
          • \n

            \n Object detection\n

            \n

            \n arn:aws:sagemaker:region:027400017018:labeling-job-algorithm-specification/object-detection\n

            \n
          • \n
          • \n

            \n Semantic Segmentation\n

            \n

            \n arn:aws:sagemaker:region:027400017018:labeling-job-algorithm-specification/semantic-segmentation\n

            \n
          • \n
          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A filter that returns only experiments created after the specified time.

          " } }, - "InitialActiveLearningModelArn": { - "target": "com.amazonaws.sagemaker#ModelArn", + "CreatedBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          At the end of an auto-label job Ground Truth sends the Amazon Resource Name (ARN) of the final\n model used for auto-labeling. You can use this model as the starting point for\n subsequent similar jobs by providing the ARN of the model here.

          " + "smithy.api#documentation": "

          A filter that returns only experiments created before the specified time.

          " } }, - "LabelingJobResourceConfig": { - "target": "com.amazonaws.sagemaker#LabelingJobResourceConfig", + "SortBy": { + "target": "com.amazonaws.sagemaker#SortExperimentsBy", "traits": { - "smithy.api#documentation": "

          Provides configuration information for a labeling job.

          " + "smithy.api#documentation": "

          The property used to sort results. The default value is CreationTime.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          Provides configuration information for auto-labeling of your data objects. A\n LabelingJobAlgorithmsConfig object must be supplied in order to use\n auto-labeling.

          " - } - }, - "com.amazonaws.sagemaker#LabelingJobArn": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 2048 }, - "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:labeling-job/.*" - } - }, - "com.amazonaws.sagemaker#LabelingJobDataAttributes": { - "type": "structure", - "members": { - "ContentClassifiers": { - "target": "com.amazonaws.sagemaker#ContentClassifiers", + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", "traits": { - "smithy.api#documentation": "

          Declares that your content is free of personally identifiable information or adult\n content. Amazon SageMaker may restrict the Amazon Mechanical Turk workers that can view your task\n based on this information.

          " + "smithy.api#documentation": "

          The sort order. The default value is Descending.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the previous call to ListExperiments didn't return the full set of\n experiments, the call returns a token for getting the next set of experiments.

          " + } + }, + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of experiments to return in the response. The default value is\n 10.

          " } } - }, - "traits": { - "smithy.api#documentation": "

          Attributes of the data specified by the customer. Use these to describe the data to be\n labeled.

          " } }, - "com.amazonaws.sagemaker#LabelingJobDataSource": { + "com.amazonaws.sagemaker#ListExperimentsResponse": { "type": "structure", "members": { - "S3DataSource": { - "target": "com.amazonaws.sagemaker#LabelingJobS3DataSource", + "ExperimentSummaries": { + "target": "com.amazonaws.sagemaker#ExperimentSummaries", "traits": { - "smithy.api#documentation": "

          The Amazon S3 location of the input data objects.

          " + "smithy.api#documentation": "

          A list of the summaries of your experiments.

          " } }, - "SnsDataSource": { - "target": "com.amazonaws.sagemaker#LabelingJobSnsDataSource", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          An Amazon SNS data source used for streaming labeling jobs.

          " + "smithy.api#documentation": "

          A token for getting the next set of experiments, if there are any.

          " } } + } + }, + "com.amazonaws.sagemaker#ListFeatureGroups": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListFeatureGroupsRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListFeatureGroupsResponse" }, "traits": { - "smithy.api#documentation": "

          Provides information about the location of input data.

          \n

          You must specify at least one of the following: S3DataSource or SnsDataSource.

          \n

          Use SnsDataSource to specify an SNS input topic\n for a streaming labeling job. If you do not specify \n and SNS input topic ARN, Ground Truth will create a one-time labeling job.

          \n

          Use S3DataSource to specify an input \n manifest file for both streaming and one-time labeling jobs.\n Adding an S3DataSource is optional if you use SnsDataSource to create a streaming labeling job.

          " + "smithy.api#documentation": "

          List FeatureGroups based on given filter and order.

          " } }, - "com.amazonaws.sagemaker#LabelingJobForWorkteamSummary": { + "com.amazonaws.sagemaker#ListFeatureGroupsRequest": { "type": "structure", "members": { - "LabelingJobName": { - "target": "com.amazonaws.sagemaker#LabelingJobName", + "NameContains": { + "target": "com.amazonaws.sagemaker#FeatureGroupNameContains", "traits": { - "smithy.api#documentation": "

          The name of the labeling job that the work team is assigned to.

          " + "smithy.api#documentation": "

          A string that partially matches one or more FeatureGroups names. Filters\n FeatureGroups by name.

          " } }, - "JobReferenceCode": { - "target": "com.amazonaws.sagemaker#JobReferenceCode", + "FeatureGroupStatusEquals": { + "target": "com.amazonaws.sagemaker#FeatureGroupStatus", "traits": { - "smithy.api#documentation": "

          A unique identifier for a labeling job. You can use this to refer to a specific\n labeling job.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A FeatureGroup status. Filters by FeatureGroup status.

          " } }, - "WorkRequesterAccountId": { - "target": "com.amazonaws.sagemaker#AccountId", + "OfflineStoreStatusEquals": { + "target": "com.amazonaws.sagemaker#OfflineStoreStatusValue", "traits": { - "smithy.api#documentation": "

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          An OfflineStore status. Filters by OfflineStore status.

          " } }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "CreationTimeAfter": { + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          The date and time that the labeling job was created.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Use this parameter to search for FeatureGroupss created after a specific\n date and time.

          " } }, - "LabelCounters": { - "target": "com.amazonaws.sagemaker#LabelCountersForWorkteam", + "CreationTimeBefore": { + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          Provides information about the progress of a labeling job.

          " + "smithy.api#documentation": "

          Use this parameter to search for FeatureGroupss created before a specific\n date and time.

          " } }, - "NumberOfHumanWorkersPerDataObject": { - "target": "com.amazonaws.sagemaker#NumberOfHumanWorkersPerDataObject", + "SortOrder": { + "target": "com.amazonaws.sagemaker#FeatureGroupSortOrder", "traits": { - "smithy.api#documentation": "

          The configured number of workers per data object.

          " + "smithy.api#documentation": "

          The order in which feature groups are listed.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          Provides summary information for a work team.

          " - } - }, - "com.amazonaws.sagemaker#LabelingJobForWorkteamSummaryList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#LabelingJobForWorkteamSummary" - } - }, - "com.amazonaws.sagemaker#LabelingJobInputConfig": { - "type": "structure", - "members": { - "DataSource": { - "target": "com.amazonaws.sagemaker#LabelingJobDataSource", + }, + "SortBy": { + "target": "com.amazonaws.sagemaker#FeatureGroupSortBy", "traits": { - "smithy.api#documentation": "

          The location of the input data.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The value on which the feature group list is sorted.

          " } }, - "DataAttributes": { - "target": "com.amazonaws.sagemaker#LabelingJobDataAttributes", + "MaxResults": { + "target": "com.amazonaws.sagemaker#FeatureGroupMaxResults", "traits": { - "smithy.api#documentation": "

          Attributes of the data specified by the customer.

          " + "smithy.api#documentation": "

          The maximum number of results returned by ListFeatureGroups.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          Input configuration information for a labeling job.

          " - } - }, - "com.amazonaws.sagemaker#LabelingJobName": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 63 }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          A token to resume pagination of ListFeatureGroups results.

          " + } + } } }, - "com.amazonaws.sagemaker#LabelingJobOutput": { + "com.amazonaws.sagemaker#ListFeatureGroupsResponse": { "type": "structure", "members": { - "OutputDatasetS3Uri": { - "target": "com.amazonaws.sagemaker#S3Uri", + "FeatureGroupSummaries": { + "target": "com.amazonaws.sagemaker#FeatureGroupSummaries", "traits": { - "smithy.api#documentation": "

          The Amazon S3 bucket location of the manifest file for labeled data.

          ", + "smithy.api#documentation": "

          A summary of feature groups.

          ", "smithy.api#required": {} } }, - "FinalActiveLearningModelArn": { - "target": "com.amazonaws.sagemaker#ModelArn", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) for the most recent Amazon SageMaker model trained as part of\n automated data labeling.

          " + "smithy.api#documentation": "

          A token to resume pagination of ListFeatureGroups results.

          ", + "smithy.api#required": {} } } + } + }, + "com.amazonaws.sagemaker#ListFlowDefinitions": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListFlowDefinitionsRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListFlowDefinitionsResponse" }, "traits": { - "smithy.api#documentation": "

          Specifies the location of the output produced by the labeling job.

          " + "smithy.api#documentation": "

          Returns information about the flow definitions in your account.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } } }, - "com.amazonaws.sagemaker#LabelingJobOutputConfig": { + "com.amazonaws.sagemaker#ListFlowDefinitionsRequest": { "type": "structure", "members": { - "S3OutputPath": { - "target": "com.amazonaws.sagemaker#S3Uri", + "CreationTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The Amazon S3 location to write output data.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A filter that returns only flow definitions with a creation time greater than or equal to the specified timestamp.

          " } }, - "KmsKeyId": { - "target": "com.amazonaws.sagemaker#KmsKeyId", + "CreationTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The AWS Key Management Service ID of the key used to encrypt the output data, if any.

          \n

          If you use a KMS key ID or an alias of your master key, the Amazon SageMaker execution role must\n include permissions to call kms:Encrypt. If you don't provide a KMS key ID,\n Amazon SageMaker uses the default KMS key for Amazon S3 for your role's account. Amazon SageMaker uses server-side\n encryption with KMS-managed keys for LabelingJobOutputConfig. If you use a\n bucket policy with an s3:PutObject permission that only allows objects with\n server-side encryption, set the condition key of\n s3:x-amz-server-side-encryption to \"aws:kms\". For more\n information, see KMS-Managed Encryption Keys in the Amazon Simple Storage Service Developer\n Guide.\n

          \n

          The KMS key policy must grant permission to the IAM role that you specify in your\n CreateLabelingJob request. For more information, see Using\n Key Policies in AWS KMS in the AWS Key Management Service Developer\n Guide.

          " + "smithy.api#documentation": "

          A filter that returns only flow definitions that were created before the specified timestamp.

          " } }, - "SnsTopicArn": { - "target": "com.amazonaws.sagemaker#SnsTopicArn", + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", "traits": { - "smithy.api#documentation": "

          An Amazon Simple Notification Service (Amazon SNS) output topic ARN.

          \n

          When workers complete labeling tasks, Ground Truth will send \n labeling task output data to the SNS output topic you specify here.

          \n

          You must provide a value for this parameter\n if you provide an Amazon SNS input topic in SnsDataSource\n in InputConfig.

          " + "smithy.api#documentation": "

          An optional value that specifies whether you want the results sorted in Ascending or Descending order.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          Output configuration information for a labeling job.

          " - } - }, - "com.amazonaws.sagemaker#LabelingJobResourceConfig": { - "type": "structure", - "members": { - "VolumeKmsKeyId": { - "target": "com.amazonaws.sagemaker#KmsKeyId", + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume\n attached to the ML compute instance(s) that run the training job. The\n VolumeKmsKeyId can be any of the following formats:

          \n
            \n
          • \n

            // KMS Key ID

            \n

            \n \"1234abcd-12ab-34cd-56ef-1234567890ab\"\n

            \n
          • \n
          • \n

            // Amazon Resource Name (ARN) of a KMS Key

            \n

            \n \"arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\"\n

            \n
          • \n
          " + "smithy.api#documentation": "

          A token to resume pagination.

          " + } + }, + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", + "traits": { + "smithy.api#box": {}, + "smithy.api#documentation": "

          The total number of items to return. If the total number of available items is more than the value specified in MaxResults, then a NextToken will be provided in the output that you can use to resume pagination.

          " } } - }, - "traits": { - "smithy.api#documentation": "

          Provides configuration information for labeling jobs.

          " } }, - "com.amazonaws.sagemaker#LabelingJobS3DataSource": { + "com.amazonaws.sagemaker#ListFlowDefinitionsResponse": { "type": "structure", "members": { - "ManifestS3Uri": { - "target": "com.amazonaws.sagemaker#S3Uri", + "FlowDefinitionSummaries": { + "target": "com.amazonaws.sagemaker#FlowDefinitionSummaries", "traits": { - "smithy.api#documentation": "

          The Amazon S3 location of the manifest file that describes the input data objects.

          ", + "smithy.api#documentation": "

          An array of objects describing the flow definitions.

          ", "smithy.api#required": {} } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          A token to resume pagination.

          " + } } + } + }, + "com.amazonaws.sagemaker#ListHumanTaskUis": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListHumanTaskUisRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListHumanTaskUisResponse" }, "traits": { - "smithy.api#documentation": "

          The Amazon S3 location of the input data objects.

          " + "smithy.api#documentation": "

          Returns information about the human task user interfaces in your account.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } } }, - "com.amazonaws.sagemaker#LabelingJobSnsDataSource": { + "com.amazonaws.sagemaker#ListHumanTaskUisRequest": { "type": "structure", "members": { - "SnsTopicArn": { - "target": "com.amazonaws.sagemaker#SnsTopicArn", + "CreationTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The Amazon SNS input topic Amazon Resource Name (ARN). Specify the ARN of the input topic\n you will use to send new data objects to a streaming labeling job.

          \n

          If you specify an input topic for SnsTopicArn in InputConfig,\n you must specify a value for SnsTopicArn in OutputConfig.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A filter that returns only human task user interfaces with a creation time greater than or equal to the specified timestamp.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          An Amazon SNS data source used for streaming labeling jobs.

          " - } - }, - "com.amazonaws.sagemaker#LabelingJobStatus": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Initializing", - "name": "INITIALIZING" - }, - { - "value": "InProgress", - "name": "IN_PROGRESS" - }, - { - "value": "Completed", - "name": "COMPLETED" - }, - { - "value": "Failed", - "name": "FAILED" - }, - { - "value": "Stopping", - "name": "STOPPING" - }, - { - "value": "Stopped", - "name": "STOPPED" + }, + "CreationTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only human task user interfaces that were created before the specified timestamp.

          " + } + }, + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", + "traits": { + "smithy.api#documentation": "

          An optional value that specifies whether you want the results sorted in Ascending or Descending order.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          A token to resume pagination.

          " } - ] + }, + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", + "traits": { + "smithy.api#box": {}, + "smithy.api#documentation": "

          The total number of items to return. If the total number of available items is more than the value specified in MaxResults, then a NextToken will be provided in the output that you can use to resume pagination.

          " + } + } } }, - "com.amazonaws.sagemaker#LabelingJobStoppingConditions": { + "com.amazonaws.sagemaker#ListHumanTaskUisResponse": { "type": "structure", "members": { - "MaxHumanLabeledObjectCount": { - "target": "com.amazonaws.sagemaker#MaxHumanLabeledObjectCount", + "HumanTaskUiSummaries": { + "target": "com.amazonaws.sagemaker#HumanTaskUiSummaries", "traits": { - "smithy.api#documentation": "

          The maximum number of objects that can be labeled by human workers.

          " + "smithy.api#documentation": "

          An array of objects describing the human task user interfaces.

          ", + "smithy.api#required": {} } }, - "MaxPercentageOfInputDatasetLabeled": { - "target": "com.amazonaws.sagemaker#MaxPercentageOfInputDatasetLabeled", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          The maximum number of input data objects that should be labeled.

          " + "smithy.api#documentation": "

          A token to resume pagination.

          " } } + } + }, + "com.amazonaws.sagemaker#ListHyperParameterTuningJobs": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListHyperParameterTuningJobsRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListHyperParameterTuningJobsResponse" }, "traits": { - "smithy.api#documentation": "

          A set of conditions for stopping a labeling job. If any of the conditions are met, the\n job is automatically stopped. You can use these conditions to control the cost of data\n labeling.

          \n \n

          Labeling jobs fail after 30 days with an appropriate client error message.

          \n
          " + "smithy.api#documentation": "

          Gets a list of HyperParameterTuningJobSummary objects that\n describe\n the hyperparameter tuning jobs launched in your account.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } } }, - "com.amazonaws.sagemaker#LabelingJobSummary": { + "com.amazonaws.sagemaker#ListHyperParameterTuningJobsRequest": { "type": "structure", "members": { - "LabelingJobName": { - "target": "com.amazonaws.sagemaker#LabelingJobName", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          The name of the labeling job.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          If the result of the previous ListHyperParameterTuningJobs request was\n truncated, the response includes a NextToken. To retrieve the next set of\n tuning jobs, use the token in the next request.

          " } }, - "LabelingJobArn": { - "target": "com.amazonaws.sagemaker#LabelingJobArn", + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) assigned to the labeling job when it was\n created.

          ", - "smithy.api#required": {} + "smithy.api#box": {}, + "smithy.api#documentation": "

          The\n maximum number of tuning jobs to return. The default value is\n 10.

          " } }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "SortBy": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobSortByOptions", "traits": { - "smithy.api#documentation": "

          The date and time that the job was created (timestamp).

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The\n field\n to sort results by. The default is Name.

          " } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", "traits": { - "smithy.api#documentation": "

          The date and time that the job was last modified (timestamp).

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The sort\n order\n for results. The default is Ascending.

          " } }, - "LabelingJobStatus": { - "target": "com.amazonaws.sagemaker#LabelingJobStatus", + "NameContains": { + "target": "com.amazonaws.sagemaker#NameContains", "traits": { - "smithy.api#documentation": "

          The current status of the labeling job.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A string in the tuning job name. This filter returns only tuning jobs whose name\n contains the specified string.

          " } }, - "LabelCounters": { - "target": "com.amazonaws.sagemaker#LabelCounters", + "CreationTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Counts showing the progress of the labeling job.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A filter that returns only tuning jobs that were created after the\n specified\n time.

          " } }, - "WorkteamArn": { - "target": "com.amazonaws.sagemaker#WorkteamArn", + "CreationTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the work team assigned to the job.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A filter that returns only tuning jobs that were created before the\n specified\n time.

          " } }, - "PreHumanTaskLambdaArn": { - "target": "com.amazonaws.sagemaker#LambdaFunctionArn", + "LastModifiedTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of a Lambda function. The function is run before each\n data object is sent to a worker.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A filter that returns only tuning jobs that were modified after the specified\n time.

          " } }, - "AnnotationConsolidationLambdaArn": { - "target": "com.amazonaws.sagemaker#LambdaFunctionArn", + "LastModifiedTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the Lambda function used to consolidate the\n annotations from individual workers into a label for a data object. For more\n information, see Annotation\n Consolidation.

          " + "smithy.api#documentation": "

          A filter that returns only tuning jobs that were modified before the specified\n time.

          " } }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", + "StatusEquals": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobStatus", "traits": { - "smithy.api#documentation": "

          If the LabelingJobStatus field is Failed, this field\n contains a description of the error.

          " + "smithy.api#documentation": "

          A filter that returns only tuning jobs with the\n specified\n status.

          " } - }, - "LabelingJobOutput": { - "target": "com.amazonaws.sagemaker#LabelingJobOutput", + } + } + }, + "com.amazonaws.sagemaker#ListHyperParameterTuningJobsResponse": { + "type": "structure", + "members": { + "HyperParameterTuningJobSummaries": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobSummaries", "traits": { - "smithy.api#documentation": "

          The location of the output produced by the labeling job.

          " + "smithy.api#documentation": "

          A list of HyperParameterTuningJobSummary objects that\n describe\n the tuning jobs that the ListHyperParameterTuningJobs\n request returned.

          ", + "smithy.api#required": {} } }, - "InputConfig": { - "target": "com.amazonaws.sagemaker#LabelingJobInputConfig", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          Input configuration for the labeling job.

          " + "smithy.api#documentation": "

          If the result of this ListHyperParameterTuningJobs request was truncated,\n the response includes a NextToken. To retrieve the next set of tuning jobs,\n use the token in the next request.

          " } } - }, - "traits": { - "smithy.api#documentation": "

          Provides summary information about a labeling job.

          " - } - }, - "com.amazonaws.sagemaker#LabelingJobSummaryList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#LabelingJobSummary" - } - }, - "com.amazonaws.sagemaker#LambdaFunctionArn": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 2048 - }, - "smithy.api#pattern": "arn:aws[a-z\\-]*:lambda:[a-z]{2}-[a-z]+-\\d{1}:\\d{12}:function:[a-zA-Z0-9-_\\.]+(:(\\$LATEST|[a-zA-Z0-9-_]+))?" } }, - "com.amazonaws.sagemaker#LastModifiedTime": { - "type": "timestamp" - }, - "com.amazonaws.sagemaker#ListAlgorithms": { + "com.amazonaws.sagemaker#ListImageVersions": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListAlgorithmsInput" + "target": "com.amazonaws.sagemaker#ListImageVersionsRequest" }, "output": { - "target": "com.amazonaws.sagemaker#ListAlgorithmsOutput" + "target": "com.amazonaws.sagemaker#ListImageVersionsResponse" }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#documentation": "

          Lists the machine learning algorithms that have been created.

          ", + "smithy.api#documentation": "

          Lists the versions of a specified image and their properties. The list can be filtered\n by creation time or modified time.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -13102,169 +18041,186 @@ } } }, - "com.amazonaws.sagemaker#ListAlgorithmsInput": { + "com.amazonaws.sagemaker#ListImageVersionsRequest": { "type": "structure", "members": { "CreationTimeAfter": { - "target": "com.amazonaws.sagemaker#CreationTime", + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only algorithms created after the specified time\n (timestamp).

          " + "smithy.api#documentation": "

          A filter that returns only versions created on or after the specified time.

          " } }, "CreationTimeBefore": { - "target": "com.amazonaws.sagemaker#CreationTime", + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only algorithms created before the specified time\n (timestamp).

          " + "smithy.api#documentation": "

          A filter that returns only versions created on or before the specified time.

          " } }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", + "ImageName": { + "target": "com.amazonaws.sagemaker#ImageName", "traits": { - "smithy.api#documentation": "

          The maximum number of algorithms to return in the response.

          " + "smithy.api#documentation": "

          The name of the image to list the versions of.

          ", + "smithy.api#required": {} } }, - "NameContains": { - "target": "com.amazonaws.sagemaker#NameContains", + "LastModifiedTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A string in the algorithm name. This filter returns only algorithms whose name\n contains the specified string.

          " + "smithy.api#documentation": "

          A filter that returns only versions modified on or after the specified time.

          " + } + }, + "LastModifiedTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only versions modified on or before the specified time.

          " + } + }, + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", + "traits": { + "smithy.api#documentation": "

          The maximum number of versions to return in the response. The default value is 10.

          " } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the response to a previous ListAlgorithms request was truncated, the\n response includes a NextToken. To retrieve the next set of algorithms, use\n the token in the next request.

          " + "smithy.api#documentation": "

          If the previous call to ListImageVersions didn't return the full set of\n versions, the call returns a token for getting the next set of versions.

          " } }, "SortBy": { - "target": "com.amazonaws.sagemaker#AlgorithmSortBy", + "target": "com.amazonaws.sagemaker#ImageVersionSortBy", "traits": { - "smithy.api#documentation": "

          The parameter by which to sort the results. The default is\n CreationTime.

          " + "smithy.api#documentation": "

          The property used to sort results. The default value is CREATION_TIME.

          " } }, "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", + "target": "com.amazonaws.sagemaker#ImageVersionSortOrder", "traits": { - "smithy.api#documentation": "

          The sort order for the results. The default is Ascending.

          " + "smithy.api#documentation": "

          The sort order. The default value is DESCENDING.

          " } } } }, - "com.amazonaws.sagemaker#ListAlgorithmsOutput": { + "com.amazonaws.sagemaker#ListImageVersionsResponse": { "type": "structure", "members": { - "AlgorithmSummaryList": { - "target": "com.amazonaws.sagemaker#AlgorithmSummaryList", + "ImageVersions": { + "target": "com.amazonaws.sagemaker#ImageVersions", "traits": { - "smithy.api#documentation": "

          >An array of AlgorithmSummary objects, each of which lists an\n algorithm.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A list of versions and their properties.

          " } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n algorithms, use it in the subsequent request.

          " + "smithy.api#documentation": "

          A token for getting the next set of versions, if there are any.

          " } } } }, - "com.amazonaws.sagemaker#ListAppImageConfigs": { + "com.amazonaws.sagemaker#ListImages": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListAppImageConfigsRequest" + "target": "com.amazonaws.sagemaker#ListImagesRequest" }, "output": { - "target": "com.amazonaws.sagemaker#ListAppImageConfigsResponse" + "target": "com.amazonaws.sagemaker#ListImagesResponse" }, "traits": { - "smithy.api#documentation": "

          Lists the AppImageConfigs in your account and their properties. The list can be\n filtered by creation time or modified time, and whether the AppImageConfig name contains\n a specified string.

          " + "smithy.api#documentation": "

          Lists the images in your account and their properties. The list can be filtered by\n creation time or modified time, and whether the image name contains a specified string.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } } }, - "com.amazonaws.sagemaker#ListAppImageConfigsRequest": { + "com.amazonaws.sagemaker#ListImagesRequest": { "type": "structure", "members": { - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", + "CreationTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The maximum number of AppImageConfigs to return in the response. The default value is\n 10.

          " + "smithy.api#documentation": "

          A filter that returns only images created on or after the specified time.

          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "CreationTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          If the previous call to ListImages didn't return the full set of\n AppImageConfigs, the call returns a token for getting the next set of AppImageConfigs.

          " + "smithy.api#documentation": "

          A filter that returns only images created on or before the specified time.

          " } }, - "NameContains": { - "target": "com.amazonaws.sagemaker#AppImageConfigName", + "LastModifiedTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only AppImageConfigs whose name contains the specified string.

          " + "smithy.api#documentation": "

          A filter that returns only images modified on or after the specified time.

          " } }, - "CreationTimeBefore": { + "LastModifiedTimeBefore": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only AppImageConfigs created on or before the specified time.

          " + "smithy.api#documentation": "

          A filter that returns only images modified on or before the specified time.

          " } }, - "CreationTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          A filter that returns only AppImageConfigs created on or after the specified time.

          " + "smithy.api#documentation": "

          The maximum number of images to return in the response. The default value is 10.

          " } }, - "ModifiedTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + "NameContains": { + "target": "com.amazonaws.sagemaker#ImageNameContains", "traits": { - "smithy.api#documentation": "

          A filter that returns only AppImageConfigs modified on or before the specified time.

          " + "smithy.api#documentation": "

          A filter that returns only images whose name contains the specified string.

          " } }, - "ModifiedTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          A filter that returns only AppImageConfigs modified on or after the specified time.

          " + "smithy.api#documentation": "

          If the previous call to ListImages didn't return the full set of images,\n the call returns a token for getting the next set of images.

          " } }, "SortBy": { - "target": "com.amazonaws.sagemaker#AppImageConfigSortKey", + "target": "com.amazonaws.sagemaker#ImageSortBy", "traits": { - "smithy.api#documentation": "

          The property used to sort results. The default value is CreationTime.

          " + "smithy.api#documentation": "

          The property used to sort results. The default value is CREATION_TIME.

          " } }, "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", + "target": "com.amazonaws.sagemaker#ImageSortOrder", "traits": { - "smithy.api#documentation": "

          The sort order. The default value is Descending.

          " + "smithy.api#documentation": "

          The sort order. The default value is DESCENDING.

          " } } } }, - "com.amazonaws.sagemaker#ListAppImageConfigsResponse": { + "com.amazonaws.sagemaker#ListImagesResponse": { "type": "structure", "members": { - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "Images": { + "target": "com.amazonaws.sagemaker#Images", "traits": { - "smithy.api#documentation": "

          A token for getting the next set of AppImageConfigs, if there are any.

          " + "smithy.api#documentation": "

          A list of images and their properties.

          " } }, - "AppImageConfigs": { - "target": "com.amazonaws.sagemaker#AppImageConfigList", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          A list of AppImageConfigs and their properties.

          " + "smithy.api#documentation": "

          A token for getting the next set of images, if there are any.

          " } } } }, - "com.amazonaws.sagemaker#ListApps": { + "com.amazonaws.sagemaker#ListLabelingJobs": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListAppsRequest" + "target": "com.amazonaws.sagemaker#ListLabelingJobsRequest" }, "output": { - "target": "com.amazonaws.sagemaker#ListAppsResponse" + "target": "com.amazonaws.sagemaker#ListLabelingJobsResponse" }, "traits": { - "smithy.api#documentation": "

          Lists apps.

          ", + "smithy.api#documentation": "

          Gets a list of labeling jobs.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -13272,180 +18228,209 @@ } } }, - "com.amazonaws.sagemaker#ListAppsRequest": { + "com.amazonaws.sagemaker#ListLabelingJobsForWorkteam": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListLabelingJobsForWorkteamRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListLabelingJobsForWorkteamResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Gets a list of labeling jobs assigned to a specified work team.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.sagemaker#ListLabelingJobsForWorkteamRequest": { "type": "structure", "members": { - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "WorkteamArn": { + "target": "com.amazonaws.sagemaker#WorkteamArn", "traits": { - "smithy.api#documentation": "

          If the previous response was truncated, you will receive this token.\n Use it in your next request to receive the next set of results.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the work team for which you want to see labeling\n jobs for.

          ", + "smithy.api#required": {} } }, "MaxResults": { "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          Returns a list up to a specified limit.

          " + "smithy.api#documentation": "

          The maximum number of labeling jobs to return in each page of the response.

          " } }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          The sort order for the results. The default is Ascending.

          " + "smithy.api#documentation": "

          If the result of the previous ListLabelingJobsForWorkteam request was\n truncated, the response includes a NextToken. To retrieve the next set of\n labeling jobs, use the token in the next request.

          " } }, - "SortBy": { - "target": "com.amazonaws.sagemaker#AppSortKey", + "CreationTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The parameter by which to sort the results. The default is CreationTime.

          " + "smithy.api#documentation": "

          A filter that returns only labeling jobs created after the specified time\n (timestamp).

          " } }, - "DomainIdEquals": { - "target": "com.amazonaws.sagemaker#DomainId", + "CreationTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A parameter to search for the domain ID.

          " + "smithy.api#documentation": "

          A filter that returns only labeling jobs created before the specified time\n (timestamp).

          " } }, - "UserProfileNameEquals": { - "target": "com.amazonaws.sagemaker#UserProfileName", + "JobReferenceCodeContains": { + "target": "com.amazonaws.sagemaker#JobReferenceCodeContains", "traits": { - "smithy.api#documentation": "

          A parameter to search by user profile name.

          " + "smithy.api#documentation": "

          A filter the limits jobs to only the ones whose job reference code contains the\n specified string.

          " + } + }, + "SortBy": { + "target": "com.amazonaws.sagemaker#ListLabelingJobsForWorkteamSortByOptions", + "traits": { + "smithy.api#documentation": "

          The field to sort results by. The default is CreationTime.

          " + } + }, + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", + "traits": { + "smithy.api#documentation": "

          The sort order for results. The default is Ascending.

          " } } } }, - "com.amazonaws.sagemaker#ListAppsResponse": { + "com.amazonaws.sagemaker#ListLabelingJobsForWorkteamResponse": { "type": "structure", "members": { - "Apps": { - "target": "com.amazonaws.sagemaker#AppList", + "LabelingJobSummaryList": { + "target": "com.amazonaws.sagemaker#LabelingJobForWorkteamSummaryList", "traits": { - "smithy.api#documentation": "

          The list of apps.

          " + "smithy.api#documentation": "

          An array of LabelingJobSummary objects, each describing a labeling\n job.

          ", + "smithy.api#required": {} } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the previous response was truncated, you will receive this token.\n Use it in your next request to receive the next set of results.

          " + "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n labeling jobs, use it in the subsequent request.

          " } } } }, - "com.amazonaws.sagemaker#ListAutoMLJobs": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#ListAutoMLJobsRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#ListAutoMLJobsResponse" - }, + "com.amazonaws.sagemaker#ListLabelingJobsForWorkteamSortByOptions": { + "type": "string", "traits": { - "smithy.api#documentation": "

          Request a list of jobs.

          ", - "smithy.api#paginated": { - "inputToken": "NextToken", - "outputToken": "NextToken", - "pageSize": "MaxResults" - } + "smithy.api#enum": [ + { + "value": "CreationTime", + "name": "CREATION_TIME" + } + ] } }, - "com.amazonaws.sagemaker#ListAutoMLJobsRequest": { + "com.amazonaws.sagemaker#ListLabelingJobsRequest": { "type": "structure", "members": { "CreationTimeAfter": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Request a list of jobs, using a filter for time.

          " + "smithy.api#documentation": "

          A filter that returns only labeling jobs created after the specified time\n (timestamp).

          " } }, "CreationTimeBefore": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Request a list of jobs, using a filter for time.

          " + "smithy.api#documentation": "

          A filter that returns only labeling jobs created before the specified time\n (timestamp).

          " } }, "LastModifiedTimeAfter": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Request a list of jobs, using a filter for time.

          " + "smithy.api#documentation": "

          A filter that returns only labeling jobs modified after the specified time\n (timestamp).

          " } }, "LastModifiedTimeBefore": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Request a list of jobs, using a filter for time.

          " + "smithy.api#documentation": "

          A filter that returns only labeling jobs modified before the specified time\n (timestamp).

          " } }, - "NameContains": { - "target": "com.amazonaws.sagemaker#AutoMLNameContains", + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          Request a list of jobs, using a search filter for name.

          " + "smithy.api#documentation": "

          The maximum number of labeling jobs to return in each page of the response.

          " } }, - "StatusEquals": { - "target": "com.amazonaws.sagemaker#AutoMLJobStatus", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          Request a list of jobs, using a filter for status.

          " + "smithy.api#documentation": "

          If the result of the previous ListLabelingJobs request was truncated, the\n response includes a NextToken. To retrieve the next set of labeling jobs,\n use the token in the next request.

          " } }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#AutoMLSortOrder", + "NameContains": { + "target": "com.amazonaws.sagemaker#NameContains", "traits": { - "smithy.api#documentation": "

          The sort order for the results. The default is Descending.

          " + "smithy.api#documentation": "

          A string in the labeling job name. This filter returns only labeling jobs whose name\n contains the specified string.

          " } }, "SortBy": { - "target": "com.amazonaws.sagemaker#AutoMLSortBy", + "target": "com.amazonaws.sagemaker#SortBy", "traits": { - "smithy.api#documentation": "

          The parameter by which to sort the results. The default is AutoMLJobName.

          " + "smithy.api#documentation": "

          The field to sort results by. The default is CreationTime.

          " } }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#AutoMLMaxResults", + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", "traits": { - "smithy.api#box": {}, - "smithy.api#documentation": "

          Request a list of jobs up to a specified limit.

          " + "smithy.api#documentation": "

          The sort order for results. The default is Ascending.

          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "StatusEquals": { + "target": "com.amazonaws.sagemaker#LabelingJobStatus", "traits": { - "smithy.api#documentation": "

          If the previous response was truncated, you receive this token. Use it in your next\n request to receive the next set of results.

          " + "smithy.api#documentation": "

          A filter that retrieves only labeling jobs with a specific status.

          " } } } }, - "com.amazonaws.sagemaker#ListAutoMLJobsResponse": { + "com.amazonaws.sagemaker#ListLabelingJobsResponse": { "type": "structure", "members": { - "AutoMLJobSummaries": { - "target": "com.amazonaws.sagemaker#AutoMLJobSummaries", + "LabelingJobSummaryList": { + "target": "com.amazonaws.sagemaker#LabelingJobSummaryList", "traits": { - "smithy.api#documentation": "

          Returns a summary list of jobs.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          An array of LabelingJobSummary objects, each describing a labeling\n job.

          " } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the previous response was truncated, you receive this token. Use it in your next\n request to receive the next set of results.

          " + "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n labeling jobs, use it in the subsequent request.

          " } } } }, - "com.amazonaws.sagemaker#ListCandidatesForAutoMLJob": { + "com.amazonaws.sagemaker#ListLineageEntityParameterKey": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#StringParameterValue" + } + }, + "com.amazonaws.sagemaker#ListModelPackageGroups": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListCandidatesForAutoMLJobRequest" + "target": "com.amazonaws.sagemaker#ListModelPackageGroupsInput" }, "output": { - "target": "com.amazonaws.sagemaker#ListCandidatesForAutoMLJobResponse" + "target": "com.amazonaws.sagemaker#ListModelPackageGroupsOutput" }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], "traits": { - "smithy.api#documentation": "

          List the Candidates created for the job.

          ", + "smithy.api#documentation": "

          Gets a list of the model groups in your AWS account.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -13453,83 +18438,81 @@ } } }, - "com.amazonaws.sagemaker#ListCandidatesForAutoMLJobRequest": { + "com.amazonaws.sagemaker#ListModelPackageGroupsInput": { "type": "structure", "members": { - "AutoMLJobName": { - "target": "com.amazonaws.sagemaker#AutoMLJobName", + "CreationTimeAfter": { + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          List the Candidates created for the job by providing the job's name.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A filter that returns only model groups created after the specified time.

          " } }, - "StatusEquals": { - "target": "com.amazonaws.sagemaker#CandidateStatus", + "CreationTimeBefore": { + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          List the Candidates for the job and filter by status.

          " + "smithy.api#documentation": "

          A filter that returns only model groups created before the specified time.

          " } }, - "CandidateNameEquals": { - "target": "com.amazonaws.sagemaker#CandidateName", + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          List the Candidates for the job and filter by candidate name.

          " + "smithy.api#documentation": "

          The maximum number of results to return in the response.

          " } }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#AutoMLSortOrder", + "NameContains": { + "target": "com.amazonaws.sagemaker#NameContains", "traits": { - "smithy.api#documentation": "

          The sort order for the results. The default is Ascending.

          " + "smithy.api#documentation": "

          A string in the model group name. This filter returns only model groups whose name\n contains the specified string.

          " } }, - "SortBy": { - "target": "com.amazonaws.sagemaker#CandidateSortBy", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          The parameter by which to sort the results. The default is Descending.

          " + "smithy.api#documentation": "

          If the result of the previous ListModelPackageGroups request was\n truncated, the response includes a NextToken. To retrieve the next set of\n model groups, use the token in the next request.

          " } }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#AutoMLMaxResults", + "SortBy": { + "target": "com.amazonaws.sagemaker#ModelPackageGroupSortBy", "traits": { - "smithy.api#box": {}, - "smithy.api#documentation": "

          List the job's Candidates up to a specified limit.

          " + "smithy.api#documentation": "

          The field to sort results by. The default is CreationTime.

          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", "traits": { - "smithy.api#documentation": "

          If the previous response was truncated, you receive this token. Use it in your next\n request to receive the next set of results.

          " + "smithy.api#documentation": "

          The sort order for results. The default is Ascending.

          " } } } }, - "com.amazonaws.sagemaker#ListCandidatesForAutoMLJobResponse": { + "com.amazonaws.sagemaker#ListModelPackageGroupsOutput": { "type": "structure", "members": { - "Candidates": { - "target": "com.amazonaws.sagemaker#AutoMLCandidates", + "ModelPackageGroupSummaryList": { + "target": "com.amazonaws.sagemaker#ModelPackageGroupSummaryList", "traits": { - "smithy.api#documentation": "

          Summaries about the Candidates.

          ", + "smithy.api#documentation": "

          A list of summaries of the model groups in your AWS account.

          ", "smithy.api#required": {} } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the previous response was truncated, you receive this token. Use it in your next\n request to receive the next set of results.

          " + "smithy.api#documentation": "

          If the response is truncated, SageMaker returns this token. To retrieve the next set\n of model groups, use it in the subsequent request.

          " } } } }, - "com.amazonaws.sagemaker#ListCodeRepositories": { + "com.amazonaws.sagemaker#ListModelPackages": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListCodeRepositoriesInput" + "target": "com.amazonaws.sagemaker#ListModelPackagesInput" }, "output": { - "target": "com.amazonaws.sagemaker#ListCodeRepositoriesOutput" + "target": "com.amazonaws.sagemaker#ListModelPackagesOutput" }, "traits": { - "smithy.api#documentation": "

          Gets a list of the Git repositories in your account.

          ", + "smithy.api#documentation": "

          Lists the model packages that have been created.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -13537,93 +18520,99 @@ } } }, - "com.amazonaws.sagemaker#ListCodeRepositoriesInput": { + "com.amazonaws.sagemaker#ListModelPackagesInput": { "type": "structure", "members": { "CreationTimeAfter": { "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          A filter that returns only Git repositories that were created after the specified\n time.

          " + "smithy.api#documentation": "

          A filter that returns only model packages created after the specified time\n (timestamp).

          " } }, "CreationTimeBefore": { "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          A filter that returns only Git repositories that were created before the specified\n time.

          " + "smithy.api#documentation": "

          A filter that returns only model packages created before the specified time\n (timestamp).

          " } }, - "LastModifiedTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          A filter that returns only Git repositories that were last modified after the\n specified time.

          " + "smithy.api#documentation": "

          The maximum number of model packages to return in the response.

          " } }, - "LastModifiedTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + "NameContains": { + "target": "com.amazonaws.sagemaker#NameContains", "traits": { - "smithy.api#documentation": "

          A filter that returns only Git repositories that were last modified before the\n specified time.

          " + "smithy.api#documentation": "

          A string in the model package name. This filter returns only model packages whose name\n contains the specified string.

          " } }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", + "ModelApprovalStatus": { + "target": "com.amazonaws.sagemaker#ModelApprovalStatus", "traits": { - "smithy.api#documentation": "

          The maximum number of Git repositories to return in the response.

          " + "smithy.api#documentation": "

          A filter that returns only the model packages with the specified approval\n status.

          " } }, - "NameContains": { - "target": "com.amazonaws.sagemaker#CodeRepositoryNameContains", + "ModelPackageGroupName": { + "target": "com.amazonaws.sagemaker#ArnOrName", "traits": { - "smithy.api#documentation": "

          A string in the Git repositories name. This filter returns only repositories whose\n name contains the specified string.

          " + "smithy.api#documentation": "

          A filter that returns only model versions that belong to the specified model group.

          " + } + }, + "ModelPackageType": { + "target": "com.amazonaws.sagemaker#ModelPackageType", + "traits": { + "smithy.api#documentation": "

          A filter that returns onlyl the model packages of the specified type. This can be one\n of the following values.

          \n
            \n
          • \n

            \n VERSIONED - List only versioned models.

            \n
          • \n
          • \n

            \n UNVERSIONED - List only unversioined models.

            \n
          • \n
          • \n

            \n BOTH - List both versioned and unversioned models.

            \n
          • \n
          " } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the result of a ListCodeRepositoriesOutput request was truncated, the\n response includes a NextToken. To get the next set of Git repositories, use\n the token in the next request.

          " + "smithy.api#documentation": "

          If the response to a previous ListModelPackages request was truncated,\n the response includes a NextToken. To retrieve the next set of model\n packages, use the token in the next request.

          " } }, "SortBy": { - "target": "com.amazonaws.sagemaker#CodeRepositorySortBy", + "target": "com.amazonaws.sagemaker#ModelPackageSortBy", "traits": { - "smithy.api#documentation": "

          The field to sort results by. The default is Name.

          " + "smithy.api#documentation": "

          The parameter by which to sort the results. The default is\n CreationTime.

          " } }, "SortOrder": { - "target": "com.amazonaws.sagemaker#CodeRepositorySortOrder", + "target": "com.amazonaws.sagemaker#SortOrder", "traits": { - "smithy.api#documentation": "

          The sort order for results. The default is Ascending.

          " + "smithy.api#documentation": "

          The sort order for the results. The default is Ascending.

          " } } } }, - "com.amazonaws.sagemaker#ListCodeRepositoriesOutput": { + "com.amazonaws.sagemaker#ListModelPackagesOutput": { "type": "structure", "members": { - "CodeRepositorySummaryList": { - "target": "com.amazonaws.sagemaker#CodeRepositorySummaryList", + "ModelPackageSummaryList": { + "target": "com.amazonaws.sagemaker#ModelPackageSummaryList", "traits": { - "smithy.api#documentation": "

          Gets a list of summaries of the Git repositories. Each summary specifies the following\n values for the repository:

          \n
            \n
          • \n

            Name

            \n
          • \n
          • \n

            Amazon Resource Name (ARN)

            \n
          • \n
          • \n

            Creation time

            \n
          • \n
          • \n

            Last modified time

            \n
          • \n
          • \n

            Configuration information, including the URL location of the repository and\n the ARN of the AWS Secrets Manager secret that contains the credentials used\n to access the repository.

            \n
          • \n
          ", + "smithy.api#documentation": "

          An array of ModelPackageSummary objects, each of which lists a model\n package.

          ", "smithy.api#required": {} } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the result of a ListCodeRepositoriesOutput request was truncated, the\n response includes a NextToken. To get the next set of Git repositories, use\n the token in the next request.

          " + "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n model packages, use it in the subsequent request.

          " } } } }, - "com.amazonaws.sagemaker#ListCompilationJobs": { + "com.amazonaws.sagemaker#ListModels": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListCompilationJobsRequest" + "target": "com.amazonaws.sagemaker#ListModelsInput" }, "output": { - "target": "com.amazonaws.sagemaker#ListCompilationJobsResponse" + "target": "com.amazonaws.sagemaker#ListModelsOutput" }, "traits": { - "smithy.api#documentation": "

          Lists model compilation jobs that satisfy various filters.

          \n

          To create a model compilation job, use CreateCompilationJob. To get\n information about a particular model compilation job you have created, use DescribeCompilationJob.

          ", + "smithy.api#documentation": "

          Lists models created with the CreateModel API.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -13631,119 +18620,81 @@ } } }, - "com.amazonaws.sagemaker#ListCompilationJobsRequest": { + "com.amazonaws.sagemaker#ListModelsInput": { "type": "structure", "members": { - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", - "traits": { - "smithy.api#documentation": "

          If the result of the previous ListCompilationJobs request was truncated,\n the response includes a NextToken. To retrieve the next set of model\n compilation jobs, use the token in the next request.

          " - } - }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", - "traits": { - "smithy.api#box": {}, - "smithy.api#documentation": "

          The maximum number of model compilation jobs to return in the response.

          " - } - }, - "CreationTimeAfter": { - "target": "com.amazonaws.sagemaker#CreationTime", + "SortBy": { + "target": "com.amazonaws.sagemaker#ModelSortKey", "traits": { - "smithy.api#documentation": "

          A filter that returns the model compilation jobs that were created after a specified\n time.

          " + "smithy.api#documentation": "

          Sorts the list of results. The default is CreationTime.

          " } }, - "CreationTimeBefore": { - "target": "com.amazonaws.sagemaker#CreationTime", + "SortOrder": { + "target": "com.amazonaws.sagemaker#OrderKey", "traits": { - "smithy.api#documentation": "

          A filter that returns the model compilation jobs that were created before a specified\n time.

          " + "smithy.api#documentation": "

          The sort order for results. The default is Descending.

          " } }, - "LastModifiedTimeAfter": { - "target": "com.amazonaws.sagemaker#LastModifiedTime", + "NextToken": { + "target": "com.amazonaws.sagemaker#PaginationToken", "traits": { - "smithy.api#documentation": "

          A filter that returns the model compilation jobs that were modified after a specified\n time.

          " + "smithy.api#documentation": "

          If the response to a previous ListModels request was truncated, the\n response includes a NextToken. To retrieve the next set of models, use the\n token in the next request.

          " } }, - "LastModifiedTimeBefore": { - "target": "com.amazonaws.sagemaker#LastModifiedTime", + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          A filter that returns the model compilation jobs that were modified before a specified\n time.

          " + "smithy.api#documentation": "

          The maximum number of models to return in the response.

          " } }, "NameContains": { - "target": "com.amazonaws.sagemaker#NameContains", - "traits": { - "smithy.api#documentation": "

          A filter that returns the model compilation jobs whose name contains a specified\n string.

          " - } - }, - "StatusEquals": { - "target": "com.amazonaws.sagemaker#CompilationJobStatus", + "target": "com.amazonaws.sagemaker#ModelNameContains", "traits": { - "smithy.api#documentation": "

          A filter that retrieves model compilation jobs with a specific DescribeCompilationJobResponse$CompilationJobStatus status.

          " + "smithy.api#documentation": "

          A string in the training job name. This filter returns only models in the training\n job whose name contains the specified string.

          " } }, - "SortBy": { - "target": "com.amazonaws.sagemaker#ListCompilationJobsSortBy", + "CreationTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The field by which to sort results. The default is CreationTime.

          " + "smithy.api#documentation": "

          A filter that returns only models created before the specified time\n (timestamp).

          " } }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", + "CreationTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The sort order for results. The default is Ascending.

          " + "smithy.api#documentation": "

          A filter that returns only models with a creation time greater than or equal to the\n specified time (timestamp).

          " } } } }, - "com.amazonaws.sagemaker#ListCompilationJobsResponse": { + "com.amazonaws.sagemaker#ListModelsOutput": { "type": "structure", "members": { - "CompilationJobSummaries": { - "target": "com.amazonaws.sagemaker#CompilationJobSummaries", + "Models": { + "target": "com.amazonaws.sagemaker#ModelSummaryList", "traits": { - "smithy.api#documentation": "

          An array of CompilationJobSummary objects, each describing a model\n compilation job.

          ", + "smithy.api#documentation": "

          An array of ModelSummary objects, each of which lists a\n model.

          ", "smithy.api#required": {} } }, "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "target": "com.amazonaws.sagemaker#PaginationToken", "traits": { - "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this NextToken. To retrieve\n the next set of model compilation jobs, use this token in the next request.

          " + "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n models, use it in the subsequent request.

          " } } } }, - "com.amazonaws.sagemaker#ListCompilationJobsSortBy": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Name", - "name": "NAME" - }, - { - "value": "CreationTime", - "name": "CREATION_TIME" - }, - { - "value": "Status", - "name": "STATUS" - } - ] - } - }, - "com.amazonaws.sagemaker#ListDomains": { + "com.amazonaws.sagemaker#ListMonitoringExecutions": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListDomainsRequest" + "target": "com.amazonaws.sagemaker#ListMonitoringExecutionsRequest" }, "output": { - "target": "com.amazonaws.sagemaker#ListDomainsResponse" + "target": "com.amazonaws.sagemaker#ListMonitoringExecutionsResponse" }, "traits": { - "smithy.api#documentation": "

          Lists the domains.

          ", + "smithy.api#documentation": "

          Returns list of all monitoring job executions.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -13751,50 +18702,117 @@ } } }, - "com.amazonaws.sagemaker#ListDomainsRequest": { + "com.amazonaws.sagemaker#ListMonitoringExecutionsRequest": { "type": "structure", "members": { + "MonitoringScheduleName": { + "target": "com.amazonaws.sagemaker#MonitoringScheduleName", + "traits": { + "smithy.api#documentation": "

          Name of a specific schedule to fetch jobs for.

          " + } + }, + "EndpointName": { + "target": "com.amazonaws.sagemaker#EndpointName", + "traits": { + "smithy.api#documentation": "

          Name of a specific endpoint to fetch jobs for.

          " + } + }, + "SortBy": { + "target": "com.amazonaws.sagemaker#MonitoringExecutionSortKey", + "traits": { + "smithy.api#documentation": "

          Whether to sort results by Status, CreationTime,\n ScheduledTime field. The default is CreationTime.

          " + } + }, + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", + "traits": { + "smithy.api#documentation": "

          Whether to sort the results in Ascending or Descending order.\n The default is Descending.

          " + } + }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the previous response was truncated, you will receive this token.\n Use it in your next request to receive the next set of results.

          " + "smithy.api#documentation": "

          The token returned if the response is truncated. To retrieve the next set of job\n executions, use it in the next request.

          " } }, "MaxResults": { "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          Returns a list up to a specified limit.

          " + "smithy.api#documentation": "

          The maximum number of jobs to return in the response. The default value is 10.

          " + } + }, + "ScheduledTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          Filter for jobs scheduled before a specified time.

          " + } + }, + "ScheduledTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          Filter for jobs scheduled after a specified time.

          " + } + }, + "CreationTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only jobs created before a specified time.

          " + } + }, + "CreationTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only jobs created after a specified time.

          " + } + }, + "LastModifiedTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only jobs modified after a specified time.

          " + } + }, + "LastModifiedTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only jobs modified before a specified time.

          " + } + }, + "StatusEquals": { + "target": "com.amazonaws.sagemaker#ExecutionStatus", + "traits": { + "smithy.api#documentation": "

          A filter that retrieves only jobs with a specific status.

          " } } } }, - "com.amazonaws.sagemaker#ListDomainsResponse": { + "com.amazonaws.sagemaker#ListMonitoringExecutionsResponse": { "type": "structure", "members": { - "Domains": { - "target": "com.amazonaws.sagemaker#DomainList", + "MonitoringExecutionSummaries": { + "target": "com.amazonaws.sagemaker#MonitoringExecutionSummaryList", "traits": { - "smithy.api#documentation": "

          The list of domains.

          " + "smithy.api#documentation": "

          A JSON array in which each element is a summary for a monitoring execution.

          ", + "smithy.api#required": {} } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the previous response was truncated, you will receive this token.\n Use it in your next request to receive the next set of results.

          " + "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of jobs,\n use it in the subsequent reques

          " } } } }, - "com.amazonaws.sagemaker#ListEndpointConfigs": { + "com.amazonaws.sagemaker#ListMonitoringSchedules": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListEndpointConfigsInput" + "target": "com.amazonaws.sagemaker#ListMonitoringSchedulesRequest" }, "output": { - "target": "com.amazonaws.sagemaker#ListEndpointConfigsOutput" + "target": "com.amazonaws.sagemaker#ListMonitoringSchedulesResponse" }, "traits": { - "smithy.api#documentation": "

          Lists endpoint configurations.

          ", + "smithy.api#documentation": "

          Returns list of all monitoring schedules.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -13802,81 +18820,105 @@ } } }, - "com.amazonaws.sagemaker#ListEndpointConfigsInput": { + "com.amazonaws.sagemaker#ListMonitoringSchedulesRequest": { "type": "structure", "members": { + "EndpointName": { + "target": "com.amazonaws.sagemaker#EndpointName", + "traits": { + "smithy.api#documentation": "

          Name of a specific endpoint to fetch schedules for.

          " + } + }, "SortBy": { - "target": "com.amazonaws.sagemaker#EndpointConfigSortKey", + "target": "com.amazonaws.sagemaker#MonitoringScheduleSortKey", "traits": { - "smithy.api#documentation": "

          The field to sort results by. The default is CreationTime.

          " + "smithy.api#documentation": "

          Whether to sort results by Status, CreationTime,\n ScheduledTime field. The default is CreationTime.

          " } }, "SortOrder": { - "target": "com.amazonaws.sagemaker#OrderKey", + "target": "com.amazonaws.sagemaker#SortOrder", "traits": { - "smithy.api#documentation": "

          The sort order for results. The default is Descending.

          " + "smithy.api#documentation": "

          Whether to sort the results in Ascending or Descending order.\n The default is Descending.

          " } }, "NextToken": { - "target": "com.amazonaws.sagemaker#PaginationToken", + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the result of the previous ListEndpointConfig request was\n truncated, the response includes a NextToken. To retrieve the next set of\n endpoint configurations, use the token in the next request.

          " + "smithy.api#documentation": "

          The token returned if the response is truncated. To retrieve the next set of job\n executions, use it in the next request.

          " } }, "MaxResults": { "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          The maximum number of training jobs to return in the response.

          " + "smithy.api#documentation": "

          The maximum number of jobs to return in the response. The default value is 10.

          " } }, "NameContains": { - "target": "com.amazonaws.sagemaker#EndpointConfigNameContains", + "target": "com.amazonaws.sagemaker#NameContains", "traits": { - "smithy.api#documentation": "

          A string in the endpoint configuration name. This filter returns only endpoint\n configurations whose name contains the specified string.

          " + "smithy.api#documentation": "

          Filter for monitoring schedules whose name contains a specified string.

          " } }, "CreationTimeBefore": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only endpoint configurations created before the specified\n time (timestamp).

          " + "smithy.api#documentation": "

          A filter that returns only monitoring schedules created before a specified time.

          " } }, "CreationTimeAfter": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only endpoint configurations with a creation time greater\n than or equal to the specified time (timestamp).

          " + "smithy.api#documentation": "

          A filter that returns only monitoring schedules created after a specified time.

          " + } + }, + "LastModifiedTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only monitoring schedules modified before a specified time.

          " + } + }, + "LastModifiedTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only monitoring schedules modified after a specified time.

          " + } + }, + "StatusEquals": { + "target": "com.amazonaws.sagemaker#ScheduleStatus", + "traits": { + "smithy.api#documentation": "

          A filter that returns only monitoring schedules modified before a specified time.

          " } } } }, - "com.amazonaws.sagemaker#ListEndpointConfigsOutput": { + "com.amazonaws.sagemaker#ListMonitoringSchedulesResponse": { "type": "structure", "members": { - "EndpointConfigs": { - "target": "com.amazonaws.sagemaker#EndpointConfigSummaryList", + "MonitoringScheduleSummaries": { + "target": "com.amazonaws.sagemaker#MonitoringScheduleSummaryList", "traits": { - "smithy.api#documentation": "

          An array of endpoint configurations.

          ", + "smithy.api#documentation": "

          A JSON array in which each element is a summary for a monitoring schedule.

          ", "smithy.api#required": {} } }, "NextToken": { - "target": "com.amazonaws.sagemaker#PaginationToken", + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n endpoint configurations, use it in the subsequent request

          " + "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of jobs,\n use it in the subsequent reques

          " } } } }, - "com.amazonaws.sagemaker#ListEndpoints": { + "com.amazonaws.sagemaker#ListNotebookInstanceLifecycleConfigs": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListEndpointsInput" + "target": "com.amazonaws.sagemaker#ListNotebookInstanceLifecycleConfigsInput" }, "output": { - "target": "com.amazonaws.sagemaker#ListEndpointsOutput" + "target": "com.amazonaws.sagemaker#ListNotebookInstanceLifecycleConfigsOutput" }, "traits": { - "smithy.api#documentation": "

          Lists endpoints.

          ", + "smithy.api#documentation": "

          Lists notebook instance lifestyle configurations created with the CreateNotebookInstanceLifecycleConfig API.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -13884,99 +18926,92 @@ } } }, - "com.amazonaws.sagemaker#ListEndpointsInput": { + "com.amazonaws.sagemaker#ListNotebookInstanceLifecycleConfigsInput": { "type": "structure", "members": { - "SortBy": { - "target": "com.amazonaws.sagemaker#EndpointSortKey", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          Sorts the list of results. The default is CreationTime.

          " + "smithy.api#documentation": "

          If the result of a ListNotebookInstanceLifecycleConfigs request was\n truncated, the response includes a NextToken. To get the next set of\n lifecycle configurations, use the token in the next request.

          " } }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#OrderKey", + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          The sort order for results. The default is Descending.

          " + "smithy.api#documentation": "

          The maximum number of lifecycle configurations to return in the response.

          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#PaginationToken", + "SortBy": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigSortKey", "traits": { - "smithy.api#documentation": "

          If the result of a ListEndpoints request was truncated, the response\n includes a NextToken. To retrieve the next set of endpoints, use the token\n in the next request.

          " + "smithy.api#documentation": "

          Sorts the list of results. The default is CreationTime.

          " } }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", + "SortOrder": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigSortOrder", "traits": { - "smithy.api#documentation": "

          The maximum number of endpoints to return in the response.

          " + "smithy.api#documentation": "

          The sort order for results.

          " } }, "NameContains": { - "target": "com.amazonaws.sagemaker#EndpointNameContains", + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigNameContains", "traits": { - "smithy.api#documentation": "

          A string in endpoint names. This filter returns only endpoints whose name contains\n the specified string.

          " + "smithy.api#documentation": "

          A string in the lifecycle configuration name. This filter returns only lifecycle\n configurations whose name contains the specified string.

          " } }, "CreationTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          A filter that returns only endpoints that were created before the specified time\n (timestamp).

          " + "smithy.api#documentation": "

          A filter that returns only lifecycle configurations that were created before the\n specified time (timestamp).

          " } }, "CreationTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          A filter that returns only endpoints with a creation time greater than or equal to\n the specified time (timestamp).

          " + "smithy.api#documentation": "

          A filter that returns only lifecycle configurations that were created after the\n specified time (timestamp).

          " } }, "LastModifiedTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + "target": "com.amazonaws.sagemaker#LastModifiedTime", "traits": { - "smithy.api#documentation": "

          A filter that returns only endpoints that were modified before the specified\n timestamp.

          " + "smithy.api#documentation": "

          A filter that returns only lifecycle configurations that were modified before the\n specified time (timestamp).

          " } }, "LastModifiedTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          A filter that returns only endpoints that were modified after the specified\n timestamp.

          " - } - }, - "StatusEquals": { - "target": "com.amazonaws.sagemaker#EndpointStatus", + "target": "com.amazonaws.sagemaker#LastModifiedTime", "traits": { - "smithy.api#documentation": "

          A filter that returns only endpoints with the specified status.

          " + "smithy.api#documentation": "

          A filter that returns only lifecycle configurations that were modified after the\n specified time (timestamp).

          " } } } }, - "com.amazonaws.sagemaker#ListEndpointsOutput": { + "com.amazonaws.sagemaker#ListNotebookInstanceLifecycleConfigsOutput": { "type": "structure", "members": { - "Endpoints": { - "target": "com.amazonaws.sagemaker#EndpointSummaryList", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          An array or endpoint objects.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To get the next set of\n lifecycle configurations, use it in the next request.

          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#PaginationToken", + "NotebookInstanceLifecycleConfigs": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigSummaryList", "traits": { - "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n training jobs, use it in the subsequent request.

          " + "smithy.api#documentation": "

          An array of NotebookInstanceLifecycleConfiguration objects, each listing\n a lifecycle configuration.

          " } } } }, - "com.amazonaws.sagemaker#ListExperiments": { + "com.amazonaws.sagemaker#ListNotebookInstances": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListExperimentsRequest" + "target": "com.amazonaws.sagemaker#ListNotebookInstancesInput" }, "output": { - "target": "com.amazonaws.sagemaker#ListExperimentsResponse" + "target": "com.amazonaws.sagemaker#ListNotebookInstancesOutput" }, "traits": { - "smithy.api#documentation": "

          Lists all the experiments in your account. The list can be filtered to show only\n experiments that were created in a specific time range. The list can be sorted by experiment\n name or creation time.

          ", + "smithy.api#documentation": "

          Returns a list of the Amazon SageMaker notebook instances in the requester's account in an AWS\n Region.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -13984,74 +19019,189 @@ } } }, - "com.amazonaws.sagemaker#ListExperimentsRequest": { + "com.amazonaws.sagemaker#ListNotebookInstancesInput": { "type": "structure", "members": { - "CreatedAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          A filter that returns only experiments created after the specified time.

          " + "smithy.api#documentation": "

          If the previous call to the ListNotebookInstances is truncated, the\n response includes a NextToken. You can use this token in your subsequent\n ListNotebookInstances request to fetch the next set of notebook\n instances.

          \n \n

          You might specify a filter or a sort order in your request. When response is\n truncated, you must use the same values for the filer and sort order in the next\n request.

          \n
          " } }, - "CreatedBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          A filter that returns only experiments created before the specified time.

          " + "smithy.api#documentation": "

          The maximum number of notebook instances to return.

          " } }, "SortBy": { - "target": "com.amazonaws.sagemaker#SortExperimentsBy", + "target": "com.amazonaws.sagemaker#NotebookInstanceSortKey", "traits": { - "smithy.api#documentation": "

          The property used to sort results. The default value is CreationTime.

          " + "smithy.api#documentation": "

          The field to sort results by. The default is Name.

          " } }, "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", + "target": "com.amazonaws.sagemaker#NotebookInstanceSortOrder", + "traits": { + "smithy.api#documentation": "

          The sort order for results.

          " + } + }, + "NameContains": { + "target": "com.amazonaws.sagemaker#NotebookInstanceNameContains", + "traits": { + "smithy.api#documentation": "

          A string in the notebook instances' name. This filter returns only notebook\n instances whose name contains the specified string.

          " + } + }, + "CreationTimeBefore": { + "target": "com.amazonaws.sagemaker#CreationTime", + "traits": { + "smithy.api#documentation": "

          A filter that returns only notebook instances that were created before the\n specified time (timestamp).

          " + } + }, + "CreationTimeAfter": { + "target": "com.amazonaws.sagemaker#CreationTime", + "traits": { + "smithy.api#documentation": "

          A filter that returns only notebook instances that were created after the specified\n time (timestamp).

          " + } + }, + "LastModifiedTimeBefore": { + "target": "com.amazonaws.sagemaker#LastModifiedTime", + "traits": { + "smithy.api#documentation": "

          A filter that returns only notebook instances that were modified before the\n specified time (timestamp).

          " + } + }, + "LastModifiedTimeAfter": { + "target": "com.amazonaws.sagemaker#LastModifiedTime", + "traits": { + "smithy.api#documentation": "

          A filter that returns only notebook instances that were modified after the\n specified time (timestamp).

          " + } + }, + "StatusEquals": { + "target": "com.amazonaws.sagemaker#NotebookInstanceStatus", + "traits": { + "smithy.api#documentation": "

          A filter that returns only notebook instances with the specified status.

          " + } + }, + "NotebookInstanceLifecycleConfigNameContains": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName", + "traits": { + "smithy.api#documentation": "

          A string in the name of a notebook instances lifecycle configuration associated with\n this notebook instance. This filter returns only notebook instances associated with a\n lifecycle configuration with a name that contains the specified string.

          " + } + }, + "DefaultCodeRepositoryContains": { + "target": "com.amazonaws.sagemaker#CodeRepositoryContains", + "traits": { + "smithy.api#documentation": "

          A string in the name or URL of a Git repository associated with this notebook\n instance. This filter returns only notebook instances associated with a git repository\n with a name that contains the specified string.

          " + } + }, + "AdditionalCodeRepositoryEquals": { + "target": "com.amazonaws.sagemaker#CodeRepositoryNameOrUrl", + "traits": { + "smithy.api#documentation": "

          A filter that returns only notebook instances with associated with the specified git\n repository.

          " + } + } + } + }, + "com.amazonaws.sagemaker#ListNotebookInstancesOutput": { + "type": "structure", + "members": { + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the response to the previous ListNotebookInstances request was\n truncated, Amazon SageMaker returns this token. To retrieve the next set of notebook instances, use\n the token in the next request.

          " + } + }, + "NotebookInstances": { + "target": "com.amazonaws.sagemaker#NotebookInstanceSummaryList", + "traits": { + "smithy.api#documentation": "

          An array of NotebookInstanceSummary objects, one for each notebook\n instance.

          " + } + } + } + }, + "com.amazonaws.sagemaker#ListPipelineExecutionSteps": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListPipelineExecutionStepsRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListPipelineExecutionStepsResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Gets a list of PipeLineExecutionStep objects.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.sagemaker#ListPipelineExecutionStepsRequest": { + "type": "structure", + "members": { + "PipelineExecutionArn": { + "target": "com.amazonaws.sagemaker#PipelineExecutionArn", "traits": { - "smithy.api#documentation": "

          The sort order. The default value is Descending.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pipeline execution.

          " } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the previous call to ListExperiments didn't return the full set of\n experiments, the call returns a token for getting the next set of experiments.

          " + "smithy.api#documentation": "

          If the result of the previous ListPipelineExecutionSteps request was truncated,\n the response includes a NextToken. To retrieve the next set of pipeline execution steps, use the token in the next request.

          " } }, "MaxResults": { "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          The maximum number of experiments to return in the response. The default value is\n 10.

          " + "smithy.api#documentation": "

          The maximum number of pipeline execution steps to return in the response.

          " + } + }, + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", + "traits": { + "smithy.api#documentation": "

          The field by which to sort results. The default is CreatedTime.

          " } } } }, - "com.amazonaws.sagemaker#ListExperimentsResponse": { + "com.amazonaws.sagemaker#ListPipelineExecutionStepsResponse": { "type": "structure", "members": { - "ExperimentSummaries": { - "target": "com.amazonaws.sagemaker#ExperimentSummaries", + "PipelineExecutionSteps": { + "target": "com.amazonaws.sagemaker#PipelineExecutionStepList", "traits": { - "smithy.api#documentation": "

          A list of the summaries of your experiments.

          " + "smithy.api#documentation": "

          A list of PipeLineExecutionStep objects. Each\n PipeLineExecutionStep consists of StepName, StartTime, EndTime, StepStatus,\n and Metadata. Metadata is an object with properties for each job that contains relevant\n information about the job created by the step.

          " } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          A token for getting the next set of experiments, if there are any.

          " + "smithy.api#documentation": "

          If the result of the previous ListPipelineExecutionSteps request was truncated,\n the response includes a NextToken. To retrieve the next set of pipeline execution steps, use the token in the next request.

          " } } } }, - "com.amazonaws.sagemaker#ListFlowDefinitions": { + "com.amazonaws.sagemaker#ListPipelineExecutions": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListFlowDefinitionsRequest" + "target": "com.amazonaws.sagemaker#ListPipelineExecutionsRequest" }, "output": { - "target": "com.amazonaws.sagemaker#ListFlowDefinitionsResponse" + "target": "com.amazonaws.sagemaker#ListPipelineExecutionsResponse" }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#documentation": "

          Returns information about the flow definitions in your account.

          ", + "smithy.api#documentation": "

          Gets a list of the pipeline executions.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -14059,70 +19209,86 @@ } } }, - "com.amazonaws.sagemaker#ListFlowDefinitionsRequest": { + "com.amazonaws.sagemaker#ListPipelineExecutionsRequest": { "type": "structure", "members": { - "CreationTimeAfter": { + "PipelineName": { + "target": "com.amazonaws.sagemaker#PipelineName", + "traits": { + "smithy.api#documentation": "

          The name of the pipeline.

          ", + "smithy.api#required": {} + } + }, + "CreatedAfter": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only flow definitions with a creation time greater than or equal to the specified timestamp.

          " + "smithy.api#documentation": "

          A filter that returns the pipeline executions that were created after a specified\n time.

          " } }, - "CreationTimeBefore": { + "CreatedBefore": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only flow definitions that were created before the specified timestamp.

          " + "smithy.api#documentation": "

          A filter that returns the pipeline executions that were created before a specified\n time.

          " + } + }, + "SortBy": { + "target": "com.amazonaws.sagemaker#SortPipelineExecutionsBy", + "traits": { + "smithy.api#documentation": "

          The field by which to sort results. The default is CreatedTime.

          " } }, "SortOrder": { "target": "com.amazonaws.sagemaker#SortOrder", "traits": { - "smithy.api#documentation": "

          An optional value that specifies whether you want the results sorted in Ascending or Descending order.

          " + "smithy.api#documentation": "

          The sort order for results.

          " } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          A token to resume pagination.

          " + "smithy.api#documentation": "

          If the result of the previous ListPipelineExecutions request was truncated,\n the response includes a NextToken. To retrieve the next set of pipeline executions, use the token in the next request.

          " } }, "MaxResults": { "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#box": {}, - "smithy.api#documentation": "

          The total number of items to return. If the total number of available items is more than the value specified in MaxResults, then a NextToken will be provided in the output that you can use to resume pagination.

          " + "smithy.api#documentation": "

          The maximum number of pipeline executions to return in the response.

          " } } } }, - "com.amazonaws.sagemaker#ListFlowDefinitionsResponse": { + "com.amazonaws.sagemaker#ListPipelineExecutionsResponse": { "type": "structure", "members": { - "FlowDefinitionSummaries": { - "target": "com.amazonaws.sagemaker#FlowDefinitionSummaries", + "PipelineExecutionSummaries": { + "target": "com.amazonaws.sagemaker#PipelineExecutionSummaryList", "traits": { - "smithy.api#documentation": "

          An array of objects describing the flow definitions.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Contains a sorted list of pipeline execution summary objects matching the specified\n filters. Each run summary includes the Amazon Resource Name (ARN) of the pipeline execution, the run date,\n and the status. This list can be empty.

          " } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          A token to resume pagination.

          " + "smithy.api#documentation": "

          If the result of the previous ListPipelineExecutions request was truncated,\n the response includes a NextToken. To retrieve the next set of pipeline executions, use the token in the next request.

          " } } } }, - "com.amazonaws.sagemaker#ListHumanTaskUis": { + "com.amazonaws.sagemaker#ListPipelineParametersForExecution": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListHumanTaskUisRequest" + "target": "com.amazonaws.sagemaker#ListPipelineParametersForExecutionRequest" }, "output": { - "target": "com.amazonaws.sagemaker#ListHumanTaskUisResponse" + "target": "com.amazonaws.sagemaker#ListPipelineParametersForExecutionResponse" }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#documentation": "

          Returns information about the human task user interfaces in your account.

          ", + "smithy.api#documentation": "

          Gets a list of parameters for a pipeline execution.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -14130,70 +19296,57 @@ } } }, - "com.amazonaws.sagemaker#ListHumanTaskUisRequest": { + "com.amazonaws.sagemaker#ListPipelineParametersForExecutionRequest": { "type": "structure", "members": { - "CreationTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          A filter that returns only human task user interfaces with a creation time greater than or equal to the specified timestamp.

          " - } - }, - "CreationTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          A filter that returns only human task user interfaces that were created before the specified timestamp.

          " - } - }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", + "PipelineExecutionArn": { + "target": "com.amazonaws.sagemaker#PipelineExecutionArn", "traits": { - "smithy.api#documentation": "

          An optional value that specifies whether you want the results sorted in Ascending or Descending order.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pipeline execution.

          ", + "smithy.api#required": {} } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          A token to resume pagination.

          " + "smithy.api#documentation": "

          If the result of the previous ListPipelineParametersForExecution request was truncated,\n the response includes a NextToken. To retrieve the next set of parameters, use the token in the next request.

          " } }, "MaxResults": { "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#box": {}, - "smithy.api#documentation": "

          The total number of items to return. If the total number of available items is more than the value specified in MaxResults, then a NextToken will be provided in the output that you can use to resume pagination.

          " + "smithy.api#documentation": "

          The maximum number of parameters to return in the response.

          " } } } }, - "com.amazonaws.sagemaker#ListHumanTaskUisResponse": { + "com.amazonaws.sagemaker#ListPipelineParametersForExecutionResponse": { "type": "structure", "members": { - "HumanTaskUiSummaries": { - "target": "com.amazonaws.sagemaker#HumanTaskUiSummaries", + "PipelineParameters": { + "target": "com.amazonaws.sagemaker#ParameterList", "traits": { - "smithy.api#documentation": "

          An array of objects describing the human task user interfaces.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Contains a list of pipeline parameters. This list can be empty.

          " } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          A token to resume pagination.

          " + "smithy.api#documentation": "

          If the result of the previous ListPipelineParametersForExecution request was truncated,\n the response includes a NextToken. To retrieve the next set of parameters, use the token in the next request.

          " } } } }, - "com.amazonaws.sagemaker#ListHyperParameterTuningJobs": { + "com.amazonaws.sagemaker#ListPipelines": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListHyperParameterTuningJobsRequest" + "target": "com.amazonaws.sagemaker#ListPipelinesRequest" }, "output": { - "target": "com.amazonaws.sagemaker#ListHyperParameterTuningJobsResponse" + "target": "com.amazonaws.sagemaker#ListPipelinesResponse" }, "traits": { - "smithy.api#documentation": "

          Gets a list of HyperParameterTuningJobSummary objects that\n describe\n the hyperparameter tuning jobs launched in your account.

          ", + "smithy.api#documentation": "

          Gets a list of pipelines.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -14201,105 +19354,80 @@ } } }, - "com.amazonaws.sagemaker#ListHyperParameterTuningJobsRequest": { + "com.amazonaws.sagemaker#ListPipelinesRequest": { "type": "structure", "members": { - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", - "traits": { - "smithy.api#documentation": "

          If the result of the previous ListHyperParameterTuningJobs request was\n truncated, the response includes a NextToken. To retrieve the next set of\n tuning jobs, use the token in the next request.

          " - } - }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", - "traits": { - "smithy.api#box": {}, - "smithy.api#documentation": "

          The\n maximum number of tuning jobs to return. The default value is\n 10.

          " - } - }, - "SortBy": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobSortByOptions", - "traits": { - "smithy.api#documentation": "

          The\n field\n to sort results by. The default is Name.

          " - } - }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", + "PipelineNamePrefix": { + "target": "com.amazonaws.sagemaker#PipelineName", "traits": { - "smithy.api#documentation": "

          The sort\n order\n for results. The default is Ascending.

          " + "smithy.api#documentation": "

          The prefix of the pipeline name.

          " } }, - "NameContains": { - "target": "com.amazonaws.sagemaker#NameContains", + "CreatedAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A string in the tuning job name. This filter returns only tuning jobs whose name\n contains the specified string.

          " + "smithy.api#documentation": "

          A filter that returns the pipelines that were created after a specified\n time.

          " } }, - "CreationTimeAfter": { + "CreatedBefore": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only tuning jobs that were created after the\n specified\n time.

          " + "smithy.api#documentation": "

          A filter that returns the pipelines that were created before a specified\n time.

          " } }, - "CreationTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + "SortBy": { + "target": "com.amazonaws.sagemaker#SortPipelinesBy", "traits": { - "smithy.api#documentation": "

          A filter that returns only tuning jobs that were created before the\n specified\n time.

          " + "smithy.api#documentation": "

          The field by which to sort results. The default is CreatedTime.

          " } }, - "LastModifiedTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", "traits": { - "smithy.api#documentation": "

          A filter that returns only tuning jobs that were modified after the specified\n time.

          " + "smithy.api#documentation": "

          The sort order for results.

          " } }, - "LastModifiedTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          A filter that returns only tuning jobs that were modified before the specified\n time.

          " + "smithy.api#documentation": "

          If the result of the previous ListPipelines request was truncated,\n the response includes a NextToken. To retrieve the next set of pipelines, use the token in the next request.

          " } }, - "StatusEquals": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobStatus", + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          A filter that returns only tuning jobs with the\n specified\n status.

          " + "smithy.api#documentation": "

          The maximum number of pipelines to return in the response.

          " } } } }, - "com.amazonaws.sagemaker#ListHyperParameterTuningJobsResponse": { + "com.amazonaws.sagemaker#ListPipelinesResponse": { "type": "structure", "members": { - "HyperParameterTuningJobSummaries": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobSummaries", + "PipelineSummaries": { + "target": "com.amazonaws.sagemaker#PipelineSummaryList", "traits": { - "smithy.api#documentation": "

          A list of HyperParameterTuningJobSummary objects that\n describe\n the tuning jobs that the ListHyperParameterTuningJobs\n request returned.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Contains a sorted list of PipelineSummary objects matching the specified\n filters. Each PipelineSummary consists of PipelineArn, PipelineName,\n ExperimentName, PipelineDescription, CreationTime, LastModifiedTime, LastRunTime, and\n RoleArn. This list can be empty.

          " } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the result of this ListHyperParameterTuningJobs request was truncated,\n the response includes a NextToken. To retrieve the next set of tuning jobs,\n use the token in the next request.

          " + "smithy.api#documentation": "

          If the result of the previous ListPipelines request was truncated,\n the response includes a NextToken. To retrieve the next set of pipelines, use the token in the next request.

          " } } } }, - "com.amazonaws.sagemaker#ListImageVersions": { + "com.amazonaws.sagemaker#ListProcessingJobs": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListImageVersionsRequest" + "target": "com.amazonaws.sagemaker#ListProcessingJobsRequest" }, "output": { - "target": "com.amazonaws.sagemaker#ListImageVersionsResponse" + "target": "com.amazonaws.sagemaker#ListProcessingJobsResponse" }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], "traits": { - "smithy.api#documentation": "

          Lists the versions of a specified image and their properties. The list can be filtered\n by creation time or modified time.

          ", + "smithy.api#documentation": "

          Lists processing jobs that satisfy various filters.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -14307,186 +19435,182 @@ } } }, - "com.amazonaws.sagemaker#ListImageVersionsRequest": { + "com.amazonaws.sagemaker#ListProcessingJobsRequest": { "type": "structure", "members": { "CreationTimeAfter": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only versions created on or after the specified time.

          " + "smithy.api#documentation": "

          A filter that returns only processing jobs created after the specified time.

          " } }, "CreationTimeBefore": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only versions created on or before the specified time.

          " - } - }, - "ImageName": { - "target": "com.amazonaws.sagemaker#ImageName", - "traits": { - "smithy.api#documentation": "

          The name of the image to list the versions of.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A filter that returns only processing jobs created after the specified time.

          " } }, "LastModifiedTimeAfter": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only versions modified on or after the specified time.

          " + "smithy.api#documentation": "

          A filter that returns only processing jobs modified after the specified time.

          " } }, "LastModifiedTimeBefore": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only versions modified on or before the specified time.

          " + "smithy.api#documentation": "

          A filter that returns only processing jobs modified before the specified time.

          " } }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", + "NameContains": { + "target": "com.amazonaws.sagemaker#String", "traits": { - "smithy.api#documentation": "

          The maximum number of versions to return in the response. The default value is 10.

          " + "smithy.api#documentation": "

          A string in the processing job name. This filter returns only processing jobs whose\n name contains the specified string.

          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "StatusEquals": { + "target": "com.amazonaws.sagemaker#ProcessingJobStatus", "traits": { - "smithy.api#documentation": "

          If the previous call to ListImageVersions didn't return the full set of\n versions, the call returns a token for getting the next set of versions.

          " + "smithy.api#documentation": "

          A filter that retrieves only processing jobs with a specific status.

          " } }, "SortBy": { - "target": "com.amazonaws.sagemaker#ImageVersionSortBy", + "target": "com.amazonaws.sagemaker#SortBy", "traits": { - "smithy.api#documentation": "

          The property used to sort results. The default value is CREATION_TIME.

          " + "smithy.api#documentation": "

          The field to sort results by. The default is CreationTime.

          " } }, "SortOrder": { - "target": "com.amazonaws.sagemaker#ImageVersionSortOrder", + "target": "com.amazonaws.sagemaker#SortOrder", "traits": { - "smithy.api#documentation": "

          The sort order. The default value is DESCENDING.

          " + "smithy.api#documentation": "

          The sort order for results. The default is Ascending.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the result of the previous ListProcessingJobs request was truncated,\n the response includes a NextToken. To retrieve the next set of processing\n jobs, use the token in the next request.

          " + } + }, + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", + "traits": { + "smithy.api#box": {}, + "smithy.api#documentation": "

          The maximum number of processing jobs to return in the response.

          " } } } }, - "com.amazonaws.sagemaker#ListImageVersionsResponse": { + "com.amazonaws.sagemaker#ListProcessingJobsResponse": { "type": "structure", "members": { - "ImageVersions": { - "target": "com.amazonaws.sagemaker#ImageVersions", + "ProcessingJobSummaries": { + "target": "com.amazonaws.sagemaker#ProcessingJobSummaries", "traits": { - "smithy.api#documentation": "

          A list of versions and their properties.

          " + "smithy.api#documentation": "

          An array of ProcessingJobSummary objects, each listing a processing\n job.

          ", + "smithy.api#required": {} } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          A token for getting the next set of versions, if there are any.

          " + "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n processing jobs, use it in the subsequent request.

          " } } } }, - "com.amazonaws.sagemaker#ListImages": { + "com.amazonaws.sagemaker#ListProjects": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListImagesRequest" + "target": "com.amazonaws.sagemaker#ListProjectsInput" }, "output": { - "target": "com.amazonaws.sagemaker#ListImagesResponse" + "target": "com.amazonaws.sagemaker#ListProjectsOutput" }, "traits": { - "smithy.api#documentation": "

          Lists the images in your account and their properties. The list can be filtered by\n creation time or modified time, and whether the image name contains a specified string.

          ", + "smithy.api#documentation": "

          Gets a list of the projects in an AWS account.

          ", "smithy.api#paginated": { - "inputToken": "NextToken", - "outputToken": "NextToken", - "pageSize": "MaxResults" - } - } - }, - "com.amazonaws.sagemaker#ListImagesRequest": { - "type": "structure", - "members": { - "CreationTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          A filter that returns only images created on or after the specified time.

          " - } - }, - "CreationTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          A filter that returns only images created on or before the specified time.

          " - } - }, - "LastModifiedTimeAfter": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.sagemaker#ListProjectsInput": { + "type": "structure", + "members": { + "CreationTimeAfter": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only images modified on or after the specified time.

          " + "smithy.api#documentation": "

          A filter that returns the projects that were created after a specified\n time.

          " } }, - "LastModifiedTimeBefore": { + "CreationTimeBefore": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only images modified on or before the specified time.

          " + "smithy.api#documentation": "

          A filter that returns the projects that were created before a specified\n time.

          " } }, "MaxResults": { "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          The maximum number of images to return in the response. The default value is 10.

          " + "smithy.api#documentation": "

          The maximum number of projects to return in the response.

          " } }, "NameContains": { - "target": "com.amazonaws.sagemaker#ImageNameContains", + "target": "com.amazonaws.sagemaker#ProjectEntityName", "traits": { - "smithy.api#documentation": "

          A filter that returns only images whose name contains the specified string.

          " + "smithy.api#documentation": "

          A filter that returns the projects whose name contains a specified\n string.

          " } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the previous call to ListImages didn't return the full set of images,\n the call returns a token for getting the next set of images.

          " + "smithy.api#documentation": "

          If the result of the previous ListProjects request was truncated,\n the response includes a NextToken. To retrieve the next set of projects, use the token in the next request.

          " } }, "SortBy": { - "target": "com.amazonaws.sagemaker#ImageSortBy", + "target": "com.amazonaws.sagemaker#ProjectSortBy", "traits": { - "smithy.api#documentation": "

          The property used to sort results. The default value is CREATION_TIME.

          " + "smithy.api#documentation": "

          The field by which to sort results. The default is CreationTime.

          " } }, "SortOrder": { - "target": "com.amazonaws.sagemaker#ImageSortOrder", + "target": "com.amazonaws.sagemaker#ProjectSortOrder", "traits": { - "smithy.api#documentation": "

          The sort order. The default value is DESCENDING.

          " + "smithy.api#documentation": "

          The sort order for results. The default is Ascending.

          " } } } }, - "com.amazonaws.sagemaker#ListImagesResponse": { + "com.amazonaws.sagemaker#ListProjectsOutput": { "type": "structure", "members": { - "Images": { - "target": "com.amazonaws.sagemaker#Images", + "ProjectSummaryList": { + "target": "com.amazonaws.sagemaker#ProjectSummaryList", "traits": { - "smithy.api#documentation": "

          A list of images and their properties.

          " + "smithy.api#documentation": "

          A list of summaries of projects.

          ", + "smithy.api#required": {} } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          A token for getting the next set of images, if there are any.

          " + "smithy.api#documentation": "

          If the result of the previous ListCompilationJobs request was truncated,\n the response includes a NextToken. To retrieve the next set of model\n compilation jobs, use the token in the next request.

          " } } } }, - "com.amazonaws.sagemaker#ListLabelingJobs": { + "com.amazonaws.sagemaker#ListSubscribedWorkteams": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListLabelingJobsRequest" + "target": "com.amazonaws.sagemaker#ListSubscribedWorkteamsRequest" }, "output": { - "target": "com.amazonaws.sagemaker#ListLabelingJobsResponse" + "target": "com.amazonaws.sagemaker#ListSubscribedWorkteamsResponse" }, "traits": { - "smithy.api#documentation": "

          Gets a list of labeling jobs.

          ", + "smithy.api#documentation": "

          Gets a list of the work teams that you are subscribed to in the AWS Marketplace. The\n list may be empty if no work team satisfies the filter specified in the\n NameContains parameter.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -14494,21 +19618,58 @@ } } }, - "com.amazonaws.sagemaker#ListLabelingJobsForWorkteam": { + "com.amazonaws.sagemaker#ListSubscribedWorkteamsRequest": { + "type": "structure", + "members": { + "NameContains": { + "target": "com.amazonaws.sagemaker#WorkteamName", + "traits": { + "smithy.api#documentation": "

          A string in the work team name. This filter returns only work teams whose name\n contains the specified string.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the result of the previous ListSubscribedWorkteams request was\n truncated, the response includes a NextToken. To retrieve the next set of\n labeling jobs, use the token in the next request.

          " + } + }, + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", + "traits": { + "smithy.api#box": {}, + "smithy.api#documentation": "

          The maximum number of work teams to return in each page of the response.

          " + } + } + } + }, + "com.amazonaws.sagemaker#ListSubscribedWorkteamsResponse": { + "type": "structure", + "members": { + "SubscribedWorkteams": { + "target": "com.amazonaws.sagemaker#SubscribedWorkteams", + "traits": { + "smithy.api#documentation": "

          An array of Workteam objects, each describing a work team.

          ", + "smithy.api#required": {} + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n work teams, use it in the subsequent request.

          " + } + } + } + }, + "com.amazonaws.sagemaker#ListTags": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListLabelingJobsForWorkteamRequest" + "target": "com.amazonaws.sagemaker#ListTagsInput" }, "output": { - "target": "com.amazonaws.sagemaker#ListLabelingJobsForWorkteamResponse" + "target": "com.amazonaws.sagemaker#ListTagsOutput" }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], "traits": { - "smithy.api#documentation": "

          Gets a list of labeling jobs assigned to a specified work team.

          ", + "smithy.api#documentation": "

          Returns the tags for the specified Amazon SageMaker resource.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -14516,263 +19677,249 @@ } } }, - "com.amazonaws.sagemaker#ListLabelingJobsForWorkteamRequest": { + "com.amazonaws.sagemaker#ListTagsInput": { "type": "structure", "members": { - "WorkteamArn": { - "target": "com.amazonaws.sagemaker#WorkteamArn", + "ResourceArn": { + "target": "com.amazonaws.sagemaker#ResourceArn", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the work team for which you want to see labeling\n jobs for.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the resource whose tags you want to\n retrieve.

          ", "smithy.api#required": {} } }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", - "traits": { - "smithy.api#documentation": "

          The maximum number of labeling jobs to return in each page of the response.

          " - } - }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the result of the previous ListLabelingJobsForWorkteam request was\n truncated, the response includes a NextToken. To retrieve the next set of\n labeling jobs, use the token in the next request.

          " - } - }, - "CreationTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          A filter that returns only labeling jobs created after the specified time\n (timestamp).

          " - } - }, - "CreationTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          A filter that returns only labeling jobs created before the specified time\n (timestamp).

          " - } - }, - "JobReferenceCodeContains": { - "target": "com.amazonaws.sagemaker#JobReferenceCodeContains", - "traits": { - "smithy.api#documentation": "

          A filter the limits jobs to only the ones whose job reference code contains the\n specified string.

          " - } - }, - "SortBy": { - "target": "com.amazonaws.sagemaker#ListLabelingJobsForWorkteamSortByOptions", - "traits": { - "smithy.api#documentation": "

          The field to sort results by. The default is CreationTime.

          " + "smithy.api#documentation": "

          If the response to the previous ListTags request is truncated, Amazon SageMaker\n returns this token. To retrieve the next set of tags, use it in the subsequent request.\n

          " } }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", + "MaxResults": { + "target": "com.amazonaws.sagemaker#ListTagsMaxResults", "traits": { - "smithy.api#documentation": "

          The sort order for results. The default is Ascending.

          " + "smithy.api#documentation": "

          Maximum number of tags to return.

          " } } } }, - "com.amazonaws.sagemaker#ListLabelingJobsForWorkteamResponse": { + "com.amazonaws.sagemaker#ListTagsMaxResults": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 50 + } + } + }, + "com.amazonaws.sagemaker#ListTagsOutput": { "type": "structure", "members": { - "LabelingJobSummaryList": { - "target": "com.amazonaws.sagemaker#LabelingJobForWorkteamSummaryList", + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          An array of LabelingJobSummary objects, each describing a labeling\n job.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          An array of Tag objects, each with a tag key and a value.

          " } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n labeling jobs, use it in the subsequent request.

          " + "smithy.api#documentation": "

          If response is truncated, Amazon SageMaker includes a token in the response. You can use this\n token in your subsequent request to fetch next set of tokens.

          " } } } }, - "com.amazonaws.sagemaker#ListLabelingJobsForWorkteamSortByOptions": { - "type": "string", + "com.amazonaws.sagemaker#ListTrainingJobs": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListTrainingJobsRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListTrainingJobsResponse" + }, "traits": { - "smithy.api#enum": [ - { - "value": "CreationTime", - "name": "CREATION_TIME" - } - ] + "smithy.api#documentation": "

          Lists training jobs.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } } }, - "com.amazonaws.sagemaker#ListLabelingJobsRequest": { + "com.amazonaws.sagemaker#ListTrainingJobsForHyperParameterTuningJob": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#ListTrainingJobsForHyperParameterTuningJobRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#ListTrainingJobsForHyperParameterTuningJobResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Gets a list of TrainingJobSummary objects that describe the training\n jobs that a hyperparameter tuning job launched.

          ", + "smithy.api#paginated": { + "inputToken": "NextToken", + "outputToken": "NextToken", + "pageSize": "MaxResults" + } + } + }, + "com.amazonaws.sagemaker#ListTrainingJobsForHyperParameterTuningJobRequest": { "type": "structure", "members": { - "CreationTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          A filter that returns only labeling jobs created after the specified time\n (timestamp).

          " - } - }, - "CreationTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          A filter that returns only labeling jobs created before the specified time\n (timestamp).

          " - } - }, - "LastModifiedTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", + "HyperParameterTuningJobName": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobName", "traits": { - "smithy.api#documentation": "

          A filter that returns only labeling jobs modified after the specified time\n (timestamp).

          " + "smithy.api#documentation": "

          The name of the tuning job whose training jobs you want to list.

          ", + "smithy.api#required": {} } }, - "LastModifiedTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          A filter that returns only labeling jobs modified before the specified time\n (timestamp).

          " + "smithy.api#documentation": "

          If the result of the previous ListTrainingJobsForHyperParameterTuningJob\n request was truncated, the response includes a NextToken. To retrieve the\n next set of training jobs, use the token in the next request.

          " } }, "MaxResults": { "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          The maximum number of labeling jobs to return in each page of the response.

          " - } - }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", - "traits": { - "smithy.api#documentation": "

          If the result of the previous ListLabelingJobs request was truncated, the\n response includes a NextToken. To retrieve the next set of labeling jobs,\n use the token in the next request.

          " + "smithy.api#documentation": "

          The maximum number of training jobs to return. The default value is 10.

          " } }, - "NameContains": { - "target": "com.amazonaws.sagemaker#NameContains", + "StatusEquals": { + "target": "com.amazonaws.sagemaker#TrainingJobStatus", "traits": { - "smithy.api#documentation": "

          A string in the labeling job name. This filter returns only labeling jobs whose name\n contains the specified string.

          " + "smithy.api#documentation": "

          A filter that returns only training jobs with the\n specified\n status.

          " } }, "SortBy": { - "target": "com.amazonaws.sagemaker#SortBy", + "target": "com.amazonaws.sagemaker#TrainingJobSortByOptions", "traits": { - "smithy.api#documentation": "

          The field to sort results by. The default is CreationTime.

          " + "smithy.api#documentation": "

          The field to sort\n results\n by. The default is Name.

          \n

          If the value of this field is FinalObjectiveMetricValue, any training\n jobs that did not return an objective metric are not listed.

          " } }, "SortOrder": { "target": "com.amazonaws.sagemaker#SortOrder", "traits": { - "smithy.api#documentation": "

          The sort order for results. The default is Ascending.

          " - } - }, - "StatusEquals": { - "target": "com.amazonaws.sagemaker#LabelingJobStatus", - "traits": { - "smithy.api#documentation": "

          A filter that retrieves only labeling jobs with a specific status.

          " + "smithy.api#documentation": "

          The sort order\n for\n results. The default is Ascending.

          " } } } }, - "com.amazonaws.sagemaker#ListLabelingJobsResponse": { + "com.amazonaws.sagemaker#ListTrainingJobsForHyperParameterTuningJobResponse": { "type": "structure", "members": { - "LabelingJobSummaryList": { - "target": "com.amazonaws.sagemaker#LabelingJobSummaryList", + "TrainingJobSummaries": { + "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobSummaries", "traits": { - "smithy.api#documentation": "

          An array of LabelingJobSummary objects, each describing a labeling\n job.

          " + "smithy.api#documentation": "

          A list of TrainingJobSummary objects that\n describe\n the training jobs that the\n ListTrainingJobsForHyperParameterTuningJob request returned.

          ", + "smithy.api#required": {} } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n labeling jobs, use it in the subsequent request.

          " + "smithy.api#documentation": "

          If the result of this ListTrainingJobsForHyperParameterTuningJob request\n was truncated, the response includes a NextToken. To retrieve the next set\n of training jobs, use the token in the next request.

          " } } } }, - "com.amazonaws.sagemaker#ListModelPackages": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#ListModelPackagesInput" - }, - "output": { - "target": "com.amazonaws.sagemaker#ListModelPackagesOutput" - }, - "traits": { - "smithy.api#documentation": "

          Lists the model packages that have been created.

          ", - "smithy.api#paginated": { - "inputToken": "NextToken", - "outputToken": "NextToken", - "pageSize": "MaxResults" - } - } - }, - "com.amazonaws.sagemaker#ListModelPackagesInput": { + "com.amazonaws.sagemaker#ListTrainingJobsRequest": { "type": "structure", "members": { + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the result of the previous ListTrainingJobs request was truncated,\n the response includes a NextToken. To retrieve the next set of training\n jobs, use the token in the next request.

          " + } + }, + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", + "traits": { + "smithy.api#box": {}, + "smithy.api#documentation": "

          The maximum number of training jobs to return in the response.

          " + } + }, "CreationTimeAfter": { - "target": "com.amazonaws.sagemaker#CreationTime", + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only model packages created after the specified time\n (timestamp).

          " + "smithy.api#documentation": "

          A filter that returns only training jobs created after the specified time\n (timestamp).

          " } }, "CreationTimeBefore": { - "target": "com.amazonaws.sagemaker#CreationTime", + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only model packages created before the specified time\n (timestamp).

          " + "smithy.api#documentation": "

          A filter that returns only training jobs created before the specified time\n (timestamp).

          " } }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", + "LastModifiedTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The maximum number of model packages to return in the response.

          " + "smithy.api#documentation": "

          A filter that returns only training jobs modified after the specified time\n (timestamp).

          " + } + }, + "LastModifiedTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A filter that returns only training jobs modified before the specified time\n (timestamp).

          " } }, "NameContains": { "target": "com.amazonaws.sagemaker#NameContains", "traits": { - "smithy.api#documentation": "

          A string in the model package name. This filter returns only model packages whose name\n contains the specified string.

          " + "smithy.api#documentation": "

          A string in the training job name. This filter returns only training jobs whose\n name contains the specified string.

          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "StatusEquals": { + "target": "com.amazonaws.sagemaker#TrainingJobStatus", "traits": { - "smithy.api#documentation": "

          If the response to a previous ListModelPackages request was truncated,\n the response includes a NextToken. To retrieve the next set of model\n packages, use the token in the next request.

          " + "smithy.api#documentation": "

          A filter that retrieves only training jobs with a specific status.

          " } }, "SortBy": { - "target": "com.amazonaws.sagemaker#ModelPackageSortBy", + "target": "com.amazonaws.sagemaker#SortBy", "traits": { - "smithy.api#documentation": "

          The parameter by which to sort the results. The default is\n CreationTime.

          " + "smithy.api#documentation": "

          The field to sort results by. The default is CreationTime.

          " } }, "SortOrder": { "target": "com.amazonaws.sagemaker#SortOrder", "traits": { - "smithy.api#documentation": "

          The sort order for the results. The default is Ascending.

          " + "smithy.api#documentation": "

          The sort order for results. The default is Ascending.

          " } } } }, - "com.amazonaws.sagemaker#ListModelPackagesOutput": { + "com.amazonaws.sagemaker#ListTrainingJobsResponse": { "type": "structure", "members": { - "ModelPackageSummaryList": { - "target": "com.amazonaws.sagemaker#ModelPackageSummaryList", + "TrainingJobSummaries": { + "target": "com.amazonaws.sagemaker#TrainingJobSummaries", "traits": { - "smithy.api#documentation": "

          An array of ModelPackageSummary objects, each of which lists a model\n package.

          ", + "smithy.api#documentation": "

          An array of TrainingJobSummary objects, each listing a training\n job.

          ", "smithy.api#required": {} } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n model packages, use it in the subsequent request.

          " + "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n training jobs, use it in the subsequent request.

          " } } } }, - "com.amazonaws.sagemaker#ListModels": { + "com.amazonaws.sagemaker#ListTransformJobs": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListModelsInput" + "target": "com.amazonaws.sagemaker#ListTransformJobsRequest" }, "output": { - "target": "com.amazonaws.sagemaker#ListModelsOutput" + "target": "com.amazonaws.sagemaker#ListTransformJobsResponse" }, "traits": { - "smithy.api#documentation": "

          Lists models created with the CreateModel API.

          ", + "smithy.api#documentation": "

          Lists transform jobs.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -14780,81 +19927,111 @@ } } }, - "com.amazonaws.sagemaker#ListModelsInput": { + "com.amazonaws.sagemaker#ListTransformJobsRequest": { "type": "structure", "members": { - "SortBy": { - "target": "com.amazonaws.sagemaker#ModelSortKey", + "CreationTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Sorts the list of results. The default is CreationTime.

          " + "smithy.api#documentation": "

          A filter that returns only transform jobs created after the specified time.

          " } }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#OrderKey", + "CreationTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The sort order for results. The default is Descending.

          " + "smithy.api#documentation": "

          A filter that returns only transform jobs created before the specified time.

          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#PaginationToken", + "LastModifiedTimeAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          If the response to a previous ListModels request was truncated, the\n response includes a NextToken. To retrieve the next set of models, use the\n token in the next request.

          " + "smithy.api#documentation": "

          A filter that returns only transform jobs modified after the specified time.

          " } }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", + "LastModifiedTimeBefore": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The maximum number of models to return in the response.

          " + "smithy.api#documentation": "

          A filter that returns only transform jobs modified before the specified time.

          " } }, "NameContains": { - "target": "com.amazonaws.sagemaker#ModelNameContains", + "target": "com.amazonaws.sagemaker#NameContains", "traits": { - "smithy.api#documentation": "

          A string in the training job name. This filter returns only models in the training\n job whose name contains the specified string.

          " + "smithy.api#documentation": "

          A string in the transform job name. This filter returns only transform jobs whose name\n contains the specified string.

          " } }, - "CreationTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + "StatusEquals": { + "target": "com.amazonaws.sagemaker#TransformJobStatus", "traits": { - "smithy.api#documentation": "

          A filter that returns only models created before the specified time\n (timestamp).

          " + "smithy.api#documentation": "

          A filter that retrieves only transform jobs with a specific status.

          " } }, - "CreationTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", + "SortBy": { + "target": "com.amazonaws.sagemaker#SortBy", "traits": { - "smithy.api#documentation": "

          A filter that returns only models with a creation time greater than or equal to the\n specified time (timestamp).

          " + "smithy.api#documentation": "

          The field to sort results by. The default is CreationTime.

          " + } + }, + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", + "traits": { + "smithy.api#documentation": "

          The sort order for results. The default is Descending.

          " + } + }, + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", + "traits": { + "smithy.api#documentation": "

          If the result of the previous ListTransformJobs request was truncated,\n the response includes a NextToken. To retrieve the next set of transform\n jobs, use the token in the next request.

          " + } + }, + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", + "traits": { + "smithy.api#box": {}, + "smithy.api#documentation": "

          The maximum number of\n transform\n jobs to return in the response. The default value is\n 10.

          " } } } }, - "com.amazonaws.sagemaker#ListModelsOutput": { + "com.amazonaws.sagemaker#ListTransformJobsResponse": { "type": "structure", "members": { - "Models": { - "target": "com.amazonaws.sagemaker#ModelSummaryList", + "TransformJobSummaries": { + "target": "com.amazonaws.sagemaker#TransformJobSummaries", "traits": { - "smithy.api#documentation": "

          An array of ModelSummary objects, each of which lists a\n model.

          ", + "smithy.api#documentation": "

          An array of\n TransformJobSummary\n objects.

          ", "smithy.api#required": {} } }, "NextToken": { - "target": "com.amazonaws.sagemaker#PaginationToken", + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n models, use it in the subsequent request.

          " + "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n transform jobs, use it in the next request.

          " } } } }, - "com.amazonaws.sagemaker#ListMonitoringExecutions": { + "com.amazonaws.sagemaker#ListTrialComponentKey256": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#TrialComponentKey256" + } + }, + "com.amazonaws.sagemaker#ListTrialComponents": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListMonitoringExecutionsRequest" + "target": "com.amazonaws.sagemaker#ListTrialComponentsRequest" }, "output": { - "target": "com.amazonaws.sagemaker#ListMonitoringExecutionsResponse" + "target": "com.amazonaws.sagemaker#ListTrialComponentsResponse" }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#documentation": "

          Returns list of all monitoring job executions.

          ", + "smithy.api#documentation": "

          Lists the trial components in your account. You can sort the list by trial component name\n or creation time. You can filter the list to show only components that were created in a\n specific time range. You can also filter on one of the following:

          \n
            \n
          • \n

            \n ExperimentName\n

            \n
          • \n
          • \n

            \n SourceArn\n

            \n
          • \n
          • \n

            \n TrialName\n

            \n
          • \n
          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -14862,117 +20039,97 @@ } } }, - "com.amazonaws.sagemaker#ListMonitoringExecutionsRequest": { + "com.amazonaws.sagemaker#ListTrialComponentsRequest": { "type": "structure", "members": { - "MonitoringScheduleName": { - "target": "com.amazonaws.sagemaker#MonitoringScheduleName", - "traits": { - "smithy.api#documentation": "

          Name of a specific schedule to fetch jobs for.

          " - } - }, - "EndpointName": { - "target": "com.amazonaws.sagemaker#EndpointName", - "traits": { - "smithy.api#documentation": "

          Name of a specific endpoint to fetch jobs for.

          " - } - }, - "SortBy": { - "target": "com.amazonaws.sagemaker#MonitoringExecutionSortKey", - "traits": { - "smithy.api#documentation": "

          Whether to sort results by Status, CreationTime,\n ScheduledTime field. The default is CreationTime.

          " - } - }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", - "traits": { - "smithy.api#documentation": "

          Whether to sort the results in Ascending or Descending order.\n The default is Descending.

          " - } - }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "ExperimentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The token returned if the response is truncated. To retrieve the next set of job\n executions, use it in the next request.

          " + "smithy.api#documentation": "

          A filter that returns only components that are part of the specified experiment. If you\n specify ExperimentName, you can't filter by SourceArn or\n TrialName.

          " } }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", + "TrialName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The maximum number of jobs to return in the response. The default value is 10.

          " + "smithy.api#documentation": "

          A filter that returns only components that are part of the specified trial. If you specify\n TrialName, you can't filter by ExperimentName or\n SourceArn.

          " } }, - "ScheduledTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + "SourceArn": { + "target": "com.amazonaws.sagemaker#String256", "traits": { - "smithy.api#documentation": "

          Filter for jobs scheduled before a specified time.

          " + "smithy.api#documentation": "

          A filter that returns only components that have the specified source Amazon Resource Name\n (ARN). If you specify SourceArn, you can't filter by ExperimentName\n or TrialName.

          " } }, - "ScheduledTimeAfter": { + "CreatedAfter": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Filter for jobs scheduled after a specified time.

          " + "smithy.api#documentation": "

          A filter that returns only components created after the specified time.

          " } }, - "CreationTimeBefore": { + "CreatedBefore": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only jobs created before a specified time.

          " + "smithy.api#documentation": "

          A filter that returns only components created before the specified time.

          " } }, - "CreationTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", + "SortBy": { + "target": "com.amazonaws.sagemaker#SortTrialComponentsBy", "traits": { - "smithy.api#documentation": "

          A filter that returns only jobs created after a specified time.

          " + "smithy.api#documentation": "

          The property used to sort results. The default value is CreationTime.

          " } }, - "LastModifiedTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", "traits": { - "smithy.api#documentation": "

          A filter that returns only jobs modified after a specified time.

          " + "smithy.api#documentation": "

          The sort order. The default value is Descending.

          " } }, - "LastModifiedTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          A filter that returns only jobs modified before a specified time.

          " + "smithy.api#documentation": "

          The maximum number of components to return in the response. The default value is\n 10.

          " } }, - "StatusEquals": { - "target": "com.amazonaws.sagemaker#ExecutionStatus", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          A filter that retrieves only jobs with a specific status.

          " + "smithy.api#documentation": "

          If the previous call to ListTrialComponents didn't return the full set of\n components, the call returns a token for getting the next set of components.

          " } } } }, - "com.amazonaws.sagemaker#ListMonitoringExecutionsResponse": { + "com.amazonaws.sagemaker#ListTrialComponentsResponse": { "type": "structure", "members": { - "MonitoringExecutionSummaries": { - "target": "com.amazonaws.sagemaker#MonitoringExecutionSummaryList", + "TrialComponentSummaries": { + "target": "com.amazonaws.sagemaker#TrialComponentSummaries", "traits": { - "smithy.api#documentation": "

          A JSON array in which each element is a summary for a monitoring execution.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A list of the summaries of your trial components.

          " } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of jobs,\n use it in the subsequent reques

          " + "smithy.api#documentation": "

          A token for getting the next set of components, if there are any.

          " } } } }, - "com.amazonaws.sagemaker#ListMonitoringSchedules": { + "com.amazonaws.sagemaker#ListTrials": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListMonitoringSchedulesRequest" + "target": "com.amazonaws.sagemaker#ListTrialsRequest" }, "output": { - "target": "com.amazonaws.sagemaker#ListMonitoringSchedulesResponse" + "target": "com.amazonaws.sagemaker#ListTrialsResponse" }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#documentation": "

          Returns list of all monitoring schedules.

          ", + "smithy.api#documentation": "

          Lists the trials in your account. Specify an experiment name to limit the list to the\n trials that are part of that experiment. Specify a trial component name to limit the list to\n the trials that associated with that trial component. The list can be filtered to show only\n trials that were created in a specific time range. The list can be sorted by trial name or\n creation time.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -14980,105 +20137,86 @@ } } }, - "com.amazonaws.sagemaker#ListMonitoringSchedulesRequest": { + "com.amazonaws.sagemaker#ListTrialsRequest": { "type": "structure", "members": { - "EndpointName": { - "target": "com.amazonaws.sagemaker#EndpointName", - "traits": { - "smithy.api#documentation": "

          Name of a specific endpoint to fetch schedules for.

          " - } - }, - "SortBy": { - "target": "com.amazonaws.sagemaker#MonitoringScheduleSortKey", - "traits": { - "smithy.api#documentation": "

          Whether to sort results by Status, CreationTime,\n ScheduledTime field. The default is CreationTime.

          " - } - }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", - "traits": { - "smithy.api#documentation": "

          Whether to sort the results in Ascending or Descending order.\n The default is Descending.

          " - } - }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "ExperimentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The token returned if the response is truncated. To retrieve the next set of job\n executions, use it in the next request.

          " + "smithy.api#documentation": "

          A filter that returns only trials that are part of the specified experiment.

          " } }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", + "TrialComponentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The maximum number of jobs to return in the response. The default value is 10.

          " + "smithy.api#documentation": "

          A filter that returns only trials that are associated with the specified trial\n component.

          " } }, - "NameContains": { - "target": "com.amazonaws.sagemaker#NameContains", + "CreatedAfter": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Filter for monitoring schedules whose name contains a specified string.

          " + "smithy.api#documentation": "

          A filter that returns only trials created after the specified time.

          " } }, - "CreationTimeBefore": { + "CreatedBefore": { "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A filter that returns only monitoring schedules created before a specified time.

          " + "smithy.api#documentation": "

          A filter that returns only trials created before the specified time.

          " } }, - "CreationTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", + "SortBy": { + "target": "com.amazonaws.sagemaker#SortTrialsBy", "traits": { - "smithy.api#documentation": "

          A filter that returns only monitoring schedules created after a specified time.

          " + "smithy.api#documentation": "

          The property used to sort results. The default value is CreationTime.

          " } }, - "LastModifiedTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", "traits": { - "smithy.api#documentation": "

          A filter that returns only monitoring schedules modified before a specified time.

          " + "smithy.api#documentation": "

          The sort order. The default value is Descending.

          " } }, - "LastModifiedTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          A filter that returns only monitoring schedules modified after a specified time.

          " + "smithy.api#documentation": "

          The maximum number of trials to return in the response. The default value is 10.

          " } }, - "StatusEquals": { - "target": "com.amazonaws.sagemaker#ScheduleStatus", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          A filter that returns only monitoring schedules modified before a specified time.

          " + "smithy.api#documentation": "

          If the previous call to ListTrials didn't return the full set of trials, the\n call returns a token for getting the next set of trials.

          " } } } }, - "com.amazonaws.sagemaker#ListMonitoringSchedulesResponse": { + "com.amazonaws.sagemaker#ListTrialsResponse": { "type": "structure", "members": { - "MonitoringScheduleSummaries": { - "target": "com.amazonaws.sagemaker#MonitoringScheduleSummaryList", + "TrialSummaries": { + "target": "com.amazonaws.sagemaker#TrialSummaries", "traits": { - "smithy.api#documentation": "

          A JSON array in which each element is a summary for a monitoring schedule.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A list of the summaries of your trials.

          " } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of jobs,\n use it in the subsequent reques

          " + "smithy.api#documentation": "

          A token for getting the next set of trials, if there are any.

          " } } } }, - "com.amazonaws.sagemaker#ListNotebookInstanceLifecycleConfigs": { + "com.amazonaws.sagemaker#ListUserProfiles": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListNotebookInstanceLifecycleConfigsInput" + "target": "com.amazonaws.sagemaker#ListUserProfilesRequest" }, "output": { - "target": "com.amazonaws.sagemaker#ListNotebookInstanceLifecycleConfigsOutput" + "target": "com.amazonaws.sagemaker#ListUserProfilesResponse" }, "traits": { - "smithy.api#documentation": "

          Lists notebook instance lifestyle configurations created with the CreateNotebookInstanceLifecycleConfig API.

          ", + "smithy.api#documentation": "

          Lists user profiles.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -15086,92 +20224,74 @@ } } }, - "com.amazonaws.sagemaker#ListNotebookInstanceLifecycleConfigsInput": { + "com.amazonaws.sagemaker#ListUserProfilesRequest": { "type": "structure", "members": { "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the result of a ListNotebookInstanceLifecycleConfigs request was\n truncated, the response includes a NextToken. To get the next set of\n lifecycle configurations, use the token in the next request.

          " + "smithy.api#documentation": "

          If the previous response was truncated, you will receive this token.\n Use it in your next request to receive the next set of results.

          " } }, "MaxResults": { "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          The maximum number of lifecycle configurations to return in the response.

          " - } - }, - "SortBy": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigSortKey", - "traits": { - "smithy.api#documentation": "

          Sorts the list of results. The default is CreationTime.

          " + "smithy.api#documentation": "

          Returns a list up to a specified limit.

          " } }, "SortOrder": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigSortOrder", - "traits": { - "smithy.api#documentation": "

          The sort order for results.

          " - } - }, - "NameContains": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigNameContains", - "traits": { - "smithy.api#documentation": "

          A string in the lifecycle configuration name. This filter returns only lifecycle\n configurations whose name contains the specified string.

          " - } - }, - "CreationTimeBefore": { - "target": "com.amazonaws.sagemaker#CreationTime", + "target": "com.amazonaws.sagemaker#SortOrder", "traits": { - "smithy.api#documentation": "

          A filter that returns only lifecycle configurations that were created before the\n specified time (timestamp).

          " + "smithy.api#documentation": "

          The sort order for the results. The default is Ascending.

          " } }, - "CreationTimeAfter": { - "target": "com.amazonaws.sagemaker#CreationTime", + "SortBy": { + "target": "com.amazonaws.sagemaker#UserProfileSortKey", "traits": { - "smithy.api#documentation": "

          A filter that returns only lifecycle configurations that were created after the\n specified time (timestamp).

          " + "smithy.api#documentation": "

          The parameter by which to sort the results. The default is CreationTime.

          " } }, - "LastModifiedTimeBefore": { - "target": "com.amazonaws.sagemaker#LastModifiedTime", + "DomainIdEquals": { + "target": "com.amazonaws.sagemaker#DomainId", "traits": { - "smithy.api#documentation": "

          A filter that returns only lifecycle configurations that were modified before the\n specified time (timestamp).

          " + "smithy.api#documentation": "

          A parameter by which to filter the results.

          " } }, - "LastModifiedTimeAfter": { - "target": "com.amazonaws.sagemaker#LastModifiedTime", + "UserProfileNameContains": { + "target": "com.amazonaws.sagemaker#UserProfileName", "traits": { - "smithy.api#documentation": "

          A filter that returns only lifecycle configurations that were modified after the\n specified time (timestamp).

          " + "smithy.api#documentation": "

          A parameter by which to filter the results.

          " } } } }, - "com.amazonaws.sagemaker#ListNotebookInstanceLifecycleConfigsOutput": { + "com.amazonaws.sagemaker#ListUserProfilesResponse": { "type": "structure", "members": { - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "UserProfiles": { + "target": "com.amazonaws.sagemaker#UserProfileList", "traits": { - "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To get the next set of\n lifecycle configurations, use it in the next request.

          " + "smithy.api#documentation": "

          The list of user profiles.

          " } }, - "NotebookInstanceLifecycleConfigs": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigSummaryList", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          An array of NotebookInstanceLifecycleConfiguration objects, each listing\n a lifecycle configuration.

          " + "smithy.api#documentation": "

          If the previous response was truncated, you will receive this token.\n Use it in your next request to receive the next set of results.

          " } } } }, - "com.amazonaws.sagemaker#ListNotebookInstances": { + "com.amazonaws.sagemaker#ListWorkforces": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListNotebookInstancesInput" + "target": "com.amazonaws.sagemaker#ListWorkforcesRequest" }, "output": { - "target": "com.amazonaws.sagemaker#ListNotebookInstancesOutput" + "target": "com.amazonaws.sagemaker#ListWorkforcesResponse" }, "traits": { - "smithy.api#documentation": "

          Returns a list of the Amazon SageMaker notebook instances in the requester's account in an AWS\n Region.

          ", + "smithy.api#documentation": "

          Use this operation to list all private and vendor workforces in an AWS Region. Note that you can only \n have one private workforce per AWS Region.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -15179,116 +20299,85 @@ } } }, - "com.amazonaws.sagemaker#ListNotebookInstancesInput": { + "com.amazonaws.sagemaker#ListWorkforcesRequest": { "type": "structure", "members": { - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", - "traits": { - "smithy.api#documentation": "

          If the previous call to the ListNotebookInstances is truncated, the\n response includes a NextToken. You can use this token in your subsequent\n ListNotebookInstances request to fetch the next set of notebook\n instances.

          \n \n

          You might specify a filter or a sort order in your request. When response is\n truncated, you must use the same values for the filer and sort order in the next\n request.

          \n
          " - } - }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", - "traits": { - "smithy.api#documentation": "

          The maximum number of notebook instances to return.

          " - } - }, "SortBy": { - "target": "com.amazonaws.sagemaker#NotebookInstanceSortKey", + "target": "com.amazonaws.sagemaker#ListWorkforcesSortByOptions", "traits": { - "smithy.api#documentation": "

          The field to sort results by. The default is Name.

          " + "smithy.api#documentation": "

          Sort workforces using the workforce name or creation date.

          " } }, "SortOrder": { - "target": "com.amazonaws.sagemaker#NotebookInstanceSortOrder", + "target": "com.amazonaws.sagemaker#SortOrder", "traits": { - "smithy.api#documentation": "

          The sort order for results.

          " + "smithy.api#documentation": "

          Sort workforces in ascending or descending order.

          " } }, "NameContains": { - "target": "com.amazonaws.sagemaker#NotebookInstanceNameContains", - "traits": { - "smithy.api#documentation": "

          A string in the notebook instances' name. This filter returns only notebook\n instances whose name contains the specified string.

          " - } - }, - "CreationTimeBefore": { - "target": "com.amazonaws.sagemaker#CreationTime", - "traits": { - "smithy.api#documentation": "

          A filter that returns only notebook instances that were created before the\n specified time (timestamp).

          " - } - }, - "CreationTimeAfter": { - "target": "com.amazonaws.sagemaker#CreationTime", - "traits": { - "smithy.api#documentation": "

          A filter that returns only notebook instances that were created after the specified\n time (timestamp).

          " - } - }, - "LastModifiedTimeBefore": { - "target": "com.amazonaws.sagemaker#LastModifiedTime", - "traits": { - "smithy.api#documentation": "

          A filter that returns only notebook instances that were modified before the\n specified time (timestamp).

          " - } - }, - "LastModifiedTimeAfter": { - "target": "com.amazonaws.sagemaker#LastModifiedTime", - "traits": { - "smithy.api#documentation": "

          A filter that returns only notebook instances that were modified after the\n specified time (timestamp).

          " - } - }, - "StatusEquals": { - "target": "com.amazonaws.sagemaker#NotebookInstanceStatus", - "traits": { - "smithy.api#documentation": "

          A filter that returns only notebook instances with the specified status.

          " - } - }, - "NotebookInstanceLifecycleConfigNameContains": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName", + "target": "com.amazonaws.sagemaker#WorkforceName", "traits": { - "smithy.api#documentation": "

          A string in the name of a notebook instances lifecycle configuration associated with\n this notebook instance. This filter returns only notebook instances associated with a\n lifecycle configuration with a name that contains the specified string.

          " + "smithy.api#documentation": "

          A filter you can use to search for workforces using part of the workforce name.

          " } }, - "DefaultCodeRepositoryContains": { - "target": "com.amazonaws.sagemaker#CodeRepositoryContains", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          A string in the name or URL of a Git repository associated with this notebook\n instance. This filter returns only notebook instances associated with a git repository\n with a name that contains the specified string.

          " + "smithy.api#documentation": "

          A token to resume pagination.

          " } }, - "AdditionalCodeRepositoryEquals": { - "target": "com.amazonaws.sagemaker#CodeRepositoryNameOrUrl", + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          A filter that returns only notebook instances with associated with the specified git\n repository.

          " + "smithy.api#box": {}, + "smithy.api#documentation": "

          The maximum number of workforces returned in the response.

          " } } } }, - "com.amazonaws.sagemaker#ListNotebookInstancesOutput": { + "com.amazonaws.sagemaker#ListWorkforcesResponse": { "type": "structure", "members": { - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "Workforces": { + "target": "com.amazonaws.sagemaker#Workforces", "traits": { - "smithy.api#documentation": "

          If the response to the previous ListNotebookInstances request was\n truncated, Amazon SageMaker returns this token. To retrieve the next set of notebook instances, use\n the token in the next request.

          " + "smithy.api#documentation": "

          A list containing information about your workforce.

          ", + "smithy.api#required": {} } }, - "NotebookInstances": { - "target": "com.amazonaws.sagemaker#NotebookInstanceSummaryList", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          An array of NotebookInstanceSummary objects, one for each notebook\n instance.

          " + "smithy.api#documentation": "

          A token to resume pagination.

          " } } } }, - "com.amazonaws.sagemaker#ListProcessingJobs": { + "com.amazonaws.sagemaker#ListWorkforcesSortByOptions": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Name", + "name": "Name" + }, + { + "value": "CreateDate", + "name": "CreateDate" + } + ] + } + }, + "com.amazonaws.sagemaker#ListWorkteams": { "type": "operation", "input": { - "target": "com.amazonaws.sagemaker#ListProcessingJobsRequest" + "target": "com.amazonaws.sagemaker#ListWorkteamsRequest" }, "output": { - "target": "com.amazonaws.sagemaker#ListProcessingJobsResponse" + "target": "com.amazonaws.sagemaker#ListWorkteamsResponse" }, "traits": { - "smithy.api#documentation": "

          Lists processing jobs that satisfy various filters.

          ", + "smithy.api#documentation": "

          Gets a list of private work teams that you have defined in a region. The list may be empty if\n no work team satisfies the filter specified in the NameContains\n parameter.

          ", "smithy.api#paginated": { "inputToken": "NextToken", "outputToken": "NextToken", @@ -15296,843 +20385,1134 @@ } } }, - "com.amazonaws.sagemaker#ListProcessingJobsRequest": { + "com.amazonaws.sagemaker#ListWorkteamsRequest": { "type": "structure", "members": { - "CreationTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          A filter that returns only processing jobs created after the specified time.

          " - } - }, - "CreationTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          A filter that returns only processing jobs created after the specified time.

          " - } - }, - "LastModifiedTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", + "SortBy": { + "target": "com.amazonaws.sagemaker#ListWorkteamsSortByOptions", "traits": { - "smithy.api#documentation": "

          A filter that returns only processing jobs modified after the specified time.

          " + "smithy.api#documentation": "

          The field to sort results by. The default is CreationTime.

          " } }, - "LastModifiedTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + "SortOrder": { + "target": "com.amazonaws.sagemaker#SortOrder", "traits": { - "smithy.api#documentation": "

          A filter that returns only processing jobs modified before the specified time.

          " + "smithy.api#documentation": "

          The sort order for results. The default is Ascending.

          " } }, "NameContains": { - "target": "com.amazonaws.sagemaker#String", + "target": "com.amazonaws.sagemaker#WorkteamName", "traits": { - "smithy.api#documentation": "

          A string in the processing job name. This filter returns only processing jobs whose\n name contains the specified string.

          " + "smithy.api#documentation": "

          A string in the work team's name. This filter returns only work teams whose name\n contains the specified string.

          " } }, - "StatusEquals": { - "target": "com.amazonaws.sagemaker#ProcessingJobStatus", + "NextToken": { + "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          A filter that retrieves only processing jobs with a specific status.

          " + "smithy.api#documentation": "

          If the result of the previous ListWorkteams request was truncated, the\n response includes a NextToken. To retrieve the next set of labeling jobs,\n use the token in the next request.

          " } }, - "SortBy": { - "target": "com.amazonaws.sagemaker#SortBy", + "MaxResults": { + "target": "com.amazonaws.sagemaker#MaxResults", "traits": { - "smithy.api#documentation": "

          The field to sort results by. The default is CreationTime.

          " + "smithy.api#box": {}, + "smithy.api#documentation": "

          The maximum number of work teams to return in each page of the response.

          " } - }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", + } + } + }, + "com.amazonaws.sagemaker#ListWorkteamsResponse": { + "type": "structure", + "members": { + "Workteams": { + "target": "com.amazonaws.sagemaker#Workteams", "traits": { - "smithy.api#documentation": "

          The sort order for results. The default is Ascending.

          " + "smithy.api#documentation": "

          An array of Workteam objects, each describing a work team.

          ", + "smithy.api#required": {} } }, "NextToken": { "target": "com.amazonaws.sagemaker#NextToken", "traits": { - "smithy.api#documentation": "

          If the result of the previous ListProcessingJobs request was truncated,\n the response includes a NextToken. To retrieve the next set of processing\n jobs, use the token in the next request.

          " + "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n work teams, use it in the subsequent request.

          " } - }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", - "traits": { - "smithy.api#box": {}, - "smithy.api#documentation": "

          The maximum number of processing jobs to return in the response.

          " + } + } + }, + "com.amazonaws.sagemaker#ListWorkteamsSortByOptions": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Name", + "name": "Name" + }, + { + "value": "CreateDate", + "name": "CreateDate" } + ] + } + }, + "com.amazonaws.sagemaker#MaxAutoMLJobRuntimeInSeconds": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1 } } }, - "com.amazonaws.sagemaker#ListProcessingJobsResponse": { + "com.amazonaws.sagemaker#MaxCandidates": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1 + } + } + }, + "com.amazonaws.sagemaker#MaxConcurrentTaskCount": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 1000 + } + } + }, + "com.amazonaws.sagemaker#MaxConcurrentTransforms": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 0 + } + } + }, + "com.amazonaws.sagemaker#MaxHumanLabeledObjectCount": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1 + } + } + }, + "com.amazonaws.sagemaker#MaxNumberOfTrainingJobs": { + "type": "integer", + "traits": { + "smithy.api#range": { + "min": 1 + } + } + }, + "com.amazonaws.sagemaker#MaxParallelTrainingJobs": { + "type": "integer", + "traits": { + "smithy.api#range": { + "min": 1 + } + } + }, + "com.amazonaws.sagemaker#MaxPayloadInMB": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 0 + } + } + }, + "com.amazonaws.sagemaker#MaxPercentageOfInputDatasetLabeled": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.sagemaker#MaxResults": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1, + "max": 100 + } + } + }, + "com.amazonaws.sagemaker#MaxRuntimeInSeconds": { + "type": "integer", + "traits": { + "smithy.api#range": { + "min": 1 + } + } + }, + "com.amazonaws.sagemaker#MaxRuntimePerTrainingJobInSeconds": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1 + } + } + }, + "com.amazonaws.sagemaker#MaxWaitTimeInSeconds": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1 + } + } + }, + "com.amazonaws.sagemaker#MaximumExecutionTimeoutInSeconds": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 600, + "max": 14400 + } + } + }, + "com.amazonaws.sagemaker#MediaType": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 64 + }, + "smithy.api#pattern": "^[-\\w]+\\/[-\\w+]+$" + } + }, + "com.amazonaws.sagemaker#MemberDefinition": { "type": "structure", "members": { - "ProcessingJobSummaries": { - "target": "com.amazonaws.sagemaker#ProcessingJobSummaries", + "CognitoMemberDefinition": { + "target": "com.amazonaws.sagemaker#CognitoMemberDefinition", "traits": { - "smithy.api#documentation": "

          An array of ProcessingJobSummary objects, each listing a processing\n job.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Cognito user group that is part of the work team.

          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "OidcMemberDefinition": { + "target": "com.amazonaws.sagemaker#OidcMemberDefinition", "traits": { - "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n processing jobs, use it in the subsequent request.

          " + "smithy.api#documentation": "

          A list user groups that exist in your OIDC Identity Provider (IdP). \n One to ten groups can be used to create a single private work team. \n When you add a user group to the list of Groups, you can add that user group to one or more\n private work teams. If you add a user group to a private work team, all workers in that user group \n are added to the work team.

          " } } + }, + "traits": { + "smithy.api#documentation": "

          Defines an Amazon Cognito or your own OIDC IdP user group that is part of a work team.

          " } }, - "com.amazonaws.sagemaker#ListSubscribedWorkteams": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#ListSubscribedWorkteamsRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#ListSubscribedWorkteamsResponse" + "com.amazonaws.sagemaker#MemberDefinitions": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#MemberDefinition" }, "traits": { - "smithy.api#documentation": "

          Gets a list of the work teams that you are subscribed to in the AWS Marketplace. The\n list may be empty if no work team satisfies the filter specified in the\n NameContains parameter.

          ", - "smithy.api#paginated": { - "inputToken": "NextToken", - "outputToken": "NextToken", - "pageSize": "MaxResults" + "smithy.api#length": { + "min": 1, + "max": 10 } } }, - "com.amazonaws.sagemaker#ListSubscribedWorkteamsRequest": { + "com.amazonaws.sagemaker#MetadataProperties": { "type": "structure", "members": { - "NameContains": { - "target": "com.amazonaws.sagemaker#WorkteamName", + "CommitId": { + "target": "com.amazonaws.sagemaker#MetadataPropertyValue", "traits": { - "smithy.api#documentation": "

          A string in the work team name. This filter returns only work teams whose name\n contains the specified string.

          " + "smithy.api#documentation": "

          The commit ID.

          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "Repository": { + "target": "com.amazonaws.sagemaker#MetadataPropertyValue", "traits": { - "smithy.api#documentation": "

          If the result of the previous ListSubscribedWorkteams request was\n truncated, the response includes a NextToken. To retrieve the next set of\n labeling jobs, use the token in the next request.

          " + "smithy.api#documentation": "

          The repository.

          " } }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", - "traits": { - "smithy.api#box": {}, - "smithy.api#documentation": "

          The maximum number of work teams to return in each page of the response.

          " - } - } - } - }, - "com.amazonaws.sagemaker#ListSubscribedWorkteamsResponse": { - "type": "structure", - "members": { - "SubscribedWorkteams": { - "target": "com.amazonaws.sagemaker#SubscribedWorkteams", + "GeneratedBy": { + "target": "com.amazonaws.sagemaker#MetadataPropertyValue", "traits": { - "smithy.api#documentation": "

          An array of Workteam objects, each describing a work team.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The entity this entity was generated by.

          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "ProjectId": { + "target": "com.amazonaws.sagemaker#MetadataPropertyValue", "traits": { - "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n work teams, use it in the subsequent request.

          " + "smithy.api#documentation": "

          The project ID.

          " } } + }, + "traits": { + "smithy.api#documentation": "

          Metadata properties of the tracking entity, trial, or trial component.

          " } }, - "com.amazonaws.sagemaker#ListTags": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#ListTagsInput" - }, - "output": { - "target": "com.amazonaws.sagemaker#ListTagsOutput" - }, + "com.amazonaws.sagemaker#MetadataPropertyValue": { + "type": "string", "traits": { - "smithy.api#documentation": "

          Returns the tags for the specified Amazon SageMaker resource.

          ", - "smithy.api#paginated": { - "inputToken": "NextToken", - "outputToken": "NextToken", - "pageSize": "MaxResults" - } + "smithy.api#length": { + "min": 0, + "max": 1024 + }, + "smithy.api#pattern": ".*" } }, - "com.amazonaws.sagemaker#ListTagsInput": { + "com.amazonaws.sagemaker#MetricData": { "type": "structure", "members": { - "ResourceArn": { - "target": "com.amazonaws.sagemaker#ResourceArn", + "MetricName": { + "target": "com.amazonaws.sagemaker#MetricName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the resource whose tags you want to\n retrieve.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The name of the metric.

          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "Value": { + "target": "com.amazonaws.sagemaker#Float", "traits": { - "smithy.api#documentation": "

          If the response to the previous ListTags request is truncated, Amazon SageMaker\n returns this token. To retrieve the next set of tags, use it in the subsequent request.\n

          " + "smithy.api#documentation": "

          The value of the metric.

          " } }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#ListTagsMaxResults", + "Timestamp": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Maximum number of tags to return.

          " + "smithy.api#documentation": "

          The date and time that the algorithm emitted the metric.

          " } } - } - }, - "com.amazonaws.sagemaker#ListTagsMaxResults": { - "type": "integer", + }, "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 50 - } + "smithy.api#documentation": "

          The name, value, and date and time of a metric that was emitted to Amazon CloudWatch.

          " } }, - "com.amazonaws.sagemaker#ListTagsOutput": { + "com.amazonaws.sagemaker#MetricDefinition": { "type": "structure", "members": { - "Tags": { - "target": "com.amazonaws.sagemaker#TagList", + "Name": { + "target": "com.amazonaws.sagemaker#MetricName", "traits": { - "smithy.api#documentation": "

          An array of Tag objects, each with a tag key and a value.

          " + "smithy.api#documentation": "

          The name of the metric.

          ", + "smithy.api#required": {} } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "Regex": { + "target": "com.amazonaws.sagemaker#MetricRegex", "traits": { - "smithy.api#documentation": "

          If response is truncated, Amazon SageMaker includes a token in the response. You can use this\n token in your subsequent request to fetch next set of tokens.

          " + "smithy.api#documentation": "

          A regular expression that searches the output of a training job and gets the value of\n the metric. For more information about using regular expressions to define metrics, see\n Defining\n Objective Metrics.

          ", + "smithy.api#required": {} } } + }, + "traits": { + "smithy.api#documentation": "

          Specifies a metric that the training algorithm\n writes\n to stderr or stdout\n . Amazon SageMakerhyperparameter\n tuning captures\n all\n defined metrics.\n You\n specify one metric that a hyperparameter tuning job uses as its\n objective metric to choose the best training job.

          " } }, - "com.amazonaws.sagemaker#ListTrainingJobs": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#ListTrainingJobsRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#ListTrainingJobsResponse" + "com.amazonaws.sagemaker#MetricDefinitionList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#MetricDefinition" }, "traits": { - "smithy.api#documentation": "

          Lists training jobs.

          ", - "smithy.api#paginated": { - "inputToken": "NextToken", - "outputToken": "NextToken", - "pageSize": "MaxResults" + "smithy.api#length": { + "min": 0, + "max": 40 } } }, - "com.amazonaws.sagemaker#ListTrainingJobsForHyperParameterTuningJob": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#ListTrainingJobsForHyperParameterTuningJobRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#ListTrainingJobsForHyperParameterTuningJobResponse" - }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], + "com.amazonaws.sagemaker#MetricName": { + "type": "string", "traits": { - "smithy.api#documentation": "

          Gets a list of TrainingJobSummary objects that describe the training\n jobs that a hyperparameter tuning job launched.

          ", - "smithy.api#paginated": { - "inputToken": "NextToken", - "outputToken": "NextToken", - "pageSize": "MaxResults" - } + "smithy.api#length": { + "min": 1, + "max": 255 + }, + "smithy.api#pattern": ".+" } }, - "com.amazonaws.sagemaker#ListTrainingJobsForHyperParameterTuningJobRequest": { + "com.amazonaws.sagemaker#MetricRegex": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 500 + }, + "smithy.api#pattern": ".+" + } + }, + "com.amazonaws.sagemaker#MetricValue": { + "type": "float" + }, + "com.amazonaws.sagemaker#MetricsSource": { "type": "structure", "members": { - "HyperParameterTuningJobName": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobName", + "ContentType": { + "target": "com.amazonaws.sagemaker#ContentType", "traits": { - "smithy.api#documentation": "

          The name of the tuning job whose training jobs you want to list.

          ", + "smithy.api#documentation": "

          ", "smithy.api#required": {} } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", - "traits": { - "smithy.api#documentation": "

          If the result of the previous ListTrainingJobsForHyperParameterTuningJob\n request was truncated, the response includes a NextToken. To retrieve the\n next set of training jobs, use the token in the next request.

          " - } - }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", + "ContentDigest": { + "target": "com.amazonaws.sagemaker#ContentDigest", "traits": { - "smithy.api#documentation": "

          The maximum number of training jobs to return. The default value is 10.

          " + "smithy.api#documentation": "

          " } }, - "StatusEquals": { - "target": "com.amazonaws.sagemaker#TrainingJobStatus", + "S3Uri": { + "target": "com.amazonaws.sagemaker#S3Uri", "traits": { - "smithy.api#documentation": "

          A filter that returns only training jobs with the\n specified\n status.

          " + "smithy.api#documentation": "

          ", + "smithy.api#required": {} } - }, - "SortBy": { - "target": "com.amazonaws.sagemaker#TrainingJobSortByOptions", - "traits": { - "smithy.api#documentation": "

          The field to sort\n results\n by. The default is Name.

          \n

          If the value of this field is FinalObjectiveMetricValue, any training\n jobs that did not return an objective metric are not listed.

          " + } + }, + "traits": { + "smithy.api#documentation": "

          " + } + }, + "com.amazonaws.sagemaker#ModelApprovalStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Approved", + "name": "APPROVED" + }, + { + "value": "Rejected", + "name": "REJECTED" + }, + { + "value": "PendingManualApproval", + "name": "PENDING_MANUAL_APPROVAL" } + ] + } + }, + "com.amazonaws.sagemaker#ModelArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 20, + "max": 2048 }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:model/.*" + } + }, + "com.amazonaws.sagemaker#ModelArtifacts": { + "type": "structure", + "members": { + "S3ModelArtifacts": { + "target": "com.amazonaws.sagemaker#S3Uri", "traits": { - "smithy.api#documentation": "

          The sort order\n for\n results. The default is Ascending.

          " + "smithy.api#documentation": "

          The path of the S3 object that contains the model artifacts. For example,\n s3://bucket-name/keynameprefix/model.tar.gz.

          ", + "smithy.api#required": {} } } + }, + "traits": { + "smithy.api#documentation": "

          Provides information about the location that is configured for storing model\n artifacts.

          \n

          Model artifacts are the output that results from training a model, and typically\n consist of trained parameters, a model defintion that desribes how to compute\n inferences, and other metadata.

          " } }, - "com.amazonaws.sagemaker#ListTrainingJobsForHyperParameterTuningJobResponse": { + "com.amazonaws.sagemaker#ModelClientConfig": { "type": "structure", "members": { - "TrainingJobSummaries": { - "target": "com.amazonaws.sagemaker#HyperParameterTrainingJobSummaries", + "InvocationsTimeoutInSeconds": { + "target": "com.amazonaws.sagemaker#InvocationsTimeoutInSeconds", "traits": { - "smithy.api#documentation": "

          A list of TrainingJobSummary objects that\n describe\n the training jobs that the\n ListTrainingJobsForHyperParameterTuningJob request returned.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The timeout value in seconds for an invocation request.

          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "InvocationsMaxRetries": { + "target": "com.amazonaws.sagemaker#InvocationsMaxRetries", "traits": { - "smithy.api#documentation": "

          If the result of this ListTrainingJobsForHyperParameterTuningJob request\n was truncated, the response includes a NextToken. To retrieve the next set\n of training jobs, use the token in the next request.

          " + "smithy.api#documentation": "

          The maximum number of retries when invocation requests are failing.

          " } } + }, + "traits": { + "smithy.api#documentation": "

          Configures the timeout and maximum number of retries for processing a transform job\n invocation.

          " } }, - "com.amazonaws.sagemaker#ListTrainingJobsRequest": { + "com.amazonaws.sagemaker#ModelDataQuality": { "type": "structure", "members": { - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", - "traits": { - "smithy.api#documentation": "

          If the result of the previous ListTrainingJobs request was truncated,\n the response includes a NextToken. To retrieve the next set of training\n jobs, use the token in the next request.

          " - } - }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", - "traits": { - "smithy.api#box": {}, - "smithy.api#documentation": "

          The maximum number of training jobs to return in the response.

          " - } - }, - "CreationTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          A filter that returns only training jobs created after the specified time\n (timestamp).

          " - } - }, - "CreationTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + "Statistics": { + "target": "com.amazonaws.sagemaker#MetricsSource", "traits": { - "smithy.api#documentation": "

          A filter that returns only training jobs created before the specified time\n (timestamp).

          " + "smithy.api#documentation": "

          Data quality statistics for a model.

          " } }, - "LastModifiedTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", + "Constraints": { + "target": "com.amazonaws.sagemaker#MetricsSource", "traits": { - "smithy.api#documentation": "

          A filter that returns only training jobs modified after the specified time\n (timestamp).

          " + "smithy.api#documentation": "

          Data quality constraints for a model.

          " } - }, - "LastModifiedTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + } + }, + "traits": { + "smithy.api#documentation": "

          Data quality constraints and statistics for a model.

          " + } + }, + "com.amazonaws.sagemaker#ModelDigests": { + "type": "structure", + "members": { + "ArtifactDigest": { + "target": "com.amazonaws.sagemaker#ArtifactDigest", "traits": { - "smithy.api#documentation": "

          A filter that returns only training jobs modified before the specified time\n (timestamp).

          " + "smithy.api#documentation": "

          Provides a hash value that uniquely identifies the stored model artifacts.

          " } - }, - "NameContains": { - "target": "com.amazonaws.sagemaker#NameContains", + } + }, + "traits": { + "smithy.api#documentation": "

          Provides information to verify the integrity of stored model artifacts.

          " + } + }, + "com.amazonaws.sagemaker#ModelMetrics": { + "type": "structure", + "members": { + "ModelQuality": { + "target": "com.amazonaws.sagemaker#ModelQuality", "traits": { - "smithy.api#documentation": "

          A string in the training job name. This filter returns only training jobs whose\n name contains the specified string.

          " + "smithy.api#documentation": "

          Metrics that measure the quality of a model.

          " } }, - "StatusEquals": { - "target": "com.amazonaws.sagemaker#TrainingJobStatus", + "ModelDataQuality": { + "target": "com.amazonaws.sagemaker#ModelDataQuality", "traits": { - "smithy.api#documentation": "

          A filter that retrieves only training jobs with a specific status.

          " + "smithy.api#documentation": "

          Metrics that measure the quality of the input data for a model.

          " } }, - "SortBy": { - "target": "com.amazonaws.sagemaker#SortBy", + "Bias": { + "target": "com.amazonaws.sagemaker#Bias", "traits": { - "smithy.api#documentation": "

          The field to sort results by. The default is CreationTime.

          " + "smithy.api#documentation": "

          Metrics that measure bais in a model.

          " } }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", + "Explainability": { + "target": "com.amazonaws.sagemaker#Explainability", "traits": { - "smithy.api#documentation": "

          The sort order for results. The default is Ascending.

          " + "smithy.api#documentation": "

          Metrics that help explain a model.

          " } } + }, + "traits": { + "smithy.api#documentation": "

          Contains metrics captured from a model.

          " } }, - "com.amazonaws.sagemaker#ListTrainingJobsResponse": { - "type": "structure", - "members": { - "TrainingJobSummaries": { - "target": "com.amazonaws.sagemaker#TrainingJobSummaries", - "traits": { - "smithy.api#documentation": "

          An array of TrainingJobSummary objects, each listing a training\n job.

          ", - "smithy.api#required": {} - } + "com.amazonaws.sagemaker#ModelName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 63 }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", - "traits": { - "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n training jobs, use it in the subsequent request.

          " - } - } + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" } }, - "com.amazonaws.sagemaker#ListTransformJobs": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#ListTransformJobsRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#ListTransformJobsResponse" - }, + "com.amazonaws.sagemaker#ModelNameContains": { + "type": "string", "traits": { - "smithy.api#documentation": "

          Lists transform jobs.

          ", - "smithy.api#paginated": { - "inputToken": "NextToken", - "outputToken": "NextToken", - "pageSize": "MaxResults" - } + "smithy.api#length": { + "min": 0, + "max": 63 + }, + "smithy.api#pattern": "[a-zA-Z0-9-]+" } }, - "com.amazonaws.sagemaker#ListTransformJobsRequest": { + "com.amazonaws.sagemaker#ModelPackage": { "type": "structure", "members": { - "CreationTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", + "ModelPackageName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          A filter that returns only transform jobs created after the specified time.

          " + "smithy.api#documentation": "

          The name of the model.

          " } }, - "CreationTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + "ModelPackageGroupName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          A filter that returns only transform jobs created before the specified time.

          " + "smithy.api#documentation": "

          The model group to which the model belongs.

          " } }, - "LastModifiedTimeAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", + "ModelPackageVersion": { + "target": "com.amazonaws.sagemaker#ModelPackageVersion", "traits": { - "smithy.api#documentation": "

          A filter that returns only transform jobs modified after the specified time.

          " + "smithy.api#documentation": "

          The version number of a versioned model.

          " } }, - "LastModifiedTimeBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + "ModelPackageArn": { + "target": "com.amazonaws.sagemaker#ModelPackageArn", "traits": { - "smithy.api#documentation": "

          A filter that returns only transform jobs modified before the specified time.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model package.

          " } }, - "NameContains": { - "target": "com.amazonaws.sagemaker#NameContains", + "ModelPackageDescription": { + "target": "com.amazonaws.sagemaker#EntityDescription", "traits": { - "smithy.api#documentation": "

          A string in the transform job name. This filter returns only transform jobs whose name\n contains the specified string.

          " + "smithy.api#documentation": "

          The description of the model package.

          " } }, - "StatusEquals": { - "target": "com.amazonaws.sagemaker#TransformJobStatus", + "CreationTime": { + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          A filter that retrieves only transform jobs with a specific status.

          " + "smithy.api#documentation": "

          The time that the model package was created.

          " } }, - "SortBy": { - "target": "com.amazonaws.sagemaker#SortBy", + "InferenceSpecification": { + "target": "com.amazonaws.sagemaker#InferenceSpecification" + }, + "SourceAlgorithmSpecification": { + "target": "com.amazonaws.sagemaker#SourceAlgorithmSpecification" + }, + "ValidationSpecification": { + "target": "com.amazonaws.sagemaker#ModelPackageValidationSpecification" + }, + "ModelPackageStatus": { + "target": "com.amazonaws.sagemaker#ModelPackageStatus", "traits": { - "smithy.api#documentation": "

          The field to sort results by. The default is CreationTime.

          " + "smithy.api#documentation": "

          The status of the model package. This can be one of the following values.

          \n
            \n
          • \n

            \n PENDING - The model package is pending being created.

            \n
          • \n
          • \n

            \n IN_PROGRESS - The model package is in the process of being\n created.

            \n
          • \n
          • \n

            \n COMPLETED - The model package was successfully created.

            \n
          • \n
          • \n

            \n FAILED - The model package failed.

            \n
          • \n
          • \n

            \n DELETING - The model package is in the process of being deleted.

            \n
          • \n
          " } }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", + "ModelPackageStatusDetails": { + "target": "com.amazonaws.sagemaker#ModelPackageStatusDetails" + }, + "CertifyForMarketplace": { + "target": "com.amazonaws.sagemaker#CertifyForMarketplace", "traits": { - "smithy.api#documentation": "

          The sort order for results. The default is Descending.

          " + "smithy.api#documentation": "

          Whether the model package is to be certified to be listed on AWS Marketplace. For\n information about listing model packages on AWS Marketplace, see List Your\n Algorithm or Model Package on AWS Marketplace.

          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "ModelApprovalStatus": { + "target": "com.amazonaws.sagemaker#ModelApprovalStatus", "traits": { - "smithy.api#documentation": "

          If the result of the previous ListTransformJobs request was truncated,\n the response includes a NextToken. To retrieve the next set of transform\n jobs, use the token in the next request.

          " + "smithy.api#documentation": "

          The approval status of the model. This can be one of the following values.

          \n
            \n
          • \n

            \n APPROVED - The model is approved

            \n
          • \n
          • \n

            \n REJECTED - The model is rejected.

            \n
          • \n
          • \n

            \n PENDING_MANUAL_APPROVAL - The model is waiting for manual\n approval.

            \n
          • \n
          " } }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", + "CreatedBy": { + "target": "com.amazonaws.sagemaker#UserContext" + }, + "MetadataProperties": { + "target": "com.amazonaws.sagemaker#MetadataProperties" + }, + "ModelMetrics": { + "target": "com.amazonaws.sagemaker#ModelMetrics", "traits": { - "smithy.api#box": {}, - "smithy.api#documentation": "

          The maximum number of\n transform\n jobs to return in the response. The default value is\n 10.

          " + "smithy.api#documentation": "

          Metrics for the model.

          " } - } - } - }, - "com.amazonaws.sagemaker#ListTransformJobsResponse": { - "type": "structure", - "members": { - "TransformJobSummaries": { - "target": "com.amazonaws.sagemaker#TransformJobSummaries", + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          An array of\n TransformJobSummary\n objects.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The last time the model package was modified.

          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "LastModifiedBy": { + "target": "com.amazonaws.sagemaker#UserContext" + }, + "ApprovalDescription": { + "target": "com.amazonaws.sagemaker#ApprovalDescription", "traits": { - "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n transform jobs, use it in the next request.

          " + "smithy.api#documentation": "

          A description provided when the model approval is set.

          " + } + }, + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", + "traits": { + "smithy.api#documentation": "

          A list of the tags associated with the model package. For more information, see Tagging AWS\n resources in the AWS General Reference Guide.

          " } } + }, + "traits": { + "smithy.api#documentation": "

          A versioned model that can be deployed for SageMaker inference.

          " } }, - "com.amazonaws.sagemaker#ListTrialComponentKey256": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#TrialComponentKey256" - } - }, - "com.amazonaws.sagemaker#ListTrialComponents": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#ListTrialComponentsRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#ListTrialComponentsResponse" - }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], + "com.amazonaws.sagemaker#ModelPackageArn": { + "type": "string", "traits": { - "smithy.api#documentation": "

          Lists the trial components in your account. You can sort the list by trial component name\n or creation time. You can filter the list to show only components that were created in a\n specific time range. You can also filter on one of the following:

          \n
            \n
          • \n

            \n ExperimentName\n

            \n
          • \n
          • \n

            \n SourceArn\n

            \n
          • \n
          • \n

            \n TrialName\n

            \n
          • \n
          ", - "smithy.api#paginated": { - "inputToken": "NextToken", - "outputToken": "NextToken", - "pageSize": "MaxResults" - } + "smithy.api#length": { + "min": 1, + "max": 2048 + }, + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:model-package/.*" } }, - "com.amazonaws.sagemaker#ListTrialComponentsRequest": { + "com.amazonaws.sagemaker#ModelPackageContainerDefinition": { "type": "structure", "members": { - "ExperimentName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "ContainerHostname": { + "target": "com.amazonaws.sagemaker#ContainerHostname", + "traits": { + "smithy.api#documentation": "

          The DNS host name for the Docker container.

          " + } + }, + "Image": { + "target": "com.amazonaws.sagemaker#ContainerImage", + "traits": { + "smithy.api#documentation": "

          The Amazon EC2 Container Registry (Amazon ECR) path where inference code is stored.

          \n

          If you are using your own custom algorithm instead of an algorithm provided by Amazon SageMaker,\n the inference code must meet Amazon SageMaker requirements. Amazon SageMaker supports both\n registry/repository[:tag] and registry/repository[@digest]\n image path formats. For more information, see Using Your Own Algorithms with Amazon\n SageMaker.

          ", + "smithy.api#required": {} + } + }, + "ImageDigest": { + "target": "com.amazonaws.sagemaker#ImageDigest", "traits": { - "smithy.api#documentation": "

          A filter that returns only components that are part of the specified experiment. If you\n specify ExperimentName, you can't filter by SourceArn or\n TrialName.

          " + "smithy.api#documentation": "

          An MD5 hash of the training algorithm that identifies the Docker image used for\n training.

          " } }, - "TrialName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "ModelDataUrl": { + "target": "com.amazonaws.sagemaker#Url", "traits": { - "smithy.api#documentation": "

          A filter that returns only components that are part of the specified trial. If you specify\n TrialName, you can't filter by ExperimentName or\n SourceArn.

          " + "smithy.api#documentation": "

          The Amazon S3 path where the model artifacts, which result from model training, are stored.\n This path must point to a single gzip compressed tar archive\n (.tar.gz suffix).

          \n \n

          The model artifacts must be in an S3 bucket that is in the same region as the\n model package.

          \n
          " } }, - "SourceArn": { - "target": "com.amazonaws.sagemaker#String256", + "ProductId": { + "target": "com.amazonaws.sagemaker#ProductId", "traits": { - "smithy.api#documentation": "

          A filter that returns only components that have the specified source Amazon Resource Name\n (ARN). If you specify SourceArn, you can't filter by ExperimentName\n or TrialName.

          " + "smithy.api#documentation": "

          The AWS Marketplace product ID of the model package.

          " } - }, - "CreatedAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", + } + }, + "traits": { + "smithy.api#documentation": "

          Describes the Docker container for the model package.

          " + } + }, + "com.amazonaws.sagemaker#ModelPackageContainerDefinitionList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ModelPackageContainerDefinition" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 1 + } + } + }, + "com.amazonaws.sagemaker#ModelPackageGroup": { + "type": "structure", + "members": { + "ModelPackageGroupName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          A filter that returns only components created after the specified time.

          " + "smithy.api#documentation": "

          The name of the model group.

          " } }, - "CreatedBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + "ModelPackageGroupArn": { + "target": "com.amazonaws.sagemaker#ModelPackageGroupArn", "traits": { - "smithy.api#documentation": "

          A filter that returns only components created before the specified time.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model group.

          " } }, - "SortBy": { - "target": "com.amazonaws.sagemaker#SortTrialComponentsBy", + "ModelPackageGroupDescription": { + "target": "com.amazonaws.sagemaker#EntityDescription", "traits": { - "smithy.api#documentation": "

          The property used to sort results. The default value is CreationTime.

          " + "smithy.api#documentation": "

          The description for the model group.

          " } }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", + "CreationTime": { + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          The sort order. The default value is Descending.

          " + "smithy.api#documentation": "

          The time that the model group was created.

          " } }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", + "CreatedBy": { + "target": "com.amazonaws.sagemaker#UserContext" + }, + "ModelPackageGroupStatus": { + "target": "com.amazonaws.sagemaker#ModelPackageGroupStatus", "traits": { - "smithy.api#documentation": "

          The maximum number of components to return in the response. The default value is\n 10.

          " + "smithy.api#documentation": "

          The status of the model group. This can be one of the following values.

          \n
            \n
          • \n

            \n PENDING - The model group is pending being created.

            \n
          • \n
          • \n

            \n IN_PROGRESS - The model group is in the process of being\n created.

            \n
          • \n
          • \n

            \n COMPLETED - The model group was successfully created.

            \n
          • \n
          • \n

            \n FAILED - The model group failed.

            \n
          • \n
          • \n

            \n DELETING - The model group is in the process of being deleted.

            \n
          • \n
          • \n

            \n DELETE_FAILED - SageMaker failed to delete the model group.

            \n
          • \n
          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          If the previous call to ListTrialComponents didn't return the full set of\n components, the call returns a token for getting the next set of components.

          " + "smithy.api#documentation": "

          A list of the tags associated with the model group. For more information, see Tagging AWS\n resources in the AWS General Reference Guide.

          " } } + }, + "traits": { + "smithy.api#documentation": "

          A group of versioned models in the model registry.

          " } }, - "com.amazonaws.sagemaker#ListTrialComponentsResponse": { - "type": "structure", - "members": { - "TrialComponentSummaries": { - "target": "com.amazonaws.sagemaker#TrialComponentSummaries", - "traits": { - "smithy.api#documentation": "

          A list of the summaries of your trial components.

          " - } + "com.amazonaws.sagemaker#ModelPackageGroupArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 2048 }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", - "traits": { - "smithy.api#documentation": "

          A token for getting the next set of components, if there are any.

          " + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:model-package-group/.*" + } + }, + "com.amazonaws.sagemaker#ModelPackageGroupSortBy": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Name", + "name": "NAME" + }, + { + "value": "CreationTime", + "name": "CREATION_TIME" } - } + ] } }, - "com.amazonaws.sagemaker#ListTrials": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#ListTrialsRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#ListTrialsResponse" - }, - "errors": [ - { - "target": "com.amazonaws.sagemaker#ResourceNotFound" - } - ], + "com.amazonaws.sagemaker#ModelPackageGroupStatus": { + "type": "string", "traits": { - "smithy.api#documentation": "

          Lists the trials in your account. Specify an experiment name to limit the list to the\n trials that are part of that experiment. Specify a trial component name to limit the list to\n the trials that associated with that trial component. The list can be filtered to show only\n trials that were created in a specific time range. The list can be sorted by trial name or\n creation time.

          ", - "smithy.api#paginated": { - "inputToken": "NextToken", - "outputToken": "NextToken", - "pageSize": "MaxResults" - } + "smithy.api#enum": [ + { + "value": "Pending", + "name": "PENDING" + }, + { + "value": "InProgress", + "name": "IN_PROGRESS" + }, + { + "value": "Completed", + "name": "COMPLETED" + }, + { + "value": "Failed", + "name": "FAILED" + }, + { + "value": "Deleting", + "name": "DELETING" + }, + { + "value": "DeleteFailed", + "name": "DELETE_FAILED" + } + ] } }, - "com.amazonaws.sagemaker#ListTrialsRequest": { + "com.amazonaws.sagemaker#ModelPackageGroupSummary": { "type": "structure", "members": { - "ExperimentName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "ModelPackageGroupName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          A filter that returns only trials that are part of the specified experiment.

          " + "smithy.api#documentation": "

          The name of the model group.

          ", + "smithy.api#required": {} } }, - "TrialComponentName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "ModelPackageGroupArn": { + "target": "com.amazonaws.sagemaker#ModelPackageGroupArn", "traits": { - "smithy.api#documentation": "

          A filter that returns only trials that are associated with the specified trial\n component.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model group.

          ", + "smithy.api#required": {} } }, - "CreatedAfter": { - "target": "com.amazonaws.sagemaker#Timestamp", + "ModelPackageGroupDescription": { + "target": "com.amazonaws.sagemaker#EntityDescription", "traits": { - "smithy.api#documentation": "

          A filter that returns only trials created after the specified time.

          " + "smithy.api#documentation": "

          A description of the model group.

          " } }, - "CreatedBefore": { - "target": "com.amazonaws.sagemaker#Timestamp", + "CreationTime": { + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          A filter that returns only trials created before the specified time.

          " + "smithy.api#documentation": "

          The time that the model group was created.

          ", + "smithy.api#required": {} } }, - "SortBy": { - "target": "com.amazonaws.sagemaker#SortTrialsBy", + "ModelPackageGroupStatus": { + "target": "com.amazonaws.sagemaker#ModelPackageGroupStatus", "traits": { - "smithy.api#documentation": "

          The property used to sort results. The default value is CreationTime.

          " + "smithy.api#documentation": "

          The status of the model group.

          ", + "smithy.api#required": {} } - }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", - "traits": { - "smithy.api#documentation": "

          The sort order. The default value is Descending.

          " + } + }, + "traits": { + "smithy.api#documentation": "

          Summary information about a model group.

          " + } + }, + "com.amazonaws.sagemaker#ModelPackageGroupSummaryList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ModelPackageGroupSummary" + } + }, + "com.amazonaws.sagemaker#ModelPackageSortBy": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Name", + "name": "NAME" + }, + { + "value": "CreationTime", + "name": "CREATION_TIME" } - }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", + ] + } + }, + "com.amazonaws.sagemaker#ModelPackageStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Pending", + "name": "PENDING" + }, + { + "value": "InProgress", + "name": "IN_PROGRESS" + }, + { + "value": "Completed", + "name": "COMPLETED" + }, + { + "value": "Failed", + "name": "FAILED" + }, + { + "value": "Deleting", + "name": "DELETING" + } + ] + } + }, + "com.amazonaws.sagemaker#ModelPackageStatusDetails": { + "type": "structure", + "members": { + "ValidationStatuses": { + "target": "com.amazonaws.sagemaker#ModelPackageStatusItemList", "traits": { - "smithy.api#documentation": "

          The maximum number of trials to return in the response. The default value is 10.

          " + "smithy.api#documentation": "

          The validation status of the model package.

          ", + "smithy.api#required": {} } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "ImageScanStatuses": { + "target": "com.amazonaws.sagemaker#ModelPackageStatusItemList", "traits": { - "smithy.api#documentation": "

          If the previous call to ListTrials didn't return the full set of trials, the\n call returns a token for getting the next set of trials.

          " + "smithy.api#documentation": "

          The status of the scan of the Docker image container for the model package.

          " } } + }, + "traits": { + "smithy.api#documentation": "

          Specifies the validation and image scan statuses of the model package.

          " } }, - "com.amazonaws.sagemaker#ListTrialsResponse": { + "com.amazonaws.sagemaker#ModelPackageStatusItem": { "type": "structure", "members": { - "TrialSummaries": { - "target": "com.amazonaws.sagemaker#TrialSummaries", + "Name": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          A list of the summaries of your trials.

          " + "smithy.api#documentation": "

          The name of the model package for which the overall status is being reported.

          ", + "smithy.api#required": {} + } + }, + "Status": { + "target": "com.amazonaws.sagemaker#DetailedModelPackageStatus", + "traits": { + "smithy.api#documentation": "

          The current status.

          ", + "smithy.api#required": {} } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "FailureReason": { + "target": "com.amazonaws.sagemaker#String", "traits": { - "smithy.api#documentation": "

          A token for getting the next set of trials, if there are any.

          " + "smithy.api#documentation": "

          if the overall status is Failed, the reason for the failure.

          " } } - } - }, - "com.amazonaws.sagemaker#ListUserProfiles": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#ListUserProfilesRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#ListUserProfilesResponse" }, "traits": { - "smithy.api#documentation": "

          Lists user profiles.

          ", - "smithy.api#paginated": { - "inputToken": "NextToken", - "outputToken": "NextToken", - "pageSize": "MaxResults" - } + "smithy.api#documentation": "

          Represents the overall status of a model package.

          " } }, - "com.amazonaws.sagemaker#ListUserProfilesRequest": { + "com.amazonaws.sagemaker#ModelPackageStatusItemList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ModelPackageStatusItem" + } + }, + "com.amazonaws.sagemaker#ModelPackageSummary": { "type": "structure", "members": { - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "ModelPackageName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          If the previous response was truncated, you will receive this token.\n Use it in your next request to receive the next set of results.

          " + "smithy.api#documentation": "

          The name of the model package.

          ", + "smithy.api#required": {} } }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", + "ModelPackageGroupName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          Returns a list up to a specified limit.

          " + "smithy.api#documentation": "

          If the model package is a versioned model, the model group that the versioned model\n belongs to.

          " } }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", + "ModelPackageVersion": { + "target": "com.amazonaws.sagemaker#ModelPackageVersion", "traits": { - "smithy.api#documentation": "

          The sort order for the results. The default is Ascending.

          " + "smithy.api#documentation": "

          If the model package is a versioned model, the version of the model.

          " } }, - "SortBy": { - "target": "com.amazonaws.sagemaker#UserProfileSortKey", + "ModelPackageArn": { + "target": "com.amazonaws.sagemaker#ModelPackageArn", "traits": { - "smithy.api#documentation": "

          The parameter by which to sort the results. The default is CreationTime.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model package.

          ", + "smithy.api#required": {} } }, - "DomainIdEquals": { - "target": "com.amazonaws.sagemaker#DomainId", + "ModelPackageDescription": { + "target": "com.amazonaws.sagemaker#EntityDescription", "traits": { - "smithy.api#documentation": "

          A parameter by which to filter the results.

          " + "smithy.api#documentation": "

          A brief description of the model package.

          " } }, - "UserProfileNameContains": { - "target": "com.amazonaws.sagemaker#UserProfileName", + "CreationTime": { + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          A parameter by which to filter the results.

          " + "smithy.api#documentation": "

          A timestamp that shows when the model package was created.

          ", + "smithy.api#required": {} + } + }, + "ModelPackageStatus": { + "target": "com.amazonaws.sagemaker#ModelPackageStatus", + "traits": { + "smithy.api#documentation": "

          The overall status of the model package.

          ", + "smithy.api#required": {} + } + }, + "ModelApprovalStatus": { + "target": "com.amazonaws.sagemaker#ModelApprovalStatus", + "traits": { + "smithy.api#documentation": "

          The approval status of the model. This can be one of the following values.

          \n
            \n
          • \n

            \n APPROVED - The model is approved

            \n
          • \n
          • \n

            \n REJECTED - The model is rejected.

            \n
          • \n
          • \n

            \n PENDING_MANUAL_APPROVAL - The model is waiting for manual\n approval.

            \n
          • \n
          " } } + }, + "traits": { + "smithy.api#documentation": "

          Provides summary information about a model package.

          " } }, - "com.amazonaws.sagemaker#ListUserProfilesResponse": { + "com.amazonaws.sagemaker#ModelPackageSummaryList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ModelPackageSummary" + } + }, + "com.amazonaws.sagemaker#ModelPackageType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Versioned", + "name": "VERSIONED" + }, + { + "value": "Unversioned", + "name": "UNVERSIONED" + }, + { + "value": "Both", + "name": "BOTH" + } + ] + } + }, + "com.amazonaws.sagemaker#ModelPackageValidationProfile": { "type": "structure", "members": { - "UserProfiles": { - "target": "com.amazonaws.sagemaker#UserProfileList", + "ProfileName": { + "target": "com.amazonaws.sagemaker#EntityName", "traits": { - "smithy.api#documentation": "

          The list of user profiles.

          " + "smithy.api#documentation": "

          The name of the profile for the model package.

          ", + "smithy.api#required": {} } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "TransformJobDefinition": { + "target": "com.amazonaws.sagemaker#TransformJobDefinition", "traits": { - "smithy.api#documentation": "

          If the previous response was truncated, you will receive this token.\n Use it in your next request to receive the next set of results.

          " + "smithy.api#documentation": "

          The TransformJobDefinition object that describes the transform job used\n for the validation of the model package.

          ", + "smithy.api#required": {} } } + }, + "traits": { + "smithy.api#documentation": "

          Contains data, such as the inputs and targeted instance types that are used in the\n process of validating the model package.

          \n

          The data provided in the validation profile is made available to your buyers on AWS\n Marketplace.

          " } }, - "com.amazonaws.sagemaker#ListWorkforces": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#ListWorkforcesRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#ListWorkforcesResponse" + "com.amazonaws.sagemaker#ModelPackageValidationProfiles": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ModelPackageValidationProfile" }, "traits": { - "smithy.api#documentation": "

          Use this operation to list all private and vendor workforces in an AWS Region. Note that you can only \n have one private workforce per AWS Region.

          ", - "smithy.api#paginated": { - "inputToken": "NextToken", - "outputToken": "NextToken", - "pageSize": "MaxResults" + "smithy.api#length": { + "min": 1, + "max": 1 } } }, - "com.amazonaws.sagemaker#ListWorkforcesRequest": { + "com.amazonaws.sagemaker#ModelPackageValidationSpecification": { "type": "structure", "members": { - "SortBy": { - "target": "com.amazonaws.sagemaker#ListWorkforcesSortByOptions", - "traits": { - "smithy.api#documentation": "

          Sort workforces using the workforce name or creation date.

          " - } - }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", - "traits": { - "smithy.api#documentation": "

          Sort workforces in ascending or descending order.

          " - } - }, - "NameContains": { - "target": "com.amazonaws.sagemaker#WorkforceName", - "traits": { - "smithy.api#documentation": "

          A filter you can use to search for workforces using part of the workforce name.

          " - } - }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "ValidationRole": { + "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          A token to resume pagination.

          " + "smithy.api#documentation": "

          The IAM roles to be used for the validation of the model package.

          ", + "smithy.api#required": {} } }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", + "ValidationProfiles": { + "target": "com.amazonaws.sagemaker#ModelPackageValidationProfiles", "traits": { - "smithy.api#box": {}, - "smithy.api#documentation": "

          The maximum number of workforces returned in the response.

          " + "smithy.api#documentation": "

          An array of ModelPackageValidationProfile objects, each of which\n specifies a batch transform job that Amazon SageMaker runs to validate your model package.

          ", + "smithy.api#required": {} } } + }, + "traits": { + "smithy.api#documentation": "

          Specifies batch transform jobs that Amazon SageMaker runs to validate your model package.

          " } }, - "com.amazonaws.sagemaker#ListWorkforcesResponse": { + "com.amazonaws.sagemaker#ModelPackageVersion": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 1 + } + } + }, + "com.amazonaws.sagemaker#ModelQuality": { "type": "structure", "members": { - "Workforces": { - "target": "com.amazonaws.sagemaker#Workforces", + "Statistics": { + "target": "com.amazonaws.sagemaker#MetricsSource", "traits": { - "smithy.api#documentation": "

          A list containing information about your workforce.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Model quality statistics.

          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "Constraints": { + "target": "com.amazonaws.sagemaker#MetricsSource", "traits": { - "smithy.api#documentation": "

          A token to resume pagination.

          " + "smithy.api#documentation": "

          Model quality constraints.

          " } } + }, + "traits": { + "smithy.api#documentation": "

          Model quality statistics and constraints.

          " } }, - "com.amazonaws.sagemaker#ListWorkforcesSortByOptions": { + "com.amazonaws.sagemaker#ModelSortKey": { "type": "string", "traits": { "smithy.api#enum": [ @@ -16141,458 +21521,425 @@ "name": "Name" }, { - "value": "CreateDate", - "name": "CreateDate" + "value": "CreationTime", + "name": "CreationTime" } ] } }, - "com.amazonaws.sagemaker#ListWorkteams": { - "type": "operation", - "input": { - "target": "com.amazonaws.sagemaker#ListWorkteamsRequest" - }, - "output": { - "target": "com.amazonaws.sagemaker#ListWorkteamsResponse" + "com.amazonaws.sagemaker#ModelStepMetadata": { + "type": "structure", + "members": { + "Arn": { + "target": "com.amazonaws.sagemaker#String256", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the created model.

          " + } + } }, "traits": { - "smithy.api#documentation": "

          Gets a list of private work teams that you have defined in a region. The list may be empty if\n no work team satisfies the filter specified in the NameContains\n parameter.

          ", - "smithy.api#paginated": { - "inputToken": "NextToken", - "outputToken": "NextToken", - "pageSize": "MaxResults" + "smithy.api#documentation": "

          Metadata for Model steps.

          " + } + }, + "com.amazonaws.sagemaker#ModelSummary": { + "type": "structure", + "members": { + "ModelName": { + "target": "com.amazonaws.sagemaker#ModelName", + "traits": { + "smithy.api#documentation": "

          The name of the model that you want a summary for.

          ", + "smithy.api#required": {} + } + }, + "ModelArn": { + "target": "com.amazonaws.sagemaker#ModelArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model.

          ", + "smithy.api#required": {} + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A timestamp that indicates when the model was created.

          ", + "smithy.api#required": {} + } } + }, + "traits": { + "smithy.api#documentation": "

          Provides summary information about a model.

          " } }, - "com.amazonaws.sagemaker#ListWorkteamsRequest": { + "com.amazonaws.sagemaker#ModelSummaryList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ModelSummary" + } + }, + "com.amazonaws.sagemaker#MonitoringAppSpecification": { "type": "structure", "members": { - "SortBy": { - "target": "com.amazonaws.sagemaker#ListWorkteamsSortByOptions", + "ImageUri": { + "target": "com.amazonaws.sagemaker#ImageUri", "traits": { - "smithy.api#documentation": "

          The field to sort results by. The default is CreationTime.

          " + "smithy.api#documentation": "

          The container image to be run by the monitoring job.

          ", + "smithy.api#required": {} } }, - "SortOrder": { - "target": "com.amazonaws.sagemaker#SortOrder", + "ContainerEntrypoint": { + "target": "com.amazonaws.sagemaker#ContainerEntrypoint", "traits": { - "smithy.api#documentation": "

          The sort order for results. The default is Ascending.

          " + "smithy.api#documentation": "

          Specifies the entrypoint for a container used to run the monitoring job.

          " } }, - "NameContains": { - "target": "com.amazonaws.sagemaker#WorkteamName", + "ContainerArguments": { + "target": "com.amazonaws.sagemaker#MonitoringContainerArguments", "traits": { - "smithy.api#documentation": "

          A string in the work team's name. This filter returns only work teams whose name\n contains the specified string.

          " + "smithy.api#documentation": "

          An array of arguments for the container used to run the monitoring job.

          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "RecordPreprocessorSourceUri": { + "target": "com.amazonaws.sagemaker#S3Uri", "traits": { - "smithy.api#documentation": "

          If the result of the previous ListWorkteams request was truncated, the\n response includes a NextToken. To retrieve the next set of labeling jobs,\n use the token in the next request.

          " + "smithy.api#documentation": "

          An Amazon S3 URI to a script that is called per row prior to running analysis. It can\n base64 decode the payload and convert it into a flatted json so that the built-in container\n can use the converted data. Applicable only for the built-in (first party)\n containers.

          " } }, - "MaxResults": { - "target": "com.amazonaws.sagemaker#MaxResults", + "PostAnalyticsProcessorSourceUri": { + "target": "com.amazonaws.sagemaker#S3Uri", "traits": { - "smithy.api#box": {}, - "smithy.api#documentation": "

          The maximum number of work teams to return in each page of the response.

          " + "smithy.api#documentation": "

          An Amazon S3 URI to a script that is called after analysis has been performed.\n Applicable only for the built-in (first party) containers.

          " } } + }, + "traits": { + "smithy.api#documentation": "

          Container image configuration object for the monitoring job.

          " } }, - "com.amazonaws.sagemaker#ListWorkteamsResponse": { + "com.amazonaws.sagemaker#MonitoringBaselineConfig": { "type": "structure", "members": { - "Workteams": { - "target": "com.amazonaws.sagemaker#Workteams", + "ConstraintsResource": { + "target": "com.amazonaws.sagemaker#MonitoringConstraintsResource", "traits": { - "smithy.api#documentation": "

          An array of Workteam objects, each describing a work team.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The baseline constraint file in Amazon S3 that the current monitoring job should\n validated against.

          " } }, - "NextToken": { - "target": "com.amazonaws.sagemaker#NextToken", + "StatisticsResource": { + "target": "com.amazonaws.sagemaker#MonitoringStatisticsResource", "traits": { - "smithy.api#documentation": "

          If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of\n work teams, use it in the subsequent request.

          " - } - } - } - }, - "com.amazonaws.sagemaker#ListWorkteamsSortByOptions": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Name", - "name": "Name" - }, - { - "value": "CreateDate", - "name": "CreateDate" + "smithy.api#documentation": "

          The baseline statistics file in Amazon S3 that the current monitoring job should be\n validated against.

          " } - ] - } - }, - "com.amazonaws.sagemaker#MaxAutoMLJobRuntimeInSeconds": { - "type": "integer", - "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 1 - } - } - }, - "com.amazonaws.sagemaker#MaxCandidates": { - "type": "integer", - "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 1 - } - } - }, - "com.amazonaws.sagemaker#MaxConcurrentTaskCount": { - "type": "integer", - "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 1, - "max": 1000 } - } - }, - "com.amazonaws.sagemaker#MaxConcurrentTransforms": { - "type": "integer", + }, "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 0 - } + "smithy.api#documentation": "

          Configuration for monitoring constraints and monitoring statistics. These baseline\n resources are compared against the results of the current job from the series of jobs\n scheduled to collect data periodically.

          " } }, - "com.amazonaws.sagemaker#MaxHumanLabeledObjectCount": { - "type": "integer", - "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 1 + "com.amazonaws.sagemaker#MonitoringClusterConfig": { + "type": "structure", + "members": { + "InstanceCount": { + "target": "com.amazonaws.sagemaker#ProcessingInstanceCount", + "traits": { + "smithy.api#documentation": "

          The number of ML compute instances to use in the model monitoring job. For distributed\n processing jobs, specify a value greater than 1. The default value is 1.

          ", + "smithy.api#required": {} + } + }, + "InstanceType": { + "target": "com.amazonaws.sagemaker#ProcessingInstanceType", + "traits": { + "smithy.api#documentation": "

          The ML compute instance type for the processing job.

          ", + "smithy.api#required": {} + } + }, + "VolumeSizeInGB": { + "target": "com.amazonaws.sagemaker#ProcessingVolumeSizeInGB", + "traits": { + "smithy.api#documentation": "

          The size of the ML storage volume, in gigabytes, that you want to provision. You must\n specify sufficient ML storage for your scenario.

          ", + "smithy.api#required": {} + } + }, + "VolumeKmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", + "traits": { + "smithy.api#documentation": "

          The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data\n on the storage volume attached to the ML compute instance(s) that run the model monitoring\n job.

          " + } } - } - }, - "com.amazonaws.sagemaker#MaxNumberOfTrainingJobs": { - "type": "integer", + }, "traits": { - "smithy.api#range": { - "min": 1 - } + "smithy.api#documentation": "

          Configuration for the cluster used to run model monitoring jobs.

          " } }, - "com.amazonaws.sagemaker#MaxParallelTrainingJobs": { - "type": "integer", - "traits": { - "smithy.api#range": { - "min": 1 + "com.amazonaws.sagemaker#MonitoringConstraintsResource": { + "type": "structure", + "members": { + "S3Uri": { + "target": "com.amazonaws.sagemaker#S3Uri", + "traits": { + "smithy.api#documentation": "

          The Amazon S3 URI for the constraints resource.

          " + } } - } - }, - "com.amazonaws.sagemaker#MaxPayloadInMB": { - "type": "integer", + }, "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 0 - } + "smithy.api#documentation": "

          The constraints resource for a monitoring job.

          " } }, - "com.amazonaws.sagemaker#MaxPercentageOfInputDatasetLabeled": { - "type": "integer", + "com.amazonaws.sagemaker#MonitoringContainerArguments": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ContainerArgument" + }, "traits": { - "smithy.api#box": {}, - "smithy.api#range": { + "smithy.api#length": { "min": 1, - "max": 100 + "max": 50 } } }, - "com.amazonaws.sagemaker#MaxResults": { - "type": "integer", + "com.amazonaws.sagemaker#MonitoringEnvironmentMap": { + "type": "map", + "key": { + "target": "com.amazonaws.sagemaker#ProcessingEnvironmentKey" + }, + "value": { + "target": "com.amazonaws.sagemaker#ProcessingEnvironmentValue" + }, "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 1, - "max": 100 + "smithy.api#length": { + "min": 0, + "max": 50 } } }, - "com.amazonaws.sagemaker#MaxRuntimeInSeconds": { - "type": "integer", + "com.amazonaws.sagemaker#MonitoringExecutionSortKey": { + "type": "string", "traits": { - "smithy.api#range": { - "min": 1 - } + "smithy.api#enum": [ + { + "value": "CreationTime", + "name": "CREATION_TIME" + }, + { + "value": "ScheduledTime", + "name": "SCHEDULED_TIME" + }, + { + "value": "Status", + "name": "STATUS" + } + ] } }, - "com.amazonaws.sagemaker#MaxRuntimePerTrainingJobInSeconds": { - "type": "integer", - "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 1 + "com.amazonaws.sagemaker#MonitoringExecutionSummary": { + "type": "structure", + "members": { + "MonitoringScheduleName": { + "target": "com.amazonaws.sagemaker#MonitoringScheduleName", + "traits": { + "smithy.api#documentation": "

          The name of the monitoring schedule.

          ", + "smithy.api#required": {} + } + }, + "ScheduledTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          The time the monitoring job was scheduled.

          ", + "smithy.api#required": {} + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          The time at which the monitoring job was created.

          ", + "smithy.api#required": {} + } + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          A timestamp that indicates the last time the monitoring job was modified.

          ", + "smithy.api#required": {} + } + }, + "MonitoringExecutionStatus": { + "target": "com.amazonaws.sagemaker#ExecutionStatus", + "traits": { + "smithy.api#documentation": "

          The status of the monitoring job.

          ", + "smithy.api#required": {} + } + }, + "ProcessingJobArn": { + "target": "com.amazonaws.sagemaker#ProcessingJobArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the monitoring job.

          " + } + }, + "EndpointName": { + "target": "com.amazonaws.sagemaker#EndpointName", + "traits": { + "smithy.api#documentation": "

          The name of teh endpoint used to run the monitoring job.

          " + } + }, + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", + "traits": { + "smithy.api#documentation": "

          Contains the reason a monitoring job failed, if it failed.

          " + } } - } - }, - "com.amazonaws.sagemaker#MaxWaitTimeInSeconds": { - "type": "integer", + }, "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 1 - } + "smithy.api#documentation": "

          Summary of information about the last monitoring job to run.

          " } }, - "com.amazonaws.sagemaker#MediaType": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 64 - }, - "smithy.api#pattern": "^[-\\w]+\\/[-\\w+]+$" + "com.amazonaws.sagemaker#MonitoringExecutionSummaryList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#MonitoringExecutionSummary" } }, - "com.amazonaws.sagemaker#MemberDefinition": { + "com.amazonaws.sagemaker#MonitoringInput": { "type": "structure", "members": { - "CognitoMemberDefinition": { - "target": "com.amazonaws.sagemaker#CognitoMemberDefinition", - "traits": { - "smithy.api#documentation": "

          The Amazon Cognito user group that is part of the work team.

          " - } - }, - "OidcMemberDefinition": { - "target": "com.amazonaws.sagemaker#OidcMemberDefinition", + "EndpointInput": { + "target": "com.amazonaws.sagemaker#EndpointInput", "traits": { - "smithy.api#documentation": "

          A list user groups that exist in your OIDC Identity Provider (IdP). \n One to ten groups can be used to create a single private work team. \n When you add a user group to the list of Groups, you can add that user group to one or more\n private work teams. If you add a user group to a private work team, all workers in that user group \n are added to the work team.

          " + "smithy.api#documentation": "

          The endpoint for a monitoring job.

          ", + "smithy.api#required": {} } } }, "traits": { - "smithy.api#documentation": "

          Defines an Amazon Cognito or your own OIDC IdP user group that is part of a work team.

          " + "smithy.api#documentation": "

          The inputs for a monitoring job.

          " } }, - "com.amazonaws.sagemaker#MemberDefinitions": { + "com.amazonaws.sagemaker#MonitoringInputs": { "type": "list", "member": { - "target": "com.amazonaws.sagemaker#MemberDefinition" + "target": "com.amazonaws.sagemaker#MonitoringInput" }, "traits": { "smithy.api#length": { "min": 1, - "max": 10 + "max": 1 } } }, - "com.amazonaws.sagemaker#MetricData": { + "com.amazonaws.sagemaker#MonitoringJobDefinition": { "type": "structure", "members": { - "MetricName": { - "target": "com.amazonaws.sagemaker#MetricName", + "BaselineConfig": { + "target": "com.amazonaws.sagemaker#MonitoringBaselineConfig", "traits": { - "smithy.api#documentation": "

          The name of the metric.

          " + "smithy.api#documentation": "

          Baseline configuration used to validate that the data conforms to the specified\n constraints and statistics

          " } }, - "Value": { - "target": "com.amazonaws.sagemaker#Float", + "MonitoringInputs": { + "target": "com.amazonaws.sagemaker#MonitoringInputs", "traits": { - "smithy.api#documentation": "

          The value of the metric.

          " + "smithy.api#documentation": "

          The array of inputs for the monitoring job. Currently we support monitoring an Amazon SageMaker\n Endpoint.

          ", + "smithy.api#required": {} } }, - "Timestamp": { - "target": "com.amazonaws.sagemaker#Timestamp", + "MonitoringOutputConfig": { + "target": "com.amazonaws.sagemaker#MonitoringOutputConfig", "traits": { - "smithy.api#documentation": "

          The date and time that the algorithm emitted the metric.

          " + "smithy.api#documentation": "

          The array of outputs from the monitoring job to be uploaded to Amazon Simple Storage\n Service (Amazon S3).

          ", + "smithy.api#required": {} } - } - }, - "traits": { - "smithy.api#documentation": "

          The name, value, and date and time of a metric that was emitted to Amazon CloudWatch.

          " - } - }, - "com.amazonaws.sagemaker#MetricDefinition": { - "type": "structure", - "members": { - "Name": { - "target": "com.amazonaws.sagemaker#MetricName", + }, + "MonitoringResources": { + "target": "com.amazonaws.sagemaker#MonitoringResources", "traits": { - "smithy.api#documentation": "

          The name of the metric.

          ", + "smithy.api#documentation": "

          Identifies the resources, ML compute instances, and ML storage volumes to deploy for a\n monitoring job. In distributed processing, you specify more than one instance.

          ", "smithy.api#required": {} } }, - "Regex": { - "target": "com.amazonaws.sagemaker#MetricRegex", + "MonitoringAppSpecification": { + "target": "com.amazonaws.sagemaker#MonitoringAppSpecification", "traits": { - "smithy.api#documentation": "

          A regular expression that searches the output of a training job and gets the value of\n the metric. For more information about using regular expressions to define metrics, see\n Defining\n Objective Metrics.

          ", + "smithy.api#documentation": "

          Configures the monitoring job to run a specified Docker container image.

          ", "smithy.api#required": {} } - } - }, - "traits": { - "smithy.api#documentation": "

          Specifies a metric that the training algorithm\n writes\n to stderr or stdout\n . Amazon SageMakerhyperparameter\n tuning captures\n all\n defined metrics.\n You\n specify one metric that a hyperparameter tuning job uses as its\n objective metric to choose the best training job.

          " - } - }, - "com.amazonaws.sagemaker#MetricDefinitionList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#MetricDefinition" - }, - "traits": { - "smithy.api#length": { - "min": 0, - "max": 40 - } - } - }, - "com.amazonaws.sagemaker#MetricName": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 255 - }, - "smithy.api#pattern": ".+" - } - }, - "com.amazonaws.sagemaker#MetricRegex": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 500 }, - "smithy.api#pattern": ".+" - } - }, - "com.amazonaws.sagemaker#MetricValue": { - "type": "float" - }, - "com.amazonaws.sagemaker#ModelArn": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 20, - "max": 2048 + "StoppingCondition": { + "target": "com.amazonaws.sagemaker#MonitoringStoppingCondition", + "traits": { + "smithy.api#documentation": "

          Specifies a time limit for how long the monitoring job is allowed to run.

          " + } }, - "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:model/.*" - } - }, - "com.amazonaws.sagemaker#ModelArtifacts": { - "type": "structure", - "members": { - "S3ModelArtifacts": { - "target": "com.amazonaws.sagemaker#S3Uri", + "Environment": { + "target": "com.amazonaws.sagemaker#MonitoringEnvironmentMap", "traits": { - "smithy.api#documentation": "

          The path of the S3 object that contains the model artifacts. For example,\n s3://bucket-name/keynameprefix/model.tar.gz.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Sets the environment variables in the Docker container.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          Provides information about the location that is configured for storing model\n artifacts.

          \n

          Model artifacts are the output that results from training a model, and typically\n consist of trained parameters, a model defintion that desribes how to compute\n inferences, and other metadata.

          " - } - }, - "com.amazonaws.sagemaker#ModelClientConfig": { - "type": "structure", - "members": { - "InvocationsTimeoutInSeconds": { - "target": "com.amazonaws.sagemaker#InvocationsTimeoutInSeconds", + }, + "NetworkConfig": { + "target": "com.amazonaws.sagemaker#NetworkConfig", "traits": { - "smithy.api#documentation": "

          The timeout value in seconds for an invocation request.

          " + "smithy.api#documentation": "

          Specifies networking options for an monitoring job.

          " } }, - "InvocationsMaxRetries": { - "target": "com.amazonaws.sagemaker#InvocationsMaxRetries", + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          The maximum number of retries when invocation requests are failing.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on\n your behalf.

          ", + "smithy.api#required": {} } } }, "traits": { - "smithy.api#documentation": "

          Configures the timeout and maximum number of retries for processing a transform job\n invocation.

          " - } - }, - "com.amazonaws.sagemaker#ModelName": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 63 - }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" - } - }, - "com.amazonaws.sagemaker#ModelNameContains": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 63 - }, - "smithy.api#pattern": "[a-zA-Z0-9-]+" + "smithy.api#documentation": "

          Defines the monitoring job.

          " } }, - "com.amazonaws.sagemaker#ModelPackageArn": { - "type": "string", + "com.amazonaws.sagemaker#MonitoringMaxRuntimeInSeconds": { + "type": "integer", "traits": { - "smithy.api#length": { + "smithy.api#range": { "min": 1, - "max": 2048 - }, - "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:model-package/.*" + "max": 86400 + } } }, - "com.amazonaws.sagemaker#ModelPackageContainerDefinition": { + "com.amazonaws.sagemaker#MonitoringOutput": { "type": "structure", "members": { - "ContainerHostname": { - "target": "com.amazonaws.sagemaker#ContainerHostname", - "traits": { - "smithy.api#documentation": "

          The DNS host name for the Docker container.

          " - } - }, - "Image": { - "target": "com.amazonaws.sagemaker#ContainerImage", + "S3Output": { + "target": "com.amazonaws.sagemaker#MonitoringS3Output", "traits": { - "smithy.api#documentation": "

          The Amazon EC2 Container Registry (Amazon ECR) path where inference code is stored.

          \n

          If you are using your own custom algorithm instead of an algorithm provided by Amazon SageMaker,\n the inference code must meet Amazon SageMaker requirements. Amazon SageMaker supports both\n registry/repository[:tag] and registry/repository[@digest]\n image path formats. For more information, see Using Your Own Algorithms with Amazon\n SageMaker.

          ", + "smithy.api#documentation": "

          The Amazon S3 storage location where the results of a monitoring job are saved.

          ", "smithy.api#required": {} } - }, - "ImageDigest": { - "target": "com.amazonaws.sagemaker#ImageDigest", - "traits": { - "smithy.api#documentation": "

          An MD5 hash of the training algorithm that identifies the Docker image used for\n training.

          " - } - }, - "ModelDataUrl": { - "target": "com.amazonaws.sagemaker#Url", + } + }, + "traits": { + "smithy.api#documentation": "

          The output object for a monitoring job.

          " + } + }, + "com.amazonaws.sagemaker#MonitoringOutputConfig": { + "type": "structure", + "members": { + "MonitoringOutputs": { + "target": "com.amazonaws.sagemaker#MonitoringOutputs", "traits": { - "smithy.api#documentation": "

          The Amazon S3 path where the model artifacts, which result from model training, are stored.\n This path must point to a single gzip compressed tar archive\n (.tar.gz suffix).

          \n \n

          The model artifacts must be in an S3 bucket that is in the same region as the\n model package.

          \n
          " + "smithy.api#documentation": "

          Monitoring outputs for monitoring jobs. This is where the output of the periodic\n monitoring jobs is uploaded.

          ", + "smithy.api#required": {} } }, - "ProductId": { - "target": "com.amazonaws.sagemaker#ProductId", + "KmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", "traits": { - "smithy.api#documentation": "

          The AWS Marketplace product ID of the model package.

          " + "smithy.api#documentation": "

          The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model\n artifacts at rest using Amazon S3 server-side encryption.

          " } } }, "traits": { - "smithy.api#documentation": "

          Describes the Docker container for the model package.

          " + "smithy.api#documentation": "

          The output configuration for monitoring jobs.

          " } }, - "com.amazonaws.sagemaker#ModelPackageContainerDefinitionList": { + "com.amazonaws.sagemaker#MonitoringOutputs": { "type": "list", "member": { - "target": "com.amazonaws.sagemaker#ModelPackageContainerDefinition" + "target": "com.amazonaws.sagemaker#MonitoringOutput" }, "traits": { "smithy.api#length": { @@ -16601,1829 +21948,1902 @@ } } }, - "com.amazonaws.sagemaker#ModelPackageSortBy": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Name", - "name": "NAME" - }, - { - "value": "CreationTime", - "name": "CREATION_TIME" - } - ] - } - }, - "com.amazonaws.sagemaker#ModelPackageStatus": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Pending", - "name": "PENDING" - }, - { - "value": "InProgress", - "name": "IN_PROGRESS" - }, - { - "value": "Completed", - "name": "COMPLETED" - }, - { - "value": "Failed", - "name": "FAILED" - }, - { - "value": "Deleting", - "name": "DELETING" - } - ] - } - }, - "com.amazonaws.sagemaker#ModelPackageStatusDetails": { + "com.amazonaws.sagemaker#MonitoringResources": { "type": "structure", "members": { - "ValidationStatuses": { - "target": "com.amazonaws.sagemaker#ModelPackageStatusItemList", + "ClusterConfig": { + "target": "com.amazonaws.sagemaker#MonitoringClusterConfig", "traits": { - "smithy.api#documentation": "

          The validation status of the model package.

          ", + "smithy.api#documentation": "

          The configuration for the cluster resources used to run the processing job.

          ", "smithy.api#required": {} } - }, - "ImageScanStatuses": { - "target": "com.amazonaws.sagemaker#ModelPackageStatusItemList", - "traits": { - "smithy.api#documentation": "

          The status of the scan of the Docker image container for the model package.

          " - } } }, "traits": { - "smithy.api#documentation": "

          Specifies the validation and image scan statuses of the model package.

          " + "smithy.api#documentation": "

          Identifies the resources to deploy for a monitoring job.

          " } }, - "com.amazonaws.sagemaker#ModelPackageStatusItem": { + "com.amazonaws.sagemaker#MonitoringS3Output": { "type": "structure", "members": { - "Name": { - "target": "com.amazonaws.sagemaker#EntityName", + "S3Uri": { + "target": "com.amazonaws.sagemaker#MonitoringS3Uri", "traits": { - "smithy.api#documentation": "

          The name of the model package for which the overall status is being reported.

          ", + "smithy.api#documentation": "

          A URI that identifies the Amazon S3 storage location where Amazon SageMaker saves the results of a\n monitoring job.

          ", "smithy.api#required": {} } }, - "Status": { - "target": "com.amazonaws.sagemaker#DetailedModelPackageStatus", + "LocalPath": { + "target": "com.amazonaws.sagemaker#ProcessingLocalPath", "traits": { - "smithy.api#documentation": "

          The current status.

          ", + "smithy.api#documentation": "

          The local path to the Amazon S3 storage location where Amazon SageMaker saves the results of a\n monitoring job. LocalPath is an absolute path for the output data.

          ", "smithy.api#required": {} } }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#String", + "S3UploadMode": { + "target": "com.amazonaws.sagemaker#ProcessingS3UploadMode", "traits": { - "smithy.api#documentation": "

          if the overall status is Failed, the reason for the failure.

          " + "smithy.api#documentation": "

          Whether to upload the results of the monitoring job continuously or after the job\n completes.

          " } } }, "traits": { - "smithy.api#documentation": "

          Represents the overall status of a model package.

          " + "smithy.api#documentation": "

          Information about where and how you want to store the results of a monitoring\n job.

          " } }, - "com.amazonaws.sagemaker#ModelPackageStatusItemList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#ModelPackageStatusItem" + "com.amazonaws.sagemaker#MonitoringS3Uri": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 512 + }, + "smithy.api#pattern": "^(https|s3)://([^/]+)/?(.*)$" } }, - "com.amazonaws.sagemaker#ModelPackageSummary": { + "com.amazonaws.sagemaker#MonitoringSchedule": { "type": "structure", "members": { - "ModelPackageName": { - "target": "com.amazonaws.sagemaker#EntityName", + "MonitoringScheduleArn": { + "target": "com.amazonaws.sagemaker#MonitoringScheduleArn", "traits": { - "smithy.api#documentation": "

          The name of the model package.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the monitoring schedule.

          " } }, - "ModelPackageArn": { - "target": "com.amazonaws.sagemaker#ModelPackageArn", + "MonitoringScheduleName": { + "target": "com.amazonaws.sagemaker#MonitoringScheduleName", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model package.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The name of the monitoring schedule.

          " } }, - "ModelPackageDescription": { - "target": "com.amazonaws.sagemaker#EntityDescription", + "MonitoringScheduleStatus": { + "target": "com.amazonaws.sagemaker#ScheduleStatus", "traits": { - "smithy.api#documentation": "

          A brief description of the model package.

          " + "smithy.api#documentation": "

          The status of the monitoring schedule. This can be one of the following values.

          \n
            \n
          • \n

            \n PENDING - The schedule is pending being created.

            \n
          • \n
          • \n

            \n FAILED - The schedule failed.

            \n
          • \n
          • \n

            \n SCHEDULED - The schedule was successfully created.

            \n
          • \n
          • \n

            \n STOPPED - The schedule was stopped.

            \n
          • \n
          " + } + }, + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", + "traits": { + "smithy.api#documentation": "

          If the monitoring schedule failed, the reason it failed.

          " } }, "CreationTime": { - "target": "com.amazonaws.sagemaker#CreationTime", + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          A timestamp that shows when the model package was created.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The time that the monitoring schedule was created.

          " } }, - "ModelPackageStatus": { - "target": "com.amazonaws.sagemaker#ModelPackageStatus", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The overall status of the model package.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The last time the monitoring schedule was changed.

          " + } + }, + "MonitoringScheduleConfig": { + "target": "com.amazonaws.sagemaker#MonitoringScheduleConfig" + }, + "EndpointName": { + "target": "com.amazonaws.sagemaker#EndpointName", + "traits": { + "smithy.api#documentation": "

          The endpoint that hosts the model being monitored.

          " + } + }, + "LastMonitoringExecutionSummary": { + "target": "com.amazonaws.sagemaker#MonitoringExecutionSummary" + }, + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", + "traits": { + "smithy.api#documentation": "

          A list of the tags associated with the monitoring schedlue. For more information, see Tagging AWS\n resources in the AWS General Reference Guide.

          " } } }, "traits": { - "smithy.api#documentation": "

          Provides summary information about a model package.

          " + "smithy.api#documentation": "

          A schedule for a model monitoring job. For information about model monitor, see\n Amazon SageMaker Model\n Monitor.

          " } }, - "com.amazonaws.sagemaker#ModelPackageSummaryList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#ModelPackageSummary" + "com.amazonaws.sagemaker#MonitoringScheduleArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": ".*" } }, - "com.amazonaws.sagemaker#ModelPackageValidationProfile": { + "com.amazonaws.sagemaker#MonitoringScheduleConfig": { "type": "structure", "members": { - "ProfileName": { - "target": "com.amazonaws.sagemaker#EntityName", + "ScheduleConfig": { + "target": "com.amazonaws.sagemaker#ScheduleConfig", "traits": { - "smithy.api#documentation": "

          The name of the profile for the model package.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Configures the monitoring schedule.

          " } }, - "TransformJobDefinition": { - "target": "com.amazonaws.sagemaker#TransformJobDefinition", + "MonitoringJobDefinition": { + "target": "com.amazonaws.sagemaker#MonitoringJobDefinition", "traits": { - "smithy.api#documentation": "

          The TransformJobDefinition object that describes the transform job used\n for the validation of the model package.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Defines the monitoring job.

          " } } }, "traits": { - "smithy.api#documentation": "

          Contains data, such as the inputs and targeted instance types that are used in the\n process of validating the model package.

          \n

          The data provided in the validation profile is made available to your buyers on AWS\n Marketplace.

          " + "smithy.api#documentation": "

          Configures the monitoring schedule and defines the monitoring job.

          " } }, - "com.amazonaws.sagemaker#ModelPackageValidationProfiles": { + "com.amazonaws.sagemaker#MonitoringScheduleList": { "type": "list", "member": { - "target": "com.amazonaws.sagemaker#ModelPackageValidationProfile" - }, + "target": "com.amazonaws.sagemaker#MonitoringSchedule" + } + }, + "com.amazonaws.sagemaker#MonitoringScheduleName": { + "type": "string", "traits": { "smithy.api#length": { "min": 1, - "max": 1 - } - } - }, - "com.amazonaws.sagemaker#ModelPackageValidationSpecification": { - "type": "structure", - "members": { - "ValidationRole": { - "target": "com.amazonaws.sagemaker#RoleArn", - "traits": { - "smithy.api#documentation": "

          The IAM roles to be used for the validation of the model package.

          ", - "smithy.api#required": {} - } + "max": 63 }, - "ValidationProfiles": { - "target": "com.amazonaws.sagemaker#ModelPackageValidationProfiles", - "traits": { - "smithy.api#documentation": "

          An array of ModelPackageValidationProfile objects, each of which\n specifies a batch transform job that Amazon SageMaker runs to validate your model package.

          ", - "smithy.api#required": {} - } - } - }, - "traits": { - "smithy.api#documentation": "

          Specifies batch transform jobs that Amazon SageMaker runs to validate your model package.

          " + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,62}$" } }, - "com.amazonaws.sagemaker#ModelSortKey": { + "com.amazonaws.sagemaker#MonitoringScheduleSortKey": { "type": "string", "traits": { "smithy.api#enum": [ { "value": "Name", - "name": "Name" + "name": "NAME" }, { "value": "CreationTime", - "name": "CreationTime" + "name": "CREATION_TIME" + }, + { + "value": "Status", + "name": "STATUS" } ] } }, - "com.amazonaws.sagemaker#ModelSummary": { + "com.amazonaws.sagemaker#MonitoringScheduleSummary": { "type": "structure", "members": { - "ModelName": { - "target": "com.amazonaws.sagemaker#ModelName", + "MonitoringScheduleName": { + "target": "com.amazonaws.sagemaker#MonitoringScheduleName", + "traits": { + "smithy.api#documentation": "

          The name of the monitoring schedule.

          ", + "smithy.api#required": {} + } + }, + "MonitoringScheduleArn": { + "target": "com.amazonaws.sagemaker#MonitoringScheduleArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the monitoring schedule.

          ", + "smithy.api#required": {} + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          The creation time of the monitoring schedule.

          ", + "smithy.api#required": {} + } + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The name of the model that you want a summary for.

          ", + "smithy.api#documentation": "

          The last time the monitoring schedule was modified.

          ", "smithy.api#required": {} } }, - "ModelArn": { - "target": "com.amazonaws.sagemaker#ModelArn", + "MonitoringScheduleStatus": { + "target": "com.amazonaws.sagemaker#ScheduleStatus", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model.

          ", + "smithy.api#documentation": "

          The status of the monitoring schedule.

          ", "smithy.api#required": {} } }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "EndpointName": { + "target": "com.amazonaws.sagemaker#EndpointName", "traits": { - "smithy.api#documentation": "

          A timestamp that indicates when the model was created.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The name of the endpoint using the monitoring schedule.

          " } } }, "traits": { - "smithy.api#documentation": "

          Provides summary information about a model.

          " + "smithy.api#documentation": "

          Summarizes the monitoring schedule.

          " } }, - "com.amazonaws.sagemaker#ModelSummaryList": { + "com.amazonaws.sagemaker#MonitoringScheduleSummaryList": { "type": "list", "member": { - "target": "com.amazonaws.sagemaker#ModelSummary" + "target": "com.amazonaws.sagemaker#MonitoringScheduleSummary" } }, - "com.amazonaws.sagemaker#MonitoringAppSpecification": { + "com.amazonaws.sagemaker#MonitoringStatisticsResource": { "type": "structure", "members": { - "ImageUri": { - "target": "com.amazonaws.sagemaker#ImageUri", - "traits": { - "smithy.api#documentation": "

          The container image to be run by the monitoring job.

          ", - "smithy.api#required": {} - } - }, - "ContainerEntrypoint": { - "target": "com.amazonaws.sagemaker#ContainerEntrypoint", - "traits": { - "smithy.api#documentation": "

          Specifies the entrypoint for a container used to run the monitoring job.

          " - } - }, - "ContainerArguments": { - "target": "com.amazonaws.sagemaker#MonitoringContainerArguments", - "traits": { - "smithy.api#documentation": "

          An array of arguments for the container used to run the monitoring job.

          " - } - }, - "RecordPreprocessorSourceUri": { - "target": "com.amazonaws.sagemaker#S3Uri", - "traits": { - "smithy.api#documentation": "

          An Amazon S3 URI to a script that is called per row prior to running analysis. It can\n base64 decode the payload and convert it into a flatted json so that the built-in container\n can use the converted data. Applicable only for the built-in (first party)\n containers.

          " - } - }, - "PostAnalyticsProcessorSourceUri": { + "S3Uri": { "target": "com.amazonaws.sagemaker#S3Uri", "traits": { - "smithy.api#documentation": "

          An Amazon S3 URI to a script that is called after analysis has been performed.\n Applicable only for the built-in (first party) containers.

          " + "smithy.api#documentation": "

          The Amazon S3 URI for the statistics resource.

          " } } }, "traits": { - "smithy.api#documentation": "

          Container image configuration object for the monitoring job.

          " + "smithy.api#documentation": "

          The statistics resource for a monitoring job.

          " } }, - "com.amazonaws.sagemaker#MonitoringBaselineConfig": { + "com.amazonaws.sagemaker#MonitoringStoppingCondition": { "type": "structure", "members": { - "ConstraintsResource": { - "target": "com.amazonaws.sagemaker#MonitoringConstraintsResource", - "traits": { - "smithy.api#documentation": "

          The baseline constraint file in Amazon S3 that the current monitoring job should\n validated against.

          " - } - }, - "StatisticsResource": { - "target": "com.amazonaws.sagemaker#MonitoringStatisticsResource", + "MaxRuntimeInSeconds": { + "target": "com.amazonaws.sagemaker#MonitoringMaxRuntimeInSeconds", "traits": { - "smithy.api#documentation": "

          The baseline statistics file in Amazon S3 that the current monitoring job should be\n validated against.

          " + "smithy.api#documentation": "

          The maximum runtime allowed in seconds.

          ", + "smithy.api#required": {} } } }, "traits": { - "smithy.api#documentation": "

          Configuration for monitoring constraints and monitoring statistics. These baseline\n resources are compared against the results of the current job from the series of jobs\n scheduled to collect data periodically.

          " + "smithy.api#documentation": "

          A time limit for how long the monitoring job is allowed to run before stopping.

          " } }, - "com.amazonaws.sagemaker#MonitoringClusterConfig": { + "com.amazonaws.sagemaker#MountPath": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 1024 + }, + "smithy.api#pattern": "^\\/.*" + } + }, + "com.amazonaws.sagemaker#NameContains": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 63 + }, + "smithy.api#pattern": "[a-zA-Z0-9\\-]+" + } + }, + "com.amazonaws.sagemaker#NestedFilters": { "type": "structure", "members": { - "InstanceCount": { - "target": "com.amazonaws.sagemaker#ProcessingInstanceCount", + "NestedPropertyName": { + "target": "com.amazonaws.sagemaker#ResourcePropertyName", "traits": { - "smithy.api#documentation": "

          The number of ML compute instances to use in the model monitoring job. For distributed\n processing jobs, specify a value greater than 1. The default value is 1.

          ", + "smithy.api#documentation": "

          The name of the property to use in the nested filters. The value must match a listed property name,\n such as InputDataConfig.

          ", "smithy.api#required": {} } }, - "InstanceType": { - "target": "com.amazonaws.sagemaker#ProcessingInstanceType", + "Filters": { + "target": "com.amazonaws.sagemaker#FilterList", "traits": { - "smithy.api#documentation": "

          The ML compute instance type for the processing job.

          ", + "smithy.api#documentation": "

          A list of filters. Each filter acts on a property. Filters must contain at least one\n Filters value. For example, a NestedFilters call might\n include a filter on the PropertyName parameter of the\n InputDataConfig property:\n InputDataConfig.DataSource.S3DataSource.S3Uri.

          ", "smithy.api#required": {} } - }, - "VolumeSizeInGB": { - "target": "com.amazonaws.sagemaker#ProcessingVolumeSizeInGB", + } + }, + "traits": { + "smithy.api#documentation": "

          A list of nested Filter objects. A resource must satisfy the conditions\n of all filters to be included in the results returned from the Search API.

          \n

          For example, to filter on a training job's InputDataConfig property with a\n specific channel name and S3Uri prefix, define the following filters:

          \n
            \n
          • \n

            \n '{Name:\"InputDataConfig.ChannelName\", \"Operator\":\"Equals\", \"Value\":\"train\"}',\n

            \n
          • \n
          • \n

            \n '{Name:\"InputDataConfig.DataSource.S3DataSource.S3Uri\", \"Operator\":\"Contains\",\n \"Value\":\"mybucket/catdata\"}'\n

            \n
          • \n
          " + } + }, + "com.amazonaws.sagemaker#NestedFiltersList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#NestedFilters" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 20 + } + } + }, + "com.amazonaws.sagemaker#NetworkConfig": { + "type": "structure", + "members": { + "EnableInterContainerTrafficEncryption": { + "target": "com.amazonaws.sagemaker#Boolean", "traits": { - "smithy.api#documentation": "

          The size of the ML storage volume, in gigabytes, that you want to provision. You must\n specify sufficient ML storage for your scenario.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Whether to encrypt all communications between distributed processing jobs. Choose\n True to encrypt communications. Encryption provides greater security for distributed\n processing jobs, but the processing might take longer.

          " } }, - "VolumeKmsKeyId": { - "target": "com.amazonaws.sagemaker#KmsKeyId", + "EnableNetworkIsolation": { + "target": "com.amazonaws.sagemaker#Boolean", "traits": { - "smithy.api#documentation": "

          The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data\n on the storage volume attached to the ML compute instance(s) that run the model monitoring\n job.

          " + "smithy.api#documentation": "

          Whether to allow inbound and outbound network calls to and from the containers used for\n the processing job.

          " } + }, + "VpcConfig": { + "target": "com.amazonaws.sagemaker#VpcConfig" } }, "traits": { - "smithy.api#documentation": "

          Configuration for the cluster used to run model monitoring jobs.

          " + "smithy.api#documentation": "

          Networking options for a job, such as network traffic encryption between containers,\n whether to allow inbound and outbound network calls to and from containers, and the VPC\n subnets and security groups to use for VPC-enabled jobs.

          " } }, - "com.amazonaws.sagemaker#MonitoringConstraintsResource": { - "type": "structure", - "members": { - "S3Uri": { - "target": "com.amazonaws.sagemaker#S3Uri", - "traits": { - "smithy.api#documentation": "

          The Amazon S3 URI for the constraints resource.

          " + "com.amazonaws.sagemaker#NetworkInterfaceId": { + "type": "string" + }, + "com.amazonaws.sagemaker#NextToken": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 8192 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#NotebookInstanceAcceleratorType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "ml.eia1.medium", + "name": "ML_EIA1_MEDIUM" + }, + { + "value": "ml.eia1.large", + "name": "ML_EIA1_LARGE" + }, + { + "value": "ml.eia1.xlarge", + "name": "ML_EIA1_XLARGE" + }, + { + "value": "ml.eia2.medium", + "name": "ML_EIA2_MEDIUM" + }, + { + "value": "ml.eia2.large", + "name": "ML_EIA2_LARGE" + }, + { + "value": "ml.eia2.xlarge", + "name": "ML_EIA2_XLARGE" } + ] + } + }, + "com.amazonaws.sagemaker#NotebookInstanceAcceleratorTypes": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#NotebookInstanceAcceleratorType" + } + }, + "com.amazonaws.sagemaker#NotebookInstanceArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 } - }, + } + }, + "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigArn": { + "type": "string", "traits": { - "smithy.api#documentation": "

          The constraints resource for a monitoring job.

          " + "smithy.api#length": { + "min": 0, + "max": 256 + } } }, - "com.amazonaws.sagemaker#MonitoringContainerArguments": { + "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigContent": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 16384 + }, + "smithy.api#pattern": "[\\S\\s]+" + } + }, + "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigList": { "type": "list", "member": { - "target": "com.amazonaws.sagemaker#ContainerArgument" + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleHook" }, "traits": { "smithy.api#length": { - "min": 1, - "max": 50 + "min": 0, + "max": 1 } } }, - "com.amazonaws.sagemaker#MonitoringEnvironmentMap": { - "type": "map", - "key": { - "target": "com.amazonaws.sagemaker#ProcessingEnvironmentKey" - }, - "value": { - "target": "com.amazonaws.sagemaker#ProcessingEnvironmentValue" - }, + "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 63 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" + } + }, + "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigNameContains": { + "type": "string", "traits": { "smithy.api#length": { "min": 0, - "max": 50 - } + "max": 63 + }, + "smithy.api#pattern": "[a-zA-Z0-9-]+" } }, - "com.amazonaws.sagemaker#MonitoringExecutionSortKey": { + "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigSortKey": { "type": "string", "traits": { "smithy.api#enum": [ + { + "value": "Name", + "name": "NAME" + }, { "value": "CreationTime", "name": "CREATION_TIME" }, { - "value": "ScheduledTime", - "name": "SCHEDULED_TIME" + "value": "LastModifiedTime", + "name": "LAST_MODIFIED_TIME" + } + ] + } + }, + "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigSortOrder": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Ascending", + "name": "ASCENDING" }, { - "value": "Status", - "name": "STATUS" + "value": "Descending", + "name": "DESCENDING" } ] } }, - "com.amazonaws.sagemaker#MonitoringExecutionSummary": { + "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigSummary": { "type": "structure", "members": { - "MonitoringScheduleName": { - "target": "com.amazonaws.sagemaker#MonitoringScheduleName", + "NotebookInstanceLifecycleConfigName": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName", "traits": { - "smithy.api#documentation": "

          The name of the monitoring schedule.

          ", + "smithy.api#documentation": "

          The name of the lifecycle configuration.

          ", "smithy.api#required": {} } }, - "ScheduledTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "NotebookInstanceLifecycleConfigArn": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigArn", "traits": { - "smithy.api#documentation": "

          The time the monitoring job was scheduled.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the lifecycle configuration.

          ", "smithy.api#required": {} } }, "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          The time at which the monitoring job was created.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A timestamp that tells when the lifecycle configuration was created.

          " } }, "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#Timestamp", - "traits": { - "smithy.api#documentation": "

          A timestamp that indicates the last time the monitoring job was modified.

          ", - "smithy.api#required": {} - } - }, - "MonitoringExecutionStatus": { - "target": "com.amazonaws.sagemaker#ExecutionStatus", - "traits": { - "smithy.api#documentation": "

          The status of the monitoring job.

          ", - "smithy.api#required": {} - } - }, - "ProcessingJobArn": { - "target": "com.amazonaws.sagemaker#ProcessingJobArn", - "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the monitoring job.

          " - } - }, - "EndpointName": { - "target": "com.amazonaws.sagemaker#EndpointName", - "traits": { - "smithy.api#documentation": "

          The name of teh endpoint used to run the monitoring job.

          " - } - }, - "FailureReason": { - "target": "com.amazonaws.sagemaker#FailureReason", + "target": "com.amazonaws.sagemaker#LastModifiedTime", "traits": { - "smithy.api#documentation": "

          Contains the reason a monitoring job failed, if it failed.

          " + "smithy.api#documentation": "

          A timestamp that tells when the lifecycle configuration was last modified.

          " } } }, "traits": { - "smithy.api#documentation": "

          Summary of information about the last monitoring job to run.

          " + "smithy.api#documentation": "

          Provides a summary of a notebook instance lifecycle configuration.

          " } }, - "com.amazonaws.sagemaker#MonitoringExecutionSummaryList": { + "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigSummaryList": { "type": "list", "member": { - "target": "com.amazonaws.sagemaker#MonitoringExecutionSummary" + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigSummary" } }, - "com.amazonaws.sagemaker#MonitoringInput": { + "com.amazonaws.sagemaker#NotebookInstanceLifecycleHook": { "type": "structure", "members": { - "EndpointInput": { - "target": "com.amazonaws.sagemaker#EndpointInput", + "Content": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigContent", "traits": { - "smithy.api#documentation": "

          The endpoint for a monitoring job.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A base64-encoded string that contains a shell script for a notebook instance lifecycle\n configuration.

          " } } }, "traits": { - "smithy.api#documentation": "

          The inputs for a monitoring job.

          " + "smithy.api#documentation": "

          Contains the notebook instance lifecycle configuration script.

          \n

          Each lifecycle configuration script has a limit of 16384 characters.

          \n

          The value of the $PATH environment variable that is available to both\n scripts is /sbin:bin:/usr/sbin:/usr/bin.

          \n

          View CloudWatch Logs for notebook instance lifecycle configurations in log group\n /aws/sagemaker/NotebookInstances in log stream\n [notebook-instance-name]/[LifecycleConfigHook].

          \n

          Lifecycle configuration scripts cannot run for longer than 5 minutes. If a script runs\n for longer than 5 minutes, it fails and the notebook instance is not created or\n started.

          \n

          For information about notebook instance lifestyle configurations, see Step\n 2.1: (Optional) Customize a Notebook Instance.

          " } }, - "com.amazonaws.sagemaker#MonitoringInputs": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#MonitoringInput" - }, + "com.amazonaws.sagemaker#NotebookInstanceName": { + "type": "string", "traits": { "smithy.api#length": { - "min": 1, - "max": 1 - } + "min": 0, + "max": 63 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" } }, - "com.amazonaws.sagemaker#MonitoringJobDefinition": { + "com.amazonaws.sagemaker#NotebookInstanceNameContains": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 63 + }, + "smithy.api#pattern": "[a-zA-Z0-9-]+" + } + }, + "com.amazonaws.sagemaker#NotebookInstanceSortKey": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Name", + "name": "NAME" + }, + { + "value": "CreationTime", + "name": "CREATION_TIME" + }, + { + "value": "Status", + "name": "STATUS" + } + ] + } + }, + "com.amazonaws.sagemaker#NotebookInstanceSortOrder": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Ascending", + "name": "ASCENDING" + }, + { + "value": "Descending", + "name": "DESCENDING" + } + ] + } + }, + "com.amazonaws.sagemaker#NotebookInstanceStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Pending", + "name": "Pending" + }, + { + "value": "InService", + "name": "InService" + }, + { + "value": "Stopping", + "name": "Stopping" + }, + { + "value": "Stopped", + "name": "Stopped" + }, + { + "value": "Failed", + "name": "Failed" + }, + { + "value": "Deleting", + "name": "Deleting" + }, + { + "value": "Updating", + "name": "Updating" + } + ] + } + }, + "com.amazonaws.sagemaker#NotebookInstanceSummary": { "type": "structure", "members": { - "BaselineConfig": { - "target": "com.amazonaws.sagemaker#MonitoringBaselineConfig", + "NotebookInstanceName": { + "target": "com.amazonaws.sagemaker#NotebookInstanceName", "traits": { - "smithy.api#documentation": "

          Baseline configuration used to validate that the data conforms to the specified\n constraints and statistics

          " + "smithy.api#documentation": "

          The name of the notebook instance that you want a summary for.

          ", + "smithy.api#required": {} } }, - "MonitoringInputs": { - "target": "com.amazonaws.sagemaker#MonitoringInputs", + "NotebookInstanceArn": { + "target": "com.amazonaws.sagemaker#NotebookInstanceArn", "traits": { - "smithy.api#documentation": "

          The array of inputs for the monitoring job. Currently we support monitoring an Amazon SageMaker\n Endpoint.

          ", + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the notebook instance.

          ", "smithy.api#required": {} } }, - "MonitoringOutputConfig": { - "target": "com.amazonaws.sagemaker#MonitoringOutputConfig", + "NotebookInstanceStatus": { + "target": "com.amazonaws.sagemaker#NotebookInstanceStatus", "traits": { - "smithy.api#documentation": "

          The array of outputs from the monitoring job to be uploaded to Amazon Simple Storage\n Service (Amazon S3).

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The status of the notebook instance.

          " } }, - "MonitoringResources": { - "target": "com.amazonaws.sagemaker#MonitoringResources", + "Url": { + "target": "com.amazonaws.sagemaker#NotebookInstanceUrl", "traits": { - "smithy.api#documentation": "

          Identifies the resources, ML compute instances, and ML storage volumes to deploy for a\n monitoring job. In distributed processing, you specify more than one instance.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The\n URL that you use to connect to the Jupyter instance running in your notebook instance.\n

          " } }, - "MonitoringAppSpecification": { - "target": "com.amazonaws.sagemaker#MonitoringAppSpecification", + "InstanceType": { + "target": "com.amazonaws.sagemaker#InstanceType", "traits": { - "smithy.api#documentation": "

          Configures the monitoring job to run a specified Docker container image.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The type of ML compute instance that the notebook instance is running on.

          " } }, - "StoppingCondition": { - "target": "com.amazonaws.sagemaker#MonitoringStoppingCondition", + "CreationTime": { + "target": "com.amazonaws.sagemaker#CreationTime", "traits": { - "smithy.api#documentation": "

          Specifies a time limit for how long the monitoring job is allowed to run.

          " + "smithy.api#documentation": "

          A timestamp that shows when the notebook instance was created.

          " } }, - "Environment": { - "target": "com.amazonaws.sagemaker#MonitoringEnvironmentMap", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#LastModifiedTime", "traits": { - "smithy.api#documentation": "

          Sets the environment variables in the Docker container.

          " + "smithy.api#documentation": "

          A timestamp that shows when the notebook instance was last modified.

          " } }, - "NetworkConfig": { - "target": "com.amazonaws.sagemaker#NetworkConfig", + "NotebookInstanceLifecycleConfigName": { + "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName", "traits": { - "smithy.api#documentation": "

          Specifies networking options for an monitoring job.

          " + "smithy.api#documentation": "

          The name of a notebook instance lifecycle configuration associated with this notebook\n instance.

          \n

          For information about notebook instance lifestyle configurations, see Step\n 2.1: (Optional) Customize a Notebook Instance.

          " } }, - "RoleArn": { - "target": "com.amazonaws.sagemaker#RoleArn", + "DefaultCodeRepository": { + "target": "com.amazonaws.sagemaker#CodeRepositoryNameOrUrl", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on\n your behalf.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Git repository associated with the notebook instance as its default code\n repository. This can be either the name of a Git repository stored as a resource in your\n account, or the URL of a Git repository in AWS CodeCommit or in any\n other Git repository. When you open a notebook instance, it opens in the directory that\n contains this repository. For more information, see Associating Git Repositories with Amazon SageMaker\n Notebook Instances.

          " + } + }, + "AdditionalCodeRepositories": { + "target": "com.amazonaws.sagemaker#AdditionalCodeRepositoryNamesOrUrls", + "traits": { + "smithy.api#documentation": "

          An array of up to three Git repositories associated with the notebook instance. These\n can be either the names of Git repositories stored as resources in your account, or the\n URL of Git repositories in AWS CodeCommit or in any\n other Git repository. These repositories are cloned at the same level as the default\n repository of your notebook instance. For more information, see Associating Git\n Repositories with Amazon SageMaker Notebook Instances.

          " } } }, "traits": { - "smithy.api#documentation": "

          Defines the monitoring job.

          " + "smithy.api#documentation": "

          Provides summary information for an Amazon SageMaker notebook instance.

          " } }, - "com.amazonaws.sagemaker#MonitoringMaxRuntimeInSeconds": { + "com.amazonaws.sagemaker#NotebookInstanceSummaryList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#NotebookInstanceSummary" + } + }, + "com.amazonaws.sagemaker#NotebookInstanceUrl": { + "type": "string" + }, + "com.amazonaws.sagemaker#NotebookInstanceVolumeSizeInGB": { "type": "integer", "traits": { + "smithy.api#box": {}, "smithy.api#range": { - "min": 1, - "max": 86400 + "min": 5, + "max": 16384 } } }, - "com.amazonaws.sagemaker#MonitoringOutput": { - "type": "structure", - "members": { - "S3Output": { - "target": "com.amazonaws.sagemaker#MonitoringS3Output", - "traits": { - "smithy.api#documentation": "

          The Amazon S3 storage location where the results of a monitoring job are saved.

          ", - "smithy.api#required": {} - } - } - }, + "com.amazonaws.sagemaker#NotebookOutputOption": { + "type": "string", "traits": { - "smithy.api#documentation": "

          The output object for a monitoring job.

          " + "smithy.api#enum": [ + { + "value": "Allowed", + "name": "Allowed" + }, + { + "value": "Disabled", + "name": "Disabled" + } + ] } }, - "com.amazonaws.sagemaker#MonitoringOutputConfig": { + "com.amazonaws.sagemaker#NotificationConfiguration": { "type": "structure", "members": { - "MonitoringOutputs": { - "target": "com.amazonaws.sagemaker#MonitoringOutputs", - "traits": { - "smithy.api#documentation": "

          Monitoring outputs for monitoring jobs. This is where the output of the periodic\n monitoring jobs is uploaded.

          ", - "smithy.api#required": {} - } - }, - "KmsKeyId": { - "target": "com.amazonaws.sagemaker#KmsKeyId", + "NotificationTopicArn": { + "target": "com.amazonaws.sagemaker#NotificationTopicArn", "traits": { - "smithy.api#documentation": "

          The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model\n artifacts at rest using Amazon S3 server-side encryption.

          " + "smithy.api#documentation": "

          The ARN for the SNS topic to which notifications should be published.

          " } } }, "traits": { - "smithy.api#documentation": "

          The output configuration for monitoring jobs.

          " + "smithy.api#documentation": "

          Configures SNS notifications of available or expiring work items for work\n teams.

          " } }, - "com.amazonaws.sagemaker#MonitoringOutputs": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#MonitoringOutput" - }, + "com.amazonaws.sagemaker#NotificationTopicArn": { + "type": "string", "traits": { - "smithy.api#length": { + "smithy.api#pattern": "arn:aws[a-z\\-]*:sns:[a-z0-9\\-]*:[0-9]{12}:[a-zA-Z0-9_.-]*" + } + }, + "com.amazonaws.sagemaker#NumberOfHumanWorkersPerDataObject": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { "min": 1, - "max": 1 + "max": 9 } } }, - "com.amazonaws.sagemaker#MonitoringResources": { + "com.amazonaws.sagemaker#ObjectiveStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Succeeded", + "name": "Succeeded" + }, + { + "value": "Pending", + "name": "Pending" + }, + { + "value": "Failed", + "name": "Failed" + } + ] + } + }, + "com.amazonaws.sagemaker#ObjectiveStatusCounter": { + "type": "integer", + "traits": { + "smithy.api#range": { + "min": 0 + } + } + }, + "com.amazonaws.sagemaker#ObjectiveStatusCounters": { "type": "structure", "members": { - "ClusterConfig": { - "target": "com.amazonaws.sagemaker#MonitoringClusterConfig", + "Succeeded": { + "target": "com.amazonaws.sagemaker#ObjectiveStatusCounter", "traits": { - "smithy.api#documentation": "

          The configuration for the cluster resources used to run the processing job.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The number of training jobs whose final objective metric was evaluated by the\n hyperparameter tuning job and used in the hyperparameter tuning process.

          " + } + }, + "Pending": { + "target": "com.amazonaws.sagemaker#ObjectiveStatusCounter", + "traits": { + "smithy.api#documentation": "

          The number of training jobs that are in progress and pending evaluation of their final\n objective metric.

          " + } + }, + "Failed": { + "target": "com.amazonaws.sagemaker#ObjectiveStatusCounter", + "traits": { + "smithy.api#documentation": "

          The number of training jobs whose final objective metric was not evaluated and used in\n the hyperparameter tuning process. This typically occurs when the training job failed or\n did not emit an objective metric.

          " } } }, "traits": { - "smithy.api#documentation": "

          Identifies the resources to deploy for a monitoring job.

          " + "smithy.api#documentation": "

          Specifies the number of training jobs that this hyperparameter tuning job launched,\n categorized by the status of their objective metric. The objective metric status shows\n whether the\n final\n objective metric for the training job has been evaluated by the\n tuning job and used in the hyperparameter tuning process.

          " } }, - "com.amazonaws.sagemaker#MonitoringS3Output": { + "com.amazonaws.sagemaker#OfflineStoreConfig": { "type": "structure", "members": { - "S3Uri": { - "target": "com.amazonaws.sagemaker#MonitoringS3Uri", + "S3StorageConfig": { + "target": "com.amazonaws.sagemaker#S3StorageConfig", "traits": { - "smithy.api#documentation": "

          A URI that identifies the Amazon S3 storage location where Amazon SageMaker saves the results of a\n monitoring job.

          ", + "smithy.api#documentation": "

          The Amazon Simple Storage (Amazon S3) location of OfflineStore.

          ", "smithy.api#required": {} } }, - "LocalPath": { - "target": "com.amazonaws.sagemaker#ProcessingLocalPath", + "DisableGlueTableCreation": { + "target": "com.amazonaws.sagemaker#Boolean", "traits": { - "smithy.api#documentation": "

          The local path to the Amazon S3 storage location where Amazon SageMaker saves the results of a\n monitoring job. LocalPath is an absolute path for the output data.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Set to True to disable the automatic creation of an AWS Glue table when\n configuring an OfflineStore.

          " } }, - "S3UploadMode": { - "target": "com.amazonaws.sagemaker#ProcessingS3UploadMode", + "DataCatalogConfig": { + "target": "com.amazonaws.sagemaker#DataCatalogConfig", "traits": { - "smithy.api#documentation": "

          Whether to upload the results of the monitoring job continuously or after the job\n completes.

          " + "smithy.api#documentation": "

          The meta data of the Glue table that is autogenerated when an OfflineStore\n is created.

          " } } }, "traits": { - "smithy.api#documentation": "

          Information about where and how you want to store the results of a monitoring\n job.

          " - } - }, - "com.amazonaws.sagemaker#MonitoringS3Uri": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 512 - }, - "smithy.api#pattern": "^(https|s3)://([^/]+)/?(.*)$" - } - }, - "com.amazonaws.sagemaker#MonitoringScheduleArn": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 256 - }, - "smithy.api#pattern": ".*" + "smithy.api#documentation": "

          The configuration of an OfflineStore.

          \n

          Provide an OfflineStoreConfig in a request to\n CreateFeatureGroup to create an OfflineStore.

          \n

          To encrypt an OfflineStore using at rest data encryption, specify AWS Key\n Management Service (KMS) key ID, or KMSKeyId, in\n S3StorageConfig.

          " } }, - "com.amazonaws.sagemaker#MonitoringScheduleConfig": { + "com.amazonaws.sagemaker#OfflineStoreStatus": { "type": "structure", "members": { - "ScheduleConfig": { - "target": "com.amazonaws.sagemaker#ScheduleConfig", + "Status": { + "target": "com.amazonaws.sagemaker#OfflineStoreStatusValue", "traits": { - "smithy.api#documentation": "

          Configures the monitoring schedule.

          " + "smithy.api#documentation": "

          An OfflineStore status.

          ", + "smithy.api#required": {} } }, - "MonitoringJobDefinition": { - "target": "com.amazonaws.sagemaker#MonitoringJobDefinition", + "BlockedReason": { + "target": "com.amazonaws.sagemaker#BlockedReason", "traits": { - "smithy.api#documentation": "

          Defines the monitoring job.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The justification for why the OfflineStoreStatus is Blocked (if applicable).

          " } } }, "traits": { - "smithy.api#documentation": "

          Configures the monitoring schedule and defines the monitoring job.

          " - } - }, - "com.amazonaws.sagemaker#MonitoringScheduleName": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 63 - }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*$" + "smithy.api#documentation": "

          The status of OfflineStore.

          " } }, - "com.amazonaws.sagemaker#MonitoringScheduleSortKey": { + "com.amazonaws.sagemaker#OfflineStoreStatusValue": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "Name", - "name": "NAME" + "value": "Active", + "name": "ACTIVE" }, { - "value": "CreationTime", - "name": "CREATION_TIME" + "value": "Blocked", + "name": "BLOCKED" }, { - "value": "Status", - "name": "STATUS" + "value": "Disabled", + "name": "DISABLED" } ] } }, - "com.amazonaws.sagemaker#MonitoringScheduleSummary": { + "com.amazonaws.sagemaker#OidcConfig": { "type": "structure", "members": { - "MonitoringScheduleName": { - "target": "com.amazonaws.sagemaker#MonitoringScheduleName", + "ClientId": { + "target": "com.amazonaws.sagemaker#ClientId", "traits": { - "smithy.api#documentation": "

          The name of the monitoring schedule.

          ", + "smithy.api#documentation": "

          The OIDC IdP client ID used to configure your private workforce.

          ", "smithy.api#required": {} } }, - "MonitoringScheduleArn": { - "target": "com.amazonaws.sagemaker#MonitoringScheduleArn", + "ClientSecret": { + "target": "com.amazonaws.sagemaker#ClientSecret", + "traits": { + "smithy.api#documentation": "

          The OIDC IdP client secret used to configure your private workforce.

          ", + "smithy.api#required": {} + } + }, + "Issuer": { + "target": "com.amazonaws.sagemaker#OidcEndpoint", + "traits": { + "smithy.api#documentation": "

          The OIDC IdP issuer used to configure your private workforce.

          ", + "smithy.api#required": {} + } + }, + "AuthorizationEndpoint": { + "target": "com.amazonaws.sagemaker#OidcEndpoint", + "traits": { + "smithy.api#documentation": "

          The OIDC IdP authorization endpoint used to configure your private workforce.

          ", + "smithy.api#required": {} + } + }, + "TokenEndpoint": { + "target": "com.amazonaws.sagemaker#OidcEndpoint", + "traits": { + "smithy.api#documentation": "

          The OIDC IdP token endpoint used to configure your private workforce.

          ", + "smithy.api#required": {} + } + }, + "UserInfoEndpoint": { + "target": "com.amazonaws.sagemaker#OidcEndpoint", + "traits": { + "smithy.api#documentation": "

          The OIDC IdP user information endpoint used to configure your private workforce.

          ", + "smithy.api#required": {} + } + }, + "LogoutEndpoint": { + "target": "com.amazonaws.sagemaker#OidcEndpoint", + "traits": { + "smithy.api#documentation": "

          The OIDC IdP logout endpoint used to configure your private workforce.

          ", + "smithy.api#required": {} + } + }, + "JwksUri": { + "target": "com.amazonaws.sagemaker#OidcEndpoint", + "traits": { + "smithy.api#documentation": "

          The OIDC IdP JSON Web Key Set (Jwks) URI used to configure your private workforce.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Use this parameter to configure your OIDC Identity Provider (IdP).

          " + } + }, + "com.amazonaws.sagemaker#OidcConfigForResponse": { + "type": "structure", + "members": { + "ClientId": { + "target": "com.amazonaws.sagemaker#ClientId", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the monitoring schedule.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The OIDC IdP client ID used to configure your private workforce.

          " } }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "Issuer": { + "target": "com.amazonaws.sagemaker#OidcEndpoint", "traits": { - "smithy.api#documentation": "

          The creation time of the monitoring schedule.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The OIDC IdP issuer used to configure your private workforce.

          " } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#Timestamp", + "AuthorizationEndpoint": { + "target": "com.amazonaws.sagemaker#OidcEndpoint", "traits": { - "smithy.api#documentation": "

          The last time the monitoring schedule was modified.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The OIDC IdP authorization endpoint used to configure your private workforce.

          " } }, - "MonitoringScheduleStatus": { - "target": "com.amazonaws.sagemaker#ScheduleStatus", + "TokenEndpoint": { + "target": "com.amazonaws.sagemaker#OidcEndpoint", "traits": { - "smithy.api#documentation": "

          The status of the monitoring schedule.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The OIDC IdP token endpoint used to configure your private workforce.

          " } }, - "EndpointName": { - "target": "com.amazonaws.sagemaker#EndpointName", + "UserInfoEndpoint": { + "target": "com.amazonaws.sagemaker#OidcEndpoint", "traits": { - "smithy.api#documentation": "

          The name of the endpoint using the monitoring schedule.

          " + "smithy.api#documentation": "

          The OIDC IdP user information endpoint used to configure your private workforce.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          Summarizes the monitoring schedule.

          " - } - }, - "com.amazonaws.sagemaker#MonitoringScheduleSummaryList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#MonitoringScheduleSummary" - } - }, - "com.amazonaws.sagemaker#MonitoringStatisticsResource": { - "type": "structure", - "members": { - "S3Uri": { - "target": "com.amazonaws.sagemaker#S3Uri", + }, + "LogoutEndpoint": { + "target": "com.amazonaws.sagemaker#OidcEndpoint", "traits": { - "smithy.api#documentation": "

          The Amazon S3 URI for the statistics resource.

          " + "smithy.api#documentation": "

          The OIDC IdP logout endpoint used to configure your private workforce.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          The statistics resource for a monitoring job.

          " - } - }, - "com.amazonaws.sagemaker#MonitoringStoppingCondition": { - "type": "structure", - "members": { - "MaxRuntimeInSeconds": { - "target": "com.amazonaws.sagemaker#MonitoringMaxRuntimeInSeconds", + }, + "JwksUri": { + "target": "com.amazonaws.sagemaker#OidcEndpoint", "traits": { - "smithy.api#documentation": "

          The maximum runtime allowed in seconds.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The OIDC IdP JSON Web Key Set (Jwks) URI used to configure your private workforce.

          " } } }, "traits": { - "smithy.api#documentation": "

          A time limit for how long the monitoring job is allowed to run before stopping.

          " - } - }, - "com.amazonaws.sagemaker#MountPath": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 1024 - }, - "smithy.api#pattern": "^\\/.*" + "smithy.api#documentation": "

          Your OIDC IdP workforce configuration.

          " } }, - "com.amazonaws.sagemaker#NameContains": { + "com.amazonaws.sagemaker#OidcEndpoint": { "type": "string", "traits": { "smithy.api#length": { "min": 0, - "max": 63 + "max": 500 }, - "smithy.api#pattern": "[a-zA-Z0-9\\-]+" + "smithy.api#pattern": "https://\\S+" } }, - "com.amazonaws.sagemaker#NestedFilters": { + "com.amazonaws.sagemaker#OidcMemberDefinition": { "type": "structure", "members": { - "NestedPropertyName": { - "target": "com.amazonaws.sagemaker#ResourcePropertyName", - "traits": { - "smithy.api#documentation": "

          The name of the property to use in the nested filters. The value must match a listed property name,\n such as InputDataConfig.

          ", - "smithy.api#required": {} - } - }, - "Filters": { - "target": "com.amazonaws.sagemaker#FilterList", + "Groups": { + "target": "com.amazonaws.sagemaker#Groups", "traits": { - "smithy.api#documentation": "

          A list of filters. Each filter acts on a property. Filters must contain at least one\n Filters value. For example, a NestedFilters call might\n include a filter on the PropertyName parameter of the\n InputDataConfig property:\n InputDataConfig.DataSource.S3DataSource.S3Uri.

          ", + "smithy.api#documentation": "

          A list of comma seperated strings that identifies\n user groups in your OIDC IdP. Each user group is\n made up of a group of private workers.

          ", "smithy.api#required": {} } } }, "traits": { - "smithy.api#documentation": "

          A list of nested Filter objects. A resource must satisfy the conditions\n of all filters to be included in the results returned from the Search API.

          \n

          For example, to filter on a training job's InputDataConfig property with a\n specific channel name and S3Uri prefix, define the following filters:

          \n
            \n
          • \n

            \n '{Name:\"InputDataConfig.ChannelName\", \"Operator\":\"Equals\", \"Value\":\"train\"}',\n

            \n
          • \n
          • \n

            \n '{Name:\"InputDataConfig.DataSource.S3DataSource.S3Uri\", \"Operator\":\"Contains\",\n \"Value\":\"mybucket/catdata\"}'\n

            \n
          • \n
          " - } - }, - "com.amazonaws.sagemaker#NestedFiltersList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#NestedFilters" - }, - "traits": { - "smithy.api#length": { - "min": 1, - "max": 20 - } + "smithy.api#documentation": "

          A list of user groups that exist in your OIDC Identity Provider (IdP). \n One to ten groups can be used to create a single private work team. \n When you add a user group to the list of Groups, you can add that user group to one or more\n private work teams. If you add a user group to a private work team, all workers in that user group \n are added to the work team.

          " } }, - "com.amazonaws.sagemaker#NetworkConfig": { + "com.amazonaws.sagemaker#OnlineStoreConfig": { "type": "structure", "members": { - "EnableInterContainerTrafficEncryption": { - "target": "com.amazonaws.sagemaker#Boolean", + "SecurityConfig": { + "target": "com.amazonaws.sagemaker#OnlineStoreSecurityConfig", "traits": { - "smithy.api#documentation": "

          Whether to encrypt all communications between distributed processing jobs. Choose\n True to encrypt communications. Encryption provides greater security for distributed\n processing jobs, but the processing might take longer.

          " + "smithy.api#documentation": "

          Use to specify KMS Key ID (KMSKeyId) for at-rest encryption of your\n OnlineStore.

          " } }, - "EnableNetworkIsolation": { + "EnableOnlineStore": { "target": "com.amazonaws.sagemaker#Boolean", "traits": { - "smithy.api#documentation": "

          Whether to allow inbound and outbound network calls to and from the containers used for\n the processing job.

          " + "smithy.api#documentation": "

          Turn OnlineStore off by specifying False \n for the EnableOnlineStore flag. Turn OnlineStore \n on by specifying True \n for the EnableOnlineStore flag.

          \n

          The default value is False.

          " } - }, - "VpcConfig": { - "target": "com.amazonaws.sagemaker#VpcConfig" } }, "traits": { - "smithy.api#documentation": "

          Networking options for a job, such as network traffic encryption between containers,\n whether to allow inbound and outbound network calls to and from containers, and the VPC\n subnets and security groups to use for VPC-enabled jobs.

          " + "smithy.api#documentation": "

          Use this to specify the AWS Key Management Service (KMS) Key ID, or\n KMSKeyId, for at rest data encryption. You can turn\n OnlineStore on or off by specifying the EnableOnlineStore flag\n at General Assembly; the default value is False.

          " } }, - "com.amazonaws.sagemaker#NetworkInterfaceId": { - "type": "string" - }, - "com.amazonaws.sagemaker#NextToken": { - "type": "string", + "com.amazonaws.sagemaker#OnlineStoreSecurityConfig": { + "type": "structure", + "members": { + "KmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", + "traits": { + "smithy.api#documentation": "

          The ID of the AWS Key Management Service (AWS KMS) key that SageMaker Feature Store uses\n to encrypt the Amazon S3 objects at rest using Amazon S3 server-side encryption.

          \n

          The caller (either IAM user or IAM role) of CreateFeatureGroup must have\n below permissions to the OnlineStore\n KmsKeyId:

          \n
            \n
          • \n

            \n \"kms:Encrypt\"\n

            \n
          • \n
          • \n

            \n \"kms:Decrypt\"\n

            \n
          • \n
          • \n

            \n \"kms:DescribeKey\"\n

            \n
          • \n
          • \n

            \n \"kms:CreateGrant\"\n

            \n
          • \n
          • \n

            \n \"kms:RetireGrant\"\n

            \n
          • \n
          • \n

            \n \"kms:ReEncryptFrom\"\n

            \n
          • \n
          • \n

            \n \"kms:ReEncryptTo\"\n

            \n
          • \n
          • \n

            \n \"kms:GenerateDataKey\"\n

            \n
          • \n
          • \n

            \n \"kms:ListAliases\"\n

            \n
          • \n
          • \n

            \n \"kms:ListGrants\"\n

            \n
          • \n
          • \n

            \n \"kms:RevokeGrant\"\n

            \n
          • \n
          \n

          The caller (either IAM user or IAM role) to all DataPlane operations\n (PutRecord, GetRecord, DeleteRecord) must have\n the following permissions to the KmsKeyId:

          \n
            \n
          • \n

            \n \"kms:Decrypt\"\n

            \n
          • \n
          " + } + } + }, "traits": { - "smithy.api#length": { - "min": 0, - "max": 8192 - }, - "smithy.api#pattern": ".*" + "smithy.api#documentation": "

          The security configuration for OnlineStore.

          " } }, - "com.amazonaws.sagemaker#NotebookInstanceAcceleratorType": { + "com.amazonaws.sagemaker#Operator": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "ml.eia1.medium", - "name": "ML_EIA1_MEDIUM" + "value": "Equals", + "name": "EQUALS" }, { - "value": "ml.eia1.large", - "name": "ML_EIA1_LARGE" + "value": "NotEquals", + "name": "NOT_EQUALS" }, { - "value": "ml.eia1.xlarge", - "name": "ML_EIA1_XLARGE" + "value": "GreaterThan", + "name": "GREATER_THAN" }, { - "value": "ml.eia2.medium", - "name": "ML_EIA2_MEDIUM" + "value": "GreaterThanOrEqualTo", + "name": "GREATER_THAN_OR_EQUAL_TO" }, { - "value": "ml.eia2.large", - "name": "ML_EIA2_LARGE" + "value": "LessThan", + "name": "LESS_THAN" }, { - "value": "ml.eia2.xlarge", - "name": "ML_EIA2_XLARGE" - } - ] - } - }, - "com.amazonaws.sagemaker#NotebookInstanceAcceleratorTypes": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#NotebookInstanceAcceleratorType" - } - }, - "com.amazonaws.sagemaker#NotebookInstanceArn": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 256 - } - } - }, - "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigArn": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 256 - } - } - }, - "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigContent": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 1, - "max": 16384 - }, - "smithy.api#pattern": "[\\S\\s]+" - } - }, - "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleHook" - }, - "traits": { - "smithy.api#length": { - "min": 0, - "max": 1 - } - } - }, - "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 63 - }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" - } - }, - "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigNameContains": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 63 - }, - "smithy.api#pattern": "[a-zA-Z0-9-]+" - } - }, - "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigSortKey": { - "type": "string", - "traits": { - "smithy.api#enum": [ + "value": "LessThanOrEqualTo", + "name": "LESS_THAN_OR_EQUAL_TO" + }, { - "value": "Name", - "name": "NAME" + "value": "Contains", + "name": "CONTAINS" + }, + { + "value": "Exists", + "name": "EXISTS" }, { - "value": "CreationTime", - "name": "CREATION_TIME" + "value": "NotExists", + "name": "NOT_EXISTS" }, { - "value": "LastModifiedTime", - "name": "LAST_MODIFIED_TIME" + "value": "In", + "name": "IN" } ] } }, - "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigSortOrder": { + "com.amazonaws.sagemaker#OptionalDouble": { + "type": "double", + "traits": { + "smithy.api#box": {} + } + }, + "com.amazonaws.sagemaker#OptionalInteger": { + "type": "integer", + "traits": { + "smithy.api#box": {} + } + }, + "com.amazonaws.sagemaker#OptionalVolumeSizeInGB": { + "type": "integer", + "traits": { + "smithy.api#range": { + "min": 0 + } + } + }, + "com.amazonaws.sagemaker#OrderKey": { "type": "string", "traits": { "smithy.api#enum": [ { "value": "Ascending", - "name": "ASCENDING" + "name": "Ascending" }, { "value": "Descending", - "name": "DESCENDING" + "name": "Descending" } ] } }, - "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigSummary": { + "com.amazonaws.sagemaker#OutputConfig": { "type": "structure", "members": { - "NotebookInstanceLifecycleConfigName": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName", + "S3OutputLocation": { + "target": "com.amazonaws.sagemaker#S3Uri", "traits": { - "smithy.api#documentation": "

          The name of the lifecycle configuration.

          ", + "smithy.api#documentation": "

          Identifies the S3 bucket where you want Amazon SageMaker to store the model artifacts. For\n example, s3://bucket-name/key-name-prefix.

          ", "smithy.api#required": {} } }, - "NotebookInstanceLifecycleConfigArn": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigArn", + "TargetDevice": { + "target": "com.amazonaws.sagemaker#TargetDevice", "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the lifecycle configuration.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Identifies the target device or the machine learning instance that you want to run\n your model on after the compilation has completed. Alternatively, you can specify OS,\n architecture, and accelerator using TargetPlatform fields. It can be\n used instead of TargetPlatform.

          " } }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#CreationTime", + "TargetPlatform": { + "target": "com.amazonaws.sagemaker#TargetPlatform", "traits": { - "smithy.api#documentation": "

          A timestamp that tells when the lifecycle configuration was created.

          " + "smithy.api#documentation": "

          Contains information about a target platform that you want your model to run on, such\n as OS, architecture, and accelerators. It is an alternative of\n TargetDevice.

          \n

          The following examples show how to configure the TargetPlatform and\n CompilerOptions JSON strings for popular target platforms:

          \n
            \n
          • \n

            Raspberry Pi 3 Model B+

            \n

            \n \"TargetPlatform\": {\"Os\": \"LINUX\", \"Arch\": \"ARM_EABIHF\"},\n

            \n

            \n \"CompilerOptions\": {'mattr': ['+neon']}\n

            \n
          • \n
          • \n

            Jetson TX2

            \n

            \n \"TargetPlatform\": {\"Os\": \"LINUX\", \"Arch\": \"ARM64\", \"Accelerator\":\n \"NVIDIA\"},\n

            \n

            \n \"CompilerOptions\": {'gpu-code': 'sm_62', 'trt-ver': '6.0.1',\n 'cuda-ver': '10.0'}\n

            \n
          • \n
          • \n

            EC2 m5.2xlarge instance OS

            \n

            \n \"TargetPlatform\": {\"Os\": \"LINUX\", \"Arch\": \"X86_64\", \"Accelerator\":\n \"NVIDIA\"},\n

            \n

            \n \"CompilerOptions\": {'mcpu': 'skylake-avx512'}\n

            \n
          • \n
          • \n

            RK3399

            \n

            \n \"TargetPlatform\": {\"Os\": \"LINUX\", \"Arch\": \"ARM64\", \"Accelerator\":\n \"MALI\"}\n

            \n
          • \n
          • \n

            ARMv7 phone (CPU)

            \n

            \n \"TargetPlatform\": {\"Os\": \"ANDROID\", \"Arch\": \"ARM_EABI\"},\n

            \n

            \n \"CompilerOptions\": {'ANDROID_PLATFORM': 25, 'mattr':\n ['+neon']}\n

            \n
          • \n
          • \n

            ARMv8 phone (CPU)

            \n

            \n \"TargetPlatform\": {\"Os\": \"ANDROID\", \"Arch\": \"ARM64\"},\n

            \n

            \n \"CompilerOptions\": {'ANDROID_PLATFORM': 29}\n

            \n
          • \n
          " } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#LastModifiedTime", + "CompilerOptions": { + "target": "com.amazonaws.sagemaker#CompilerOptions", "traits": { - "smithy.api#documentation": "

          A timestamp that tells when the lifecycle configuration was last modified.

          " + "smithy.api#documentation": "

          Specifies additional parameters for compiler options in JSON format. The compiler\n options are TargetPlatform specific. It is required for NVIDIA accelerators\n and highly recommended for CPU compilations. For any other cases, it is optional to\n specify CompilerOptions.\n

          \n
            \n
          • \n

            \n CPU: Compilation for CPU supports the following compiler\n options.

            \n
              \n
            • \n

              \n mcpu: CPU micro-architecture. For example, {'mcpu':\n 'skylake-avx512'}\n

              \n
            • \n
            • \n

              \n mattr: CPU flags. For example, {'mattr': ['+neon',\n '+vfpv4']}\n

              \n
            • \n
            \n
          • \n
          • \n

            \n ARM: Details of ARM CPU compilations.

            \n
              \n
            • \n

              \n NEON: NEON is an implementation of the Advanced SIMD\n extension used in ARMv7 processors.

              \n

              For example, add {'mattr': ['+neon']} to the compiler\n options if compiling for ARM 32-bit platform with the NEON\n support.

              \n
            • \n
            \n
          • \n
          • \n

            \n NVIDIA: Compilation for NVIDIA GPU supports the following\n compiler options.

            \n
              \n
            • \n

              \n gpu_code: Specifies the targeted architecture.

              \n
            • \n
            • \n

              \n trt-ver: Specifies the TensorRT versions in x.y.z.\n format.

              \n
            • \n
            • \n

              \n cuda-ver: Specifies the CUDA version in x.y\n format.

              \n
            • \n
            \n

            For example, {'gpu-code': 'sm_72', 'trt-ver': '6.0.1', 'cuda-ver':\n '10.1'}\n

            \n
          • \n
          • \n

            \n ANDROID: Compilation for the Android OS supports the following\n compiler options:

            \n
              \n
            • \n

              \n ANDROID_PLATFORM: Specifies the Android API levels.\n Available levels range from 21 to 29. For example,\n {'ANDROID_PLATFORM': 28}.

              \n
            • \n
            • \n

              \n mattr: Add {'mattr': ['+neon']} to compiler\n options if compiling for ARM 32-bit platform with NEON support.

              \n
            • \n
            \n
          • \n
          • \n

            \n INFERENTIA: Compilation for target ml_inf1 uses compiler options\n passed in as a JSON string. For example,\n \"CompilerOptions\": \"\\\"--verbose 1 --num-neuroncores 2 -O2\\\"\".\n

            \n

            For information about supported compiler options, see\n \n Neuron Compiler CLI.\n

            \n
          • \n
          • \n

            \n CoreML: Compilation for the CoreML OutputConfig$TargetDevice\n supports the following compiler options:

            \n
              \n
            • \n

              \n class_labels: Specifies the classification labels file\n name inside input tar.gz file. For example,\n {\"class_labels\": \"imagenet_labels_1000.txt\"}.\n Labels inside the txt file should be separated by newlines.

              \n
            • \n
            \n
          • \n
          " + } + }, + "KmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", + "traits": { + "smithy.api#documentation": "

          The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume\n after compilation job. If you don't provide a KMS key ID, Amazon SageMaker uses the default KMS key for Amazon S3 for your role's account

          \n

          The KmsKeyId can be any of the following formats:

          \n
            \n
          • \n

            Key ID: 1234abcd-12ab-34cd-56ef-1234567890ab\n

            \n
          • \n
          • \n

            Key ARN:\n arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\n

            \n
          • \n
          • \n

            Alias name: alias/ExampleAlias\n

            \n
          • \n
          • \n

            Alias name ARN:\n arn:aws:kms:us-west-2:111122223333:alias/ExampleAlias\n

            \n
          • \n
          " } } }, "traits": { - "smithy.api#documentation": "

          Provides a summary of a notebook instance lifecycle configuration.

          " - } - }, - "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigSummaryList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigSummary" + "smithy.api#documentation": "

          Contains information about the output location for the compiled model and the target\n device that the model runs on. TargetDevice and TargetPlatform\n are mutually exclusive, so you need to choose one between the two to specify your target\n device or platform. If you cannot find your device you want to use from the\n TargetDevice list, use TargetPlatform to describe the\n platform of your edge device and CompilerOptions if there are specific\n settings that are required or recommended to use for particular TargetPlatform.

          " } }, - "com.amazonaws.sagemaker#NotebookInstanceLifecycleHook": { + "com.amazonaws.sagemaker#OutputDataConfig": { "type": "structure", "members": { - "Content": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigContent", + "KmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", "traits": { - "smithy.api#documentation": "

          A base64-encoded string that contains a shell script for a notebook instance lifecycle\n configuration.

          " + "smithy.api#documentation": "

          The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using\n Amazon S3 server-side encryption. The KmsKeyId can be any of the following\n formats:

          \n
            \n
          • \n

            // KMS Key ID

            \n

            \n \"1234abcd-12ab-34cd-56ef-1234567890ab\"\n

            \n
          • \n
          • \n

            // Amazon Resource Name (ARN) of a KMS Key

            \n

            \n \"arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\"\n

            \n
          • \n
          • \n

            // KMS Key Alias

            \n

            \n \"alias/ExampleAlias\"\n

            \n
          • \n
          • \n

            // Amazon Resource Name (ARN) of a KMS Key Alias

            \n

            \n \"arn:aws:kms:us-west-2:111122223333:alias/ExampleAlias\"\n

            \n
          • \n
          \n \n

          If you use a KMS key ID or an alias of your master key, the Amazon SageMaker execution role must\n include permissions to call kms:Encrypt. If you don't provide a KMS key ID,\n Amazon SageMaker uses the default KMS key for Amazon S3 for your role's account. Amazon SageMaker uses server-side\n encryption with KMS-managed keys for OutputDataConfig. If you use a bucket\n policy with an s3:PutObject permission that only allows objects with\n server-side encryption, set the condition key of\n s3:x-amz-server-side-encryption to \"aws:kms\". For more\n information, see KMS-Managed Encryption Keys in the Amazon Simple Storage Service Developer\n Guide.\n

          \n

          The KMS key policy must grant permission to the IAM role that you specify in your\n CreateTrainingJob, CreateTransformJob, or\n CreateHyperParameterTuningJob requests. For more information, see\n Using Key Policies in AWS KMS in the AWS Key Management Service Developer\n Guide.

          " + } + }, + "S3OutputPath": { + "target": "com.amazonaws.sagemaker#S3Uri", + "traits": { + "smithy.api#documentation": "

          Identifies the S3 path where you want Amazon SageMaker to store the model artifacts. For\n example, s3://bucket-name/key-name-prefix.

          ", + "smithy.api#required": {} } } }, "traits": { - "smithy.api#documentation": "

          Contains the notebook instance lifecycle configuration script.

          \n

          Each lifecycle configuration script has a limit of 16384 characters.

          \n

          The value of the $PATH environment variable that is available to both\n scripts is /sbin:bin:/usr/sbin:/usr/bin.

          \n

          View CloudWatch Logs for notebook instance lifecycle configurations in log group\n /aws/sagemaker/NotebookInstances in log stream\n [notebook-instance-name]/[LifecycleConfigHook].

          \n

          Lifecycle configuration scripts cannot run for longer than 5 minutes. If a script runs\n for longer than 5 minutes, it fails and the notebook instance is not created or\n started.

          \n

          For information about notebook instance lifestyle configurations, see Step\n 2.1: (Optional) Customize a Notebook Instance.

          " + "smithy.api#documentation": "

          Provides information about how to store model training results (model\n artifacts).

          " } }, - "com.amazonaws.sagemaker#NotebookInstanceName": { + "com.amazonaws.sagemaker#PaginationToken": { "type": "string", "traits": { "smithy.api#length": { "min": 0, - "max": 63 + "max": 8192 }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" + "smithy.api#pattern": ".*" } }, - "com.amazonaws.sagemaker#NotebookInstanceNameContains": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 63 + "com.amazonaws.sagemaker#Parameter": { + "type": "structure", + "members": { + "Name": { + "target": "com.amazonaws.sagemaker#PipelineParameterName", + "traits": { + "smithy.api#documentation": "

          The name of the parameter to assign a value to. This parameter name must match a named parameter in the pipeline definition.

          ", + "smithy.api#required": {} + } }, - "smithy.api#pattern": "[a-zA-Z0-9-]+" + "Value": { + "target": "com.amazonaws.sagemaker#String1024", + "traits": { + "smithy.api#documentation": "

          The literal value for the parameter.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Assigns a value to a named Pipeline parameter.

          " } }, - "com.amazonaws.sagemaker#NotebookInstanceSortKey": { + "com.amazonaws.sagemaker#ParameterKey": { "type": "string", "traits": { - "smithy.api#enum": [ - { - "value": "Name", - "name": "NAME" - }, - { - "value": "CreationTime", - "name": "CREATION_TIME" - }, - { - "value": "Status", - "name": "STATUS" - } - ] + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": ".*" } }, - "com.amazonaws.sagemaker#NotebookInstanceSortOrder": { - "type": "string", + "com.amazonaws.sagemaker#ParameterList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#Parameter" + }, "traits": { - "smithy.api#enum": [ - { - "value": "Ascending", - "name": "ASCENDING" - }, - { - "value": "Descending", - "name": "DESCENDING" - } - ] + "smithy.api#length": { + "min": 0, + "max": 50 + } } }, - "com.amazonaws.sagemaker#NotebookInstanceStatus": { + "com.amazonaws.sagemaker#ParameterName": { "type": "string", "traits": { - "smithy.api#enum": [ - { - "value": "Pending", - "name": "Pending" - }, - { - "value": "InService", - "name": "InService" - }, - { - "value": "Stopping", - "name": "Stopping" - }, - { - "value": "Stopped", - "name": "Stopped" - }, - { - "value": "Failed", - "name": "Failed" - }, - { - "value": "Deleting", - "name": "Deleting" - }, - { - "value": "Updating", - "name": "Updating" - } - ] + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": "[\\p{L}\\p{M}\\p{Z}\\p{S}\\p{N}\\p{P}]*" } }, - "com.amazonaws.sagemaker#NotebookInstanceSummary": { + "com.amazonaws.sagemaker#ParameterRange": { "type": "structure", "members": { - "NotebookInstanceName": { - "target": "com.amazonaws.sagemaker#NotebookInstanceName", - "traits": { - "smithy.api#documentation": "

          The name of the notebook instance that you want a summary for.

          ", - "smithy.api#required": {} - } - }, - "NotebookInstanceArn": { - "target": "com.amazonaws.sagemaker#NotebookInstanceArn", - "traits": { - "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the notebook instance.

          ", - "smithy.api#required": {} - } - }, - "NotebookInstanceStatus": { - "target": "com.amazonaws.sagemaker#NotebookInstanceStatus", - "traits": { - "smithy.api#documentation": "

          The status of the notebook instance.

          " - } - }, - "Url": { - "target": "com.amazonaws.sagemaker#NotebookInstanceUrl", - "traits": { - "smithy.api#documentation": "

          The\n URL that you use to connect to the Jupyter instance running in your notebook instance.\n

          " - } - }, - "InstanceType": { - "target": "com.amazonaws.sagemaker#InstanceType", + "IntegerParameterRangeSpecification": { + "target": "com.amazonaws.sagemaker#IntegerParameterRangeSpecification", "traits": { - "smithy.api#documentation": "

          The type of ML compute instance that the notebook instance is running on.

          " + "smithy.api#documentation": "

          A IntegerParameterRangeSpecification object that defines the possible\n values for an integer hyperparameter.

          " } }, - "CreationTime": { - "target": "com.amazonaws.sagemaker#CreationTime", + "ContinuousParameterRangeSpecification": { + "target": "com.amazonaws.sagemaker#ContinuousParameterRangeSpecification", "traits": { - "smithy.api#documentation": "

          A timestamp that shows when the notebook instance was created.

          " + "smithy.api#documentation": "

          A ContinuousParameterRangeSpecification object that defines the possible\n values for a continuous hyperparameter.

          " } }, - "LastModifiedTime": { - "target": "com.amazonaws.sagemaker#LastModifiedTime", + "CategoricalParameterRangeSpecification": { + "target": "com.amazonaws.sagemaker#CategoricalParameterRangeSpecification", "traits": { - "smithy.api#documentation": "

          A timestamp that shows when the notebook instance was last modified.

          " + "smithy.api#documentation": "

          A CategoricalParameterRangeSpecification object that defines the possible\n values for a categorical hyperparameter.

          " } - }, - "NotebookInstanceLifecycleConfigName": { - "target": "com.amazonaws.sagemaker#NotebookInstanceLifecycleConfigName", + } + }, + "traits": { + "smithy.api#documentation": "

          Defines the possible values for categorical, continuous, and integer hyperparameters\n to be used by an algorithm.

          " + } + }, + "com.amazonaws.sagemaker#ParameterRanges": { + "type": "structure", + "members": { + "IntegerParameterRanges": { + "target": "com.amazonaws.sagemaker#IntegerParameterRanges", "traits": { - "smithy.api#documentation": "

          The name of a notebook instance lifecycle configuration associated with this notebook\n instance.

          \n

          For information about notebook instance lifestyle configurations, see Step\n 2.1: (Optional) Customize a Notebook Instance.

          " + "smithy.api#documentation": "

          The array of IntegerParameterRange objects that specify ranges of\n integer hyperparameters that a hyperparameter tuning job searches.

          " } }, - "DefaultCodeRepository": { - "target": "com.amazonaws.sagemaker#CodeRepositoryNameOrUrl", + "ContinuousParameterRanges": { + "target": "com.amazonaws.sagemaker#ContinuousParameterRanges", "traits": { - "smithy.api#documentation": "

          The Git repository associated with the notebook instance as its default code\n repository. This can be either the name of a Git repository stored as a resource in your\n account, or the URL of a Git repository in AWS CodeCommit or in any\n other Git repository. When you open a notebook instance, it opens in the directory that\n contains this repository. For more information, see Associating Git Repositories with Amazon SageMaker\n Notebook Instances.

          " + "smithy.api#documentation": "

          The array of ContinuousParameterRange objects that specify ranges of\n continuous hyperparameters that a hyperparameter tuning job searches.

          " } }, - "AdditionalCodeRepositories": { - "target": "com.amazonaws.sagemaker#AdditionalCodeRepositoryNamesOrUrls", + "CategoricalParameterRanges": { + "target": "com.amazonaws.sagemaker#CategoricalParameterRanges", "traits": { - "smithy.api#documentation": "

          An array of up to three Git repositories associated with the notebook instance. These\n can be either the names of Git repositories stored as resources in your account, or the\n URL of Git repositories in AWS CodeCommit or in any\n other Git repository. These repositories are cloned at the same level as the default\n repository of your notebook instance. For more information, see Associating Git\n Repositories with Amazon SageMaker Notebook Instances.

          " + "smithy.api#documentation": "

          The array of CategoricalParameterRange objects that specify ranges\n of categorical hyperparameters that a hyperparameter tuning job searches.

          " } } }, "traits": { - "smithy.api#documentation": "

          Provides summary information for an Amazon SageMaker notebook instance.

          " - } - }, - "com.amazonaws.sagemaker#NotebookInstanceSummaryList": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#NotebookInstanceSummary" - } - }, - "com.amazonaws.sagemaker#NotebookInstanceUrl": { - "type": "string" - }, - "com.amazonaws.sagemaker#NotebookInstanceVolumeSizeInGB": { - "type": "integer", - "traits": { - "smithy.api#box": {}, - "smithy.api#range": { - "min": 5, - "max": 16384 - } + "smithy.api#documentation": "

          Specifies ranges of integer, continuous, and categorical hyperparameters that a\n hyperparameter tuning job searches. The hyperparameter tuning job launches training jobs\n with hyperparameter values within these ranges to find the combination of values that\n result in the training job with the best performance as measured by the objective metric\n of the hyperparameter tuning job.

          \n \n

          You can specify a maximum of 20 hyperparameters that a hyperparameter tuning job\n can search over. Every possible value of a categorical parameter range counts\n against this limit.

          \n
          " } }, - "com.amazonaws.sagemaker#NotebookOutputOption": { + "com.amazonaws.sagemaker#ParameterType": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "Allowed", - "name": "Allowed" + "value": "Integer", + "name": "INTEGER" }, { - "value": "Disabled", - "name": "Disabled" + "value": "Continuous", + "name": "CONTINUOUS" + }, + { + "value": "Categorical", + "name": "CATEGORICAL" + }, + { + "value": "FreeText", + "name": "FREE_TEXT" } ] } }, - "com.amazonaws.sagemaker#NotificationConfiguration": { + "com.amazonaws.sagemaker#ParameterValue": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#ParameterValues": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ParameterValue" + }, + "traits": { + "smithy.api#length": { + "min": 1, + "max": 20 + } + } + }, + "com.amazonaws.sagemaker#Parent": { "type": "structure", "members": { - "NotificationTopicArn": { - "target": "com.amazonaws.sagemaker#NotificationTopicArn", + "TrialName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The ARN for the SNS topic to which notifications should be published.

          " + "smithy.api#documentation": "

          The name of the trial.

          " + } + }, + "ExperimentName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "traits": { + "smithy.api#documentation": "

          The name of the experiment.

          " } } }, "traits": { - "smithy.api#documentation": "

          Configures SNS notifications of available or expiring work items for work\n teams.

          " + "smithy.api#documentation": "

          The trial that a trial component is associated with and the experiment the trial is part\n of. A component might not be associated with a trial. A component can be associated with\n multiple trials.

          " } }, - "com.amazonaws.sagemaker#NotificationTopicArn": { - "type": "string", + "com.amazonaws.sagemaker#ParentHyperParameterTuningJob": { + "type": "structure", + "members": { + "HyperParameterTuningJobName": { + "target": "com.amazonaws.sagemaker#HyperParameterTuningJobName", + "traits": { + "smithy.api#documentation": "

          The name of the hyperparameter tuning job to be used as a starting point for a new\n hyperparameter tuning job.

          " + } + } + }, "traits": { - "smithy.api#pattern": "arn:aws[a-z\\-]*:sns:[a-z0-9\\-]*:[0-9]{12}:[a-zA-Z0-9_.-]*" + "smithy.api#documentation": "

          A previously completed or stopped hyperparameter tuning job to be used as a starting\n point for a new hyperparameter tuning job.

          " } }, - "com.amazonaws.sagemaker#NumberOfHumanWorkersPerDataObject": { - "type": "integer", + "com.amazonaws.sagemaker#ParentHyperParameterTuningJobs": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ParentHyperParameterTuningJob" + }, "traits": { - "smithy.api#box": {}, - "smithy.api#range": { + "smithy.api#length": { "min": 1, - "max": 9 + "max": 5 } } }, - "com.amazonaws.sagemaker#ObjectiveStatus": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Succeeded", - "name": "Succeeded" - }, - { - "value": "Pending", - "name": "Pending" - }, - { - "value": "Failed", - "name": "Failed" - } - ] - } - }, - "com.amazonaws.sagemaker#ObjectiveStatusCounter": { - "type": "integer", - "traits": { - "smithy.api#range": { - "min": 0 - } + "com.amazonaws.sagemaker#Parents": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#Parent" } }, - "com.amazonaws.sagemaker#ObjectiveStatusCounters": { + "com.amazonaws.sagemaker#Pipeline": { "type": "structure", "members": { - "Succeeded": { - "target": "com.amazonaws.sagemaker#ObjectiveStatusCounter", + "PipelineArn": { + "target": "com.amazonaws.sagemaker#PipelineArn", "traits": { - "smithy.api#documentation": "

          The number of training jobs whose final objective metric was evaluated by the\n hyperparameter tuning job and used in the hyperparameter tuning process.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pipeline.

          " } }, - "Pending": { - "target": "com.amazonaws.sagemaker#ObjectiveStatusCounter", + "PipelineName": { + "target": "com.amazonaws.sagemaker#PipelineName", "traits": { - "smithy.api#documentation": "

          The number of training jobs that are in progress and pending evaluation of their final\n objective metric.

          " + "smithy.api#documentation": "

          The name of the pipeline.

          " } }, - "Failed": { - "target": "com.amazonaws.sagemaker#ObjectiveStatusCounter", - "traits": { - "smithy.api#documentation": "

          The number of training jobs whose final objective metric was not evaluated and used in\n the hyperparameter tuning process. This typically occurs when the training job failed or\n did not emit an objective metric.

          " - } - } - }, - "traits": { - "smithy.api#documentation": "

          Specifies the number of training jobs that this hyperparameter tuning job launched,\n categorized by the status of their objective metric. The objective metric status shows\n whether the\n final\n objective metric for the training job has been evaluated by the\n tuning job and used in the hyperparameter tuning process.

          " - } - }, - "com.amazonaws.sagemaker#OidcConfig": { - "type": "structure", - "members": { - "ClientId": { - "target": "com.amazonaws.sagemaker#ClientId", + "PipelineDisplayName": { + "target": "com.amazonaws.sagemaker#PipelineName", "traits": { - "smithy.api#documentation": "

          The OIDC IdP client ID used to configure your private workforce.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The display name of the pipeline.

          " } }, - "ClientSecret": { - "target": "com.amazonaws.sagemaker#ClientSecret", + "PipelineDescription": { + "target": "com.amazonaws.sagemaker#PipelineDescription", "traits": { - "smithy.api#documentation": "

          The OIDC IdP client secret used to configure your private workforce.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The description of the pipeline.

          " } }, - "Issuer": { - "target": "com.amazonaws.sagemaker#OidcEndpoint", + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", "traits": { - "smithy.api#documentation": "

          The OIDC IdP issuer used to configure your private workforce.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the role that created the pipeline.

          " } }, - "AuthorizationEndpoint": { - "target": "com.amazonaws.sagemaker#OidcEndpoint", + "PipelineStatus": { + "target": "com.amazonaws.sagemaker#PipelineStatus", "traits": { - "smithy.api#documentation": "

          The OIDC IdP authorization endpoint used to configure your private workforce.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The status of the pipeline.

          " } }, - "TokenEndpoint": { - "target": "com.amazonaws.sagemaker#OidcEndpoint", + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The OIDC IdP token endpoint used to configure your private workforce.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The creation time of the pipeline.

          " } }, - "UserInfoEndpoint": { - "target": "com.amazonaws.sagemaker#OidcEndpoint", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The OIDC IdP user information endpoint used to configure your private workforce.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The time that the pipeline was last modified.

          " } }, - "LogoutEndpoint": { - "target": "com.amazonaws.sagemaker#OidcEndpoint", + "LastRunTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The OIDC IdP logout endpoint used to configure your private workforce.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The time when the pipeline was last run.

          " } }, - "JwksUri": { - "target": "com.amazonaws.sagemaker#OidcEndpoint", + "CreatedBy": { + "target": "com.amazonaws.sagemaker#UserContext" + }, + "LastModifiedBy": { + "target": "com.amazonaws.sagemaker#UserContext" + }, + "Tags": { + "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          The OIDC IdP JSON Web Key Set (Jwks) URI used to configure your private workforce.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          A list of tags that apply to the pipeline.

          " } } }, "traits": { - "smithy.api#documentation": "

          Use this parameter to configure your OIDC Identity Provider (IdP).

          " + "smithy.api#documentation": "

          A SageMaker Model Building Pipeline instance.

          " } }, - "com.amazonaws.sagemaker#OidcConfigForResponse": { + "com.amazonaws.sagemaker#PipelineArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:pipeline/.*" + } + }, + "com.amazonaws.sagemaker#PipelineDefinition": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 1048576 + }, + "smithy.api#pattern": ".*(?:[ \\r\\n\\t].*)*" + } + }, + "com.amazonaws.sagemaker#PipelineDescription": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 3072 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#PipelineExecution": { "type": "structure", "members": { - "ClientId": { - "target": "com.amazonaws.sagemaker#ClientId", + "PipelineArn": { + "target": "com.amazonaws.sagemaker#PipelineArn", "traits": { - "smithy.api#documentation": "

          The OIDC IdP client ID used to configure your private workforce.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pipeline that was executed.

          " } }, - "Issuer": { - "target": "com.amazonaws.sagemaker#OidcEndpoint", + "PipelineExecutionArn": { + "target": "com.amazonaws.sagemaker#PipelineExecutionArn", "traits": { - "smithy.api#documentation": "

          The OIDC IdP issuer used to configure your private workforce.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pipeline execution.

          " } }, - "AuthorizationEndpoint": { - "target": "com.amazonaws.sagemaker#OidcEndpoint", + "PipelineExecutionDisplayName": { + "target": "com.amazonaws.sagemaker#PipelineExecutionName", "traits": { - "smithy.api#documentation": "

          The OIDC IdP authorization endpoint used to configure your private workforce.

          " + "smithy.api#documentation": "

          The display name of the pipeline execution.

          " } }, - "TokenEndpoint": { - "target": "com.amazonaws.sagemaker#OidcEndpoint", + "PipelineExecutionStatus": { + "target": "com.amazonaws.sagemaker#PipelineExecutionStatus", "traits": { - "smithy.api#documentation": "

          The OIDC IdP token endpoint used to configure your private workforce.

          " + "smithy.api#documentation": "

          The status of the pipeline status.

          " } }, - "UserInfoEndpoint": { - "target": "com.amazonaws.sagemaker#OidcEndpoint", + "PipelineExecutionDescription": { + "target": "com.amazonaws.sagemaker#PipelineExecutionDescription", + "traits": { + "smithy.api#documentation": "

          The description of the pipeline execution.

          " + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The OIDC IdP user information endpoint used to configure your private workforce.

          " + "smithy.api#documentation": "

          The creation time of the pipeline execution.

          " } }, - "LogoutEndpoint": { - "target": "com.amazonaws.sagemaker#OidcEndpoint", + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The OIDC IdP logout endpoint used to configure your private workforce.

          " + "smithy.api#documentation": "

          The time that the pipeline execution was last modified.

          " } }, - "JwksUri": { - "target": "com.amazonaws.sagemaker#OidcEndpoint", + "CreatedBy": { + "target": "com.amazonaws.sagemaker#UserContext" + }, + "LastModifiedBy": { + "target": "com.amazonaws.sagemaker#UserContext" + }, + "PipelineParameters": { + "target": "com.amazonaws.sagemaker#ParameterList", "traits": { - "smithy.api#documentation": "

          The OIDC IdP JSON Web Key Set (Jwks) URI used to configure your private workforce.

          " + "smithy.api#documentation": "

          Contains a list of pipeline parameters. This list can be empty.

          " } } }, "traits": { - "smithy.api#documentation": "

          Your OIDC IdP workforce configuration.

          " + "smithy.api#documentation": "

          An execution of a pipeline.

          " } }, - "com.amazonaws.sagemaker#OidcEndpoint": { + "com.amazonaws.sagemaker#PipelineExecutionArn": { "type": "string", "traits": { "smithy.api#length": { "min": 0, - "max": 500 + "max": 256 }, - "smithy.api#pattern": "https://\\S+" + "smithy.api#pattern": "^arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:pipeline\\/.*\\/execution\\/.*$" } }, - "com.amazonaws.sagemaker#OidcMemberDefinition": { - "type": "structure", - "members": { - "Groups": { - "target": "com.amazonaws.sagemaker#Groups", - "traits": { - "smithy.api#documentation": "

          A list of comma seperated strings that identifies\n user groups in your OIDC IdP. Each user group is\n made up of a group of private workers.

          ", - "smithy.api#required": {} - } - } - }, + "com.amazonaws.sagemaker#PipelineExecutionDescription": { + "type": "string", "traits": { - "smithy.api#documentation": "

          A list of user groups that exist in your OIDC Identity Provider (IdP). \n One to ten groups can be used to create a single private work team. \n When you add a user group to the list of Groups, you can add that user group to one or more\n private work teams. If you add a user group to a private work team, all workers in that user group \n are added to the work team.

          " + "smithy.api#length": { + "min": 0, + "max": 3072 + }, + "smithy.api#pattern": ".*" } }, - "com.amazonaws.sagemaker#Operator": { + "com.amazonaws.sagemaker#PipelineExecutionName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 82 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,81}" + } + }, + "com.amazonaws.sagemaker#PipelineExecutionStatus": { "type": "string", "traits": { "smithy.api#enum": [ { - "value": "Equals", - "name": "EQUALS" - }, - { - "value": "NotEquals", - "name": "NOT_EQUALS" - }, - { - "value": "GreaterThan", - "name": "GREATER_THAN" - }, - { - "value": "GreaterThanOrEqualTo", - "name": "GREATER_THAN_OR_EQUAL_TO" - }, - { - "value": "LessThan", - "name": "LESS_THAN" - }, - { - "value": "LessThanOrEqualTo", - "name": "LESS_THAN_OR_EQUAL_TO" - }, - { - "value": "Contains", - "name": "CONTAINS" + "value": "Executing", + "name": "EXECUTING" }, { - "value": "Exists", - "name": "EXISTS" + "value": "Stopping", + "name": "STOPPING" }, { - "value": "NotExists", - "name": "NOT_EXISTS" + "value": "Stopped", + "name": "STOPPED" }, { - "value": "In", - "name": "IN" - } - ] - } - }, - "com.amazonaws.sagemaker#OptionalDouble": { - "type": "double", - "traits": { - "smithy.api#box": {} - } - }, - "com.amazonaws.sagemaker#OptionalInteger": { - "type": "integer", - "traits": { - "smithy.api#box": {} - } - }, - "com.amazonaws.sagemaker#OptionalVolumeSizeInGB": { - "type": "integer", - "traits": { - "smithy.api#range": { - "min": 0 - } - } - }, - "com.amazonaws.sagemaker#OrderKey": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Ascending", - "name": "Ascending" + "value": "Failed", + "name": "FAILED" }, { - "value": "Descending", - "name": "Descending" + "value": "Succeeded", + "name": "SUCCEEDED" } ] } }, - "com.amazonaws.sagemaker#OutputConfig": { + "com.amazonaws.sagemaker#PipelineExecutionStep": { "type": "structure", "members": { - "S3OutputLocation": { - "target": "com.amazonaws.sagemaker#S3Uri", + "StepName": { + "target": "com.amazonaws.sagemaker#StepName", "traits": { - "smithy.api#documentation": "

          Identifies the S3 bucket where you want Amazon SageMaker to store the model artifacts. For\n example, s3://bucket-name/key-name-prefix.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The name of the step that is executed.

          " } }, - "TargetDevice": { - "target": "com.amazonaws.sagemaker#TargetDevice", + "StartTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Identifies the target device or the machine learning instance that you want to run\n your model on after the compilation has completed. Alternatively, you can specify OS,\n architecture, and accelerator using TargetPlatform fields. It can be\n used instead of TargetPlatform.

          " + "smithy.api#documentation": "

          The time that the step started executing.

          " } }, - "TargetPlatform": { - "target": "com.amazonaws.sagemaker#TargetPlatform", + "EndTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          Contains information about a target platform that you want your model to run on, such\n as OS, architecture, and accelerators. It is an alternative of\n TargetDevice.

          \n

          The following examples show how to configure the TargetPlatform and\n CompilerOptions JSON strings for popular target platforms:

          \n
            \n
          • \n

            Raspberry Pi 3 Model B+

            \n

            \n \"TargetPlatform\": {\"Os\": \"LINUX\", \"Arch\": \"ARM_EABIHF\"},\n

            \n

            \n \"CompilerOptions\": {'mattr': ['+neon']}\n

            \n
          • \n
          • \n

            Jetson TX2

            \n

            \n \"TargetPlatform\": {\"Os\": \"LINUX\", \"Arch\": \"ARM64\", \"Accelerator\":\n \"NVIDIA\"},\n

            \n

            \n \"CompilerOptions\": {'gpu-code': 'sm_62', 'trt-ver': '6.0.1',\n 'cuda-ver': '10.0'}\n

            \n
          • \n
          • \n

            EC2 m5.2xlarge instance OS

            \n

            \n \"TargetPlatform\": {\"Os\": \"LINUX\", \"Arch\": \"X86_64\", \"Accelerator\":\n \"NVIDIA\"},\n

            \n

            \n \"CompilerOptions\": {'mcpu': 'skylake-avx512'}\n

            \n
          • \n
          • \n

            RK3399

            \n

            \n \"TargetPlatform\": {\"Os\": \"LINUX\", \"Arch\": \"ARM64\", \"Accelerator\":\n \"MALI\"}\n

            \n
          • \n
          • \n

            ARMv7 phone (CPU)

            \n

            \n \"TargetPlatform\": {\"Os\": \"ANDROID\", \"Arch\": \"ARM_EABI\"},\n

            \n

            \n \"CompilerOptions\": {'ANDROID_PLATFORM': 25, 'mattr':\n ['+neon']}\n

            \n
          • \n
          • \n

            ARMv8 phone (CPU)

            \n

            \n \"TargetPlatform\": {\"Os\": \"ANDROID\", \"Arch\": \"ARM64\"},\n

            \n

            \n \"CompilerOptions\": {'ANDROID_PLATFORM': 29}\n

            \n
          • \n
          " + "smithy.api#documentation": "

          The time that the step stopped executing.

          " } }, - "CompilerOptions": { - "target": "com.amazonaws.sagemaker#CompilerOptions", + "StepStatus": { + "target": "com.amazonaws.sagemaker#StepStatus", "traits": { - "smithy.api#documentation": "

          Specifies additional parameters for compiler options in JSON format. The compiler\n options are TargetPlatform specific. It is required for NVIDIA accelerators\n and highly recommended for CPU compilations. For any other cases, it is optional to\n specify CompilerOptions.\n

          \n
            \n
          • \n

            \n CPU: Compilation for CPU supports the following compiler\n options.

            \n
              \n
            • \n

              \n mcpu: CPU micro-architecture. For example, {'mcpu':\n 'skylake-avx512'}\n

              \n
            • \n
            • \n

              \n mattr: CPU flags. For example, {'mattr': ['+neon',\n '+vfpv4']}\n

              \n
            • \n
            \n
          • \n
          • \n

            \n ARM: Details of ARM CPU compilations.

            \n
              \n
            • \n

              \n NEON: NEON is an implementation of the Advanced SIMD\n extension used in ARMv7 processors.

              \n

              For example, add {'mattr': ['+neon']} to the compiler\n options if compiling for ARM 32-bit platform with the NEON\n support.

              \n
            • \n
            \n
          • \n
          • \n

            \n NVIDIA: Compilation for NVIDIA GPU supports the following\n compiler options.

            \n
              \n
            • \n

              \n gpu_code: Specifies the targeted architecture.

              \n
            • \n
            • \n

              \n trt-ver: Specifies the TensorRT versions in x.y.z.\n format.

              \n
            • \n
            • \n

              \n cuda-ver: Specifies the CUDA version in x.y\n format.

              \n
            • \n
            \n

            For example, {'gpu-code': 'sm_72', 'trt-ver': '6.0.1', 'cuda-ver':\n '10.1'}\n

            \n
          • \n
          • \n

            \n ANDROID: Compilation for the Android OS supports the following\n compiler options:

            \n
              \n
            • \n

              \n ANDROID_PLATFORM: Specifies the Android API levels.\n Available levels range from 21 to 29. For example,\n {'ANDROID_PLATFORM': 28}.

              \n
            • \n
            • \n

              \n mattr: Add {'mattr': ['+neon']} to compiler\n options if compiling for ARM 32-bit platform with NEON support.

              \n
            • \n
            \n
          • \n
          • \n

            \n INFERENTIA: Compilation for target ml_inf1 uses compiler options \n passed in as a JSON string. For example, \n \"CompilerOptions\": \"\\\"--verbose 1 --num-neuroncores 2 -O2\\\"\".\n

            \n

            For information about supported compiler options, see \n \n Neuron Compiler CLI.\n

            \n
          • \n
          • \n

            \n CoreML: Compilation for the CoreML OutputConfig$TargetDevice\n supports the following compiler options:

            \n
              \n
            • \n

              \n class_labels: Specifies the classification labels file\n name inside input tar.gz file. For example,\n {\"class_labels\": \"imagenet_labels_1000.txt\"}.\n Labels inside the txt file should be separated by newlines.

              \n
            • \n
            \n
          • \n
          " + "smithy.api#documentation": "

          The status of the step execution.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          Contains information about the output location for the compiled model and the target\n device that the model runs on. TargetDevice and TargetPlatform\n are mutually exclusive, so you need to choose one between the two to specify your target\n device or platform. If you cannot find your device you want to use from the\n TargetDevice list, use TargetPlatform to describe the\n platform of your edge device and CompilerOptions if there are specific\n settings that are required or recommended to use for particular TargetPlatform.

          " - } - }, - "com.amazonaws.sagemaker#OutputDataConfig": { - "type": "structure", - "members": { - "KmsKeyId": { - "target": "com.amazonaws.sagemaker#KmsKeyId", + }, + "CacheHitResult": { + "target": "com.amazonaws.sagemaker#CacheHitResult", "traits": { - "smithy.api#documentation": "

          The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using\n Amazon S3 server-side encryption. The KmsKeyId can be any of the following\n formats:

          \n
            \n
          • \n

            // KMS Key ID

            \n

            \n \"1234abcd-12ab-34cd-56ef-1234567890ab\"\n

            \n
          • \n
          • \n

            // Amazon Resource Name (ARN) of a KMS Key

            \n

            \n \"arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab\"\n

            \n
          • \n
          • \n

            // KMS Key Alias

            \n

            \n \"alias/ExampleAlias\"\n

            \n
          • \n
          • \n

            // Amazon Resource Name (ARN) of a KMS Key Alias

            \n

            \n \"arn:aws:kms:us-west-2:111122223333:alias/ExampleAlias\"\n

            \n
          • \n
          \n \n

          If you use a KMS key ID or an alias of your master key, the Amazon SageMaker execution role must\n include permissions to call kms:Encrypt. If you don't provide a KMS key ID,\n Amazon SageMaker uses the default KMS key for Amazon S3 for your role's account. Amazon SageMaker uses server-side\n encryption with KMS-managed keys for OutputDataConfig. If you use a bucket\n policy with an s3:PutObject permission that only allows objects with\n server-side encryption, set the condition key of\n s3:x-amz-server-side-encryption to \"aws:kms\". For more\n information, see KMS-Managed Encryption Keys in the Amazon Simple Storage Service Developer\n Guide.\n

          \n

          The KMS key policy must grant permission to the IAM role that you specify in your\n CreateTrainingJob, CreateTransformJob, or\n CreateHyperParameterTuningJob requests. For more information, see\n Using Key Policies in AWS KMS in the AWS Key Management Service Developer\n Guide.

          " + "smithy.api#documentation": "

          If this pipeline execution step was cached, details on the cache hit.

          " } }, - "S3OutputPath": { - "target": "com.amazonaws.sagemaker#S3Uri", + "FailureReason": { + "target": "com.amazonaws.sagemaker#FailureReason", "traits": { - "smithy.api#documentation": "

          Identifies the S3 path where you want Amazon SageMaker to store the model artifacts. For\n example, s3://bucket-name/key-name-prefix.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The reason why the step failed execution. This is only returned if the step failed its execution.

          " + } + }, + "Metadata": { + "target": "com.amazonaws.sagemaker#PipelineExecutionStepMetadata", + "traits": { + "smithy.api#documentation": "

          The metadata for the step execution.

          " } } }, "traits": { - "smithy.api#documentation": "

          Provides information about how to store model training results (model\n artifacts).

          " - } - }, - "com.amazonaws.sagemaker#PaginationToken": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 8192 - }, - "smithy.api#pattern": ".*" - } - }, - "com.amazonaws.sagemaker#ParameterKey": { - "type": "string", - "traits": { - "smithy.api#length": { - "min": 0, - "max": 256 - }, - "smithy.api#pattern": ".*" + "smithy.api#documentation": "

          An execution of a step in a pipeline.

          " } }, - "com.amazonaws.sagemaker#ParameterName": { - "type": "string", + "com.amazonaws.sagemaker#PipelineExecutionStepList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#PipelineExecutionStep" + }, "traits": { "smithy.api#length": { "min": 0, - "max": 256 - }, - "smithy.api#pattern": "[\\p{L}\\p{M}\\p{Z}\\p{S}\\p{N}\\p{P}]*" + "max": 100 + } } }, - "com.amazonaws.sagemaker#ParameterRange": { + "com.amazonaws.sagemaker#PipelineExecutionStepMetadata": { "type": "structure", "members": { - "IntegerParameterRangeSpecification": { - "target": "com.amazonaws.sagemaker#IntegerParameterRangeSpecification", + "TrainingJob": { + "target": "com.amazonaws.sagemaker#TrainingJobStepMetadata", "traits": { - "smithy.api#documentation": "

          A IntegerParameterRangeSpecification object that defines the possible\n values for an integer hyperparameter.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the training job that was run by this step execution.

          " } }, - "ContinuousParameterRangeSpecification": { - "target": "com.amazonaws.sagemaker#ContinuousParameterRangeSpecification", + "ProcessingJob": { + "target": "com.amazonaws.sagemaker#ProcessingJobStepMetadata", "traits": { - "smithy.api#documentation": "

          A ContinuousParameterRangeSpecification object that defines the possible\n values for a continuous hyperparameter.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the processing job that was run by this step execution.

          " } }, - "CategoricalParameterRangeSpecification": { - "target": "com.amazonaws.sagemaker#CategoricalParameterRangeSpecification", + "TransformJob": { + "target": "com.amazonaws.sagemaker#TransformJobStepMetadata", "traits": { - "smithy.api#documentation": "

          A CategoricalParameterRangeSpecification object that defines the possible\n values for a categorical hyperparameter.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the transform job that was run by this step execution.

          " + } + }, + "Model": { + "target": "com.amazonaws.sagemaker#ModelStepMetadata", + "traits": { + "smithy.api#documentation": "

          Metadata for the Model step.

          " + } + }, + "RegisterModel": { + "target": "com.amazonaws.sagemaker#RegisterModelStepMetadata", + "traits": { + "smithy.api#documentation": "

          Metadata for the RegisterModel step.

          " + } + }, + "Condition": { + "target": "com.amazonaws.sagemaker#ConditionStepMetadata", + "traits": { + "smithy.api#documentation": "

          If this is a Condition step metadata object, details on the condition.

          " } } }, "traits": { - "smithy.api#documentation": "

          Defines the possible values for categorical, continuous, and integer hyperparameters\n to be used by an algorithm.

          " + "smithy.api#documentation": "

          Metadata for a step execution.

          " } }, - "com.amazonaws.sagemaker#ParameterRanges": { + "com.amazonaws.sagemaker#PipelineExecutionSummary": { "type": "structure", "members": { - "IntegerParameterRanges": { - "target": "com.amazonaws.sagemaker#IntegerParameterRanges", + "PipelineExecutionArn": { + "target": "com.amazonaws.sagemaker#PipelineExecutionArn", "traits": { - "smithy.api#documentation": "

          The array of IntegerParameterRange objects that specify ranges of\n integer hyperparameters that a hyperparameter tuning job searches.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pipeline execution.

          " } }, - "ContinuousParameterRanges": { - "target": "com.amazonaws.sagemaker#ContinuousParameterRanges", + "StartTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The array of ContinuousParameterRange objects that specify ranges of\n continuous hyperparameters that a hyperparameter tuning job searches.

          " + "smithy.api#documentation": "

          The start time of the pipeline execution.

          " } }, - "CategoricalParameterRanges": { - "target": "com.amazonaws.sagemaker#CategoricalParameterRanges", + "PipelineExecutionStatus": { + "target": "com.amazonaws.sagemaker#PipelineExecutionStatus", "traits": { - "smithy.api#documentation": "

          The array of CategoricalParameterRange objects that specify ranges\n of categorical hyperparameters that a hyperparameter tuning job searches.

          " + "smithy.api#documentation": "

          The status of the pipeline execution.

          " + } + }, + "PipelineExecutionDescription": { + "target": "com.amazonaws.sagemaker#PipelineExecutionDescription", + "traits": { + "smithy.api#documentation": "

          The description of the pipeline execution.

          " + } + }, + "PipelineExecutionDisplayName": { + "target": "com.amazonaws.sagemaker#PipelineExecutionName", + "traits": { + "smithy.api#documentation": "

          The display name of the pipeline execution.

          " } } }, "traits": { - "smithy.api#documentation": "

          Specifies ranges of integer, continuous, and categorical hyperparameters that a\n hyperparameter tuning job searches. The hyperparameter tuning job launches training jobs\n with hyperparameter values within these ranges to find the combination of values that\n result in the training job with the best performance as measured by the objective metric\n of the hyperparameter tuning job.

          \n \n

          You can specify a maximum of 20 hyperparameters that a hyperparameter tuning job\n can search over. Every possible value of a categorical parameter range counts\n against this limit.

          \n
          " - } - }, - "com.amazonaws.sagemaker#ParameterType": { - "type": "string", - "traits": { - "smithy.api#enum": [ - { - "value": "Integer", - "name": "INTEGER" - }, - { - "value": "Continuous", - "name": "CONTINUOUS" - }, - { - "value": "Categorical", - "name": "CATEGORICAL" - }, - { - "value": "FreeText", - "name": "FREE_TEXT" - } - ] + "smithy.api#documentation": "

          A pipeline execution summary.

          " } }, - "com.amazonaws.sagemaker#ParameterValue": { - "type": "string", + "com.amazonaws.sagemaker#PipelineExecutionSummaryList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#PipelineExecutionSummary" + }, "traits": { "smithy.api#length": { "min": 0, + "max": 100 + } + } + }, + "com.amazonaws.sagemaker#PipelineName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, "max": 256 }, - "smithy.api#pattern": ".*" + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,255}" } }, - "com.amazonaws.sagemaker#ParameterValues": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#ParameterValue" - }, + "com.amazonaws.sagemaker#PipelineParameterName": { + "type": "string", "traits": { "smithy.api#length": { "min": 1, - "max": 20 - } + "max": 256 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,255}$" } }, - "com.amazonaws.sagemaker#Parent": { + "com.amazonaws.sagemaker#PipelineStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Active", + "name": "ACTIVE" + } + ] + } + }, + "com.amazonaws.sagemaker#PipelineSummary": { "type": "structure", "members": { - "TrialName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "PipelineArn": { + "target": "com.amazonaws.sagemaker#PipelineArn", "traits": { - "smithy.api#documentation": "

          The name of the trial.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pipeline.

          " } }, - "ExperimentName": { - "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "PipelineName": { + "target": "com.amazonaws.sagemaker#PipelineName", "traits": { - "smithy.api#documentation": "

          The name of the experiment.

          " + "smithy.api#documentation": "

          The name of the pipeline.

          " } - } - }, - "traits": { - "smithy.api#documentation": "

          The trial that a trial component is associated with and the experiment the trial is part\n of. A component might not be associated with a trial. A component can be associated with\n multiple trials.

          " - } - }, - "com.amazonaws.sagemaker#ParentHyperParameterTuningJob": { - "type": "structure", - "members": { - "HyperParameterTuningJobName": { - "target": "com.amazonaws.sagemaker#HyperParameterTuningJobName", + }, + "PipelineDisplayName": { + "target": "com.amazonaws.sagemaker#PipelineName", "traits": { - "smithy.api#documentation": "

          The name of the hyperparameter tuning job to be used as a starting point for a new\n hyperparameter tuning job.

          " + "smithy.api#documentation": "

          The display name of the pipeline.

          " + } + }, + "PipelineDescription": { + "target": "com.amazonaws.sagemaker#PipelineDescription", + "traits": { + "smithy.api#documentation": "

          The description of the pipeline.

          " + } + }, + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) that the pipeline used to execute.

          " + } + }, + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          The creation time of the pipeline.

          " + } + }, + "LastModifiedTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          The time that the pipeline was last modified.

          " + } + }, + "LastExecutionTime": { + "target": "com.amazonaws.sagemaker#Timestamp", + "traits": { + "smithy.api#documentation": "

          The last time that a pipeline execution began.

          " } } }, "traits": { - "smithy.api#documentation": "

          A previously completed or stopped hyperparameter tuning job to be used as a starting\n point for a new hyperparameter tuning job.

          " + "smithy.api#documentation": "

          A summary of a pipeline.

          " } }, - "com.amazonaws.sagemaker#ParentHyperParameterTuningJobs": { + "com.amazonaws.sagemaker#PipelineSummaryList": { "type": "list", "member": { - "target": "com.amazonaws.sagemaker#ParentHyperParameterTuningJob" + "target": "com.amazonaws.sagemaker#PipelineSummary" }, "traits": { "smithy.api#length": { - "min": 1, - "max": 5 + "min": 0, + "max": 100 } } }, - "com.amazonaws.sagemaker#Parents": { - "type": "list", - "member": { - "target": "com.amazonaws.sagemaker#Parent" + "com.amazonaws.sagemaker#PolicyString": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 20480 + }, + "smithy.api#pattern": ".*" } }, "com.amazonaws.sagemaker#PresignedDomainUrl": { @@ -18518,6 +23938,21 @@ "smithy.api#pattern": "[\\S\\s]*" } }, + "com.amazonaws.sagemaker#ProcessingFeatureStoreOutput": { + "type": "structure", + "members": { + "FeatureGroupName": { + "target": "com.amazonaws.sagemaker#FeatureGroupName", + "traits": { + "smithy.api#documentation": "

          The name of the Amazon SageMaker FeatureGroup to use as the destination for processing job output.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          Configuration for processing job outputs in Amazon SageMaker Feature Store.

          " + } + }, "com.amazonaws.sagemaker#ProcessingInput": { "type": "structure", "members": { @@ -18528,16 +23963,27 @@ "smithy.api#required": {} } }, + "AppManaged": { + "target": "com.amazonaws.sagemaker#AppManaged", + "traits": { + "smithy.api#documentation": "

          When True, input operations such as data download are managed natively by the\n processing job application. When False (default), input operations are managed by Amazon SageMaker.

          " + } + }, "S3Input": { "target": "com.amazonaws.sagemaker#ProcessingS3Input", "traits": { - "smithy.api#documentation": "

          The S3 inputs for the processing job.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Configuration for processing job inputs in Amazon S3.

          " + } + }, + "DatasetDefinition": { + "target": "com.amazonaws.sagemaker#DatasetDefinition", + "traits": { + "smithy.api#documentation": "

          Configuration for a Dataset Definition input.

          " } } }, "traits": { - "smithy.api#documentation": "

          The inputs for a processing job.

          " + "smithy.api#documentation": "

          The inputs for a processing job. The processing input must specify exactly one of either\n S3Input or DatasetDefinition types.

          " } }, "com.amazonaws.sagemaker#ProcessingInputs": { @@ -18727,7 +24173,7 @@ "ProcessingInputs": { "target": "com.amazonaws.sagemaker#ProcessingInputs", "traits": { - "smithy.api#documentation": "

          For each input, data is downloaded from S3 into the processing container before the\n processing job begins running if \"S3InputMode\" is set to File.

          " + "smithy.api#documentation": "

          List of input configurations for the processing job.

          " } }, "ProcessingOutputConfig": { @@ -18860,7 +24306,7 @@ "min": 1, "max": 63 }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,62}" } }, "com.amazonaws.sagemaker#ProcessingJobStatus": { @@ -18890,6 +24336,20 @@ ] } }, + "com.amazonaws.sagemaker#ProcessingJobStepMetadata": { + "type": "structure", + "members": { + "Arn": { + "target": "com.amazonaws.sagemaker#ProcessingJobArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the processing job.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Metadata for a processing job step.

          " + } + }, "com.amazonaws.sagemaker#ProcessingJobSummaries": { "type": "list", "member": { @@ -18988,13 +24448,24 @@ "S3Output": { "target": "com.amazonaws.sagemaker#ProcessingS3Output", "traits": { - "smithy.api#documentation": "

          Configuration for processing job outputs in Amazon S3.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Configuration for processing job outputs in Amazon S3.

          " + } + }, + "FeatureStoreOutput": { + "target": "com.amazonaws.sagemaker#ProcessingFeatureStoreOutput", + "traits": { + "smithy.api#documentation": "

          Configuration for processing job outputs in Amazon SageMaker Feature Store. This processing output\n type is only supported when AppManaged is specified.

          " + } + }, + "AppManaged": { + "target": "com.amazonaws.sagemaker#AppManaged", + "traits": { + "smithy.api#documentation": "

          When True, output operations such as data upload are managed natively by the\n processing job application. When False (default), output operations are managed by\n Amazon SageMaker.

          " } } }, "traits": { - "smithy.api#documentation": "

          Describes the results of a processing job.

          " + "smithy.api#documentation": "

          Describes the results of a processing job. The processing output must specify exactly one of\n either S3Output or FeatureStoreOutput types.

          " } }, "com.amazonaws.sagemaker#ProcessingOutputConfig": { @@ -19003,7 +24474,7 @@ "Outputs": { "target": "com.amazonaws.sagemaker#ProcessingOutputs", "traits": { - "smithy.api#documentation": "

          Output configuration information for a processing job.

          ", + "smithy.api#documentation": "

          List of output configurations for the processing job.

          ", "smithy.api#required": {} } }, @@ -19103,8 +24574,7 @@ "LocalPath": { "target": "com.amazonaws.sagemaker#ProcessingLocalPath", "traits": { - "smithy.api#documentation": "

          The local path to the Amazon S3 bucket where you want Amazon SageMaker to download the inputs to\n run a processing job. LocalPath is an absolute path to the input\n data.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          The local path to the Amazon S3 bucket where you want Amazon SageMaker to download the inputs to\n run a processing job. LocalPath is an absolute path to the input\n data. This is a required parameter when AppManaged is False\n (default).

          " } }, "S3DataType": { @@ -19117,8 +24587,7 @@ "S3InputMode": { "target": "com.amazonaws.sagemaker#ProcessingS3InputMode", "traits": { - "smithy.api#documentation": "

          Whether to use File or Pipe input mode. In\n File mode, Amazon SageMaker copies the data from the input source onto the local\n Amazon Elastic Block Store (Amazon EBS) volumes before starting your training algorithm.\n This is the most commonly used input mode. In Pipe mode, Amazon SageMaker streams input\n data from the source directly to your algorithm without using the EBS volume.

          ", - "smithy.api#required": {} + "smithy.api#documentation": "

          Whether to use File or Pipe input mode. In\n File mode, Amazon SageMaker copies the data from the input source onto the local\n Amazon Elastic Block Store (Amazon EBS) volumes before starting your training algorithm.\n This is the most commonly used input mode. In Pipe mode, Amazon SageMaker streams input\n data from the source directly to your algorithm without using the EBS volume.This is a\n required parameter when AppManaged is False (default).

          " } }, "S3DataDistributionType": { @@ -19135,7 +24604,7 @@ } }, "traits": { - "smithy.api#documentation": "

          Information about where and how you want to obtain the inputs for an processing\n job.

          " + "smithy.api#documentation": "

          Configuration for processing job inputs in Amazon S3.

          " } }, "com.amazonaws.sagemaker#ProcessingS3InputMode": { @@ -19179,7 +24648,7 @@ } }, "traits": { - "smithy.api#documentation": "

          Information about where and how you want to store the results of an\n processing job.

          " + "smithy.api#documentation": "

          Configuration for processing job outputs in Amazon S3.

          " } }, "com.amazonaws.sagemaker#ProcessingS3UploadMode": { @@ -19600,60 +25069,210 @@ } } }, - "com.amazonaws.sagemaker#ProductionVariantSummary": { + "com.amazonaws.sagemaker#ProductionVariantSummary": { + "type": "structure", + "members": { + "VariantName": { + "target": "com.amazonaws.sagemaker#VariantName", + "traits": { + "smithy.api#documentation": "

          The name of the variant.

          ", + "smithy.api#required": {} + } + }, + "DeployedImages": { + "target": "com.amazonaws.sagemaker#DeployedImages", + "traits": { + "smithy.api#documentation": "

          An array of DeployedImage objects that specify the Amazon EC2 Container Registry paths of the\n inference images deployed on instances of this ProductionVariant.

          " + } + }, + "CurrentWeight": { + "target": "com.amazonaws.sagemaker#VariantWeight", + "traits": { + "smithy.api#documentation": "

          The weight associated with the variant.

          " + } + }, + "DesiredWeight": { + "target": "com.amazonaws.sagemaker#VariantWeight", + "traits": { + "smithy.api#documentation": "

          The requested weight, as specified in the\n UpdateEndpointWeightsAndCapacities request.

          " + } + }, + "CurrentInstanceCount": { + "target": "com.amazonaws.sagemaker#TaskCount", + "traits": { + "smithy.api#documentation": "

          The number of instances associated with the variant.

          " + } + }, + "DesiredInstanceCount": { + "target": "com.amazonaws.sagemaker#TaskCount", + "traits": { + "smithy.api#documentation": "

          The number of instances requested in the\n UpdateEndpointWeightsAndCapacities request.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Describes weight and capacities for a production variant associated with an\n endpoint. If you sent a request to the UpdateEndpointWeightsAndCapacities\n API and the endpoint status is Updating, you get different desired and\n current values.

          " + } + }, + "com.amazonaws.sagemaker#ProductionVariantSummaryList": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ProductionVariantSummary" + }, + "traits": { + "smithy.api#length": { + "min": 1 + } + } + }, + "com.amazonaws.sagemaker#ProjectArn": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 2048 + }, + "smithy.api#pattern": "arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:project:.*" + } + }, + "com.amazonaws.sagemaker#ProjectEntityName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 32 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,31}" + } + }, + "com.amazonaws.sagemaker#ProjectId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 20 + }, + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" + } + }, + "com.amazonaws.sagemaker#ProjectSortBy": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Name", + "name": "NAME" + }, + { + "value": "CreationTime", + "name": "CREATION_TIME" + } + ] + } + }, + "com.amazonaws.sagemaker#ProjectSortOrder": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Ascending", + "name": "ASCENDING" + }, + { + "value": "Descending", + "name": "DESCENDING" + } + ] + } + }, + "com.amazonaws.sagemaker#ProjectStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Pending", + "name": "PENDING" + }, + { + "value": "CreateInProgress", + "name": "CREATE_IN_PROGRESS" + }, + { + "value": "CreateCompleted", + "name": "CREATE_COMPLETED" + }, + { + "value": "CreateFailed", + "name": "CREATE_FAILED" + }, + { + "value": "DeleteInProgress", + "name": "DELETE_IN_PROGRESS" + }, + { + "value": "DeleteFailed", + "name": "DELETE_FAILED" + }, + { + "value": "DeleteCompleted", + "name": "DELETE_COMPLETED" + } + ] + } + }, + "com.amazonaws.sagemaker#ProjectSummary": { "type": "structure", "members": { - "VariantName": { - "target": "com.amazonaws.sagemaker#VariantName", + "ProjectName": { + "target": "com.amazonaws.sagemaker#ProjectEntityName", "traits": { - "smithy.api#documentation": "

          The name of the variant.

          ", + "smithy.api#documentation": "

          The name of the project.

          ", "smithy.api#required": {} } }, - "DeployedImages": { - "target": "com.amazonaws.sagemaker#DeployedImages", + "ProjectDescription": { + "target": "com.amazonaws.sagemaker#EntityDescription", "traits": { - "smithy.api#documentation": "

          An array of DeployedImage objects that specify the Amazon EC2 Container Registry paths of the\n inference images deployed on instances of this ProductionVariant.

          " + "smithy.api#documentation": "

          The description of the project.

          " } }, - "CurrentWeight": { - "target": "com.amazonaws.sagemaker#VariantWeight", + "ProjectArn": { + "target": "com.amazonaws.sagemaker#ProjectArn", "traits": { - "smithy.api#documentation": "

          The weight associated with the variant.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the project.

          ", + "smithy.api#required": {} } }, - "DesiredWeight": { - "target": "com.amazonaws.sagemaker#VariantWeight", + "ProjectId": { + "target": "com.amazonaws.sagemaker#ProjectId", "traits": { - "smithy.api#documentation": "

          The requested weight, as specified in the\n UpdateEndpointWeightsAndCapacities request.

          " + "smithy.api#documentation": "

          The ID of the project.

          ", + "smithy.api#required": {} } }, - "CurrentInstanceCount": { - "target": "com.amazonaws.sagemaker#TaskCount", + "CreationTime": { + "target": "com.amazonaws.sagemaker#Timestamp", "traits": { - "smithy.api#documentation": "

          The number of instances associated with the variant.

          " + "smithy.api#documentation": "

          The time that the project was created.

          ", + "smithy.api#required": {} } }, - "DesiredInstanceCount": { - "target": "com.amazonaws.sagemaker#TaskCount", + "ProjectStatus": { + "target": "com.amazonaws.sagemaker#ProjectStatus", "traits": { - "smithy.api#documentation": "

          The number of instances requested in the\n UpdateEndpointWeightsAndCapacities request.

          " + "smithy.api#documentation": "

          The status of the project.

          ", + "smithy.api#required": {} } } }, "traits": { - "smithy.api#documentation": "

          Describes weight and capacities for a production variant associated with an\n endpoint. If you sent a request to the UpdateEndpointWeightsAndCapacities\n API and the endpoint status is Updating, you get different desired and\n current values.

          " + "smithy.api#documentation": "

          Information about a project.

          " } }, - "com.amazonaws.sagemaker#ProductionVariantSummaryList": { + "com.amazonaws.sagemaker#ProjectSummaryList": { "type": "list", "member": { - "target": "com.amazonaws.sagemaker#ProductionVariantSummary" - }, - "traits": { - "smithy.api#length": { - "min": 1 - } + "target": "com.amazonaws.sagemaker#ProjectSummary" } }, "com.amazonaws.sagemaker#PropertyNameHint": { @@ -19701,6 +25320,58 @@ "target": "com.amazonaws.sagemaker#PropertyNameSuggestion" } }, + "com.amazonaws.sagemaker#ProvisionedProductStatusMessage": { + "type": "string", + "traits": { + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#ProvisioningParameter": { + "type": "structure", + "members": { + "Key": { + "target": "com.amazonaws.sagemaker#ProvisioningParameterKey", + "traits": { + "smithy.api#documentation": "

          The key that identifies a provisioning parameter.

          " + } + }, + "Value": { + "target": "com.amazonaws.sagemaker#ProvisioningParameterValue", + "traits": { + "smithy.api#documentation": "

          The value of the provisioning parameter.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          A key value pair used when you provision a project as a service catalog product. For\n information, see What is AWS Service\n Catalog.

          " + } + }, + "com.amazonaws.sagemaker#ProvisioningParameterKey": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 1000 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#ProvisioningParameterValue": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 4096 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#ProvisioningParameters": { + "type": "list", + "member": { + "target": "com.amazonaws.sagemaker#ProvisioningParameter" + } + }, "com.amazonaws.sagemaker#PublicWorkforceTaskPrice": { "type": "structure", "members": { @@ -19715,6 +25386,49 @@ "smithy.api#documentation": "

          Defines the amount of money paid to an Amazon Mechanical Turk worker for each task performed.

          \n

          Use one of the following prices for bounding box tasks. Prices are in US dollars and\n should be based on the complexity of the task; the longer it takes in your initial\n testing, the more you should offer.

          \n
            \n
          • \n

            0.036

            \n
          • \n
          • \n

            0.048

            \n
          • \n
          • \n

            0.060

            \n
          • \n
          • \n

            0.072

            \n
          • \n
          • \n

            0.120

            \n
          • \n
          • \n

            0.240

            \n
          • \n
          • \n

            0.360

            \n
          • \n
          • \n

            0.480

            \n
          • \n
          • \n

            0.600

            \n
          • \n
          • \n

            0.720

            \n
          • \n
          • \n

            0.840

            \n
          • \n
          • \n

            0.960

            \n
          • \n
          • \n

            1.080

            \n
          • \n
          • \n

            1.200

            \n
          • \n
          \n

          Use one of the following prices for image classification, text classification, and\n custom tasks. Prices are in US dollars.

          \n
            \n
          • \n

            0.012

            \n
          • \n
          • \n

            0.024

            \n
          • \n
          • \n

            0.036

            \n
          • \n
          • \n

            0.048

            \n
          • \n
          • \n

            0.060

            \n
          • \n
          • \n

            0.072

            \n
          • \n
          • \n

            0.120

            \n
          • \n
          • \n

            0.240

            \n
          • \n
          • \n

            0.360

            \n
          • \n
          • \n

            0.480

            \n
          • \n
          • \n

            0.600

            \n
          • \n
          • \n

            0.720

            \n
          • \n
          • \n

            0.840

            \n
          • \n
          • \n

            0.960

            \n
          • \n
          • \n

            1.080

            \n
          • \n
          • \n

            1.200

            \n
          • \n
          \n

          Use one of the following prices for semantic segmentation tasks. Prices are in US\n dollars.

          \n
            \n
          • \n

            0.840

            \n
          • \n
          • \n

            0.960

            \n
          • \n
          • \n

            1.080

            \n
          • \n
          • \n

            1.200

            \n
          • \n
          \n

          Use one of the following prices for Textract AnalyzeDocument Important Form Key Amazon\n Augmented AI review tasks. Prices are in US dollars.

          \n
            \n
          • \n

            2.400

            \n
          • \n
          • \n

            2.280

            \n
          • \n
          • \n

            2.160

            \n
          • \n
          • \n

            2.040

            \n
          • \n
          • \n

            1.920

            \n
          • \n
          • \n

            1.800

            \n
          • \n
          • \n

            1.680

            \n
          • \n
          • \n

            1.560

            \n
          • \n
          • \n

            1.440

            \n
          • \n
          • \n

            1.320

            \n
          • \n
          • \n

            1.200

            \n
          • \n
          • \n

            1.080

            \n
          • \n
          • \n

            0.960

            \n
          • \n
          • \n

            0.840

            \n
          • \n
          • \n

            0.720

            \n
          • \n
          • \n

            0.600

            \n
          • \n
          • \n

            0.480

            \n
          • \n
          • \n

            0.360

            \n
          • \n
          • \n

            0.240

            \n
          • \n
          • \n

            0.120

            \n
          • \n
          • \n

            0.072

            \n
          • \n
          • \n

            0.060

            \n
          • \n
          • \n

            0.048

            \n
          • \n
          • \n

            0.036

            \n
          • \n
          • \n

            0.024

            \n
          • \n
          • \n

            0.012

            \n
          • \n
          \n

          Use one of the following prices for Rekognition DetectModerationLabels Amazon\n Augmented AI review tasks. Prices are in US dollars.

          \n
            \n
          • \n

            1.200

            \n
          • \n
          • \n

            1.080

            \n
          • \n
          • \n

            0.960

            \n
          • \n
          • \n

            0.840

            \n
          • \n
          • \n

            0.720

            \n
          • \n
          • \n

            0.600

            \n
          • \n
          • \n

            0.480

            \n
          • \n
          • \n

            0.360

            \n
          • \n
          • \n

            0.240

            \n
          • \n
          • \n

            0.120

            \n
          • \n
          • \n

            0.072

            \n
          • \n
          • \n

            0.060

            \n
          • \n
          • \n

            0.048

            \n
          • \n
          • \n

            0.036

            \n
          • \n
          • \n

            0.024

            \n
          • \n
          • \n

            0.012

            \n
          • \n
          \n

          Use one of the following prices for Amazon Augmented AI custom human review tasks.\n Prices are in US dollars.

          \n
            \n
          • \n

            1.200

            \n
          • \n
          • \n

            1.080

            \n
          • \n
          • \n

            0.960

            \n
          • \n
          • \n

            0.840

            \n
          • \n
          • \n

            0.720

            \n
          • \n
          • \n

            0.600

            \n
          • \n
          • \n

            0.480

            \n
          • \n
          • \n

            0.360

            \n
          • \n
          • \n

            0.240

            \n
          • \n
          • \n

            0.120

            \n
          • \n
          • \n

            0.072

            \n
          • \n
          • \n

            0.060

            \n
          • \n
          • \n

            0.048

            \n
          • \n
          • \n

            0.036

            \n
          • \n
          • \n

            0.024

            \n
          • \n
          • \n

            0.012

            \n
          • \n
          " } }, + "com.amazonaws.sagemaker#PutModelPackageGroupPolicy": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#PutModelPackageGroupPolicyInput" + }, + "output": { + "target": "com.amazonaws.sagemaker#PutModelPackageGroupPolicyOutput" + }, + "traits": { + "smithy.api#documentation": "

          Adds a resouce policy to control access to a model group. For information about\n resoure policies, see Identity-based\n policies and resource-based policies in the AWS Identity and Access Management User Guide..

          " + } + }, + "com.amazonaws.sagemaker#PutModelPackageGroupPolicyInput": { + "type": "structure", + "members": { + "ModelPackageGroupName": { + "target": "com.amazonaws.sagemaker#EntityName", + "traits": { + "smithy.api#documentation": "

          The name of the model group to add a resource policy to.

          ", + "smithy.api#required": {} + } + }, + "ResourcePolicy": { + "target": "com.amazonaws.sagemaker#PolicyString", + "traits": { + "smithy.api#documentation": "

          The resource policy for the model group.

          ", + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.sagemaker#PutModelPackageGroupPolicyOutput": { + "type": "structure", + "members": { + "ModelPackageGroupArn": { + "target": "com.amazonaws.sagemaker#ModelPackageGroupArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model package group.

          ", + "smithy.api#required": {} + } + } + } + }, "com.amazonaws.sagemaker#RealtimeInferenceInstanceTypes": { "type": "list", "member": { @@ -19736,6 +25450,169 @@ ] } }, + "com.amazonaws.sagemaker#RedshiftClusterId": { + "type": "string", + "traits": { + "smithy.api#documentation": "

          The Redshift cluster Identifier.

          ", + "smithy.api#length": { + "min": 1, + "max": 63 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#RedshiftDatabase": { + "type": "string", + "traits": { + "smithy.api#documentation": "

          The name of the Redshift database used in Redshift query execution.

          ", + "smithy.api#length": { + "min": 1, + "max": 64 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#RedshiftDatasetDefinition": { + "type": "structure", + "members": { + "ClusterId": { + "target": "com.amazonaws.sagemaker#RedshiftClusterId", + "traits": { + "smithy.api#required": {} + } + }, + "Database": { + "target": "com.amazonaws.sagemaker#RedshiftDatabase", + "traits": { + "smithy.api#required": {} + } + }, + "DbUser": { + "target": "com.amazonaws.sagemaker#RedshiftUserName", + "traits": { + "smithy.api#required": {} + } + }, + "QueryString": { + "target": "com.amazonaws.sagemaker#RedshiftQueryString", + "traits": { + "smithy.api#required": {} + } + }, + "ClusterRoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", + "traits": { + "smithy.api#documentation": "

          The IAM role attached to your Redshift cluster that Amazon SageMaker uses to generate datasets.

          ", + "smithy.api#required": {} + } + }, + "OutputS3Uri": { + "target": "com.amazonaws.sagemaker#S3Uri", + "traits": { + "smithy.api#documentation": "

          The location in Amazon S3 where the Redshift query results are stored.

          ", + "smithy.api#required": {} + } + }, + "KmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", + "traits": { + "smithy.api#documentation": "

          The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data from a\n Redshift execution.

          " + } + }, + "OutputFormat": { + "target": "com.amazonaws.sagemaker#RedshiftResultFormat", + "traits": { + "smithy.api#required": {} + } + }, + "OutputCompression": { + "target": "com.amazonaws.sagemaker#RedshiftResultCompressionType" + } + }, + "traits": { + "smithy.api#documentation": "

          Configuration for Redshift Dataset Definition input.

          " + } + }, + "com.amazonaws.sagemaker#RedshiftQueryString": { + "type": "string", + "traits": { + "smithy.api#documentation": "

          The SQL query statements to be executed.

          ", + "smithy.api#length": { + "min": 1, + "max": 4096 + }, + "smithy.api#pattern": "[\\s\\S]+" + } + }, + "com.amazonaws.sagemaker#RedshiftResultCompressionType": { + "type": "string", + "traits": { + "smithy.api#documentation": "

          The compression used for Redshift query results.

          ", + "smithy.api#enum": [ + { + "value": "None", + "name": "NONE" + }, + { + "value": "GZIP", + "name": "GZIP" + }, + { + "value": "BZIP2", + "name": "BZIP2" + }, + { + "value": "ZSTD", + "name": "ZSTD" + }, + { + "value": "SNAPPY", + "name": "SNAPPY" + } + ] + } + }, + "com.amazonaws.sagemaker#RedshiftResultFormat": { + "type": "string", + "traits": { + "smithy.api#documentation": "

          The data storage format for Redshift query results.

          ", + "smithy.api#enum": [ + { + "value": "PARQUET", + "name": "PARQUET" + }, + { + "value": "CSV", + "name": "CSV" + } + ] + } + }, + "com.amazonaws.sagemaker#RedshiftUserName": { + "type": "string", + "traits": { + "smithy.api#documentation": "

          The database user name used in Redshift query execution.

          ", + "smithy.api#length": { + "min": 1, + "max": 128 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#RegisterModelStepMetadata": { + "type": "structure", + "members": { + "Arn": { + "target": "com.amazonaws.sagemaker#String256", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model package.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Metadata for a register model job step.

          " + } + }, "com.amazonaws.sagemaker#RenderUiTemplate": { "type": "operation", "input": { @@ -19888,7 +25765,7 @@ "min": 0, "max": 256 }, - "smithy.api#pattern": "arn:.*" + "smithy.api#pattern": "arn:aws[a-z-]*:sagemaker:[a-z0-9-]*:[0-9]{12}:.+" } }, "com.amazonaws.sagemaker#ResourceConfig": { @@ -20048,6 +25925,30 @@ { "value": "ExperimentTrialComponent", "name": "EXPERIMENT_TRIAL_COMPONENT" + }, + { + "value": "Endpoint", + "name": "ENDPOINT" + }, + { + "value": "ModelPackage", + "name": "MODEL_PACKAGE" + }, + { + "value": "ModelPackageGroup", + "name": "MODEL_PACKAGE_GROUP" + }, + { + "value": "Pipeline", + "name": "PIPELINE" + }, + { + "value": "PipelineExecution", + "name": "PIPELINE_EXECUTION" + }, + { + "value": "FeatureGroup", + "name": "FEATURE_GROUP" } ] } @@ -20174,7 +26075,7 @@ "traits": { "smithy.api#length": { "min": 0, - "max": 20 + "max": 100 } } }, @@ -20246,6 +26147,27 @@ ] } }, + "com.amazonaws.sagemaker#S3StorageConfig": { + "type": "structure", + "members": { + "S3Uri": { + "target": "com.amazonaws.sagemaker#S3Uri", + "traits": { + "smithy.api#documentation": "

          The S3 URI, or location in Amazon S3, of OfflineStore.

          \n

          S3 URIs have a format similar to the following: s3://example-bucket/prefix/.

          ", + "smithy.api#required": {} + } + }, + "KmsKeyId": { + "target": "com.amazonaws.sagemaker#KmsKeyId", + "traits": { + "smithy.api#documentation": "

          The AWS Key Management Service (KMS) key ID of the key used to encrypt any objects\n written into the OfflineStore S3 location.

          \n

          The IAM roleARN that is passed as a parameter to\n CreateFeatureGroup must have below permissions to the\n KmsKeyId:

          \n
            \n
          • \n

            \n \"kms:GenerateDataKey\"\n

            \n
          • \n
          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          The Amazon Simple Storage (Amazon S3) location and and security configuration for OfflineStore.

          " + } + }, "com.amazonaws.sagemaker#S3Uri": { "type": "string", "traits": { @@ -20260,12 +26182,18 @@ "type": "service", "version": "2017-07-24", "operations": [ + { + "target": "com.amazonaws.sagemaker#AddAssociation" + }, { "target": "com.amazonaws.sagemaker#AddTags" }, { "target": "com.amazonaws.sagemaker#AssociateTrialComponent" }, + { + "target": "com.amazonaws.sagemaker#CreateAction" + }, { "target": "com.amazonaws.sagemaker#CreateAlgorithm" }, @@ -20275,6 +26203,9 @@ { "target": "com.amazonaws.sagemaker#CreateAppImageConfig" }, + { + "target": "com.amazonaws.sagemaker#CreateArtifact" + }, { "target": "com.amazonaws.sagemaker#CreateAutoMLJob" }, @@ -20284,6 +26215,9 @@ { "target": "com.amazonaws.sagemaker#CreateCompilationJob" }, + { + "target": "com.amazonaws.sagemaker#CreateContext" + }, { "target": "com.amazonaws.sagemaker#CreateDomain" }, @@ -20296,6 +26230,9 @@ { "target": "com.amazonaws.sagemaker#CreateExperiment" }, + { + "target": "com.amazonaws.sagemaker#CreateFeatureGroup" + }, { "target": "com.amazonaws.sagemaker#CreateFlowDefinition" }, @@ -20320,6 +26257,9 @@ { "target": "com.amazonaws.sagemaker#CreateModelPackage" }, + { + "target": "com.amazonaws.sagemaker#CreateModelPackageGroup" + }, { "target": "com.amazonaws.sagemaker#CreateMonitoringSchedule" }, @@ -20329,6 +26269,9 @@ { "target": "com.amazonaws.sagemaker#CreateNotebookInstanceLifecycleConfig" }, + { + "target": "com.amazonaws.sagemaker#CreatePipeline" + }, { "target": "com.amazonaws.sagemaker#CreatePresignedDomainUrl" }, @@ -20338,6 +26281,9 @@ { "target": "com.amazonaws.sagemaker#CreateProcessingJob" }, + { + "target": "com.amazonaws.sagemaker#CreateProject" + }, { "target": "com.amazonaws.sagemaker#CreateTrainingJob" }, @@ -20359,6 +26305,9 @@ { "target": "com.amazonaws.sagemaker#CreateWorkteam" }, + { + "target": "com.amazonaws.sagemaker#DeleteAction" + }, { "target": "com.amazonaws.sagemaker#DeleteAlgorithm" }, @@ -20368,9 +26317,18 @@ { "target": "com.amazonaws.sagemaker#DeleteAppImageConfig" }, + { + "target": "com.amazonaws.sagemaker#DeleteArtifact" + }, + { + "target": "com.amazonaws.sagemaker#DeleteAssociation" + }, { "target": "com.amazonaws.sagemaker#DeleteCodeRepository" }, + { + "target": "com.amazonaws.sagemaker#DeleteContext" + }, { "target": "com.amazonaws.sagemaker#DeleteDomain" }, @@ -20383,6 +26341,9 @@ { "target": "com.amazonaws.sagemaker#DeleteExperiment" }, + { + "target": "com.amazonaws.sagemaker#DeleteFeatureGroup" + }, { "target": "com.amazonaws.sagemaker#DeleteFlowDefinition" }, @@ -20401,6 +26362,12 @@ { "target": "com.amazonaws.sagemaker#DeleteModelPackage" }, + { + "target": "com.amazonaws.sagemaker#DeleteModelPackageGroup" + }, + { + "target": "com.amazonaws.sagemaker#DeleteModelPackageGroupPolicy" + }, { "target": "com.amazonaws.sagemaker#DeleteMonitoringSchedule" }, @@ -20410,6 +26377,12 @@ { "target": "com.amazonaws.sagemaker#DeleteNotebookInstanceLifecycleConfig" }, + { + "target": "com.amazonaws.sagemaker#DeletePipeline" + }, + { + "target": "com.amazonaws.sagemaker#DeleteProject" + }, { "target": "com.amazonaws.sagemaker#DeleteTags" }, @@ -20428,6 +26401,9 @@ { "target": "com.amazonaws.sagemaker#DeleteWorkteam" }, + { + "target": "com.amazonaws.sagemaker#DescribeAction" + }, { "target": "com.amazonaws.sagemaker#DescribeAlgorithm" }, @@ -20437,6 +26413,9 @@ { "target": "com.amazonaws.sagemaker#DescribeAppImageConfig" }, + { + "target": "com.amazonaws.sagemaker#DescribeArtifact" + }, { "target": "com.amazonaws.sagemaker#DescribeAutoMLJob" }, @@ -20446,6 +26425,9 @@ { "target": "com.amazonaws.sagemaker#DescribeCompilationJob" }, + { + "target": "com.amazonaws.sagemaker#DescribeContext" + }, { "target": "com.amazonaws.sagemaker#DescribeDomain" }, @@ -20458,6 +26440,9 @@ { "target": "com.amazonaws.sagemaker#DescribeExperiment" }, + { + "target": "com.amazonaws.sagemaker#DescribeFeatureGroup" + }, { "target": "com.amazonaws.sagemaker#DescribeFlowDefinition" }, @@ -20482,6 +26467,9 @@ { "target": "com.amazonaws.sagemaker#DescribeModelPackage" }, + { + "target": "com.amazonaws.sagemaker#DescribeModelPackageGroup" + }, { "target": "com.amazonaws.sagemaker#DescribeMonitoringSchedule" }, @@ -20491,9 +26479,21 @@ { "target": "com.amazonaws.sagemaker#DescribeNotebookInstanceLifecycleConfig" }, + { + "target": "com.amazonaws.sagemaker#DescribePipeline" + }, + { + "target": "com.amazonaws.sagemaker#DescribePipelineDefinitionForExecution" + }, + { + "target": "com.amazonaws.sagemaker#DescribePipelineExecution" + }, { "target": "com.amazonaws.sagemaker#DescribeProcessingJob" }, + { + "target": "com.amazonaws.sagemaker#DescribeProject" + }, { "target": "com.amazonaws.sagemaker#DescribeSubscribedWorkteam" }, @@ -20518,12 +26518,27 @@ { "target": "com.amazonaws.sagemaker#DescribeWorkteam" }, + { + "target": "com.amazonaws.sagemaker#DisableSagemakerServicecatalogPortfolio" + }, { "target": "com.amazonaws.sagemaker#DisassociateTrialComponent" }, + { + "target": "com.amazonaws.sagemaker#EnableSagemakerServicecatalogPortfolio" + }, + { + "target": "com.amazonaws.sagemaker#GetModelPackageGroupPolicy" + }, + { + "target": "com.amazonaws.sagemaker#GetSagemakerServicecatalogPortfolioStatus" + }, { "target": "com.amazonaws.sagemaker#GetSearchSuggestions" }, + { + "target": "com.amazonaws.sagemaker#ListActions" + }, { "target": "com.amazonaws.sagemaker#ListAlgorithms" }, @@ -20533,6 +26548,12 @@ { "target": "com.amazonaws.sagemaker#ListApps" }, + { + "target": "com.amazonaws.sagemaker#ListArtifacts" + }, + { + "target": "com.amazonaws.sagemaker#ListAssociations" + }, { "target": "com.amazonaws.sagemaker#ListAutoMLJobs" }, @@ -20545,6 +26566,9 @@ { "target": "com.amazonaws.sagemaker#ListCompilationJobs" }, + { + "target": "com.amazonaws.sagemaker#ListContexts" + }, { "target": "com.amazonaws.sagemaker#ListDomains" }, @@ -20557,6 +26581,9 @@ { "target": "com.amazonaws.sagemaker#ListExperiments" }, + { + "target": "com.amazonaws.sagemaker#ListFeatureGroups" + }, { "target": "com.amazonaws.sagemaker#ListFlowDefinitions" }, @@ -20578,6 +26605,9 @@ { "target": "com.amazonaws.sagemaker#ListLabelingJobsForWorkteam" }, + { + "target": "com.amazonaws.sagemaker#ListModelPackageGroups" + }, { "target": "com.amazonaws.sagemaker#ListModelPackages" }, @@ -20596,9 +26626,24 @@ { "target": "com.amazonaws.sagemaker#ListNotebookInstances" }, + { + "target": "com.amazonaws.sagemaker#ListPipelineExecutions" + }, + { + "target": "com.amazonaws.sagemaker#ListPipelineExecutionSteps" + }, + { + "target": "com.amazonaws.sagemaker#ListPipelineParametersForExecution" + }, + { + "target": "com.amazonaws.sagemaker#ListPipelines" + }, { "target": "com.amazonaws.sagemaker#ListProcessingJobs" }, + { + "target": "com.amazonaws.sagemaker#ListProjects" + }, { "target": "com.amazonaws.sagemaker#ListSubscribedWorkteams" }, @@ -20629,6 +26674,9 @@ { "target": "com.amazonaws.sagemaker#ListWorkteams" }, + { + "target": "com.amazonaws.sagemaker#PutModelPackageGroupPolicy" + }, { "target": "com.amazonaws.sagemaker#RenderUiTemplate" }, @@ -20641,6 +26689,9 @@ { "target": "com.amazonaws.sagemaker#StartNotebookInstance" }, + { + "target": "com.amazonaws.sagemaker#StartPipelineExecution" + }, { "target": "com.amazonaws.sagemaker#StopAutoMLJob" }, @@ -20659,6 +26710,9 @@ { "target": "com.amazonaws.sagemaker#StopNotebookInstance" }, + { + "target": "com.amazonaws.sagemaker#StopPipelineExecution" + }, { "target": "com.amazonaws.sagemaker#StopProcessingJob" }, @@ -20668,12 +26722,21 @@ { "target": "com.amazonaws.sagemaker#StopTransformJob" }, + { + "target": "com.amazonaws.sagemaker#UpdateAction" + }, { "target": "com.amazonaws.sagemaker#UpdateAppImageConfig" }, + { + "target": "com.amazonaws.sagemaker#UpdateArtifact" + }, { "target": "com.amazonaws.sagemaker#UpdateCodeRepository" }, + { + "target": "com.amazonaws.sagemaker#UpdateContext" + }, { "target": "com.amazonaws.sagemaker#UpdateDomain" }, @@ -20689,6 +26752,9 @@ { "target": "com.amazonaws.sagemaker#UpdateImage" }, + { + "target": "com.amazonaws.sagemaker#UpdateModelPackage" + }, { "target": "com.amazonaws.sagemaker#UpdateMonitoringSchedule" }, @@ -20698,6 +26764,12 @@ { "target": "com.amazonaws.sagemaker#UpdateNotebookInstanceLifecycleConfig" }, + { + "target": "com.amazonaws.sagemaker#UpdatePipeline" + }, + { + "target": "com.amazonaws.sagemaker#UpdatePipelineExecution" + }, { "target": "com.amazonaws.sagemaker#UpdateTrial" }, @@ -20732,6 +26804,21 @@ } } }, + "com.amazonaws.sagemaker#SagemakerServicecatalogStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Enabled", + "name": "ENABLED" + }, + { + "value": "Disabled", + "name": "DISABLED" + } + ] + } + }, "com.amazonaws.sagemaker#SamplingPercentage": { "type": "integer", "traits": { @@ -20876,6 +26963,24 @@ "traits": { "smithy.api#documentation": "

          The properties of a trial component.

          " } + }, + "Endpoint": { + "target": "com.amazonaws.sagemaker#Endpoint" + }, + "ModelPackage": { + "target": "com.amazonaws.sagemaker#ModelPackage" + }, + "ModelPackageGroup": { + "target": "com.amazonaws.sagemaker#ModelPackageGroup" + }, + "Pipeline": { + "target": "com.amazonaws.sagemaker#Pipeline" + }, + "PipelineExecution": { + "target": "com.amazonaws.sagemaker#PipelineExecution" + }, + "FeatureGroup": { + "target": "com.amazonaws.sagemaker#FeatureGroup" } }, "traits": { @@ -21022,6 +27127,10 @@ { "value": "MaxWaitTimeExceeded", "name": "MAX_WAIT_TIME_EXCEEDED" + }, + { + "value": "Updating", + "name": "UPDATING" } ] } @@ -21101,6 +27210,70 @@ "com.amazonaws.sagemaker#Seed": { "type": "long" }, + "com.amazonaws.sagemaker#ServiceCatalogEntityId": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 100 + }, + "smithy.api#pattern": "^[a-zA-Z0-9_\\-]*" + } + }, + "com.amazonaws.sagemaker#ServiceCatalogProvisionedProductDetails": { + "type": "structure", + "members": { + "ProvisionedProductId": { + "target": "com.amazonaws.sagemaker#ServiceCatalogEntityId", + "traits": { + "smithy.api#documentation": "

          The ID of the provisioned product.

          " + } + }, + "ProvisionedProductStatusMessage": { + "target": "com.amazonaws.sagemaker#ProvisionedProductStatusMessage", + "traits": { + "smithy.api#documentation": "

          The current status of the product.

          \n
            \n
          • \n

            \n AVAILABLE - Stable state, ready to perform any operation. The most recent operation succeeded and completed.

            \n
          • \n
          • \n

            \n UNDER_CHANGE - Transitive state. Operations performed might not have valid results. Wait for an AVAILABLE status before performing operations.

            \n
          • \n
          • \n

            \n TAINTED - Stable state, ready to perform any operation. The stack has completed the requested operation but is not exactly what was requested. For example, a request to update to a new version failed and the stack rolled back to the current version.

            \n
          • \n
          • \n

            \n ERROR - An unexpected error occurred. The provisioned product exists but the stack is not running. For example, CloudFormation received a parameter value that was not valid and could not launch the stack.

            \n
          • \n
          • \n

            \n PLAN_IN_PROGRESS - Transitive state. The plan operations were performed to provision a new product, but resources have not yet been created. After reviewing the list of resources to be created, execute the plan. Wait for an AVAILABLE status before performing operations.

            \n
          • \n
          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Details of a provisioned service catalog product. For information about service catalog,\n see What is AWS Service\n Catalog.

          " + } + }, + "com.amazonaws.sagemaker#ServiceCatalogProvisioningDetails": { + "type": "structure", + "members": { + "ProductId": { + "target": "com.amazonaws.sagemaker#ServiceCatalogEntityId", + "traits": { + "smithy.api#documentation": "

          The ID of the product to provision.

          ", + "smithy.api#required": {} + } + }, + "ProvisioningArtifactId": { + "target": "com.amazonaws.sagemaker#ServiceCatalogEntityId", + "traits": { + "smithy.api#documentation": "

          The ID of the provisioning artifact.

          ", + "smithy.api#required": {} + } + }, + "PathId": { + "target": "com.amazonaws.sagemaker#ServiceCatalogEntityId", + "traits": { + "smithy.api#documentation": "

          The path identifier of the product. This value is optional if the product has a default path, and required if the product has more than one path.

          " + } + }, + "ProvisioningParameters": { + "target": "com.amazonaws.sagemaker#ProvisioningParameters", + "traits": { + "smithy.api#documentation": "

          A list of key value pairs that you specify when you provision a product.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Details that you specify to provision a service catalog product. For information about\n service catalog, see .What is AWS Service\n Catalog.

          " + } + }, "com.amazonaws.sagemaker#SessionExpirationDurationInSeconds": { "type": "integer", "traits": { @@ -21168,6 +27341,59 @@ "smithy.api#pattern": "arn:aws[a-z\\-]*:sns:[a-z0-9\\-]*:[0-9]{12}:[a-zA-Z0-9_.-]+" } }, + "com.amazonaws.sagemaker#SortActionsBy": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Name", + "name": "NAME" + }, + { + "value": "CreationTime", + "name": "CREATION_TIME" + } + ] + } + }, + "com.amazonaws.sagemaker#SortArtifactsBy": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "CreationTime", + "name": "CREATION_TIME" + } + ] + } + }, + "com.amazonaws.sagemaker#SortAssociationsBy": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "SourceArn", + "name": "SOURCE_ARN" + }, + { + "value": "DestinationArn", + "name": "DESTINATION_ARN" + }, + { + "value": "SourceType", + "name": "SOURCE_TYPE" + }, + { + "value": "DestinationType", + "name": "DESTINATION_TYPE" + }, + { + "value": "CreationTime", + "name": "CREATION_TIME" + } + ] + } + }, "com.amazonaws.sagemaker#SortBy": { "type": "string", "traits": { @@ -21187,6 +27413,21 @@ ] } }, + "com.amazonaws.sagemaker#SortContextsBy": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Name", + "name": "NAME" + }, + { + "value": "CreationTime", + "name": "CREATION_TIME" + } + ] + } + }, "com.amazonaws.sagemaker#SortExperimentsBy": { "type": "string", "traits": { @@ -21217,6 +27458,36 @@ ] } }, + "com.amazonaws.sagemaker#SortPipelineExecutionsBy": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "CreationTime", + "name": "CREATION_TIME" + }, + { + "value": "PipelineExecutionArn", + "name": "PIPELINE_EXECUTION_ARN" + } + ] + } + }, + "com.amazonaws.sagemaker#SortPipelinesBy": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Name", + "name": "NAME" + }, + { + "value": "CreationTime", + "name": "CREATION_TIME" + } + ] + } + }, "com.amazonaws.sagemaker#SortTrialComponentsBy": { "type": "string", "traits": { @@ -21319,6 +27590,16 @@ } } }, + "com.amazonaws.sagemaker#SourceUri": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 2048 + }, + "smithy.api#pattern": ".*" + } + }, "com.amazonaws.sagemaker#SplitType": { "type": "string", "traits": { @@ -21394,6 +27675,75 @@ } } }, + "com.amazonaws.sagemaker#StartPipelineExecution": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#StartPipelineExecutionRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#StartPipelineExecutionResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceLimitExceeded" + }, + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Starts a pipeline execution.

          " + } + }, + "com.amazonaws.sagemaker#StartPipelineExecutionRequest": { + "type": "structure", + "members": { + "PipelineName": { + "target": "com.amazonaws.sagemaker#PipelineName", + "traits": { + "smithy.api#documentation": "

          The name of the pipeline.

          ", + "smithy.api#required": {} + } + }, + "PipelineExecutionDisplayName": { + "target": "com.amazonaws.sagemaker#PipelineExecutionName", + "traits": { + "smithy.api#documentation": "

          The display name of the pipeline execution.

          " + } + }, + "PipelineParameters": { + "target": "com.amazonaws.sagemaker#ParameterList", + "traits": { + "smithy.api#documentation": "

          Contains a list of pipeline parameters. This list can be empty.

          " + } + }, + "PipelineExecutionDescription": { + "target": "com.amazonaws.sagemaker#PipelineExecutionDescription", + "traits": { + "smithy.api#documentation": "

          The description of the pipeline execution.

          " + } + }, + "ClientRequestToken": { + "target": "com.amazonaws.sagemaker#IdempotencyToken", + "traits": { + "smithy.api#documentation": "

          A unique, case-sensitive identifier that you provide to ensure the idempotency of the\n operation. An idempotent operation completes no more than one time.

          ", + "smithy.api#idempotencyToken": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.sagemaker#StartPipelineExecutionResponse": { + "type": "structure", + "members": { + "PipelineExecutionArn": { + "target": "com.amazonaws.sagemaker#PipelineExecutionArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pipeline execution.

          " + } + } + } + }, "com.amazonaws.sagemaker#StatusDetails": { "type": "string", "traits": { @@ -21407,6 +27757,47 @@ "com.amazonaws.sagemaker#StatusMessage": { "type": "string" }, + "com.amazonaws.sagemaker#StepName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 256 + }, + "smithy.api#pattern": ".*" + } + }, + "com.amazonaws.sagemaker#StepStatus": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "Starting", + "name": "STARTING" + }, + { + "value": "Executing", + "name": "EXECUTING" + }, + { + "value": "Stopping", + "name": "STOPPING" + }, + { + "value": "Stopped", + "name": "STOPPED" + }, + { + "value": "Failed", + "name": "FAILED" + }, + { + "value": "Succeeded", + "name": "SUCCEEDED" + } + ] + } + }, "com.amazonaws.sagemaker#StopAutoMLJob": { "type": "operation", "input": { @@ -21558,6 +27949,54 @@ } } }, + "com.amazonaws.sagemaker#StopPipelineExecution": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#StopPipelineExecutionRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#StopPipelineExecutionResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Stops a pipeline execution.

          " + } + }, + "com.amazonaws.sagemaker#StopPipelineExecutionRequest": { + "type": "structure", + "members": { + "PipelineExecutionArn": { + "target": "com.amazonaws.sagemaker#PipelineExecutionArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pipeline execution.

          ", + "smithy.api#required": {} + } + }, + "ClientRequestToken": { + "target": "com.amazonaws.sagemaker#IdempotencyToken", + "traits": { + "smithy.api#documentation": "

          A unique, case-sensitive identifier that you provide to ensure the idempotency of the\n operation. An idempotent operation completes no more than one time.

          ", + "smithy.api#idempotencyToken": {}, + "smithy.api#required": {} + } + } + } + }, + "com.amazonaws.sagemaker#StopPipelineExecutionResponse": { + "type": "structure", + "members": { + "PipelineExecutionArn": { + "target": "com.amazonaws.sagemaker#PipelineExecutionArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pipeline execution.

          " + } + } + } + }, "com.amazonaws.sagemaker#StopProcessingJob": { "type": "operation", "input": { @@ -21678,6 +28117,15 @@ "smithy.api#pattern": ".+" } }, + "com.amazonaws.sagemaker#String2048": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 2048 + } + } + }, "com.amazonaws.sagemaker#String256": { "type": "string", "traits": { @@ -21687,6 +28135,15 @@ } } }, + "com.amazonaws.sagemaker#String64": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 0, + "max": 64 + } + } + }, "com.amazonaws.sagemaker#StringParameterValue": { "type": "string", "traits": { @@ -21791,6 +28248,16 @@ "smithy.api#mediaType": "application/json" } }, + "com.amazonaws.sagemaker#TableName": { + "type": "string", + "traits": { + "smithy.api#length": { + "min": 1, + "max": 255 + }, + "smithy.api#pattern": "[\\u0020-\\uD7FF\\uE000-\\uFFFD\\uD800\\uDC00-\\uDBFF\\uDFFF\\t]*" + } + }, "com.amazonaws.sagemaker#Tag": { "type": "structure", "members": { @@ -21976,6 +28443,10 @@ { "value": "coreml", "name": "COREML" + }, + { + "value": "jacinto_tda4vm", + "name": "JACINTO_TDA4VM" } ] } @@ -22228,9 +28699,62 @@ } } }, + "com.amazonaws.sagemaker#TerminationWaitInSeconds": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 0, + "max": 3600 + } + } + }, "com.amazonaws.sagemaker#Timestamp": { "type": "timestamp" }, + "com.amazonaws.sagemaker#TrafficRoutingConfig": { + "type": "structure", + "members": { + "Type": { + "target": "com.amazonaws.sagemaker#TrafficRoutingConfigType", + "traits": { + "smithy.api#documentation": "

          ", + "smithy.api#required": {} + } + }, + "WaitIntervalInSeconds": { + "target": "com.amazonaws.sagemaker#WaitIntervalInSeconds", + "traits": { + "smithy.api#documentation": "

          ", + "smithy.api#required": {} + } + }, + "CanarySize": { + "target": "com.amazonaws.sagemaker#CapacitySize", + "traits": { + "smithy.api#documentation": "

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Currently, the TrafficRoutingConfig API is not supported.

          " + } + }, + "com.amazonaws.sagemaker#TrafficRoutingConfigType": { + "type": "string", + "traits": { + "smithy.api#enum": [ + { + "value": "ALL_AT_ONCE", + "name": "ALL_AT_ONCE" + }, + { + "value": "CANARY", + "name": "CANARY" + } + ] + } + }, "com.amazonaws.sagemaker#TrainingInputMode": { "type": "string", "traits": { @@ -22621,7 +29145,7 @@ "Tags": { "target": "com.amazonaws.sagemaker#TagList", "traits": { - "smithy.api#documentation": "

          An array of key-value pairs. For more information, see Using\n Cost Allocation Tags in the AWS Billing and Cost Management User\n Guide.

          " + "smithy.api#documentation": "

          An array of key-value pairs. You can use tags to categorize your AWS resources in\n different ways, for example, by purpose, owner, or environment. For more information,\n see Tagging AWS\n Resources.

          " } } }, @@ -22710,7 +29234,7 @@ "min": 1, "max": 63 }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,62}" } }, "com.amazonaws.sagemaker#TrainingJobSortByOptions": { @@ -22809,6 +29333,20 @@ "smithy.api#documentation": "

          The numbers of training jobs launched by a hyperparameter tuning job, categorized by\n status.

          " } }, + "com.amazonaws.sagemaker#TrainingJobStepMetadata": { + "type": "structure", + "members": { + "Arn": { + "target": "com.amazonaws.sagemaker#TrainingJobArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the training job that was run by this step execution.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Metadata for a training job step.

          " + } + }, "com.amazonaws.sagemaker#TrainingJobSummaries": { "type": "list", "member": { @@ -22953,7 +29491,7 @@ "min": 0, "max": 1024 }, - "smithy.api#pattern": "[a-zA-Z_][a-zA-Z0-9_]*" + "smithy.api#pattern": "[a-zA-Z_][a-zA-Z0-9_]{0,1023}" } }, "com.amazonaws.sagemaker#TransformEnvironmentMap": { @@ -23331,7 +29869,7 @@ "min": 1, "max": 63 }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,62}" } }, "com.amazonaws.sagemaker#TransformJobStatus": { @@ -23361,6 +29899,20 @@ ] } }, + "com.amazonaws.sagemaker#TransformJobStepMetadata": { + "type": "structure", + "members": { + "Arn": { + "target": "com.amazonaws.sagemaker#TransformJobArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the transform job that was run by this step execution.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Metadata for a transform job step.

          " + } + }, "com.amazonaws.sagemaker#TransformJobSummaries": { "type": "list", "member": { @@ -23552,6 +30104,9 @@ "LastModifiedBy": { "target": "com.amazonaws.sagemaker#UserContext" }, + "MetadataProperties": { + "target": "com.amazonaws.sagemaker#MetadataProperties" + }, "Tags": { "target": "com.amazonaws.sagemaker#TagList", "traits": { @@ -23663,6 +30218,9 @@ "smithy.api#documentation": "

          The metrics for the component.

          " } }, + "MetadataProperties": { + "target": "com.amazonaws.sagemaker#MetadataProperties" + }, "SourceDetail": { "target": "com.amazonaws.sagemaker#TrialComponentSourceDetail", "traits": { @@ -23831,7 +30389,7 @@ } }, "com.amazonaws.sagemaker#TrialComponentParameterValue": { - "type": "structure", + "type": "union", "members": { "StringValue": { "target": "com.amazonaws.sagemaker#StringParameterValue", @@ -24234,44 +30792,111 @@ "smithy.api#documentation": "

          The ARN of the worker task template used to render the worker UI and tools for\n labeling job tasks.

          \n

          Use this parameter when you are creating a labeling job for 3D point cloud and video\n fram labeling jobs. Use your labeling job task type to select one of the following ARN's\n and use it with this parameter when you create a labeling job. Replace\n aws-region with the AWS region you are creating your labeling job\n in.

          \n\n

          \n 3D Point Cloud HumanTaskUiArns\n

          \n\n

          Use this HumanTaskUiArn for 3D point cloud object detection and 3D point\n cloud object detection adjustment labeling jobs.

          \n
            \n
          • \n

            \n arn:aws:sagemaker:aws-region:394669845002:human-task-ui/PointCloudObjectDetection\n

            \n
          • \n
          \n\n

          Use this HumanTaskUiArn for 3D point cloud object tracking and 3D point\n cloud object tracking adjustment labeling jobs.

          \n
            \n
          • \n

            \n arn:aws:sagemaker:aws-region:394669845002:human-task-ui/PointCloudObjectTracking\n

            \n
          • \n
          \n\n

          Use this HumanTaskUiArn for 3D point cloud semantic segmentation and 3D\n point cloud semantic segmentation adjustment labeling jobs.

          \n
            \n
          • \n

            \n arn:aws:sagemaker:aws-region:394669845002:human-task-ui/PointCloudSemanticSegmentation\n

            \n
          • \n
          \n\n

          \n Video Frame HumanTaskUiArns\n

          \n\n

          Use this HumanTaskUiArn for video frame object detection and video frame\n object detection adjustment labeling jobs.

          \n
            \n
          • \n

            \n arn:aws:sagemaker:region:394669845002:human-task-ui/VideoObjectDetection\n

            \n
          • \n
          \n\n

          Use this HumanTaskUiArn for video frame object tracking and video frame\n object tracking adjustment labeling jobs.

          \n
            \n
          • \n

            \n arn:aws:sagemaker:aws-region:394669845002:human-task-ui/VideoObjectTracking\n

            \n
          • \n
          " } } - }, + }, + "traits": { + "smithy.api#documentation": "

          Provided configuration information for the worker UI for a labeling job.

          " + } + }, + "com.amazonaws.sagemaker#UiTemplate": { + "type": "structure", + "members": { + "Content": { + "target": "com.amazonaws.sagemaker#TemplateContent", + "traits": { + "smithy.api#documentation": "

          The content of the Liquid template for the worker user interface.

          ", + "smithy.api#required": {} + } + } + }, + "traits": { + "smithy.api#documentation": "

          The Liquid template for the worker user interface.

          " + } + }, + "com.amazonaws.sagemaker#UiTemplateInfo": { + "type": "structure", + "members": { + "Url": { + "target": "com.amazonaws.sagemaker#TemplateUrl", + "traits": { + "smithy.api#documentation": "

          The URL for the user interface template.

          " + } + }, + "ContentSha256": { + "target": "com.amazonaws.sagemaker#TemplateContentSha256", + "traits": { + "smithy.api#documentation": "

          The SHA-256 digest of the contents of the template.

          " + } + } + }, + "traits": { + "smithy.api#documentation": "

          Container for user interface template information.

          " + } + }, + "com.amazonaws.sagemaker#UpdateAction": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#UpdateActionRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#UpdateActionResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ConflictException" + }, + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], "traits": { - "smithy.api#documentation": "

          Provided configuration information for the worker UI for a labeling job.

          " + "smithy.api#documentation": "

          Updates an action.

          " } }, - "com.amazonaws.sagemaker#UiTemplate": { + "com.amazonaws.sagemaker#UpdateActionRequest": { "type": "structure", "members": { - "Content": { - "target": "com.amazonaws.sagemaker#TemplateContent", + "ActionName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", "traits": { - "smithy.api#documentation": "

          The content of the Liquid template for the worker user interface.

          ", + "smithy.api#documentation": "

          The name of the action to update.

          ", "smithy.api#required": {} } + }, + "Description": { + "target": "com.amazonaws.sagemaker#ExperimentDescription", + "traits": { + "smithy.api#documentation": "

          The new description for the action.

          " + } + }, + "Status": { + "target": "com.amazonaws.sagemaker#ActionStatus", + "traits": { + "smithy.api#documentation": "

          The new status for the action.

          " + } + }, + "Properties": { + "target": "com.amazonaws.sagemaker#LineageEntityParameters", + "traits": { + "smithy.api#documentation": "

          The new list of properties. Overwrites the current property list.

          " + } + }, + "PropertiesToRemove": { + "target": "com.amazonaws.sagemaker#ListLineageEntityParameterKey", + "traits": { + "smithy.api#documentation": "

          A list of properties to remove.

          " + } } - }, - "traits": { - "smithy.api#documentation": "

          The Liquid template for the worker user interface.

          " } }, - "com.amazonaws.sagemaker#UiTemplateInfo": { + "com.amazonaws.sagemaker#UpdateActionResponse": { "type": "structure", "members": { - "Url": { - "target": "com.amazonaws.sagemaker#TemplateUrl", - "traits": { - "smithy.api#documentation": "

          The URL for the user interface template.

          " - } - }, - "ContentSha256": { - "target": "com.amazonaws.sagemaker#TemplateContentSha256", + "ActionArn": { + "target": "com.amazonaws.sagemaker#ActionArn", "traits": { - "smithy.api#documentation": "

          The SHA-256 digest of the contents of the template.

          " + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the action.

          " } } - }, - "traits": { - "smithy.api#documentation": "

          Container for user interface template information.

          " } }, "com.amazonaws.sagemaker#UpdateAppImageConfig": { @@ -24320,6 +30945,67 @@ } } }, + "com.amazonaws.sagemaker#UpdateArtifact": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#UpdateArtifactRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#UpdateArtifactResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ConflictException" + }, + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Updates an artifact.

          " + } + }, + "com.amazonaws.sagemaker#UpdateArtifactRequest": { + "type": "structure", + "members": { + "ArtifactArn": { + "target": "com.amazonaws.sagemaker#ArtifactArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the artifact to update.

          ", + "smithy.api#required": {} + } + }, + "ArtifactName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "traits": { + "smithy.api#documentation": "

          The new name for the artifact.

          " + } + }, + "Properties": { + "target": "com.amazonaws.sagemaker#LineageEntityParameters", + "traits": { + "smithy.api#documentation": "

          The new list of properties. Overwrites the current property list.

          " + } + }, + "PropertiesToRemove": { + "target": "com.amazonaws.sagemaker#ListLineageEntityParameterKey", + "traits": { + "smithy.api#documentation": "

          A list of properties to remove.

          " + } + } + } + }, + "com.amazonaws.sagemaker#UpdateArtifactResponse": { + "type": "structure", + "members": { + "ArtifactArn": { + "target": "com.amazonaws.sagemaker#ArtifactArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the artifact.

          " + } + } + } + }, "com.amazonaws.sagemaker#UpdateCodeRepository": { "type": "operation", "input": { @@ -24362,6 +31048,67 @@ } } }, + "com.amazonaws.sagemaker#UpdateContext": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#UpdateContextRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#UpdateContextResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ConflictException" + }, + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Updates a context.

          " + } + }, + "com.amazonaws.sagemaker#UpdateContextRequest": { + "type": "structure", + "members": { + "ContextName": { + "target": "com.amazonaws.sagemaker#ExperimentEntityName", + "traits": { + "smithy.api#documentation": "

          The name of the context to update.

          ", + "smithy.api#required": {} + } + }, + "Description": { + "target": "com.amazonaws.sagemaker#ExperimentDescription", + "traits": { + "smithy.api#documentation": "

          The new description for the context.

          " + } + }, + "Properties": { + "target": "com.amazonaws.sagemaker#LineageEntityParameters", + "traits": { + "smithy.api#documentation": "

          The new list of properties. Overwrites the current property list.

          " + } + }, + "PropertiesToRemove": { + "target": "com.amazonaws.sagemaker#ListLineageEntityParameterKey", + "traits": { + "smithy.api#documentation": "

          A list of properties to remove.

          " + } + } + } + }, + "com.amazonaws.sagemaker#UpdateContextResponse": { + "type": "structure", + "members": { + "ContextArn": { + "target": "com.amazonaws.sagemaker#ContextArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the context.

          " + } + } + } + }, "com.amazonaws.sagemaker#UpdateDomain": { "type": "operation", "input": { @@ -24451,7 +31198,7 @@ "RetainAllVariantProperties": { "target": "com.amazonaws.sagemaker#Boolean", "traits": { - "smithy.api#documentation": "

          When updating endpoint resources, enables or disables the retention of variant\n properties, such as the instance count or the variant weight. To retain the variant\n properties of an endpoint when updating it, set RetainAllVariantProperties\n to true. To use the variant properties specified in a new\n EndpointConfig call when updating an endpoint, set\n RetainAllVariantProperties to false.

          " + "smithy.api#documentation": "

          When updating endpoint resources, enables or disables the retention of variant\n properties, such as the instance count or the variant weight. To retain the variant\n properties of an endpoint when updating it, set RetainAllVariantProperties\n to true. To use the variant properties specified in a new\n EndpointConfig call when updating an endpoint, set\n RetainAllVariantProperties to false. The default is\n false.

          " } }, "ExcludeRetainedVariantProperties": { @@ -24459,6 +31206,12 @@ "traits": { "smithy.api#documentation": "

          When you are updating endpoint resources with UpdateEndpointInput$RetainAllVariantProperties, whose value is set to\n true, ExcludeRetainedVariantProperties specifies the list\n of type VariantProperty to override with the values provided by\n EndpointConfig. If you don't specify a value for\n ExcludeAllVariantProperties, no variant properties are overridden.\n

          " } + }, + "DeploymentConfig": { + "target": "com.amazonaws.sagemaker#DeploymentConfig", + "traits": { + "smithy.api#documentation": "

          The deployment configuration for the endpoint to be updated.

          " + } } } }, @@ -24644,6 +31397,55 @@ } } }, + "com.amazonaws.sagemaker#UpdateModelPackage": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#UpdateModelPackageInput" + }, + "output": { + "target": "com.amazonaws.sagemaker#UpdateModelPackageOutput" + }, + "traits": { + "smithy.api#documentation": "

          Updates a versioned model.

          " + } + }, + "com.amazonaws.sagemaker#UpdateModelPackageInput": { + "type": "structure", + "members": { + "ModelPackageArn": { + "target": "com.amazonaws.sagemaker#ModelPackageArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model.

          ", + "smithy.api#required": {} + } + }, + "ModelApprovalStatus": { + "target": "com.amazonaws.sagemaker#ModelApprovalStatus", + "traits": { + "smithy.api#documentation": "

          The approval status of the model.

          ", + "smithy.api#required": {} + } + }, + "ApprovalDescription": { + "target": "com.amazonaws.sagemaker#ApprovalDescription", + "traits": { + "smithy.api#documentation": "

          A description for the approval status of the model.

          " + } + } + } + }, + "com.amazonaws.sagemaker#UpdateModelPackageOutput": { + "type": "structure", + "members": { + "ModelPackageArn": { + "target": "com.amazonaws.sagemaker#ModelPackageArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the model.

          ", + "smithy.api#required": {} + } + } + } + }, "com.amazonaws.sagemaker#UpdateMonitoringSchedule": { "type": "operation", "input": { @@ -24845,6 +31647,122 @@ "type": "structure", "members": {} }, + "com.amazonaws.sagemaker#UpdatePipeline": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#UpdatePipelineRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#UpdatePipelineResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Updates a pipeline.

          " + } + }, + "com.amazonaws.sagemaker#UpdatePipelineExecution": { + "type": "operation", + "input": { + "target": "com.amazonaws.sagemaker#UpdatePipelineExecutionRequest" + }, + "output": { + "target": "com.amazonaws.sagemaker#UpdatePipelineExecutionResponse" + }, + "errors": [ + { + "target": "com.amazonaws.sagemaker#ResourceNotFound" + } + ], + "traits": { + "smithy.api#documentation": "

          Updates a pipeline execution.

          " + } + }, + "com.amazonaws.sagemaker#UpdatePipelineExecutionRequest": { + "type": "structure", + "members": { + "PipelineExecutionArn": { + "target": "com.amazonaws.sagemaker#PipelineExecutionArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the pipeline execution.

          ", + "smithy.api#required": {} + } + }, + "PipelineExecutionDescription": { + "target": "com.amazonaws.sagemaker#PipelineExecutionDescription", + "traits": { + "smithy.api#documentation": "

          The description of the pipeline execution.

          " + } + }, + "PipelineExecutionDisplayName": { + "target": "com.amazonaws.sagemaker#PipelineExecutionName", + "traits": { + "smithy.api#documentation": "

          The display name of the pipeline execution.

          " + } + } + } + }, + "com.amazonaws.sagemaker#UpdatePipelineExecutionResponse": { + "type": "structure", + "members": { + "PipelineExecutionArn": { + "target": "com.amazonaws.sagemaker#PipelineExecutionArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the updated pipeline execution.

          " + } + } + } + }, + "com.amazonaws.sagemaker#UpdatePipelineRequest": { + "type": "structure", + "members": { + "PipelineName": { + "target": "com.amazonaws.sagemaker#PipelineName", + "traits": { + "smithy.api#documentation": "

          The name of the pipeline to update.

          ", + "smithy.api#required": {} + } + }, + "PipelineDisplayName": { + "target": "com.amazonaws.sagemaker#PipelineName", + "traits": { + "smithy.api#documentation": "

          The display name of the pipeline.

          " + } + }, + "PipelineDefinition": { + "target": "com.amazonaws.sagemaker#PipelineDefinition", + "traits": { + "smithy.api#documentation": "

          The JSON pipeline definition.

          " + } + }, + "PipelineDescription": { + "target": "com.amazonaws.sagemaker#PipelineDescription", + "traits": { + "smithy.api#documentation": "

          The description of the pipeline.

          " + } + }, + "RoleArn": { + "target": "com.amazonaws.sagemaker#RoleArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) that the pipeline uses to execute.

          " + } + } + } + }, + "com.amazonaws.sagemaker#UpdatePipelineResponse": { + "type": "structure", + "members": { + "PipelineArn": { + "target": "com.amazonaws.sagemaker#PipelineArn", + "traits": { + "smithy.api#documentation": "

          The Amazon Resource Name (ARN) of the updated pipeline.

          " + } + } + } + }, "com.amazonaws.sagemaker#UpdateTrial": { "type": "operation", "input": { @@ -25260,7 +32178,7 @@ "min": 0, "max": 63 }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,62}" } }, "com.amazonaws.sagemaker#UserProfileSortKey": { @@ -25364,7 +32282,7 @@ "min": 0, "max": 63 }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,62}" } }, "com.amazonaws.sagemaker#VariantProperty": { @@ -25484,6 +32402,16 @@ } } }, + "com.amazonaws.sagemaker#WaitIntervalInSeconds": { + "type": "integer", + "traits": { + "smithy.api#box": {}, + "smithy.api#range": { + "min": 0, + "max": 3600 + } + } + }, "com.amazonaws.sagemaker#Workforce": { "type": "structure", "members": { @@ -25559,7 +32487,7 @@ "min": 1, "max": 63 }, - "smithy.api#pattern": "^[a-zA-Z0-9]([a-zA-Z0-9\\-])*$" + "smithy.api#pattern": "^[a-zA-Z0-9]([a-zA-Z0-9\\-]){0,62}$" } }, "com.amazonaws.sagemaker#Workforces": { @@ -25657,7 +32585,7 @@ "min": 1, "max": 63 }, - "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9])*" + "smithy.api#pattern": "^[a-zA-Z0-9](-*[a-zA-Z0-9]){0,62}" } }, "com.amazonaws.sagemaker#Workteams": {