diff --git a/.changes/3.89.1 b/.changes/3.89.1 new file mode 100644 index 0000000000..d8271213bf --- /dev/null +++ b/.changes/3.89.1 @@ -0,0 +1,17 @@ +[ + { + "type": "enhancement", + "category": "S3", + "description": "Documentation updates for s3" + }, + { + "type": "api-change", + "category": "SageMaker", + "description": "SageMaker notebook instances now support enabling or disabling root access for notebook users. SageMaker Neo now supports rk3399 and rk3288 as compilation target devices." + }, + { + "type": "api-change", + "category": "CodeBuild", + "description": "CodeBuild also now supports Git Submodules. CodeBuild now supports opting out of Encryption for S3 Build Logs. By default these logs are encrypted." + } +] \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 2705ce3eb4..1bb0baed31 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # CHANGELOG +## next release + +* `Aws\CodeBuild` - CodeBuild also now supports Git Submodules. CodeBuild now supports opting out of Encryption for S3 Build Logs. By default these logs are encrypted. +* `Aws\S3` - Documentation updates for s3 +* `Aws\SageMaker` - SageMaker notebook instances now support enabling or disabling root access for notebook users. SageMaker Neo now supports rk3399 and rk3288 as compilation target devices. + ## 3.89.0 - 2019-03-07 * `Aws\AppMesh` - This release includes a new version of the AWS App Mesh APIs. You can read more about the new APIs here: https://docs.aws.amazon.com/app-mesh/latest/APIReference/Welcome.html. diff --git a/src/data/codebuild/2016-10-06/api-2.json b/src/data/codebuild/2016-10-06/api-2.json index 5f3673a4ad..0a1f55be6b 100644 --- a/src/data/codebuild/2016-10-06/api-2.json +++ b/src/data/codebuild/2016-10-06/api-2.json @@ -631,6 +631,13 @@ "type":"integer", "min":0 }, + "GitSubmodulesConfig":{ + "type":"structure", + "required":["fetchSubmodules"], + "members":{ + "fetchSubmodules":{"shape":"WrapperBoolean"} + } + }, "ImagePullCredentialsType":{ "type":"string", "enum":[ @@ -684,7 +691,7 @@ "type":"string", "max":127, "min":1, - "pattern":"^([\\\\p{L}\\\\p{Z}\\\\p{N}_.:/=@+\\\\-]*)$" + "pattern":"^([\\p{L}\\p{Z}\\p{N}_.:/=@+\\-]*)$" }, "LanguageType":{ "type":"string", @@ -952,6 +959,7 @@ "type":{"shape":"SourceType"}, "location":{"shape":"String"}, "gitCloneDepth":{"shape":"GitCloneDepth"}, + "gitSubmodulesConfig":{"shape":"GitSubmodulesConfig"}, "buildspec":{"shape":"String"}, "auth":{"shape":"SourceAuth"}, "reportBuildStatus":{"shape":"WrapperBoolean"}, @@ -1008,7 +1016,8 @@ "required":["status"], "members":{ "status":{"shape":"LogsConfigStatusType"}, - "location":{"shape":"String"} + "location":{"shape":"String"}, + "encryptionDisabled":{"shape":"WrapperBoolean"} } }, "SecurityGroupIds":{ @@ -1087,6 +1096,7 @@ "sourceLocationOverride":{"shape":"String"}, "sourceAuthOverride":{"shape":"SourceAuth"}, "gitCloneDepthOverride":{"shape":"GitCloneDepth"}, + "gitSubmodulesConfigOverride":{"shape":"GitSubmodulesConfig"}, "buildspecOverride":{"shape":"String"}, "insecureSslOverride":{"shape":"WrapperBoolean"}, "reportBuildStatusOverride":{"shape":"WrapperBoolean"}, @@ -1208,7 +1218,7 @@ "type":"string", "max":255, "min":1, - "pattern":"^([\\\\p{L}\\\\p{Z}\\\\p{N}_.:/=@+\\\\-]*)$" + "pattern":"^([\\p{L}\\p{Z}\\p{N}_.:/=@+\\-]*)$" }, "VpcConfig":{ "type":"structure", diff --git a/src/data/codebuild/2016-10-06/api-2.json.php b/src/data/codebuild/2016-10-06/api-2.json.php index cf982124d2..34dbf5df36 100644 --- a/src/data/codebuild/2016-10-06/api-2.json.php +++ b/src/data/codebuild/2016-10-06/api-2.json.php @@ -1,3 +1,3 @@ '2.0', 'metadata' => [ 'apiVersion' => '2016-10-06', 'endpointPrefix' => 'codebuild', 'jsonVersion' => '1.1', 'protocol' => 'json', 'serviceFullName' => 'AWS CodeBuild', 'serviceId' => 'CodeBuild', 'signatureVersion' => 'v4', 'targetPrefix' => 'CodeBuild_20161006', 'uid' => 'codebuild-2016-10-06', ], 'operations' => [ 'BatchDeleteBuilds' => [ 'name' => 'BatchDeleteBuilds', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'BatchDeleteBuildsInput', ], 'output' => [ 'shape' => 'BatchDeleteBuildsOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], ], ], 'BatchGetBuilds' => [ 'name' => 'BatchGetBuilds', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'BatchGetBuildsInput', ], 'output' => [ 'shape' => 'BatchGetBuildsOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], ], ], 'BatchGetProjects' => [ 'name' => 'BatchGetProjects', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'BatchGetProjectsInput', ], 'output' => [ 'shape' => 'BatchGetProjectsOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], ], ], 'CreateProject' => [ 'name' => 'CreateProject', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateProjectInput', ], 'output' => [ 'shape' => 'CreateProjectOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'ResourceAlreadyExistsException', ], [ 'shape' => 'AccountLimitExceededException', ], ], ], 'CreateWebhook' => [ 'name' => 'CreateWebhook', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateWebhookInput', ], 'output' => [ 'shape' => 'CreateWebhookOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'OAuthProviderException', ], [ 'shape' => 'ResourceAlreadyExistsException', ], [ 'shape' => 'ResourceNotFoundException', ], ], ], 'DeleteProject' => [ 'name' => 'DeleteProject', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteProjectInput', ], 'output' => [ 'shape' => 'DeleteProjectOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], ], ], 'DeleteSourceCredentials' => [ 'name' => 'DeleteSourceCredentials', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteSourceCredentialsInput', ], 'output' => [ 'shape' => 'DeleteSourceCredentialsOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'ResourceNotFoundException', ], ], ], 'DeleteWebhook' => [ 'name' => 'DeleteWebhook', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteWebhookInput', ], 'output' => [ 'shape' => 'DeleteWebhookOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'ResourceNotFoundException', ], [ 'shape' => 'OAuthProviderException', ], ], ], 'ImportSourceCredentials' => [ 'name' => 'ImportSourceCredentials', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ImportSourceCredentialsInput', ], 'output' => [ 'shape' => 'ImportSourceCredentialsOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'AccountLimitExceededException', ], ], ], 'InvalidateProjectCache' => [ 'name' => 'InvalidateProjectCache', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'InvalidateProjectCacheInput', ], 'output' => [ 'shape' => 'InvalidateProjectCacheOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'ResourceNotFoundException', ], ], ], 'ListBuilds' => [ 'name' => 'ListBuilds', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListBuildsInput', ], 'output' => [ 'shape' => 'ListBuildsOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], ], ], 'ListBuildsForProject' => [ 'name' => 'ListBuildsForProject', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListBuildsForProjectInput', ], 'output' => [ 'shape' => 'ListBuildsForProjectOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'ResourceNotFoundException', ], ], ], 'ListCuratedEnvironmentImages' => [ 'name' => 'ListCuratedEnvironmentImages', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListCuratedEnvironmentImagesInput', ], 'output' => [ 'shape' => 'ListCuratedEnvironmentImagesOutput', ], ], 'ListProjects' => [ 'name' => 'ListProjects', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListProjectsInput', ], 'output' => [ 'shape' => 'ListProjectsOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], ], ], 'ListSourceCredentials' => [ 'name' => 'ListSourceCredentials', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListSourceCredentialsInput', ], 'output' => [ 'shape' => 'ListSourceCredentialsOutput', ], ], 'StartBuild' => [ 'name' => 'StartBuild', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'StartBuildInput', ], 'output' => [ 'shape' => 'StartBuildOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'ResourceNotFoundException', ], [ 'shape' => 'AccountLimitExceededException', ], ], ], 'StopBuild' => [ 'name' => 'StopBuild', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'StopBuildInput', ], 'output' => [ 'shape' => 'StopBuildOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'ResourceNotFoundException', ], ], ], 'UpdateProject' => [ 'name' => 'UpdateProject', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'UpdateProjectInput', ], 'output' => [ 'shape' => 'UpdateProjectOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'ResourceNotFoundException', ], ], ], 'UpdateWebhook' => [ 'name' => 'UpdateWebhook', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'UpdateWebhookInput', ], 'output' => [ 'shape' => 'UpdateWebhookOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'ResourceNotFoundException', ], [ 'shape' => 'OAuthProviderException', ], ], ], ], 'shapes' => [ 'AccountLimitExceededException' => [ 'type' => 'structure', 'members' => [], 'exception' => true, ], 'ArtifactNamespace' => [ 'type' => 'string', 'enum' => [ 'NONE', 'BUILD_ID', ], ], 'ArtifactPackaging' => [ 'type' => 'string', 'enum' => [ 'NONE', 'ZIP', ], ], 'ArtifactsType' => [ 'type' => 'string', 'enum' => [ 'CODEPIPELINE', 'S3', 'NO_ARTIFACTS', ], ], 'AuthType' => [ 'type' => 'string', 'enum' => [ 'OAUTH', 'BASIC_AUTH', 'PERSONAL_ACCESS_TOKEN', ], ], 'BatchDeleteBuildsInput' => [ 'type' => 'structure', 'required' => [ 'ids', ], 'members' => [ 'ids' => [ 'shape' => 'BuildIds', ], ], ], 'BatchDeleteBuildsOutput' => [ 'type' => 'structure', 'members' => [ 'buildsDeleted' => [ 'shape' => 'BuildIds', ], 'buildsNotDeleted' => [ 'shape' => 'BuildsNotDeleted', ], ], ], 'BatchGetBuildsInput' => [ 'type' => 'structure', 'required' => [ 'ids', ], 'members' => [ 'ids' => [ 'shape' => 'BuildIds', ], ], ], 'BatchGetBuildsOutput' => [ 'type' => 'structure', 'members' => [ 'builds' => [ 'shape' => 'Builds', ], 'buildsNotFound' => [ 'shape' => 'BuildIds', ], ], ], 'BatchGetProjectsInput' => [ 'type' => 'structure', 'required' => [ 'names', ], 'members' => [ 'names' => [ 'shape' => 'ProjectNames', ], ], ], 'BatchGetProjectsOutput' => [ 'type' => 'structure', 'members' => [ 'projects' => [ 'shape' => 'Projects', ], 'projectsNotFound' => [ 'shape' => 'ProjectNames', ], ], ], 'Boolean' => [ 'type' => 'boolean', ], 'Build' => [ 'type' => 'structure', 'members' => [ 'id' => [ 'shape' => 'NonEmptyString', ], 'arn' => [ 'shape' => 'NonEmptyString', ], 'startTime' => [ 'shape' => 'Timestamp', ], 'endTime' => [ 'shape' => 'Timestamp', ], 'currentPhase' => [ 'shape' => 'String', ], 'buildStatus' => [ 'shape' => 'StatusType', ], 'sourceVersion' => [ 'shape' => 'NonEmptyString', ], 'resolvedSourceVersion' => [ 'shape' => 'NonEmptyString', ], 'projectName' => [ 'shape' => 'NonEmptyString', ], 'phases' => [ 'shape' => 'BuildPhases', ], 'source' => [ 'shape' => 'ProjectSource', ], 'secondarySources' => [ 'shape' => 'ProjectSources', ], 'secondarySourceVersions' => [ 'shape' => 'ProjectSecondarySourceVersions', ], 'artifacts' => [ 'shape' => 'BuildArtifacts', ], 'secondaryArtifacts' => [ 'shape' => 'BuildArtifactsList', ], 'cache' => [ 'shape' => 'ProjectCache', ], 'environment' => [ 'shape' => 'ProjectEnvironment', ], 'serviceRole' => [ 'shape' => 'NonEmptyString', ], 'logs' => [ 'shape' => 'LogsLocation', ], 'timeoutInMinutes' => [ 'shape' => 'WrapperInt', ], 'queuedTimeoutInMinutes' => [ 'shape' => 'WrapperInt', ], 'buildComplete' => [ 'shape' => 'Boolean', ], 'initiator' => [ 'shape' => 'String', ], 'vpcConfig' => [ 'shape' => 'VpcConfig', ], 'networkInterface' => [ 'shape' => 'NetworkInterface', ], 'encryptionKey' => [ 'shape' => 'NonEmptyString', ], ], ], 'BuildArtifacts' => [ 'type' => 'structure', 'members' => [ 'location' => [ 'shape' => 'String', ], 'sha256sum' => [ 'shape' => 'String', ], 'md5sum' => [ 'shape' => 'String', ], 'overrideArtifactName' => [ 'shape' => 'WrapperBoolean', ], 'encryptionDisabled' => [ 'shape' => 'WrapperBoolean', ], 'artifactIdentifier' => [ 'shape' => 'String', ], ], ], 'BuildArtifactsList' => [ 'type' => 'list', 'member' => [ 'shape' => 'BuildArtifacts', ], 'max' => 12, 'min' => 0, ], 'BuildIds' => [ 'type' => 'list', 'member' => [ 'shape' => 'NonEmptyString', ], 'max' => 100, 'min' => 1, ], 'BuildNotDeleted' => [ 'type' => 'structure', 'members' => [ 'id' => [ 'shape' => 'NonEmptyString', ], 'statusCode' => [ 'shape' => 'String', ], ], ], 'BuildPhase' => [ 'type' => 'structure', 'members' => [ 'phaseType' => [ 'shape' => 'BuildPhaseType', ], 'phaseStatus' => [ 'shape' => 'StatusType', ], 'startTime' => [ 'shape' => 'Timestamp', ], 'endTime' => [ 'shape' => 'Timestamp', ], 'durationInSeconds' => [ 'shape' => 'WrapperLong', ], 'contexts' => [ 'shape' => 'PhaseContexts', ], ], ], 'BuildPhaseType' => [ 'type' => 'string', 'enum' => [ 'SUBMITTED', 'QUEUED', 'PROVISIONING', 'DOWNLOAD_SOURCE', 'INSTALL', 'PRE_BUILD', 'BUILD', 'POST_BUILD', 'UPLOAD_ARTIFACTS', 'FINALIZING', 'COMPLETED', ], ], 'BuildPhases' => [ 'type' => 'list', 'member' => [ 'shape' => 'BuildPhase', ], ], 'Builds' => [ 'type' => 'list', 'member' => [ 'shape' => 'Build', ], ], 'BuildsNotDeleted' => [ 'type' => 'list', 'member' => [ 'shape' => 'BuildNotDeleted', ], ], 'CacheMode' => [ 'type' => 'string', 'enum' => [ 'LOCAL_DOCKER_LAYER_CACHE', 'LOCAL_SOURCE_CACHE', 'LOCAL_CUSTOM_CACHE', ], ], 'CacheType' => [ 'type' => 'string', 'enum' => [ 'NO_CACHE', 'S3', 'LOCAL', ], ], 'CloudWatchLogsConfig' => [ 'type' => 'structure', 'required' => [ 'status', ], 'members' => [ 'status' => [ 'shape' => 'LogsConfigStatusType', ], 'groupName' => [ 'shape' => 'String', ], 'streamName' => [ 'shape' => 'String', ], ], ], 'ComputeType' => [ 'type' => 'string', 'enum' => [ 'BUILD_GENERAL1_SMALL', 'BUILD_GENERAL1_MEDIUM', 'BUILD_GENERAL1_LARGE', ], ], 'CreateProjectInput' => [ 'type' => 'structure', 'required' => [ 'name', 'source', 'artifacts', 'environment', 'serviceRole', ], 'members' => [ 'name' => [ 'shape' => 'ProjectName', ], 'description' => [ 'shape' => 'ProjectDescription', ], 'source' => [ 'shape' => 'ProjectSource', ], 'secondarySources' => [ 'shape' => 'ProjectSources', ], 'artifacts' => [ 'shape' => 'ProjectArtifacts', ], 'secondaryArtifacts' => [ 'shape' => 'ProjectArtifactsList', ], 'cache' => [ 'shape' => 'ProjectCache', ], 'environment' => [ 'shape' => 'ProjectEnvironment', ], 'serviceRole' => [ 'shape' => 'NonEmptyString', ], 'timeoutInMinutes' => [ 'shape' => 'TimeOut', ], 'queuedTimeoutInMinutes' => [ 'shape' => 'TimeOut', ], 'encryptionKey' => [ 'shape' => 'NonEmptyString', ], 'tags' => [ 'shape' => 'TagList', ], 'vpcConfig' => [ 'shape' => 'VpcConfig', ], 'badgeEnabled' => [ 'shape' => 'WrapperBoolean', ], 'logsConfig' => [ 'shape' => 'LogsConfig', ], ], ], 'CreateProjectOutput' => [ 'type' => 'structure', 'members' => [ 'project' => [ 'shape' => 'Project', ], ], ], 'CreateWebhookInput' => [ 'type' => 'structure', 'required' => [ 'projectName', ], 'members' => [ 'projectName' => [ 'shape' => 'ProjectName', ], 'branchFilter' => [ 'shape' => 'String', ], 'filterGroups' => [ 'shape' => 'FilterGroups', ], ], ], 'CreateWebhookOutput' => [ 'type' => 'structure', 'members' => [ 'webhook' => [ 'shape' => 'Webhook', ], ], ], 'CredentialProviderType' => [ 'type' => 'string', 'enum' => [ 'SECRETS_MANAGER', ], ], 'DeleteProjectInput' => [ 'type' => 'structure', 'required' => [ 'name', ], 'members' => [ 'name' => [ 'shape' => 'NonEmptyString', ], ], ], 'DeleteProjectOutput' => [ 'type' => 'structure', 'members' => [], ], 'DeleteSourceCredentialsInput' => [ 'type' => 'structure', 'required' => [ 'arn', ], 'members' => [ 'arn' => [ 'shape' => 'NonEmptyString', ], ], ], 'DeleteSourceCredentialsOutput' => [ 'type' => 'structure', 'members' => [ 'arn' => [ 'shape' => 'NonEmptyString', ], ], ], 'DeleteWebhookInput' => [ 'type' => 'structure', 'required' => [ 'projectName', ], 'members' => [ 'projectName' => [ 'shape' => 'ProjectName', ], ], ], 'DeleteWebhookOutput' => [ 'type' => 'structure', 'members' => [], ], 'EnvironmentImage' => [ 'type' => 'structure', 'members' => [ 'name' => [ 'shape' => 'String', ], 'description' => [ 'shape' => 'String', ], 'versions' => [ 'shape' => 'ImageVersions', ], ], ], 'EnvironmentImages' => [ 'type' => 'list', 'member' => [ 'shape' => 'EnvironmentImage', ], ], 'EnvironmentLanguage' => [ 'type' => 'structure', 'members' => [ 'language' => [ 'shape' => 'LanguageType', ], 'images' => [ 'shape' => 'EnvironmentImages', ], ], ], 'EnvironmentLanguages' => [ 'type' => 'list', 'member' => [ 'shape' => 'EnvironmentLanguage', ], ], 'EnvironmentPlatform' => [ 'type' => 'structure', 'members' => [ 'platform' => [ 'shape' => 'PlatformType', ], 'languages' => [ 'shape' => 'EnvironmentLanguages', ], ], ], 'EnvironmentPlatforms' => [ 'type' => 'list', 'member' => [ 'shape' => 'EnvironmentPlatform', ], ], 'EnvironmentType' => [ 'type' => 'string', 'enum' => [ 'WINDOWS_CONTAINER', 'LINUX_CONTAINER', ], ], 'EnvironmentVariable' => [ 'type' => 'structure', 'required' => [ 'name', 'value', ], 'members' => [ 'name' => [ 'shape' => 'NonEmptyString', ], 'value' => [ 'shape' => 'String', ], 'type' => [ 'shape' => 'EnvironmentVariableType', ], ], ], 'EnvironmentVariableType' => [ 'type' => 'string', 'enum' => [ 'PLAINTEXT', 'PARAMETER_STORE', ], ], 'EnvironmentVariables' => [ 'type' => 'list', 'member' => [ 'shape' => 'EnvironmentVariable', ], ], 'FilterGroup' => [ 'type' => 'list', 'member' => [ 'shape' => 'WebhookFilter', ], ], 'FilterGroups' => [ 'type' => 'list', 'member' => [ 'shape' => 'FilterGroup', ], ], 'GitCloneDepth' => [ 'type' => 'integer', 'min' => 0, ], 'ImagePullCredentialsType' => [ 'type' => 'string', 'enum' => [ 'CODEBUILD', 'SERVICE_ROLE', ], ], 'ImageVersions' => [ 'type' => 'list', 'member' => [ 'shape' => 'String', ], ], 'ImportSourceCredentialsInput' => [ 'type' => 'structure', 'required' => [ 'token', 'serverType', 'authType', ], 'members' => [ 'username' => [ 'shape' => 'NonEmptyString', ], 'token' => [ 'shape' => 'SensitiveNonEmptyString', ], 'serverType' => [ 'shape' => 'ServerType', ], 'authType' => [ 'shape' => 'AuthType', ], ], ], 'ImportSourceCredentialsOutput' => [ 'type' => 'structure', 'members' => [ 'arn' => [ 'shape' => 'NonEmptyString', ], ], ], 'InvalidInputException' => [ 'type' => 'structure', 'members' => [], 'exception' => true, ], 'InvalidateProjectCacheInput' => [ 'type' => 'structure', 'required' => [ 'projectName', ], 'members' => [ 'projectName' => [ 'shape' => 'NonEmptyString', ], ], ], 'InvalidateProjectCacheOutput' => [ 'type' => 'structure', 'members' => [], ], 'KeyInput' => [ 'type' => 'string', 'max' => 127, 'min' => 1, 'pattern' => '^([\\\\p{L}\\\\p{Z}\\\\p{N}_.:/=@+\\\\-]*)$', ], 'LanguageType' => [ 'type' => 'string', 'enum' => [ 'JAVA', 'PYTHON', 'NODE_JS', 'RUBY', 'GOLANG', 'DOCKER', 'ANDROID', 'DOTNET', 'BASE', 'PHP', ], ], 'ListBuildsForProjectInput' => [ 'type' => 'structure', 'required' => [ 'projectName', ], 'members' => [ 'projectName' => [ 'shape' => 'NonEmptyString', ], 'sortOrder' => [ 'shape' => 'SortOrderType', ], 'nextToken' => [ 'shape' => 'String', ], ], ], 'ListBuildsForProjectOutput' => [ 'type' => 'structure', 'members' => [ 'ids' => [ 'shape' => 'BuildIds', ], 'nextToken' => [ 'shape' => 'String', ], ], ], 'ListBuildsInput' => [ 'type' => 'structure', 'members' => [ 'sortOrder' => [ 'shape' => 'SortOrderType', ], 'nextToken' => [ 'shape' => 'String', ], ], ], 'ListBuildsOutput' => [ 'type' => 'structure', 'members' => [ 'ids' => [ 'shape' => 'BuildIds', ], 'nextToken' => [ 'shape' => 'String', ], ], ], 'ListCuratedEnvironmentImagesInput' => [ 'type' => 'structure', 'members' => [], ], 'ListCuratedEnvironmentImagesOutput' => [ 'type' => 'structure', 'members' => [ 'platforms' => [ 'shape' => 'EnvironmentPlatforms', ], ], ], 'ListProjectsInput' => [ 'type' => 'structure', 'members' => [ 'sortBy' => [ 'shape' => 'ProjectSortByType', ], 'sortOrder' => [ 'shape' => 'SortOrderType', ], 'nextToken' => [ 'shape' => 'NonEmptyString', ], ], ], 'ListProjectsOutput' => [ 'type' => 'structure', 'members' => [ 'nextToken' => [ 'shape' => 'String', ], 'projects' => [ 'shape' => 'ProjectNames', ], ], ], 'ListSourceCredentialsInput' => [ 'type' => 'structure', 'members' => [], ], 'ListSourceCredentialsOutput' => [ 'type' => 'structure', 'members' => [ 'sourceCredentialsInfos' => [ 'shape' => 'SourceCredentialsInfos', ], ], ], 'LogsConfig' => [ 'type' => 'structure', 'members' => [ 'cloudWatchLogs' => [ 'shape' => 'CloudWatchLogsConfig', ], 's3Logs' => [ 'shape' => 'S3LogsConfig', ], ], ], 'LogsConfigStatusType' => [ 'type' => 'string', 'enum' => [ 'ENABLED', 'DISABLED', ], ], 'LogsLocation' => [ 'type' => 'structure', 'members' => [ 'groupName' => [ 'shape' => 'String', ], 'streamName' => [ 'shape' => 'String', ], 'deepLink' => [ 'shape' => 'String', ], 's3DeepLink' => [ 'shape' => 'String', ], 'cloudWatchLogs' => [ 'shape' => 'CloudWatchLogsConfig', ], 's3Logs' => [ 'shape' => 'S3LogsConfig', ], ], ], 'NetworkInterface' => [ 'type' => 'structure', 'members' => [ 'subnetId' => [ 'shape' => 'NonEmptyString', ], 'networkInterfaceId' => [ 'shape' => 'NonEmptyString', ], ], ], 'NonEmptyString' => [ 'type' => 'string', 'min' => 1, ], 'OAuthProviderException' => [ 'type' => 'structure', 'members' => [], 'exception' => true, ], 'PhaseContext' => [ 'type' => 'structure', 'members' => [ 'statusCode' => [ 'shape' => 'String', ], 'message' => [ 'shape' => 'String', ], ], ], 'PhaseContexts' => [ 'type' => 'list', 'member' => [ 'shape' => 'PhaseContext', ], ], 'PlatformType' => [ 'type' => 'string', 'enum' => [ 'DEBIAN', 'AMAZON_LINUX', 'UBUNTU', 'WINDOWS_SERVER', ], ], 'Project' => [ 'type' => 'structure', 'members' => [ 'name' => [ 'shape' => 'ProjectName', ], 'arn' => [ 'shape' => 'String', ], 'description' => [ 'shape' => 'ProjectDescription', ], 'source' => [ 'shape' => 'ProjectSource', ], 'secondarySources' => [ 'shape' => 'ProjectSources', ], 'artifacts' => [ 'shape' => 'ProjectArtifacts', ], 'secondaryArtifacts' => [ 'shape' => 'ProjectArtifactsList', ], 'cache' => [ 'shape' => 'ProjectCache', ], 'environment' => [ 'shape' => 'ProjectEnvironment', ], 'serviceRole' => [ 'shape' => 'NonEmptyString', ], 'timeoutInMinutes' => [ 'shape' => 'TimeOut', ], 'queuedTimeoutInMinutes' => [ 'shape' => 'TimeOut', ], 'encryptionKey' => [ 'shape' => 'NonEmptyString', ], 'tags' => [ 'shape' => 'TagList', ], 'created' => [ 'shape' => 'Timestamp', ], 'lastModified' => [ 'shape' => 'Timestamp', ], 'webhook' => [ 'shape' => 'Webhook', ], 'vpcConfig' => [ 'shape' => 'VpcConfig', ], 'badge' => [ 'shape' => 'ProjectBadge', ], 'logsConfig' => [ 'shape' => 'LogsConfig', ], ], ], 'ProjectArtifacts' => [ 'type' => 'structure', 'required' => [ 'type', ], 'members' => [ 'type' => [ 'shape' => 'ArtifactsType', ], 'location' => [ 'shape' => 'String', ], 'path' => [ 'shape' => 'String', ], 'namespaceType' => [ 'shape' => 'ArtifactNamespace', ], 'name' => [ 'shape' => 'String', ], 'packaging' => [ 'shape' => 'ArtifactPackaging', ], 'overrideArtifactName' => [ 'shape' => 'WrapperBoolean', ], 'encryptionDisabled' => [ 'shape' => 'WrapperBoolean', ], 'artifactIdentifier' => [ 'shape' => 'String', ], ], ], 'ProjectArtifactsList' => [ 'type' => 'list', 'member' => [ 'shape' => 'ProjectArtifacts', ], 'max' => 12, 'min' => 0, ], 'ProjectBadge' => [ 'type' => 'structure', 'members' => [ 'badgeEnabled' => [ 'shape' => 'Boolean', ], 'badgeRequestUrl' => [ 'shape' => 'String', ], ], ], 'ProjectCache' => [ 'type' => 'structure', 'required' => [ 'type', ], 'members' => [ 'type' => [ 'shape' => 'CacheType', ], 'location' => [ 'shape' => 'String', ], 'modes' => [ 'shape' => 'ProjectCacheModes', ], ], ], 'ProjectCacheModes' => [ 'type' => 'list', 'member' => [ 'shape' => 'CacheMode', ], ], 'ProjectDescription' => [ 'type' => 'string', 'max' => 255, 'min' => 0, ], 'ProjectEnvironment' => [ 'type' => 'structure', 'required' => [ 'type', 'image', 'computeType', ], 'members' => [ 'type' => [ 'shape' => 'EnvironmentType', ], 'image' => [ 'shape' => 'NonEmptyString', ], 'computeType' => [ 'shape' => 'ComputeType', ], 'environmentVariables' => [ 'shape' => 'EnvironmentVariables', ], 'privilegedMode' => [ 'shape' => 'WrapperBoolean', ], 'certificate' => [ 'shape' => 'String', ], 'registryCredential' => [ 'shape' => 'RegistryCredential', ], 'imagePullCredentialsType' => [ 'shape' => 'ImagePullCredentialsType', ], ], ], 'ProjectName' => [ 'type' => 'string', 'max' => 255, 'min' => 2, 'pattern' => '[A-Za-z0-9][A-Za-z0-9\\-_]{1,254}', ], 'ProjectNames' => [ 'type' => 'list', 'member' => [ 'shape' => 'NonEmptyString', ], 'max' => 100, 'min' => 1, ], 'ProjectSecondarySourceVersions' => [ 'type' => 'list', 'member' => [ 'shape' => 'ProjectSourceVersion', ], 'max' => 12, 'min' => 0, ], 'ProjectSortByType' => [ 'type' => 'string', 'enum' => [ 'NAME', 'CREATED_TIME', 'LAST_MODIFIED_TIME', ], ], 'ProjectSource' => [ 'type' => 'structure', 'required' => [ 'type', ], 'members' => [ 'type' => [ 'shape' => 'SourceType', ], 'location' => [ 'shape' => 'String', ], 'gitCloneDepth' => [ 'shape' => 'GitCloneDepth', ], 'buildspec' => [ 'shape' => 'String', ], 'auth' => [ 'shape' => 'SourceAuth', ], 'reportBuildStatus' => [ 'shape' => 'WrapperBoolean', ], 'insecureSsl' => [ 'shape' => 'WrapperBoolean', ], 'sourceIdentifier' => [ 'shape' => 'String', ], ], ], 'ProjectSourceVersion' => [ 'type' => 'structure', 'required' => [ 'sourceIdentifier', 'sourceVersion', ], 'members' => [ 'sourceIdentifier' => [ 'shape' => 'String', ], 'sourceVersion' => [ 'shape' => 'String', ], ], ], 'ProjectSources' => [ 'type' => 'list', 'member' => [ 'shape' => 'ProjectSource', ], 'max' => 12, 'min' => 0, ], 'Projects' => [ 'type' => 'list', 'member' => [ 'shape' => 'Project', ], ], 'RegistryCredential' => [ 'type' => 'structure', 'required' => [ 'credential', 'credentialProvider', ], 'members' => [ 'credential' => [ 'shape' => 'NonEmptyString', ], 'credentialProvider' => [ 'shape' => 'CredentialProviderType', ], ], ], 'ResourceAlreadyExistsException' => [ 'type' => 'structure', 'members' => [], 'exception' => true, ], 'ResourceNotFoundException' => [ 'type' => 'structure', 'members' => [], 'exception' => true, ], 'S3LogsConfig' => [ 'type' => 'structure', 'required' => [ 'status', ], 'members' => [ 'status' => [ 'shape' => 'LogsConfigStatusType', ], 'location' => [ 'shape' => 'String', ], ], ], 'SecurityGroupIds' => [ 'type' => 'list', 'member' => [ 'shape' => 'NonEmptyString', ], 'max' => 5, ], 'SensitiveNonEmptyString' => [ 'type' => 'string', 'min' => 1, 'sensitive' => true, ], 'ServerType' => [ 'type' => 'string', 'enum' => [ 'GITHUB', 'BITBUCKET', 'GITHUB_ENTERPRISE', ], ], 'SortOrderType' => [ 'type' => 'string', 'enum' => [ 'ASCENDING', 'DESCENDING', ], ], 'SourceAuth' => [ 'type' => 'structure', 'required' => [ 'type', ], 'members' => [ 'type' => [ 'shape' => 'SourceAuthType', ], 'resource' => [ 'shape' => 'String', ], ], ], 'SourceAuthType' => [ 'type' => 'string', 'enum' => [ 'OAUTH', ], ], 'SourceCredentialsInfo' => [ 'type' => 'structure', 'members' => [ 'arn' => [ 'shape' => 'NonEmptyString', ], 'serverType' => [ 'shape' => 'ServerType', ], 'authType' => [ 'shape' => 'AuthType', ], ], ], 'SourceCredentialsInfos' => [ 'type' => 'list', 'member' => [ 'shape' => 'SourceCredentialsInfo', ], ], 'SourceType' => [ 'type' => 'string', 'enum' => [ 'CODECOMMIT', 'CODEPIPELINE', 'GITHUB', 'S3', 'BITBUCKET', 'GITHUB_ENTERPRISE', 'NO_SOURCE', ], ], 'StartBuildInput' => [ 'type' => 'structure', 'required' => [ 'projectName', ], 'members' => [ 'projectName' => [ 'shape' => 'NonEmptyString', ], 'secondarySourcesOverride' => [ 'shape' => 'ProjectSources', ], 'secondarySourcesVersionOverride' => [ 'shape' => 'ProjectSecondarySourceVersions', ], 'sourceVersion' => [ 'shape' => 'String', ], 'artifactsOverride' => [ 'shape' => 'ProjectArtifacts', ], 'secondaryArtifactsOverride' => [ 'shape' => 'ProjectArtifactsList', ], 'environmentVariablesOverride' => [ 'shape' => 'EnvironmentVariables', ], 'sourceTypeOverride' => [ 'shape' => 'SourceType', ], 'sourceLocationOverride' => [ 'shape' => 'String', ], 'sourceAuthOverride' => [ 'shape' => 'SourceAuth', ], 'gitCloneDepthOverride' => [ 'shape' => 'GitCloneDepth', ], 'buildspecOverride' => [ 'shape' => 'String', ], 'insecureSslOverride' => [ 'shape' => 'WrapperBoolean', ], 'reportBuildStatusOverride' => [ 'shape' => 'WrapperBoolean', ], 'environmentTypeOverride' => [ 'shape' => 'EnvironmentType', ], 'imageOverride' => [ 'shape' => 'NonEmptyString', ], 'computeTypeOverride' => [ 'shape' => 'ComputeType', ], 'certificateOverride' => [ 'shape' => 'String', ], 'cacheOverride' => [ 'shape' => 'ProjectCache', ], 'serviceRoleOverride' => [ 'shape' => 'NonEmptyString', ], 'privilegedModeOverride' => [ 'shape' => 'WrapperBoolean', ], 'timeoutInMinutesOverride' => [ 'shape' => 'TimeOut', ], 'queuedTimeoutInMinutesOverride' => [ 'shape' => 'TimeOut', ], 'idempotencyToken' => [ 'shape' => 'String', ], 'logsConfigOverride' => [ 'shape' => 'LogsConfig', ], 'registryCredentialOverride' => [ 'shape' => 'RegistryCredential', ], 'imagePullCredentialsTypeOverride' => [ 'shape' => 'ImagePullCredentialsType', ], ], ], 'StartBuildOutput' => [ 'type' => 'structure', 'members' => [ 'build' => [ 'shape' => 'Build', ], ], ], 'StatusType' => [ 'type' => 'string', 'enum' => [ 'SUCCEEDED', 'FAILED', 'FAULT', 'TIMED_OUT', 'IN_PROGRESS', 'STOPPED', ], ], 'StopBuildInput' => [ 'type' => 'structure', 'required' => [ 'id', ], 'members' => [ 'id' => [ 'shape' => 'NonEmptyString', ], ], ], 'StopBuildOutput' => [ 'type' => 'structure', 'members' => [ 'build' => [ 'shape' => 'Build', ], ], ], 'String' => [ 'type' => 'string', ], 'Subnets' => [ 'type' => 'list', 'member' => [ 'shape' => 'NonEmptyString', ], 'max' => 16, ], 'Tag' => [ 'type' => 'structure', 'members' => [ 'key' => [ 'shape' => 'KeyInput', ], 'value' => [ 'shape' => 'ValueInput', ], ], ], 'TagList' => [ 'type' => 'list', 'member' => [ 'shape' => 'Tag', ], 'max' => 50, 'min' => 0, ], 'TimeOut' => [ 'type' => 'integer', 'max' => 480, 'min' => 5, ], 'Timestamp' => [ 'type' => 'timestamp', ], 'UpdateProjectInput' => [ 'type' => 'structure', 'required' => [ 'name', ], 'members' => [ 'name' => [ 'shape' => 'NonEmptyString', ], 'description' => [ 'shape' => 'ProjectDescription', ], 'source' => [ 'shape' => 'ProjectSource', ], 'secondarySources' => [ 'shape' => 'ProjectSources', ], 'artifacts' => [ 'shape' => 'ProjectArtifacts', ], 'secondaryArtifacts' => [ 'shape' => 'ProjectArtifactsList', ], 'cache' => [ 'shape' => 'ProjectCache', ], 'environment' => [ 'shape' => 'ProjectEnvironment', ], 'serviceRole' => [ 'shape' => 'NonEmptyString', ], 'timeoutInMinutes' => [ 'shape' => 'TimeOut', ], 'queuedTimeoutInMinutes' => [ 'shape' => 'TimeOut', ], 'encryptionKey' => [ 'shape' => 'NonEmptyString', ], 'tags' => [ 'shape' => 'TagList', ], 'vpcConfig' => [ 'shape' => 'VpcConfig', ], 'badgeEnabled' => [ 'shape' => 'WrapperBoolean', ], 'logsConfig' => [ 'shape' => 'LogsConfig', ], ], ], 'UpdateProjectOutput' => [ 'type' => 'structure', 'members' => [ 'project' => [ 'shape' => 'Project', ], ], ], 'UpdateWebhookInput' => [ 'type' => 'structure', 'required' => [ 'projectName', ], 'members' => [ 'projectName' => [ 'shape' => 'ProjectName', ], 'branchFilter' => [ 'shape' => 'String', ], 'rotateSecret' => [ 'shape' => 'Boolean', ], 'filterGroups' => [ 'shape' => 'FilterGroups', ], ], ], 'UpdateWebhookOutput' => [ 'type' => 'structure', 'members' => [ 'webhook' => [ 'shape' => 'Webhook', ], ], ], 'ValueInput' => [ 'type' => 'string', 'max' => 255, 'min' => 1, 'pattern' => '^([\\\\p{L}\\\\p{Z}\\\\p{N}_.:/=@+\\\\-]*)$', ], 'VpcConfig' => [ 'type' => 'structure', 'members' => [ 'vpcId' => [ 'shape' => 'NonEmptyString', ], 'subnets' => [ 'shape' => 'Subnets', ], 'securityGroupIds' => [ 'shape' => 'SecurityGroupIds', ], ], ], 'Webhook' => [ 'type' => 'structure', 'members' => [ 'url' => [ 'shape' => 'NonEmptyString', ], 'payloadUrl' => [ 'shape' => 'NonEmptyString', ], 'secret' => [ 'shape' => 'NonEmptyString', ], 'branchFilter' => [ 'shape' => 'String', ], 'filterGroups' => [ 'shape' => 'FilterGroups', ], 'lastModifiedSecret' => [ 'shape' => 'Timestamp', ], ], ], 'WebhookFilter' => [ 'type' => 'structure', 'required' => [ 'type', 'pattern', ], 'members' => [ 'type' => [ 'shape' => 'WebhookFilterType', ], 'pattern' => [ 'shape' => 'String', ], 'excludeMatchedPattern' => [ 'shape' => 'WrapperBoolean', ], ], ], 'WebhookFilterType' => [ 'type' => 'string', 'enum' => [ 'EVENT', 'BASE_REF', 'HEAD_REF', 'ACTOR_ACCOUNT_ID', 'FILE_PATH', ], ], 'WrapperBoolean' => [ 'type' => 'boolean', ], 'WrapperInt' => [ 'type' => 'integer', ], 'WrapperLong' => [ 'type' => 'long', ], ],]; +return [ 'version' => '2.0', 'metadata' => [ 'apiVersion' => '2016-10-06', 'endpointPrefix' => 'codebuild', 'jsonVersion' => '1.1', 'protocol' => 'json', 'serviceFullName' => 'AWS CodeBuild', 'serviceId' => 'CodeBuild', 'signatureVersion' => 'v4', 'targetPrefix' => 'CodeBuild_20161006', 'uid' => 'codebuild-2016-10-06', ], 'operations' => [ 'BatchDeleteBuilds' => [ 'name' => 'BatchDeleteBuilds', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'BatchDeleteBuildsInput', ], 'output' => [ 'shape' => 'BatchDeleteBuildsOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], ], ], 'BatchGetBuilds' => [ 'name' => 'BatchGetBuilds', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'BatchGetBuildsInput', ], 'output' => [ 'shape' => 'BatchGetBuildsOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], ], ], 'BatchGetProjects' => [ 'name' => 'BatchGetProjects', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'BatchGetProjectsInput', ], 'output' => [ 'shape' => 'BatchGetProjectsOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], ], ], 'CreateProject' => [ 'name' => 'CreateProject', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateProjectInput', ], 'output' => [ 'shape' => 'CreateProjectOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'ResourceAlreadyExistsException', ], [ 'shape' => 'AccountLimitExceededException', ], ], ], 'CreateWebhook' => [ 'name' => 'CreateWebhook', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateWebhookInput', ], 'output' => [ 'shape' => 'CreateWebhookOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'OAuthProviderException', ], [ 'shape' => 'ResourceAlreadyExistsException', ], [ 'shape' => 'ResourceNotFoundException', ], ], ], 'DeleteProject' => [ 'name' => 'DeleteProject', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteProjectInput', ], 'output' => [ 'shape' => 'DeleteProjectOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], ], ], 'DeleteSourceCredentials' => [ 'name' => 'DeleteSourceCredentials', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteSourceCredentialsInput', ], 'output' => [ 'shape' => 'DeleteSourceCredentialsOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'ResourceNotFoundException', ], ], ], 'DeleteWebhook' => [ 'name' => 'DeleteWebhook', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteWebhookInput', ], 'output' => [ 'shape' => 'DeleteWebhookOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'ResourceNotFoundException', ], [ 'shape' => 'OAuthProviderException', ], ], ], 'ImportSourceCredentials' => [ 'name' => 'ImportSourceCredentials', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ImportSourceCredentialsInput', ], 'output' => [ 'shape' => 'ImportSourceCredentialsOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'AccountLimitExceededException', ], ], ], 'InvalidateProjectCache' => [ 'name' => 'InvalidateProjectCache', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'InvalidateProjectCacheInput', ], 'output' => [ 'shape' => 'InvalidateProjectCacheOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'ResourceNotFoundException', ], ], ], 'ListBuilds' => [ 'name' => 'ListBuilds', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListBuildsInput', ], 'output' => [ 'shape' => 'ListBuildsOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], ], ], 'ListBuildsForProject' => [ 'name' => 'ListBuildsForProject', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListBuildsForProjectInput', ], 'output' => [ 'shape' => 'ListBuildsForProjectOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'ResourceNotFoundException', ], ], ], 'ListCuratedEnvironmentImages' => [ 'name' => 'ListCuratedEnvironmentImages', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListCuratedEnvironmentImagesInput', ], 'output' => [ 'shape' => 'ListCuratedEnvironmentImagesOutput', ], ], 'ListProjects' => [ 'name' => 'ListProjects', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListProjectsInput', ], 'output' => [ 'shape' => 'ListProjectsOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], ], ], 'ListSourceCredentials' => [ 'name' => 'ListSourceCredentials', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListSourceCredentialsInput', ], 'output' => [ 'shape' => 'ListSourceCredentialsOutput', ], ], 'StartBuild' => [ 'name' => 'StartBuild', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'StartBuildInput', ], 'output' => [ 'shape' => 'StartBuildOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'ResourceNotFoundException', ], [ 'shape' => 'AccountLimitExceededException', ], ], ], 'StopBuild' => [ 'name' => 'StopBuild', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'StopBuildInput', ], 'output' => [ 'shape' => 'StopBuildOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'ResourceNotFoundException', ], ], ], 'UpdateProject' => [ 'name' => 'UpdateProject', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'UpdateProjectInput', ], 'output' => [ 'shape' => 'UpdateProjectOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'ResourceNotFoundException', ], ], ], 'UpdateWebhook' => [ 'name' => 'UpdateWebhook', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'UpdateWebhookInput', ], 'output' => [ 'shape' => 'UpdateWebhookOutput', ], 'errors' => [ [ 'shape' => 'InvalidInputException', ], [ 'shape' => 'ResourceNotFoundException', ], [ 'shape' => 'OAuthProviderException', ], ], ], ], 'shapes' => [ 'AccountLimitExceededException' => [ 'type' => 'structure', 'members' => [], 'exception' => true, ], 'ArtifactNamespace' => [ 'type' => 'string', 'enum' => [ 'NONE', 'BUILD_ID', ], ], 'ArtifactPackaging' => [ 'type' => 'string', 'enum' => [ 'NONE', 'ZIP', ], ], 'ArtifactsType' => [ 'type' => 'string', 'enum' => [ 'CODEPIPELINE', 'S3', 'NO_ARTIFACTS', ], ], 'AuthType' => [ 'type' => 'string', 'enum' => [ 'OAUTH', 'BASIC_AUTH', 'PERSONAL_ACCESS_TOKEN', ], ], 'BatchDeleteBuildsInput' => [ 'type' => 'structure', 'required' => [ 'ids', ], 'members' => [ 'ids' => [ 'shape' => 'BuildIds', ], ], ], 'BatchDeleteBuildsOutput' => [ 'type' => 'structure', 'members' => [ 'buildsDeleted' => [ 'shape' => 'BuildIds', ], 'buildsNotDeleted' => [ 'shape' => 'BuildsNotDeleted', ], ], ], 'BatchGetBuildsInput' => [ 'type' => 'structure', 'required' => [ 'ids', ], 'members' => [ 'ids' => [ 'shape' => 'BuildIds', ], ], ], 'BatchGetBuildsOutput' => [ 'type' => 'structure', 'members' => [ 'builds' => [ 'shape' => 'Builds', ], 'buildsNotFound' => [ 'shape' => 'BuildIds', ], ], ], 'BatchGetProjectsInput' => [ 'type' => 'structure', 'required' => [ 'names', ], 'members' => [ 'names' => [ 'shape' => 'ProjectNames', ], ], ], 'BatchGetProjectsOutput' => [ 'type' => 'structure', 'members' => [ 'projects' => [ 'shape' => 'Projects', ], 'projectsNotFound' => [ 'shape' => 'ProjectNames', ], ], ], 'Boolean' => [ 'type' => 'boolean', ], 'Build' => [ 'type' => 'structure', 'members' => [ 'id' => [ 'shape' => 'NonEmptyString', ], 'arn' => [ 'shape' => 'NonEmptyString', ], 'startTime' => [ 'shape' => 'Timestamp', ], 'endTime' => [ 'shape' => 'Timestamp', ], 'currentPhase' => [ 'shape' => 'String', ], 'buildStatus' => [ 'shape' => 'StatusType', ], 'sourceVersion' => [ 'shape' => 'NonEmptyString', ], 'resolvedSourceVersion' => [ 'shape' => 'NonEmptyString', ], 'projectName' => [ 'shape' => 'NonEmptyString', ], 'phases' => [ 'shape' => 'BuildPhases', ], 'source' => [ 'shape' => 'ProjectSource', ], 'secondarySources' => [ 'shape' => 'ProjectSources', ], 'secondarySourceVersions' => [ 'shape' => 'ProjectSecondarySourceVersions', ], 'artifacts' => [ 'shape' => 'BuildArtifacts', ], 'secondaryArtifacts' => [ 'shape' => 'BuildArtifactsList', ], 'cache' => [ 'shape' => 'ProjectCache', ], 'environment' => [ 'shape' => 'ProjectEnvironment', ], 'serviceRole' => [ 'shape' => 'NonEmptyString', ], 'logs' => [ 'shape' => 'LogsLocation', ], 'timeoutInMinutes' => [ 'shape' => 'WrapperInt', ], 'queuedTimeoutInMinutes' => [ 'shape' => 'WrapperInt', ], 'buildComplete' => [ 'shape' => 'Boolean', ], 'initiator' => [ 'shape' => 'String', ], 'vpcConfig' => [ 'shape' => 'VpcConfig', ], 'networkInterface' => [ 'shape' => 'NetworkInterface', ], 'encryptionKey' => [ 'shape' => 'NonEmptyString', ], ], ], 'BuildArtifacts' => [ 'type' => 'structure', 'members' => [ 'location' => [ 'shape' => 'String', ], 'sha256sum' => [ 'shape' => 'String', ], 'md5sum' => [ 'shape' => 'String', ], 'overrideArtifactName' => [ 'shape' => 'WrapperBoolean', ], 'encryptionDisabled' => [ 'shape' => 'WrapperBoolean', ], 'artifactIdentifier' => [ 'shape' => 'String', ], ], ], 'BuildArtifactsList' => [ 'type' => 'list', 'member' => [ 'shape' => 'BuildArtifacts', ], 'max' => 12, 'min' => 0, ], 'BuildIds' => [ 'type' => 'list', 'member' => [ 'shape' => 'NonEmptyString', ], 'max' => 100, 'min' => 1, ], 'BuildNotDeleted' => [ 'type' => 'structure', 'members' => [ 'id' => [ 'shape' => 'NonEmptyString', ], 'statusCode' => [ 'shape' => 'String', ], ], ], 'BuildPhase' => [ 'type' => 'structure', 'members' => [ 'phaseType' => [ 'shape' => 'BuildPhaseType', ], 'phaseStatus' => [ 'shape' => 'StatusType', ], 'startTime' => [ 'shape' => 'Timestamp', ], 'endTime' => [ 'shape' => 'Timestamp', ], 'durationInSeconds' => [ 'shape' => 'WrapperLong', ], 'contexts' => [ 'shape' => 'PhaseContexts', ], ], ], 'BuildPhaseType' => [ 'type' => 'string', 'enum' => [ 'SUBMITTED', 'QUEUED', 'PROVISIONING', 'DOWNLOAD_SOURCE', 'INSTALL', 'PRE_BUILD', 'BUILD', 'POST_BUILD', 'UPLOAD_ARTIFACTS', 'FINALIZING', 'COMPLETED', ], ], 'BuildPhases' => [ 'type' => 'list', 'member' => [ 'shape' => 'BuildPhase', ], ], 'Builds' => [ 'type' => 'list', 'member' => [ 'shape' => 'Build', ], ], 'BuildsNotDeleted' => [ 'type' => 'list', 'member' => [ 'shape' => 'BuildNotDeleted', ], ], 'CacheMode' => [ 'type' => 'string', 'enum' => [ 'LOCAL_DOCKER_LAYER_CACHE', 'LOCAL_SOURCE_CACHE', 'LOCAL_CUSTOM_CACHE', ], ], 'CacheType' => [ 'type' => 'string', 'enum' => [ 'NO_CACHE', 'S3', 'LOCAL', ], ], 'CloudWatchLogsConfig' => [ 'type' => 'structure', 'required' => [ 'status', ], 'members' => [ 'status' => [ 'shape' => 'LogsConfigStatusType', ], 'groupName' => [ 'shape' => 'String', ], 'streamName' => [ 'shape' => 'String', ], ], ], 'ComputeType' => [ 'type' => 'string', 'enum' => [ 'BUILD_GENERAL1_SMALL', 'BUILD_GENERAL1_MEDIUM', 'BUILD_GENERAL1_LARGE', ], ], 'CreateProjectInput' => [ 'type' => 'structure', 'required' => [ 'name', 'source', 'artifacts', 'environment', 'serviceRole', ], 'members' => [ 'name' => [ 'shape' => 'ProjectName', ], 'description' => [ 'shape' => 'ProjectDescription', ], 'source' => [ 'shape' => 'ProjectSource', ], 'secondarySources' => [ 'shape' => 'ProjectSources', ], 'artifacts' => [ 'shape' => 'ProjectArtifacts', ], 'secondaryArtifacts' => [ 'shape' => 'ProjectArtifactsList', ], 'cache' => [ 'shape' => 'ProjectCache', ], 'environment' => [ 'shape' => 'ProjectEnvironment', ], 'serviceRole' => [ 'shape' => 'NonEmptyString', ], 'timeoutInMinutes' => [ 'shape' => 'TimeOut', ], 'queuedTimeoutInMinutes' => [ 'shape' => 'TimeOut', ], 'encryptionKey' => [ 'shape' => 'NonEmptyString', ], 'tags' => [ 'shape' => 'TagList', ], 'vpcConfig' => [ 'shape' => 'VpcConfig', ], 'badgeEnabled' => [ 'shape' => 'WrapperBoolean', ], 'logsConfig' => [ 'shape' => 'LogsConfig', ], ], ], 'CreateProjectOutput' => [ 'type' => 'structure', 'members' => [ 'project' => [ 'shape' => 'Project', ], ], ], 'CreateWebhookInput' => [ 'type' => 'structure', 'required' => [ 'projectName', ], 'members' => [ 'projectName' => [ 'shape' => 'ProjectName', ], 'branchFilter' => [ 'shape' => 'String', ], 'filterGroups' => [ 'shape' => 'FilterGroups', ], ], ], 'CreateWebhookOutput' => [ 'type' => 'structure', 'members' => [ 'webhook' => [ 'shape' => 'Webhook', ], ], ], 'CredentialProviderType' => [ 'type' => 'string', 'enum' => [ 'SECRETS_MANAGER', ], ], 'DeleteProjectInput' => [ 'type' => 'structure', 'required' => [ 'name', ], 'members' => [ 'name' => [ 'shape' => 'NonEmptyString', ], ], ], 'DeleteProjectOutput' => [ 'type' => 'structure', 'members' => [], ], 'DeleteSourceCredentialsInput' => [ 'type' => 'structure', 'required' => [ 'arn', ], 'members' => [ 'arn' => [ 'shape' => 'NonEmptyString', ], ], ], 'DeleteSourceCredentialsOutput' => [ 'type' => 'structure', 'members' => [ 'arn' => [ 'shape' => 'NonEmptyString', ], ], ], 'DeleteWebhookInput' => [ 'type' => 'structure', 'required' => [ 'projectName', ], 'members' => [ 'projectName' => [ 'shape' => 'ProjectName', ], ], ], 'DeleteWebhookOutput' => [ 'type' => 'structure', 'members' => [], ], 'EnvironmentImage' => [ 'type' => 'structure', 'members' => [ 'name' => [ 'shape' => 'String', ], 'description' => [ 'shape' => 'String', ], 'versions' => [ 'shape' => 'ImageVersions', ], ], ], 'EnvironmentImages' => [ 'type' => 'list', 'member' => [ 'shape' => 'EnvironmentImage', ], ], 'EnvironmentLanguage' => [ 'type' => 'structure', 'members' => [ 'language' => [ 'shape' => 'LanguageType', ], 'images' => [ 'shape' => 'EnvironmentImages', ], ], ], 'EnvironmentLanguages' => [ 'type' => 'list', 'member' => [ 'shape' => 'EnvironmentLanguage', ], ], 'EnvironmentPlatform' => [ 'type' => 'structure', 'members' => [ 'platform' => [ 'shape' => 'PlatformType', ], 'languages' => [ 'shape' => 'EnvironmentLanguages', ], ], ], 'EnvironmentPlatforms' => [ 'type' => 'list', 'member' => [ 'shape' => 'EnvironmentPlatform', ], ], 'EnvironmentType' => [ 'type' => 'string', 'enum' => [ 'WINDOWS_CONTAINER', 'LINUX_CONTAINER', ], ], 'EnvironmentVariable' => [ 'type' => 'structure', 'required' => [ 'name', 'value', ], 'members' => [ 'name' => [ 'shape' => 'NonEmptyString', ], 'value' => [ 'shape' => 'String', ], 'type' => [ 'shape' => 'EnvironmentVariableType', ], ], ], 'EnvironmentVariableType' => [ 'type' => 'string', 'enum' => [ 'PLAINTEXT', 'PARAMETER_STORE', ], ], 'EnvironmentVariables' => [ 'type' => 'list', 'member' => [ 'shape' => 'EnvironmentVariable', ], ], 'FilterGroup' => [ 'type' => 'list', 'member' => [ 'shape' => 'WebhookFilter', ], ], 'FilterGroups' => [ 'type' => 'list', 'member' => [ 'shape' => 'FilterGroup', ], ], 'GitCloneDepth' => [ 'type' => 'integer', 'min' => 0, ], 'GitSubmodulesConfig' => [ 'type' => 'structure', 'required' => [ 'fetchSubmodules', ], 'members' => [ 'fetchSubmodules' => [ 'shape' => 'WrapperBoolean', ], ], ], 'ImagePullCredentialsType' => [ 'type' => 'string', 'enum' => [ 'CODEBUILD', 'SERVICE_ROLE', ], ], 'ImageVersions' => [ 'type' => 'list', 'member' => [ 'shape' => 'String', ], ], 'ImportSourceCredentialsInput' => [ 'type' => 'structure', 'required' => [ 'token', 'serverType', 'authType', ], 'members' => [ 'username' => [ 'shape' => 'NonEmptyString', ], 'token' => [ 'shape' => 'SensitiveNonEmptyString', ], 'serverType' => [ 'shape' => 'ServerType', ], 'authType' => [ 'shape' => 'AuthType', ], ], ], 'ImportSourceCredentialsOutput' => [ 'type' => 'structure', 'members' => [ 'arn' => [ 'shape' => 'NonEmptyString', ], ], ], 'InvalidInputException' => [ 'type' => 'structure', 'members' => [], 'exception' => true, ], 'InvalidateProjectCacheInput' => [ 'type' => 'structure', 'required' => [ 'projectName', ], 'members' => [ 'projectName' => [ 'shape' => 'NonEmptyString', ], ], ], 'InvalidateProjectCacheOutput' => [ 'type' => 'structure', 'members' => [], ], 'KeyInput' => [ 'type' => 'string', 'max' => 127, 'min' => 1, 'pattern' => '^([\\p{L}\\p{Z}\\p{N}_.:/=@+\\-]*)$', ], 'LanguageType' => [ 'type' => 'string', 'enum' => [ 'JAVA', 'PYTHON', 'NODE_JS', 'RUBY', 'GOLANG', 'DOCKER', 'ANDROID', 'DOTNET', 'BASE', 'PHP', ], ], 'ListBuildsForProjectInput' => [ 'type' => 'structure', 'required' => [ 'projectName', ], 'members' => [ 'projectName' => [ 'shape' => 'NonEmptyString', ], 'sortOrder' => [ 'shape' => 'SortOrderType', ], 'nextToken' => [ 'shape' => 'String', ], ], ], 'ListBuildsForProjectOutput' => [ 'type' => 'structure', 'members' => [ 'ids' => [ 'shape' => 'BuildIds', ], 'nextToken' => [ 'shape' => 'String', ], ], ], 'ListBuildsInput' => [ 'type' => 'structure', 'members' => [ 'sortOrder' => [ 'shape' => 'SortOrderType', ], 'nextToken' => [ 'shape' => 'String', ], ], ], 'ListBuildsOutput' => [ 'type' => 'structure', 'members' => [ 'ids' => [ 'shape' => 'BuildIds', ], 'nextToken' => [ 'shape' => 'String', ], ], ], 'ListCuratedEnvironmentImagesInput' => [ 'type' => 'structure', 'members' => [], ], 'ListCuratedEnvironmentImagesOutput' => [ 'type' => 'structure', 'members' => [ 'platforms' => [ 'shape' => 'EnvironmentPlatforms', ], ], ], 'ListProjectsInput' => [ 'type' => 'structure', 'members' => [ 'sortBy' => [ 'shape' => 'ProjectSortByType', ], 'sortOrder' => [ 'shape' => 'SortOrderType', ], 'nextToken' => [ 'shape' => 'NonEmptyString', ], ], ], 'ListProjectsOutput' => [ 'type' => 'structure', 'members' => [ 'nextToken' => [ 'shape' => 'String', ], 'projects' => [ 'shape' => 'ProjectNames', ], ], ], 'ListSourceCredentialsInput' => [ 'type' => 'structure', 'members' => [], ], 'ListSourceCredentialsOutput' => [ 'type' => 'structure', 'members' => [ 'sourceCredentialsInfos' => [ 'shape' => 'SourceCredentialsInfos', ], ], ], 'LogsConfig' => [ 'type' => 'structure', 'members' => [ 'cloudWatchLogs' => [ 'shape' => 'CloudWatchLogsConfig', ], 's3Logs' => [ 'shape' => 'S3LogsConfig', ], ], ], 'LogsConfigStatusType' => [ 'type' => 'string', 'enum' => [ 'ENABLED', 'DISABLED', ], ], 'LogsLocation' => [ 'type' => 'structure', 'members' => [ 'groupName' => [ 'shape' => 'String', ], 'streamName' => [ 'shape' => 'String', ], 'deepLink' => [ 'shape' => 'String', ], 's3DeepLink' => [ 'shape' => 'String', ], 'cloudWatchLogs' => [ 'shape' => 'CloudWatchLogsConfig', ], 's3Logs' => [ 'shape' => 'S3LogsConfig', ], ], ], 'NetworkInterface' => [ 'type' => 'structure', 'members' => [ 'subnetId' => [ 'shape' => 'NonEmptyString', ], 'networkInterfaceId' => [ 'shape' => 'NonEmptyString', ], ], ], 'NonEmptyString' => [ 'type' => 'string', 'min' => 1, ], 'OAuthProviderException' => [ 'type' => 'structure', 'members' => [], 'exception' => true, ], 'PhaseContext' => [ 'type' => 'structure', 'members' => [ 'statusCode' => [ 'shape' => 'String', ], 'message' => [ 'shape' => 'String', ], ], ], 'PhaseContexts' => [ 'type' => 'list', 'member' => [ 'shape' => 'PhaseContext', ], ], 'PlatformType' => [ 'type' => 'string', 'enum' => [ 'DEBIAN', 'AMAZON_LINUX', 'UBUNTU', 'WINDOWS_SERVER', ], ], 'Project' => [ 'type' => 'structure', 'members' => [ 'name' => [ 'shape' => 'ProjectName', ], 'arn' => [ 'shape' => 'String', ], 'description' => [ 'shape' => 'ProjectDescription', ], 'source' => [ 'shape' => 'ProjectSource', ], 'secondarySources' => [ 'shape' => 'ProjectSources', ], 'artifacts' => [ 'shape' => 'ProjectArtifacts', ], 'secondaryArtifacts' => [ 'shape' => 'ProjectArtifactsList', ], 'cache' => [ 'shape' => 'ProjectCache', ], 'environment' => [ 'shape' => 'ProjectEnvironment', ], 'serviceRole' => [ 'shape' => 'NonEmptyString', ], 'timeoutInMinutes' => [ 'shape' => 'TimeOut', ], 'queuedTimeoutInMinutes' => [ 'shape' => 'TimeOut', ], 'encryptionKey' => [ 'shape' => 'NonEmptyString', ], 'tags' => [ 'shape' => 'TagList', ], 'created' => [ 'shape' => 'Timestamp', ], 'lastModified' => [ 'shape' => 'Timestamp', ], 'webhook' => [ 'shape' => 'Webhook', ], 'vpcConfig' => [ 'shape' => 'VpcConfig', ], 'badge' => [ 'shape' => 'ProjectBadge', ], 'logsConfig' => [ 'shape' => 'LogsConfig', ], ], ], 'ProjectArtifacts' => [ 'type' => 'structure', 'required' => [ 'type', ], 'members' => [ 'type' => [ 'shape' => 'ArtifactsType', ], 'location' => [ 'shape' => 'String', ], 'path' => [ 'shape' => 'String', ], 'namespaceType' => [ 'shape' => 'ArtifactNamespace', ], 'name' => [ 'shape' => 'String', ], 'packaging' => [ 'shape' => 'ArtifactPackaging', ], 'overrideArtifactName' => [ 'shape' => 'WrapperBoolean', ], 'encryptionDisabled' => [ 'shape' => 'WrapperBoolean', ], 'artifactIdentifier' => [ 'shape' => 'String', ], ], ], 'ProjectArtifactsList' => [ 'type' => 'list', 'member' => [ 'shape' => 'ProjectArtifacts', ], 'max' => 12, 'min' => 0, ], 'ProjectBadge' => [ 'type' => 'structure', 'members' => [ 'badgeEnabled' => [ 'shape' => 'Boolean', ], 'badgeRequestUrl' => [ 'shape' => 'String', ], ], ], 'ProjectCache' => [ 'type' => 'structure', 'required' => [ 'type', ], 'members' => [ 'type' => [ 'shape' => 'CacheType', ], 'location' => [ 'shape' => 'String', ], 'modes' => [ 'shape' => 'ProjectCacheModes', ], ], ], 'ProjectCacheModes' => [ 'type' => 'list', 'member' => [ 'shape' => 'CacheMode', ], ], 'ProjectDescription' => [ 'type' => 'string', 'max' => 255, 'min' => 0, ], 'ProjectEnvironment' => [ 'type' => 'structure', 'required' => [ 'type', 'image', 'computeType', ], 'members' => [ 'type' => [ 'shape' => 'EnvironmentType', ], 'image' => [ 'shape' => 'NonEmptyString', ], 'computeType' => [ 'shape' => 'ComputeType', ], 'environmentVariables' => [ 'shape' => 'EnvironmentVariables', ], 'privilegedMode' => [ 'shape' => 'WrapperBoolean', ], 'certificate' => [ 'shape' => 'String', ], 'registryCredential' => [ 'shape' => 'RegistryCredential', ], 'imagePullCredentialsType' => [ 'shape' => 'ImagePullCredentialsType', ], ], ], 'ProjectName' => [ 'type' => 'string', 'max' => 255, 'min' => 2, 'pattern' => '[A-Za-z0-9][A-Za-z0-9\\-_]{1,254}', ], 'ProjectNames' => [ 'type' => 'list', 'member' => [ 'shape' => 'NonEmptyString', ], 'max' => 100, 'min' => 1, ], 'ProjectSecondarySourceVersions' => [ 'type' => 'list', 'member' => [ 'shape' => 'ProjectSourceVersion', ], 'max' => 12, 'min' => 0, ], 'ProjectSortByType' => [ 'type' => 'string', 'enum' => [ 'NAME', 'CREATED_TIME', 'LAST_MODIFIED_TIME', ], ], 'ProjectSource' => [ 'type' => 'structure', 'required' => [ 'type', ], 'members' => [ 'type' => [ 'shape' => 'SourceType', ], 'location' => [ 'shape' => 'String', ], 'gitCloneDepth' => [ 'shape' => 'GitCloneDepth', ], 'gitSubmodulesConfig' => [ 'shape' => 'GitSubmodulesConfig', ], 'buildspec' => [ 'shape' => 'String', ], 'auth' => [ 'shape' => 'SourceAuth', ], 'reportBuildStatus' => [ 'shape' => 'WrapperBoolean', ], 'insecureSsl' => [ 'shape' => 'WrapperBoolean', ], 'sourceIdentifier' => [ 'shape' => 'String', ], ], ], 'ProjectSourceVersion' => [ 'type' => 'structure', 'required' => [ 'sourceIdentifier', 'sourceVersion', ], 'members' => [ 'sourceIdentifier' => [ 'shape' => 'String', ], 'sourceVersion' => [ 'shape' => 'String', ], ], ], 'ProjectSources' => [ 'type' => 'list', 'member' => [ 'shape' => 'ProjectSource', ], 'max' => 12, 'min' => 0, ], 'Projects' => [ 'type' => 'list', 'member' => [ 'shape' => 'Project', ], ], 'RegistryCredential' => [ 'type' => 'structure', 'required' => [ 'credential', 'credentialProvider', ], 'members' => [ 'credential' => [ 'shape' => 'NonEmptyString', ], 'credentialProvider' => [ 'shape' => 'CredentialProviderType', ], ], ], 'ResourceAlreadyExistsException' => [ 'type' => 'structure', 'members' => [], 'exception' => true, ], 'ResourceNotFoundException' => [ 'type' => 'structure', 'members' => [], 'exception' => true, ], 'S3LogsConfig' => [ 'type' => 'structure', 'required' => [ 'status', ], 'members' => [ 'status' => [ 'shape' => 'LogsConfigStatusType', ], 'location' => [ 'shape' => 'String', ], 'encryptionDisabled' => [ 'shape' => 'WrapperBoolean', ], ], ], 'SecurityGroupIds' => [ 'type' => 'list', 'member' => [ 'shape' => 'NonEmptyString', ], 'max' => 5, ], 'SensitiveNonEmptyString' => [ 'type' => 'string', 'min' => 1, 'sensitive' => true, ], 'ServerType' => [ 'type' => 'string', 'enum' => [ 'GITHUB', 'BITBUCKET', 'GITHUB_ENTERPRISE', ], ], 'SortOrderType' => [ 'type' => 'string', 'enum' => [ 'ASCENDING', 'DESCENDING', ], ], 'SourceAuth' => [ 'type' => 'structure', 'required' => [ 'type', ], 'members' => [ 'type' => [ 'shape' => 'SourceAuthType', ], 'resource' => [ 'shape' => 'String', ], ], ], 'SourceAuthType' => [ 'type' => 'string', 'enum' => [ 'OAUTH', ], ], 'SourceCredentialsInfo' => [ 'type' => 'structure', 'members' => [ 'arn' => [ 'shape' => 'NonEmptyString', ], 'serverType' => [ 'shape' => 'ServerType', ], 'authType' => [ 'shape' => 'AuthType', ], ], ], 'SourceCredentialsInfos' => [ 'type' => 'list', 'member' => [ 'shape' => 'SourceCredentialsInfo', ], ], 'SourceType' => [ 'type' => 'string', 'enum' => [ 'CODECOMMIT', 'CODEPIPELINE', 'GITHUB', 'S3', 'BITBUCKET', 'GITHUB_ENTERPRISE', 'NO_SOURCE', ], ], 'StartBuildInput' => [ 'type' => 'structure', 'required' => [ 'projectName', ], 'members' => [ 'projectName' => [ 'shape' => 'NonEmptyString', ], 'secondarySourcesOverride' => [ 'shape' => 'ProjectSources', ], 'secondarySourcesVersionOverride' => [ 'shape' => 'ProjectSecondarySourceVersions', ], 'sourceVersion' => [ 'shape' => 'String', ], 'artifactsOverride' => [ 'shape' => 'ProjectArtifacts', ], 'secondaryArtifactsOverride' => [ 'shape' => 'ProjectArtifactsList', ], 'environmentVariablesOverride' => [ 'shape' => 'EnvironmentVariables', ], 'sourceTypeOverride' => [ 'shape' => 'SourceType', ], 'sourceLocationOverride' => [ 'shape' => 'String', ], 'sourceAuthOverride' => [ 'shape' => 'SourceAuth', ], 'gitCloneDepthOverride' => [ 'shape' => 'GitCloneDepth', ], 'gitSubmodulesConfigOverride' => [ 'shape' => 'GitSubmodulesConfig', ], 'buildspecOverride' => [ 'shape' => 'String', ], 'insecureSslOverride' => [ 'shape' => 'WrapperBoolean', ], 'reportBuildStatusOverride' => [ 'shape' => 'WrapperBoolean', ], 'environmentTypeOverride' => [ 'shape' => 'EnvironmentType', ], 'imageOverride' => [ 'shape' => 'NonEmptyString', ], 'computeTypeOverride' => [ 'shape' => 'ComputeType', ], 'certificateOverride' => [ 'shape' => 'String', ], 'cacheOverride' => [ 'shape' => 'ProjectCache', ], 'serviceRoleOverride' => [ 'shape' => 'NonEmptyString', ], 'privilegedModeOverride' => [ 'shape' => 'WrapperBoolean', ], 'timeoutInMinutesOverride' => [ 'shape' => 'TimeOut', ], 'queuedTimeoutInMinutesOverride' => [ 'shape' => 'TimeOut', ], 'idempotencyToken' => [ 'shape' => 'String', ], 'logsConfigOverride' => [ 'shape' => 'LogsConfig', ], 'registryCredentialOverride' => [ 'shape' => 'RegistryCredential', ], 'imagePullCredentialsTypeOverride' => [ 'shape' => 'ImagePullCredentialsType', ], ], ], 'StartBuildOutput' => [ 'type' => 'structure', 'members' => [ 'build' => [ 'shape' => 'Build', ], ], ], 'StatusType' => [ 'type' => 'string', 'enum' => [ 'SUCCEEDED', 'FAILED', 'FAULT', 'TIMED_OUT', 'IN_PROGRESS', 'STOPPED', ], ], 'StopBuildInput' => [ 'type' => 'structure', 'required' => [ 'id', ], 'members' => [ 'id' => [ 'shape' => 'NonEmptyString', ], ], ], 'StopBuildOutput' => [ 'type' => 'structure', 'members' => [ 'build' => [ 'shape' => 'Build', ], ], ], 'String' => [ 'type' => 'string', ], 'Subnets' => [ 'type' => 'list', 'member' => [ 'shape' => 'NonEmptyString', ], 'max' => 16, ], 'Tag' => [ 'type' => 'structure', 'members' => [ 'key' => [ 'shape' => 'KeyInput', ], 'value' => [ 'shape' => 'ValueInput', ], ], ], 'TagList' => [ 'type' => 'list', 'member' => [ 'shape' => 'Tag', ], 'max' => 50, 'min' => 0, ], 'TimeOut' => [ 'type' => 'integer', 'max' => 480, 'min' => 5, ], 'Timestamp' => [ 'type' => 'timestamp', ], 'UpdateProjectInput' => [ 'type' => 'structure', 'required' => [ 'name', ], 'members' => [ 'name' => [ 'shape' => 'NonEmptyString', ], 'description' => [ 'shape' => 'ProjectDescription', ], 'source' => [ 'shape' => 'ProjectSource', ], 'secondarySources' => [ 'shape' => 'ProjectSources', ], 'artifacts' => [ 'shape' => 'ProjectArtifacts', ], 'secondaryArtifacts' => [ 'shape' => 'ProjectArtifactsList', ], 'cache' => [ 'shape' => 'ProjectCache', ], 'environment' => [ 'shape' => 'ProjectEnvironment', ], 'serviceRole' => [ 'shape' => 'NonEmptyString', ], 'timeoutInMinutes' => [ 'shape' => 'TimeOut', ], 'queuedTimeoutInMinutes' => [ 'shape' => 'TimeOut', ], 'encryptionKey' => [ 'shape' => 'NonEmptyString', ], 'tags' => [ 'shape' => 'TagList', ], 'vpcConfig' => [ 'shape' => 'VpcConfig', ], 'badgeEnabled' => [ 'shape' => 'WrapperBoolean', ], 'logsConfig' => [ 'shape' => 'LogsConfig', ], ], ], 'UpdateProjectOutput' => [ 'type' => 'structure', 'members' => [ 'project' => [ 'shape' => 'Project', ], ], ], 'UpdateWebhookInput' => [ 'type' => 'structure', 'required' => [ 'projectName', ], 'members' => [ 'projectName' => [ 'shape' => 'ProjectName', ], 'branchFilter' => [ 'shape' => 'String', ], 'rotateSecret' => [ 'shape' => 'Boolean', ], 'filterGroups' => [ 'shape' => 'FilterGroups', ], ], ], 'UpdateWebhookOutput' => [ 'type' => 'structure', 'members' => [ 'webhook' => [ 'shape' => 'Webhook', ], ], ], 'ValueInput' => [ 'type' => 'string', 'max' => 255, 'min' => 1, 'pattern' => '^([\\p{L}\\p{Z}\\p{N}_.:/=@+\\-]*)$', ], 'VpcConfig' => [ 'type' => 'structure', 'members' => [ 'vpcId' => [ 'shape' => 'NonEmptyString', ], 'subnets' => [ 'shape' => 'Subnets', ], 'securityGroupIds' => [ 'shape' => 'SecurityGroupIds', ], ], ], 'Webhook' => [ 'type' => 'structure', 'members' => [ 'url' => [ 'shape' => 'NonEmptyString', ], 'payloadUrl' => [ 'shape' => 'NonEmptyString', ], 'secret' => [ 'shape' => 'NonEmptyString', ], 'branchFilter' => [ 'shape' => 'String', ], 'filterGroups' => [ 'shape' => 'FilterGroups', ], 'lastModifiedSecret' => [ 'shape' => 'Timestamp', ], ], ], 'WebhookFilter' => [ 'type' => 'structure', 'required' => [ 'type', 'pattern', ], 'members' => [ 'type' => [ 'shape' => 'WebhookFilterType', ], 'pattern' => [ 'shape' => 'String', ], 'excludeMatchedPattern' => [ 'shape' => 'WrapperBoolean', ], ], ], 'WebhookFilterType' => [ 'type' => 'string', 'enum' => [ 'EVENT', 'BASE_REF', 'HEAD_REF', 'ACTOR_ACCOUNT_ID', 'FILE_PATH', ], ], 'WrapperBoolean' => [ 'type' => 'boolean', ], 'WrapperInt' => [ 'type' => 'integer', ], 'WrapperLong' => [ 'type' => 'long', ], ],]; diff --git a/src/data/codebuild/2016-10-06/docs-2.json b/src/data/codebuild/2016-10-06/docs-2.json index e783cb149e..847e116b7d 100644 --- a/src/data/codebuild/2016-10-06/docs-2.json +++ b/src/data/codebuild/2016-10-06/docs-2.json @@ -320,10 +320,17 @@ "GitCloneDepth": { "base": null, "refs": { - "ProjectSource$gitCloneDepth": "

Information about the git clone depth for the build project.

", + "ProjectSource$gitCloneDepth": "

Information about the Git clone depth for the build project.

", "StartBuildInput$gitCloneDepthOverride": "

The user-defined depth of history, with a minimum value of 0, that overrides, for this build only, any previous depth of history defined in the build project.

" } }, + "GitSubmodulesConfig": { + "base": "

Information about the Git submodules configuration for an AWS CodeBuild build project.

", + "refs": { + "ProjectSource$gitSubmodulesConfig": "

Information about the Git submodules configuration for the build project.

", + "StartBuildInput$gitSubmodulesConfigOverride": "

Information about the Git submodules configuration for this build of an AWS CodeBuild build project.

" + } + }, "ImagePullCredentialsType": { "base": null, "refs": { @@ -461,11 +468,11 @@ "Build$resolvedSourceVersion": "

An identifier for the version of this build's source code.

", "Build$projectName": "

The name of the AWS CodeBuild project.

", "Build$serviceRole": "

The name of a service role used for this build.

", - "Build$encryptionKey": "

The AWS Key Management Service (AWS KMS) customer master key (CMK) to be used for encrypting the build output artifacts.

This is expressed either as the Amazon Resource Name (ARN) of the CMK or, if specified, the CMK's alias (using the format alias/alias-name ).

", + "Build$encryptionKey": "

The AWS Key Management Service (AWS KMS) customer master key (CMK) to be used for encrypting the build output artifacts.

You can use a cross-account KMS key to encrypt the build output artifacts if your service role has permission to that key.

You can specify either the Amazon Resource Name (ARN) of the CMK or, if available, the CMK's alias (using the format alias/alias-name ).

", "BuildIds$member": null, "BuildNotDeleted$id": "

The ID of the build that could not be successfully deleted.

", "CreateProjectInput$serviceRole": "

The ARN of the AWS Identity and Access Management (IAM) role that enables AWS CodeBuild to interact with dependent AWS services on behalf of the AWS account.

", - "CreateProjectInput$encryptionKey": "

The AWS Key Management Service (AWS KMS) customer master key (CMK) to be used for encrypting the build output artifacts.

You can specify either the Amazon Resource Name (ARN) of the CMK or, if available, the CMK's alias (using the format alias/alias-name ).

", + "CreateProjectInput$encryptionKey": "

The AWS Key Management Service (AWS KMS) customer master key (CMK) to be used for encrypting the build output artifacts.

You can use a cross-account KMS key to encrypt the build output artifacts if your service role has permission to that key.

You can specify either the Amazon Resource Name (ARN) of the CMK or, if available, the CMK's alias (using the format alias/alias-name ).

", "DeleteProjectInput$name": "

The name of the build project.

", "DeleteSourceCredentialsInput$arn": "

The Amazon Resource Name (ARN) of the token.

", "DeleteSourceCredentialsOutput$arn": "

The Amazon Resource Name (ARN) of the token.

", @@ -478,7 +485,7 @@ "NetworkInterface$subnetId": "

The ID of the subnet.

", "NetworkInterface$networkInterfaceId": "

The ID of the network interface.

", "Project$serviceRole": "

The ARN of the AWS Identity and Access Management (IAM) role that enables AWS CodeBuild to interact with dependent AWS services on behalf of the AWS account.

", - "Project$encryptionKey": "

The AWS Key Management Service (AWS KMS) customer master key (CMK) to be used for encrypting the build output artifacts.

This is expressed either as the Amazon Resource Name (ARN) of the CMK or, if specified, the CMK's alias (using the format alias/alias-name ).

", + "Project$encryptionKey": "

The AWS Key Management Service (AWS KMS) customer master key (CMK) to be used for encrypting the build output artifacts.

You can use a cross-account KMS key to encrypt the build output artifacts if your service role has permission to that key.

You can specify either the Amazon Resource Name (ARN) of the CMK or, if available, the CMK's alias (using the format alias/alias-name ).

", "ProjectEnvironment$image": "

The image tag or image digest that identifies the Docker image to use for this build project. Use the following formats:

", "ProjectNames$member": null, "RegistryCredential$credential": "

The Amazon Resource Name (ARN) or name of credentials created using AWS Secrets Manager.

The credential can use the name of the credentials only if they exist in your current region.

", @@ -491,7 +498,7 @@ "Subnets$member": null, "UpdateProjectInput$name": "

The name of the build project.

You cannot change a build project's name.

", "UpdateProjectInput$serviceRole": "

The replacement ARN of the AWS Identity and Access Management (IAM) role that enables AWS CodeBuild to interact with dependent AWS services on behalf of the AWS account.

", - "UpdateProjectInput$encryptionKey": "

The replacement AWS Key Management Service (AWS KMS) customer master key (CMK) to be used for encrypting the build output artifacts.

You can specify either the Amazon Resource Name (ARN)of the CMK or, if available, the CMK's alias (using the format alias/alias-name ).

", + "UpdateProjectInput$encryptionKey": "

The AWS Key Management Service (AWS KMS) customer master key (CMK) to be used for encrypting the build output artifacts.

You can use a cross-account KMS key to encrypt the build output artifacts if your service role has permission to that key.

You can specify either the Amazon Resource Name (ARN) of the CMK or, if available, the CMK's alias (using the format alias/alias-name ).

", "VpcConfig$vpcId": "

The ID of the Amazon VPC.

", "Webhook$url": "

The URL to the webhook.

", "Webhook$payloadUrl": "

The AWS CodeBuild endpoint where webhook events are sent.

", @@ -567,7 +574,7 @@ "ProjectCacheModes": { "base": null, "refs": { - "ProjectCache$modes": "

If you use a LOCAL cache, the local cache mode. You can use one or more local cache modes at the same time.

" + "ProjectCache$modes": "

If you use a LOCAL cache, the local cache mode. You can use one or more local cache modes at the same time.

" } }, "ProjectDescription": { @@ -919,11 +926,13 @@ "BuildArtifacts$overrideArtifactName": "

If this flag is set, a name specified in the build spec file overrides the artifact name. The name specified in a build spec file is calculated at build time and uses the Shell Command Language. For example, you can append a date and time to your artifact name so that it is always unique.

", "BuildArtifacts$encryptionDisabled": "

Information that tells you if encryption for build artifacts is disabled.

", "CreateProjectInput$badgeEnabled": "

Set this to true to generate a publicly accessible URL for your project's build badge.

", + "GitSubmodulesConfig$fetchSubmodules": "

Set to true to fetch Git submodules for your AWS CodeBuild build project.

", "ProjectArtifacts$overrideArtifactName": "

If this flag is set, a name specified in the build spec file overrides the artifact name. The name specified in a build spec file is calculated at build time and uses the Shell Command Language. For example, you can append a date and time to your artifact name so that it is always unique.

", "ProjectArtifacts$encryptionDisabled": "

Set to true if you do not want your output artifacts encrypted. This option is valid only if your artifacts type is Amazon Simple Storage Service (Amazon S3). If this is set with another artifacts type, an invalidInputException is thrown.

", "ProjectEnvironment$privilegedMode": "

Enables running the Docker daemon inside a Docker container. Set to true only if the build project is be used to build Docker images, and the specified build environment image is not provided by AWS CodeBuild with Docker support. Otherwise, all associated builds that attempt to interact with the Docker daemon fail. You must also start the Docker daemon so that builds can interact with it. One way to do this is to initialize the Docker daemon during the install phase of your build spec by running the following build commands. (Do not run these commands if the specified build environment image is provided by AWS CodeBuild with Docker support.)

If the operating system's base image is Ubuntu Linux:

- nohup /usr/local/bin/dockerd --host=unix:///var/run/docker.sock --host=tcp://0.0.0.0:2375 --storage-driver=overlay& - timeout 15 sh -c \"until docker info; do echo .; sleep 1; done\"

If the operating system's base image is Alpine Linux, add the -t argument to timeout:

- nohup /usr/local/bin/dockerd --host=unix:///var/run/docker.sock --host=tcp://0.0.0.0:2375 --storage-driver=overlay& - timeout 15 -t sh -c \"until docker info; do echo .; sleep 1; done\"

", "ProjectSource$reportBuildStatus": "

Set to true to report the status of a build's start and finish to your source provider. This option is valid only when your source provider is GitHub, GitHub Enterprise, or Bitbucket. If this is set and you use a different source provider, an invalidInputException is thrown.

", "ProjectSource$insecureSsl": "

Enable this flag to ignore SSL warnings while connecting to the project source code.

", + "S3LogsConfig$encryptionDisabled": "

Set to true if you do not want your S3 build log output encrypted. By default S3 build logs are encrypted.

", "StartBuildInput$insecureSslOverride": "

Enable this flag to override the insecure SSL setting that is specified in the build project. The insecure SSL setting determines whether to ignore SSL warnings while connecting to the project source code. This override applies only if the build's source is GitHub Enterprise.

", "StartBuildInput$reportBuildStatusOverride": "

Set to true to report to your source provider the status of a build's start and completion. If you use this option with a source provider other than GitHub, GitHub Enterprise, or Bitbucket, an invalidInputException is thrown.

", "StartBuildInput$privilegedModeOverride": "

Enable this flag to override privileged mode in the build project.

", diff --git a/src/data/codebuild/2016-10-06/docs-2.json.php b/src/data/codebuild/2016-10-06/docs-2.json.php index ae7812b854..3f2f01cc5a 100644 --- a/src/data/codebuild/2016-10-06/docs-2.json.php +++ b/src/data/codebuild/2016-10-06/docs-2.json.php @@ -1,3 +1,3 @@ '2.0', 'service' => 'AWS CodeBuild

AWS CodeBuild is a fully managed build service in the cloud. AWS CodeBuild compiles your source code, runs unit tests, and produces artifacts that are ready to deploy. AWS CodeBuild eliminates the need to provision, manage, and scale your own build servers. It provides prepackaged build environments for the most popular programming languages and build tools, such as Apache Maven, Gradle, and more. You can also fully customize build environments in AWS CodeBuild to use your own build tools. AWS CodeBuild scales automatically to meet peak build requests. You pay only for the build time you consume. For more information about AWS CodeBuild, see the AWS CodeBuild User Guide.

AWS CodeBuild supports these operations:

', 'operations' => [ 'BatchDeleteBuilds' => '

Deletes one or more builds.

', 'BatchGetBuilds' => '

Gets information about builds.

', 'BatchGetProjects' => '

Gets information about build projects.

', 'CreateProject' => '

Creates a build project.

', 'CreateWebhook' => '

For an existing AWS CodeBuild build project that has its source code stored in a GitHub or Bitbucket repository, enables AWS CodeBuild to start rebuilding the source code every time a code change is pushed to the repository.

If you enable webhooks for an AWS CodeBuild project, and the project is used as a build step in AWS CodePipeline, then two identical builds are created for each commit. One build is triggered through webhooks, and one through AWS CodePipeline. Because billing is on a per-build basis, you are billed for both builds. Therefore, if you are using AWS CodePipeline, we recommend that you disable webhooks in AWS CodeBuild. In the AWS CodeBuild console, clear the Webhook box. For more information, see step 5 in Change a Build Project\'s Settings.

', 'DeleteProject' => '

Deletes a build project.

', 'DeleteSourceCredentials' => '

Deletes a set of GitHub, GitHub Enterprise, or Bitbucket source credentials.

', 'DeleteWebhook' => '

For an existing AWS CodeBuild build project that has its source code stored in a GitHub or Bitbucket repository, stops AWS CodeBuild from rebuilding the source code every time a code change is pushed to the repository.

', 'ImportSourceCredentials' => '

Imports the source repository credentials for an AWS CodeBuild project that has its source code stored in a GitHub, GitHub Enterprise, or Bitbucket repository.

', 'InvalidateProjectCache' => '

Resets the cache for a project.

', 'ListBuilds' => '

Gets a list of build IDs, with each build ID representing a single build.

', 'ListBuildsForProject' => '

Gets a list of build IDs for the specified build project, with each build ID representing a single build.

', 'ListCuratedEnvironmentImages' => '

Gets information about Docker images that are managed by AWS CodeBuild.

', 'ListProjects' => '

Gets a list of build project names, with each build project name representing a single build project.

', 'ListSourceCredentials' => '

Returns a list of SourceCredentialsInfo objects.

', 'StartBuild' => '

Starts running a build.

', 'StopBuild' => '

Attempts to stop running a build.

', 'UpdateProject' => '

Changes the settings of a build project.

', 'UpdateWebhook' => '

Updates the webhook associated with an AWS CodeBuild build project.

If you use Bitbucket for your repository, rotateSecret is ignored.

', ], 'shapes' => [ 'AccountLimitExceededException' => [ 'base' => '

An AWS service limit was exceeded for the calling AWS account.

', 'refs' => [], ], 'ArtifactNamespace' => [ 'base' => NULL, 'refs' => [ 'ProjectArtifacts$namespaceType' => '

Along with path and name, the pattern that AWS CodeBuild uses to determine the name and location to store the output artifact:

For example, if path is set to MyArtifacts, namespaceType is set to BUILD_ID, and name is set to MyArtifact.zip, the output artifact is stored in MyArtifacts/build-ID/MyArtifact.zip.

', ], ], 'ArtifactPackaging' => [ 'base' => NULL, 'refs' => [ 'ProjectArtifacts$packaging' => '

The type of build output artifact to create:

', ], ], 'ArtifactsType' => [ 'base' => NULL, 'refs' => [ 'ProjectArtifacts$type' => '

The type of build output artifact. Valid values include:

', ], ], 'AuthType' => [ 'base' => NULL, 'refs' => [ 'ImportSourceCredentialsInput$authType' => '

The type of authentication used to connect to a GitHub, GitHub Enterprise, or Bitbucket repository. An OAUTH connection is not supported by the API and must be created using the AWS CodeBuild console.

', 'SourceCredentialsInfo$authType' => '

The type of authentication used by the credentials. Valid options are OAUTH, BASIC_AUTH, or PERSONAL_ACCESS_TOKEN.

', ], ], 'BatchDeleteBuildsInput' => [ 'base' => NULL, 'refs' => [], ], 'BatchDeleteBuildsOutput' => [ 'base' => NULL, 'refs' => [], ], 'BatchGetBuildsInput' => [ 'base' => NULL, 'refs' => [], ], 'BatchGetBuildsOutput' => [ 'base' => NULL, 'refs' => [], ], 'BatchGetProjectsInput' => [ 'base' => NULL, 'refs' => [], ], 'BatchGetProjectsOutput' => [ 'base' => NULL, 'refs' => [], ], 'Boolean' => [ 'base' => NULL, 'refs' => [ 'Build$buildComplete' => '

Whether the build is complete. True if complete; otherwise, false.

', 'ProjectBadge$badgeEnabled' => '

Set this to true to generate a publicly accessible URL for your project\'s build badge.

', 'UpdateWebhookInput$rotateSecret' => '

A boolean value that specifies whether the associated GitHub repository\'s secret token should be updated. If you use Bitbucket for your repository, rotateSecret is ignored.

', ], ], 'Build' => [ 'base' => '

Information about a build.

', 'refs' => [ 'Builds$member' => NULL, 'StartBuildOutput$build' => '

Information about the build to be run.

', 'StopBuildOutput$build' => '

Information about the build.

', ], ], 'BuildArtifacts' => [ 'base' => '

Information about build output artifacts.

', 'refs' => [ 'Build$artifacts' => '

Information about the output artifacts for the build.

', 'BuildArtifactsList$member' => NULL, ], ], 'BuildArtifactsList' => [ 'base' => NULL, 'refs' => [ 'Build$secondaryArtifacts' => '

An array of ProjectArtifacts objects.

', ], ], 'BuildIds' => [ 'base' => NULL, 'refs' => [ 'BatchDeleteBuildsInput$ids' => '

The IDs of the builds to delete.

', 'BatchDeleteBuildsOutput$buildsDeleted' => '

The IDs of the builds that were successfully deleted.

', 'BatchGetBuildsInput$ids' => '

The IDs of the builds.

', 'BatchGetBuildsOutput$buildsNotFound' => '

The IDs of builds for which information could not be found.

', 'ListBuildsForProjectOutput$ids' => '

A list of build IDs for the specified build project, with each build ID representing a single build.

', 'ListBuildsOutput$ids' => '

A list of build IDs, with each build ID representing a single build.

', ], ], 'BuildNotDeleted' => [ 'base' => '

Information about a build that could not be successfully deleted.

', 'refs' => [ 'BuildsNotDeleted$member' => NULL, ], ], 'BuildPhase' => [ 'base' => '

Information about a stage for a build.

', 'refs' => [ 'BuildPhases$member' => NULL, ], ], 'BuildPhaseType' => [ 'base' => NULL, 'refs' => [ 'BuildPhase$phaseType' => '

The name of the build phase. Valid values include:

', ], ], 'BuildPhases' => [ 'base' => NULL, 'refs' => [ 'Build$phases' => '

Information about all previous build phases that are complete and information about any current build phase that is not yet complete.

', ], ], 'Builds' => [ 'base' => NULL, 'refs' => [ 'BatchGetBuildsOutput$builds' => '

Information about the requested builds.

', ], ], 'BuildsNotDeleted' => [ 'base' => NULL, 'refs' => [ 'BatchDeleteBuildsOutput$buildsNotDeleted' => '

Information about any builds that could not be successfully deleted.

', ], ], 'CacheMode' => [ 'base' => NULL, 'refs' => [ 'ProjectCacheModes$member' => NULL, ], ], 'CacheType' => [ 'base' => NULL, 'refs' => [ 'ProjectCache$type' => '

The type of cache used by the build project. Valid values include:

', ], ], 'CloudWatchLogsConfig' => [ 'base' => '

Information about Amazon CloudWatch Logs for a build project.

', 'refs' => [ 'LogsConfig$cloudWatchLogs' => '

Information about Amazon CloudWatch Logs for a build project. Amazon CloudWatch Logs are enabled by default.

', 'LogsLocation$cloudWatchLogs' => '

Information about Amazon CloudWatch Logs for a build project.

', ], ], 'ComputeType' => [ 'base' => NULL, 'refs' => [ 'ProjectEnvironment$computeType' => '

Information about the compute resources the build project uses. Available values include:

', 'StartBuildInput$computeTypeOverride' => '

The name of a compute type for this build that overrides the one specified in the build project.

', ], ], 'CreateProjectInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateProjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateWebhookInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateWebhookOutput' => [ 'base' => NULL, 'refs' => [], ], 'CredentialProviderType' => [ 'base' => NULL, 'refs' => [ 'RegistryCredential$credentialProvider' => '

The service that created the credentials to access a private Docker registry. The valid value, SECRETS_MANAGER, is for AWS Secrets Manager.

', ], ], 'DeleteProjectInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteProjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteSourceCredentialsInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteSourceCredentialsOutput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteWebhookInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteWebhookOutput' => [ 'base' => NULL, 'refs' => [], ], 'EnvironmentImage' => [ 'base' => '

Information about a Docker image that is managed by AWS CodeBuild.

', 'refs' => [ 'EnvironmentImages$member' => NULL, ], ], 'EnvironmentImages' => [ 'base' => NULL, 'refs' => [ 'EnvironmentLanguage$images' => '

The list of Docker images that are related by the specified programming language.

', ], ], 'EnvironmentLanguage' => [ 'base' => '

A set of Docker images that are related by programming language and are managed by AWS CodeBuild.

', 'refs' => [ 'EnvironmentLanguages$member' => NULL, ], ], 'EnvironmentLanguages' => [ 'base' => NULL, 'refs' => [ 'EnvironmentPlatform$languages' => '

The list of programming languages that are available for the specified platform.

', ], ], 'EnvironmentPlatform' => [ 'base' => '

A set of Docker images that are related by platform and are managed by AWS CodeBuild.

', 'refs' => [ 'EnvironmentPlatforms$member' => NULL, ], ], 'EnvironmentPlatforms' => [ 'base' => NULL, 'refs' => [ 'ListCuratedEnvironmentImagesOutput$platforms' => '

Information about supported platforms for Docker images that are managed by AWS CodeBuild.

', ], ], 'EnvironmentType' => [ 'base' => NULL, 'refs' => [ 'ProjectEnvironment$type' => '

The type of build environment to use for related builds.

', 'StartBuildInput$environmentTypeOverride' => '

A container type for this build that overrides the one specified in the build project.

', ], ], 'EnvironmentVariable' => [ 'base' => '

Information about an environment variable for a build project or a build.

', 'refs' => [ 'EnvironmentVariables$member' => NULL, ], ], 'EnvironmentVariableType' => [ 'base' => NULL, 'refs' => [ 'EnvironmentVariable$type' => '

The type of environment variable. Valid values include:

', ], ], 'EnvironmentVariables' => [ 'base' => NULL, 'refs' => [ 'ProjectEnvironment$environmentVariables' => '

A set of environment variables to make available to builds for this build project.

', 'StartBuildInput$environmentVariablesOverride' => '

A set of environment variables that overrides, for this build only, the latest ones already defined in the build project.

', ], ], 'FilterGroup' => [ 'base' => NULL, 'refs' => [ 'FilterGroups$member' => NULL, ], ], 'FilterGroups' => [ 'base' => NULL, 'refs' => [ 'CreateWebhookInput$filterGroups' => '

An array of arrays of WebhookFilter objects used to determine which webhooks are triggered. At least one WebhookFilter in the array must specify EVENT as its type.

For a build to be triggered, at least one filter group in the filterGroups array must pass. For a filter group to pass, each of its filters must pass.

', 'UpdateWebhookInput$filterGroups' => '

An array of arrays of WebhookFilter objects used to determine if a webhook event can trigger a build. A filter group must pcontain at least one EVENT WebhookFilter.

', 'Webhook$filterGroups' => '

An array of arrays of WebhookFilter objects used to determine which webhooks are triggered. At least one WebhookFilter in the array must specify EVENT as its type.

For a build to be triggered, at least one filter group in the filterGroups array must pass. For a filter group to pass, each of its filters must pass.

', ], ], 'GitCloneDepth' => [ 'base' => NULL, 'refs' => [ 'ProjectSource$gitCloneDepth' => '

Information about the git clone depth for the build project.

', 'StartBuildInput$gitCloneDepthOverride' => '

The user-defined depth of history, with a minimum value of 0, that overrides, for this build only, any previous depth of history defined in the build project.

', ], ], 'ImagePullCredentialsType' => [ 'base' => NULL, 'refs' => [ 'ProjectEnvironment$imagePullCredentialsType' => '

The type of credentials AWS CodeBuild uses to pull images in your build. There are two valid values:

When you use a cross-account or private registry image, you must use SERVICE_ROLE credentials. When you use an AWS CodeBuild curated image, you must use CODEBUILD credentials.

', 'StartBuildInput$imagePullCredentialsTypeOverride' => '

The type of credentials AWS CodeBuild uses to pull images in your build. There are two valid values:

When using a cross-account or private registry image, you must use SERVICE_ROLE credentials. When using an AWS CodeBuild curated image, you must use CODEBUILD credentials.

', ], ], 'ImageVersions' => [ 'base' => NULL, 'refs' => [ 'EnvironmentImage$versions' => '

A list of environment image versions.

', ], ], 'ImportSourceCredentialsInput' => [ 'base' => NULL, 'refs' => [], ], 'ImportSourceCredentialsOutput' => [ 'base' => NULL, 'refs' => [], ], 'InvalidInputException' => [ 'base' => '

The input value that was provided is not valid.

', 'refs' => [], ], 'InvalidateProjectCacheInput' => [ 'base' => NULL, 'refs' => [], ], 'InvalidateProjectCacheOutput' => [ 'base' => NULL, 'refs' => [], ], 'KeyInput' => [ 'base' => NULL, 'refs' => [ 'Tag$key' => '

The tag\'s key.

', ], ], 'LanguageType' => [ 'base' => NULL, 'refs' => [ 'EnvironmentLanguage$language' => '

The programming language for the Docker images.

', ], ], 'ListBuildsForProjectInput' => [ 'base' => NULL, 'refs' => [], ], 'ListBuildsForProjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListBuildsInput' => [ 'base' => NULL, 'refs' => [], ], 'ListBuildsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListCuratedEnvironmentImagesInput' => [ 'base' => NULL, 'refs' => [], ], 'ListCuratedEnvironmentImagesOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListProjectsInput' => [ 'base' => NULL, 'refs' => [], ], 'ListProjectsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListSourceCredentialsInput' => [ 'base' => NULL, 'refs' => [], ], 'ListSourceCredentialsOutput' => [ 'base' => NULL, 'refs' => [], ], 'LogsConfig' => [ 'base' => '

Information about logs for a build project. These can be logs in Amazon CloudWatch Logs, built in a specified S3 bucket, or both.

', 'refs' => [ 'CreateProjectInput$logsConfig' => '

Information about logs for the build project. These can be logs in Amazon CloudWatch Logs, logs uploaded to a specified S3 bucket, or both.

', 'Project$logsConfig' => '

Information about logs for the build project. A project can create logs in Amazon CloudWatch Logs, an S3 bucket, or both.

', 'StartBuildInput$logsConfigOverride' => '

Log settings for this build that override the log settings defined in the build project.

', 'UpdateProjectInput$logsConfig' => '

Information about logs for the build project. A project can create logs in Amazon CloudWatch Logs, logs in an S3 bucket, or both.

', ], ], 'LogsConfigStatusType' => [ 'base' => NULL, 'refs' => [ 'CloudWatchLogsConfig$status' => '

The current status of the logs in Amazon CloudWatch Logs for a build project. Valid values are:

', 'S3LogsConfig$status' => '

The current status of the S3 build logs. Valid values are:

', ], ], 'LogsLocation' => [ 'base' => '

Information about build logs in Amazon CloudWatch Logs.

', 'refs' => [ 'Build$logs' => '

Information about the build\'s logs in Amazon CloudWatch Logs.

', ], ], 'NetworkInterface' => [ 'base' => '

Describes a network interface.

', 'refs' => [ 'Build$networkInterface' => '

Describes a network interface.

', ], ], 'NonEmptyString' => [ 'base' => NULL, 'refs' => [ 'Build$id' => '

The unique ID for the build.

', 'Build$arn' => '

The Amazon Resource Name (ARN) of the build.

', 'Build$sourceVersion' => '

Any version identifier for the version of the source code to be built.

', 'Build$resolvedSourceVersion' => '

An identifier for the version of this build\'s source code.

', 'Build$projectName' => '

The name of the AWS CodeBuild project.

', 'Build$serviceRole' => '

The name of a service role used for this build.

', 'Build$encryptionKey' => '

The AWS Key Management Service (AWS KMS) customer master key (CMK) to be used for encrypting the build output artifacts.

This is expressed either as the Amazon Resource Name (ARN) of the CMK or, if specified, the CMK\'s alias (using the format alias/alias-name ).

', 'BuildIds$member' => NULL, 'BuildNotDeleted$id' => '

The ID of the build that could not be successfully deleted.

', 'CreateProjectInput$serviceRole' => '

The ARN of the AWS Identity and Access Management (IAM) role that enables AWS CodeBuild to interact with dependent AWS services on behalf of the AWS account.

', 'CreateProjectInput$encryptionKey' => '

The AWS Key Management Service (AWS KMS) customer master key (CMK) to be used for encrypting the build output artifacts.

You can specify either the Amazon Resource Name (ARN) of the CMK or, if available, the CMK\'s alias (using the format alias/alias-name ).

', 'DeleteProjectInput$name' => '

The name of the build project.

', 'DeleteSourceCredentialsInput$arn' => '

The Amazon Resource Name (ARN) of the token.

', 'DeleteSourceCredentialsOutput$arn' => '

The Amazon Resource Name (ARN) of the token.

', 'EnvironmentVariable$name' => '

The name or key of the environment variable.

', 'ImportSourceCredentialsInput$username' => '

The Bitbucket username when the authType is BASIC_AUTH. This parameter is not valid for other types of source providers or connections.

', 'ImportSourceCredentialsOutput$arn' => '

The Amazon Resource Name (ARN) of the token.

', 'InvalidateProjectCacheInput$projectName' => '

The name of the AWS CodeBuild build project that the cache is reset for.

', 'ListBuildsForProjectInput$projectName' => '

The name of the AWS CodeBuild project.

', 'ListProjectsInput$nextToken' => '

During a previous call, if there are more than 100 items in the list, only the first 100 items are returned, along with a unique string called a next token. To get the next batch of items in the list, call this operation again, adding the next token to the call. To get all of the items in the list, keep calling this operation with each subsequent next token that is returned, until no more next tokens are returned.

', 'NetworkInterface$subnetId' => '

The ID of the subnet.

', 'NetworkInterface$networkInterfaceId' => '

The ID of the network interface.

', 'Project$serviceRole' => '

The ARN of the AWS Identity and Access Management (IAM) role that enables AWS CodeBuild to interact with dependent AWS services on behalf of the AWS account.

', 'Project$encryptionKey' => '

The AWS Key Management Service (AWS KMS) customer master key (CMK) to be used for encrypting the build output artifacts.

This is expressed either as the Amazon Resource Name (ARN) of the CMK or, if specified, the CMK\'s alias (using the format alias/alias-name ).

', 'ProjectEnvironment$image' => '

The image tag or image digest that identifies the Docker image to use for this build project. Use the following formats:

', 'ProjectNames$member' => NULL, 'RegistryCredential$credential' => '

The Amazon Resource Name (ARN) or name of credentials created using AWS Secrets Manager.

The credential can use the name of the credentials only if they exist in your current region.

', 'SecurityGroupIds$member' => NULL, 'SourceCredentialsInfo$arn' => '

The Amazon Resource Name (ARN) of the token.

', 'StartBuildInput$projectName' => '

The name of the AWS CodeBuild build project to start running a build.

', 'StartBuildInput$imageOverride' => '

The name of an image for this build that overrides the one specified in the build project.

', 'StartBuildInput$serviceRoleOverride' => '

The name of a service role for this build that overrides the one specified in the build project.

', 'StopBuildInput$id' => '

The ID of the build.

', 'Subnets$member' => NULL, 'UpdateProjectInput$name' => '

The name of the build project.

You cannot change a build project\'s name.

', 'UpdateProjectInput$serviceRole' => '

The replacement ARN of the AWS Identity and Access Management (IAM) role that enables AWS CodeBuild to interact with dependent AWS services on behalf of the AWS account.

', 'UpdateProjectInput$encryptionKey' => '

The replacement AWS Key Management Service (AWS KMS) customer master key (CMK) to be used for encrypting the build output artifacts.

You can specify either the Amazon Resource Name (ARN)of the CMK or, if available, the CMK\'s alias (using the format alias/alias-name ).

', 'VpcConfig$vpcId' => '

The ID of the Amazon VPC.

', 'Webhook$url' => '

The URL to the webhook.

', 'Webhook$payloadUrl' => '

The AWS CodeBuild endpoint where webhook events are sent.

', 'Webhook$secret' => '

The secret token of the associated repository.

A Bitbucket webhook does not support secret.

', ], ], 'OAuthProviderException' => [ 'base' => '

There was a problem with the underlying OAuth provider.

', 'refs' => [], ], 'PhaseContext' => [ 'base' => '

Additional information about a build phase that has an error. You can use this information for troubleshooting.

', 'refs' => [ 'PhaseContexts$member' => NULL, ], ], 'PhaseContexts' => [ 'base' => NULL, 'refs' => [ 'BuildPhase$contexts' => '

Additional information about a build phase, especially to help troubleshoot a failed build.

', ], ], 'PlatformType' => [ 'base' => NULL, 'refs' => [ 'EnvironmentPlatform$platform' => '

The platform\'s name.

', ], ], 'Project' => [ 'base' => '

Information about a build project.

', 'refs' => [ 'CreateProjectOutput$project' => '

Information about the build project that was created.

', 'Projects$member' => NULL, 'UpdateProjectOutput$project' => '

Information about the build project that was changed.

', ], ], 'ProjectArtifacts' => [ 'base' => '

Information about the build output artifacts for the build project.

', 'refs' => [ 'CreateProjectInput$artifacts' => '

Information about the build output artifacts for the build project.

', 'Project$artifacts' => '

Information about the build output artifacts for the build project.

', 'ProjectArtifactsList$member' => NULL, 'StartBuildInput$artifactsOverride' => '

Build output artifact settings that override, for this build only, the latest ones already defined in the build project.

', 'UpdateProjectInput$artifacts' => '

Information to be changed about the build output artifacts for the build project.

', ], ], 'ProjectArtifactsList' => [ 'base' => NULL, 'refs' => [ 'CreateProjectInput$secondaryArtifacts' => '

An array of ProjectArtifacts objects.

', 'Project$secondaryArtifacts' => '

An array of ProjectArtifacts objects.

', 'StartBuildInput$secondaryArtifactsOverride' => '

An array of ProjectArtifacts objects.

', 'UpdateProjectInput$secondaryArtifacts' => '

An array of ProjectSource objects.

', ], ], 'ProjectBadge' => [ 'base' => '

Information about the build badge for the build project.

', 'refs' => [ 'Project$badge' => '

Information about the build badge for the build project.

', ], ], 'ProjectCache' => [ 'base' => '

Information about the cache for the build project.

', 'refs' => [ 'Build$cache' => '

Information about the cache for the build.

', 'CreateProjectInput$cache' => '

Stores recently used information so that it can be quickly accessed at a later time.

', 'Project$cache' => '

Information about the cache for the build project.

', 'StartBuildInput$cacheOverride' => '

A ProjectCache object specified for this build that overrides the one defined in the build project.

', 'UpdateProjectInput$cache' => '

Stores recently used information so that it can be quickly accessed at a later time.

', ], ], 'ProjectCacheModes' => [ 'base' => NULL, 'refs' => [ 'ProjectCache$modes' => '

If you use a LOCAL cache, the local cache mode. You can use one or more local cache modes at the same time.

', ], ], 'ProjectDescription' => [ 'base' => NULL, 'refs' => [ 'CreateProjectInput$description' => '

A description that makes the build project easy to identify.

', 'Project$description' => '

A description that makes the build project easy to identify.

', 'UpdateProjectInput$description' => '

A new or replacement description of the build project.

', ], ], 'ProjectEnvironment' => [ 'base' => '

Information about the build environment of the build project.

', 'refs' => [ 'Build$environment' => '

Information about the build environment for this build.

', 'CreateProjectInput$environment' => '

Information about the build environment for the build project.

', 'Project$environment' => '

Information about the build environment for this build project.

', 'UpdateProjectInput$environment' => '

Information to be changed about the build environment for the build project.

', ], ], 'ProjectName' => [ 'base' => NULL, 'refs' => [ 'CreateProjectInput$name' => '

The name of the build project.

', 'CreateWebhookInput$projectName' => '

The name of the AWS CodeBuild project.

', 'DeleteWebhookInput$projectName' => '

The name of the AWS CodeBuild project.

', 'Project$name' => '

The name of the build project.

', 'UpdateWebhookInput$projectName' => '

The name of the AWS CodeBuild project.

', ], ], 'ProjectNames' => [ 'base' => NULL, 'refs' => [ 'BatchGetProjectsInput$names' => '

The names of the build projects.

', 'BatchGetProjectsOutput$projectsNotFound' => '

The names of build projects for which information could not be found.

', 'ListProjectsOutput$projects' => '

The list of build project names, with each build project name representing a single build project.

', ], ], 'ProjectSecondarySourceVersions' => [ 'base' => NULL, 'refs' => [ 'Build$secondarySourceVersions' => '

An array of ProjectSourceVersion objects. Each ProjectSourceVersion must be one of:

', 'StartBuildInput$secondarySourcesVersionOverride' => '

An array of ProjectSourceVersion objects that specify one or more versions of the project\'s secondary sources to be used for this build only.

', ], ], 'ProjectSortByType' => [ 'base' => NULL, 'refs' => [ 'ListProjectsInput$sortBy' => '

The criterion to be used to list build project names. Valid values include:

Use sortOrder to specify in what order to list the build project names based on the preceding criteria.

', ], ], 'ProjectSource' => [ 'base' => '

Information about the build input source code for the build project.

', 'refs' => [ 'Build$source' => '

Information about the source code to be built.

', 'CreateProjectInput$source' => '

Information about the build input source code for the build project.

', 'Project$source' => '

Information about the build input source code for this build project.

', 'ProjectSources$member' => NULL, 'UpdateProjectInput$source' => '

Information to be changed about the build input source code for the build project.

', ], ], 'ProjectSourceVersion' => [ 'base' => '

A source identifier and its corresponding version.

', 'refs' => [ 'ProjectSecondarySourceVersions$member' => NULL, ], ], 'ProjectSources' => [ 'base' => NULL, 'refs' => [ 'Build$secondarySources' => '

An array of ProjectSource objects.

', 'CreateProjectInput$secondarySources' => '

An array of ProjectSource objects.

', 'Project$secondarySources' => '

An array of ProjectSource objects.

', 'StartBuildInput$secondarySourcesOverride' => '

An array of ProjectSource objects.

', 'UpdateProjectInput$secondarySources' => '

An array of ProjectSource objects.

', ], ], 'Projects' => [ 'base' => NULL, 'refs' => [ 'BatchGetProjectsOutput$projects' => '

Information about the requested build projects.

', ], ], 'RegistryCredential' => [ 'base' => '

Information about credentials that provide access to a private Docker registry. When this is set:

For more information, see Private Registry with AWS Secrets Manager Sample for AWS CodeBuild.

', 'refs' => [ 'ProjectEnvironment$registryCredential' => '

The credentials for access to a private registry.

', 'StartBuildInput$registryCredentialOverride' => '

The credentials for access to a private registry.

', ], ], 'ResourceAlreadyExistsException' => [ 'base' => '

The specified AWS resource cannot be created, because an AWS resource with the same settings already exists.

', 'refs' => [], ], 'ResourceNotFoundException' => [ 'base' => '

The specified AWS resource cannot be found.

', 'refs' => [], ], 'S3LogsConfig' => [ 'base' => '

Information about S3 logs for a build project.

', 'refs' => [ 'LogsConfig$s3Logs' => '

Information about logs built to an S3 bucket for a build project. S3 logs are not enabled by default.

', 'LogsLocation$s3Logs' => '

Information about S3 logs for a build project.

', ], ], 'SecurityGroupIds' => [ 'base' => NULL, 'refs' => [ 'VpcConfig$securityGroupIds' => '

A list of one or more security groups IDs in your Amazon VPC.

', ], ], 'SensitiveNonEmptyString' => [ 'base' => NULL, 'refs' => [ 'ImportSourceCredentialsInput$token' => '

For GitHub or GitHub Enterprise, this is the personal access token. For Bitbucket, this is the app password.

', ], ], 'ServerType' => [ 'base' => NULL, 'refs' => [ 'ImportSourceCredentialsInput$serverType' => '

The source provider used for this project.

', 'SourceCredentialsInfo$serverType' => '

The type of source provider. The valid options are GITHUB, GITHUB_ENTERPRISE, or BITBUCKET.

', ], ], 'SortOrderType' => [ 'base' => NULL, 'refs' => [ 'ListBuildsForProjectInput$sortOrder' => '

The order to list build IDs. Valid values include:

', 'ListBuildsInput$sortOrder' => '

The order to list build IDs. Valid values include:

', 'ListProjectsInput$sortOrder' => '

The order in which to list build projects. Valid values include:

Use sortBy to specify the criterion to be used to list build project names.

', ], ], 'SourceAuth' => [ 'base' => '

Information about the authorization settings for AWS CodeBuild to access the source code to be built.

This information is for the AWS CodeBuild console\'s use only. Your code should not get or set this information directly.

', 'refs' => [ 'ProjectSource$auth' => '

Information about the authorization settings for AWS CodeBuild to access the source code to be built.

This information is for the AWS CodeBuild console\'s use only. Your code should not get or set this information directly.

', 'StartBuildInput$sourceAuthOverride' => '

An authorization type for this build that overrides the one defined in the build project. This override applies only if the build project\'s source is BitBucket or GitHub.

', ], ], 'SourceAuthType' => [ 'base' => NULL, 'refs' => [ 'SourceAuth$type' => '

This data type is deprecated and is no longer accurate or used.

The authorization type to use. The only valid value is OAUTH, which represents the OAuth authorization type.

', ], ], 'SourceCredentialsInfo' => [ 'base' => '

Information about the credentials for a GitHub, GitHub Enterprise, or Bitbucket repository.

', 'refs' => [ 'SourceCredentialsInfos$member' => NULL, ], ], 'SourceCredentialsInfos' => [ 'base' => NULL, 'refs' => [ 'ListSourceCredentialsOutput$sourceCredentialsInfos' => '

A list of SourceCredentialsInfo objects. Each SourceCredentialsInfo object includes the authentication type, token ARN, and type of source provider for one set of credentials.

', ], ], 'SourceType' => [ 'base' => NULL, 'refs' => [ 'ProjectSource$type' => '

The type of repository that contains the source code to be built. Valid values include:

', 'StartBuildInput$sourceTypeOverride' => '

A source input type, for this build, that overrides the source input defined in the build project.

', ], ], 'StartBuildInput' => [ 'base' => NULL, 'refs' => [], ], 'StartBuildOutput' => [ 'base' => NULL, 'refs' => [], ], 'StatusType' => [ 'base' => NULL, 'refs' => [ 'Build$buildStatus' => '

The current status of the build. Valid values include:

', 'BuildPhase$phaseStatus' => '

The current status of the build phase. Valid values include:

', ], ], 'StopBuildInput' => [ 'base' => NULL, 'refs' => [], ], 'StopBuildOutput' => [ 'base' => NULL, 'refs' => [], ], 'String' => [ 'base' => NULL, 'refs' => [ 'Build$currentPhase' => '

The current build phase.

', 'Build$initiator' => '

The entity that started the build. Valid values include:

', 'BuildArtifacts$location' => '

Information about the location of the build artifacts.

', 'BuildArtifacts$sha256sum' => '

The SHA-256 hash of the build artifact.

You can use this hash along with a checksum tool to confirm file integrity and authenticity.

This value is available only if the build project\'s packaging value is set to ZIP.

', 'BuildArtifacts$md5sum' => '

The MD5 hash of the build artifact.

You can use this hash along with a checksum tool to confirm file integrity and authenticity.

This value is available only if the build project\'s packaging value is set to ZIP.

', 'BuildArtifacts$artifactIdentifier' => '

An identifier for this artifact definition.

', 'BuildNotDeleted$statusCode' => '

Additional information about the build that could not be successfully deleted.

', 'CloudWatchLogsConfig$groupName' => '

The group name of the logs in Amazon CloudWatch Logs. For more information, see Working with Log Groups and Log Streams.

', 'CloudWatchLogsConfig$streamName' => '

The prefix of the stream name of the Amazon CloudWatch Logs. For more information, see Working with Log Groups and Log Streams.

', 'CreateWebhookInput$branchFilter' => '

A regular expression used to determine which repository branches are built when a webhook is triggered. If the name of a branch matches the regular expression, then it is built. If branchFilter is empty, then all branches are built.

It is recommended that you use filterGroups instead of branchFilter.

', 'EnvironmentImage$name' => '

The name of the Docker image.

', 'EnvironmentImage$description' => '

The description of the Docker image.

', 'EnvironmentVariable$value' => '

The value of the environment variable.

We strongly discourage the use of environment variables to store sensitive values, especially AWS secret key IDs and secret access keys. Environment variables can be displayed in plain text using the AWS CodeBuild console and the AWS Command Line Interface (AWS CLI).

', 'ImageVersions$member' => NULL, 'ListBuildsForProjectInput$nextToken' => '

During a previous call, if there are more than 100 items in the list, only the first 100 items are returned, along with a unique string called a next token. To get the next batch of items in the list, call this operation again, adding the next token to the call. To get all of the items in the list, keep calling this operation with each subsequent next token that is returned, until no more next tokens are returned.

', 'ListBuildsForProjectOutput$nextToken' => '

If there are more than 100 items in the list, only the first 100 items are returned, along with a unique string called a next token. To get the next batch of items in the list, call this operation again, adding the next token to the call.

', 'ListBuildsInput$nextToken' => '

During a previous call, if there are more than 100 items in the list, only the first 100 items are returned, along with a unique string called a next token. To get the next batch of items in the list, call this operation again, adding the next token to the call. To get all of the items in the list, keep calling this operation with each subsequent next token that is returned, until no more next tokens are returned.

', 'ListBuildsOutput$nextToken' => '

If there are more than 100 items in the list, only the first 100 items are returned, along with a unique string called a next token. To get the next batch of items in the list, call this operation again, adding the next token to the call.

', 'ListProjectsOutput$nextToken' => '

If there are more than 100 items in the list, only the first 100 items are returned, along with a unique string called a next token. To get the next batch of items in the list, call this operation again, adding the next token to the call.

', 'LogsLocation$groupName' => '

The name of the Amazon CloudWatch Logs group for the build logs.

', 'LogsLocation$streamName' => '

The name of the Amazon CloudWatch Logs stream for the build logs.

', 'LogsLocation$deepLink' => '

The URL to an individual build log in Amazon CloudWatch Logs.

', 'LogsLocation$s3DeepLink' => '

The URL to a build log in an S3 bucket.

', 'PhaseContext$statusCode' => '

The status code for the context of the build phase.

', 'PhaseContext$message' => '

An explanation of the build phase\'s context. This might include a command ID and an exit code.

', 'Project$arn' => '

The Amazon Resource Name (ARN) of the build project.

', 'ProjectArtifacts$location' => '

Information about the build output artifact location:

', 'ProjectArtifacts$path' => '

Along with namespaceType and name, the pattern that AWS CodeBuild uses to name and store the output artifact:

For example, if path is set to MyArtifacts, namespaceType is set to NONE, and name is set to MyArtifact.zip, the output artifact is stored in the output bucket at MyArtifacts/MyArtifact.zip.

', 'ProjectArtifacts$name' => '

Along with path and namespaceType, the pattern that AWS CodeBuild uses to name and store the output artifact:

For example:

', 'ProjectArtifacts$artifactIdentifier' => '

An identifier for this artifact definition.

', 'ProjectBadge$badgeRequestUrl' => '

The publicly-accessible URL through which you can access the build badge for your project.

The publicly accessible URL through which you can access the build badge for your project.

', 'ProjectCache$location' => '

Information about the cache location:

', 'ProjectEnvironment$certificate' => '

The certificate to use with this build project.

', 'ProjectSource$location' => '

Information about the location of the source code to be built. Valid values include:

', 'ProjectSource$buildspec' => '

The build spec declaration to use for the builds in this build project.

If this value is not specified, a build spec must be included along with the source code to be built.

', 'ProjectSource$sourceIdentifier' => '

An identifier for this project source.

', 'ProjectSourceVersion$sourceIdentifier' => '

An identifier for a source in the build project.

', 'ProjectSourceVersion$sourceVersion' => '

The source version for the corresponding source identifier. If specified, must be one of:

', 'S3LogsConfig$location' => '

The ARN of an S3 bucket and the path prefix for S3 logs. If your Amazon S3 bucket name is my-bucket, and your path prefix is build-log, then acceptable formats are my-bucket/build-log or arn:aws:s3:::my-bucket/build-log.

', 'SourceAuth$resource' => '

The resource value that applies to the specified authorization type.

', 'StartBuildInput$sourceVersion' => '

A version of the build input to be built, for this build only. If not specified, the latest version is used. If specified, must be one of:

', 'StartBuildInput$sourceLocationOverride' => '

A location that overrides, for this build, the source location for the one defined in the build project.

', 'StartBuildInput$buildspecOverride' => '

A build spec declaration that overrides, for this build only, the latest one already defined in the build project.

', 'StartBuildInput$certificateOverride' => '

The name of a certificate for this build that overrides the one specified in the build project.

', 'StartBuildInput$idempotencyToken' => '

A unique, case sensitive identifier you provide to ensure the idempotency of the StartBuild request. The token is included in the StartBuild request and is valid for 12 hours. If you repeat the StartBuild request with the same token, but change a parameter, AWS CodeBuild returns a parameter mismatch error.

', 'UpdateWebhookInput$branchFilter' => '

A regular expression used to determine which repository branches are built when a webhook is triggered. If the name of a branch matches the regular expression, then it is built. If branchFilter is empty, then all branches are built.

It is recommended that you use filterGroups instead of branchFilter.

', 'Webhook$branchFilter' => '

A regular expression used to determine which repository branches are built when a webhook is triggered. If the name of a branch matches the regular expression, then it is built. If branchFilter is empty, then all branches are built.

It is recommended that you use filterGroups instead of branchFilter.

', 'WebhookFilter$pattern' => '

For a WebHookFilter that uses EVENT type, a comma-separated string that specifies one or more events. For example, the webhook filter PUSH, PULL_REQUEST_CREATED, PULL_REQUEST_UPDATED allows all push, pull request created, and pull request updated events to trigger a build.

For a WebHookFilter that uses any of the other filter types, a regular expression pattern. For example, a WebHookFilter that uses HEAD_REF for its type and the pattern ^refs/heads/ triggers a build when the head reference is a branch with a reference name refs/heads/branch-name.

', ], ], 'Subnets' => [ 'base' => NULL, 'refs' => [ 'VpcConfig$subnets' => '

A list of one or more subnet IDs in your Amazon VPC.

', ], ], 'Tag' => [ 'base' => '

A tag, consisting of a key and a value.

This tag is available for use by AWS services that support tags in AWS CodeBuild.

', 'refs' => [ 'TagList$member' => NULL, ], ], 'TagList' => [ 'base' => NULL, 'refs' => [ 'CreateProjectInput$tags' => '

A set of tags for this build project.

These tags are available for use by AWS services that support AWS CodeBuild build project tags.

', 'Project$tags' => '

The tags for this build project.

These tags are available for use by AWS services that support AWS CodeBuild build project tags.

', 'UpdateProjectInput$tags' => '

The replacement set of tags for this build project.

These tags are available for use by AWS services that support AWS CodeBuild build project tags.

', ], ], 'TimeOut' => [ 'base' => NULL, 'refs' => [ 'CreateProjectInput$timeoutInMinutes' => '

How long, in minutes, from 5 to 480 (8 hours), for AWS CodeBuild to wait before it times out any build that has not been marked as completed. The default is 60 minutes.

', 'CreateProjectInput$queuedTimeoutInMinutes' => '

The number of minutes a build is allowed to be queued before it times out.

', 'Project$timeoutInMinutes' => '

How long, in minutes, from 5 to 480 (8 hours), for AWS CodeBuild to wait before timing out any related build that did not get marked as completed. The default is 60 minutes.

', 'Project$queuedTimeoutInMinutes' => '

The number of minutes a build is allowed to be queued before it times out.

', 'StartBuildInput$timeoutInMinutesOverride' => '

The number of build timeout minutes, from 5 to 480 (8 hours), that overrides, for this build only, the latest setting already defined in the build project.

', 'StartBuildInput$queuedTimeoutInMinutesOverride' => '

The number of minutes a build is allowed to be queued before it times out.

', 'UpdateProjectInput$timeoutInMinutes' => '

The replacement value in minutes, from 5 to 480 (8 hours), for AWS CodeBuild to wait before timing out any related build that did not get marked as completed.

', 'UpdateProjectInput$queuedTimeoutInMinutes' => '

The number of minutes a build is allowed to be queued before it times out.

', ], ], 'Timestamp' => [ 'base' => NULL, 'refs' => [ 'Build$startTime' => '

When the build process started, expressed in Unix time format.

', 'Build$endTime' => '

When the build process ended, expressed in Unix time format.

', 'BuildPhase$startTime' => '

When the build phase started, expressed in Unix time format.

', 'BuildPhase$endTime' => '

When the build phase ended, expressed in Unix time format.

', 'Project$created' => '

When the build project was created, expressed in Unix time format.

', 'Project$lastModified' => '

When the build project\'s settings were last modified, expressed in Unix time format.

', 'Webhook$lastModifiedSecret' => '

A timestamp that indicates the last time a repository\'s secret token was modified.

', ], ], 'UpdateProjectInput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateProjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateWebhookInput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateWebhookOutput' => [ 'base' => NULL, 'refs' => [], ], 'ValueInput' => [ 'base' => NULL, 'refs' => [ 'Tag$value' => '

The tag\'s value.

', ], ], 'VpcConfig' => [ 'base' => '

Information about the VPC configuration that AWS CodeBuild accesses.

', 'refs' => [ 'Build$vpcConfig' => '

If your AWS CodeBuild project accesses resources in an Amazon VPC, you provide this parameter that identifies the VPC ID and the list of security group IDs and subnet IDs. The security groups and subnets must belong to the same VPC. You must provide at least one security group and one subnet ID.

', 'CreateProjectInput$vpcConfig' => '

VpcConfig enables AWS CodeBuild to access resources in an Amazon VPC.

', 'Project$vpcConfig' => '

Information about the VPC configuration that AWS CodeBuild accesses.

', 'UpdateProjectInput$vpcConfig' => '

VpcConfig enables AWS CodeBuild to access resources in an Amazon VPC.

', ], ], 'Webhook' => [ 'base' => '

Information about a webhook that connects repository events to a build project in AWS CodeBuild.

', 'refs' => [ 'CreateWebhookOutput$webhook' => '

Information about a webhook that connects repository events to a build project in AWS CodeBuild.

', 'Project$webhook' => '

Information about a webhook that connects repository events to a build project in AWS CodeBuild.

', 'UpdateWebhookOutput$webhook' => '

Information about a repository\'s webhook that is associated with a project in AWS CodeBuild.

', ], ], 'WebhookFilter' => [ 'base' => '

A filter used to determine which webhooks trigger a build.

', 'refs' => [ 'FilterGroup$member' => NULL, ], ], 'WebhookFilterType' => [ 'base' => NULL, 'refs' => [ 'WebhookFilter$type' => '

The type of webhook filter. There are five webhook filter types: EVENT, ACTOR_ACCOUNT_ID, HEAD_REF, BASE_REF, and FILE_PATH.

EVENT

A webhook event triggers a build when the provided pattern matches one of four event types: PUSH, PULL_REQUEST_CREATED, PULL_REQUEST_UPDATED, and PULL_REQUEST_REOPENED. The EVENT patterns are specified as a comma-separated string. For example, PUSH, PULL_REQUEST_CREATED, PULL_REQUEST_UPDATED filters all push, pull request created, and pull request updated events.

The PULL_REQUEST_REOPENED works with GitHub and GitHub Enterprise only.

ACTOR_ACCOUNT_ID

A webhook event triggers a build when a GitHub, GitHub Enterprise, or Bitbucket account ID matches the regular expression pattern.

HEAD_REF

A webhook event triggers a build when the head reference matches the regular expression pattern. For example, refs/heads/branch-name and refs/tags/tag-name.

Works with GitHub and GitHub Enterprise push, GitHub and GitHub Enterprise pull request, Bitbucket push, and Bitbucket pull request events.

BASE_REF

A webhook event triggers a build when the base reference matches the regular expression pattern. For example, refs/heads/branch-name.

Works with pull request events only.

FILE_PATH

A webhook triggers a build when the path of a changed file matches the regular expression pattern.

Works with GitHub and GitHub Enterprise push events only.

', ], ], 'WrapperBoolean' => [ 'base' => NULL, 'refs' => [ 'BuildArtifacts$overrideArtifactName' => '

If this flag is set, a name specified in the build spec file overrides the artifact name. The name specified in a build spec file is calculated at build time and uses the Shell Command Language. For example, you can append a date and time to your artifact name so that it is always unique.

', 'BuildArtifacts$encryptionDisabled' => '

Information that tells you if encryption for build artifacts is disabled.

', 'CreateProjectInput$badgeEnabled' => '

Set this to true to generate a publicly accessible URL for your project\'s build badge.

', 'ProjectArtifacts$overrideArtifactName' => '

If this flag is set, a name specified in the build spec file overrides the artifact name. The name specified in a build spec file is calculated at build time and uses the Shell Command Language. For example, you can append a date and time to your artifact name so that it is always unique.

', 'ProjectArtifacts$encryptionDisabled' => '

Set to true if you do not want your output artifacts encrypted. This option is valid only if your artifacts type is Amazon Simple Storage Service (Amazon S3). If this is set with another artifacts type, an invalidInputException is thrown.

', 'ProjectEnvironment$privilegedMode' => '

Enables running the Docker daemon inside a Docker container. Set to true only if the build project is be used to build Docker images, and the specified build environment image is not provided by AWS CodeBuild with Docker support. Otherwise, all associated builds that attempt to interact with the Docker daemon fail. You must also start the Docker daemon so that builds can interact with it. One way to do this is to initialize the Docker daemon during the install phase of your build spec by running the following build commands. (Do not run these commands if the specified build environment image is provided by AWS CodeBuild with Docker support.)

If the operating system\'s base image is Ubuntu Linux:

- nohup /usr/local/bin/dockerd --host=unix:///var/run/docker.sock --host=tcp://0.0.0.0:2375 --storage-driver=overlay& - timeout 15 sh -c "until docker info; do echo .; sleep 1; done"

If the operating system\'s base image is Alpine Linux, add the -t argument to timeout:

- nohup /usr/local/bin/dockerd --host=unix:///var/run/docker.sock --host=tcp://0.0.0.0:2375 --storage-driver=overlay& - timeout 15 -t sh -c "until docker info; do echo .; sleep 1; done"

', 'ProjectSource$reportBuildStatus' => '

Set to true to report the status of a build\'s start and finish to your source provider. This option is valid only when your source provider is GitHub, GitHub Enterprise, or Bitbucket. If this is set and you use a different source provider, an invalidInputException is thrown.

', 'ProjectSource$insecureSsl' => '

Enable this flag to ignore SSL warnings while connecting to the project source code.

', 'StartBuildInput$insecureSslOverride' => '

Enable this flag to override the insecure SSL setting that is specified in the build project. The insecure SSL setting determines whether to ignore SSL warnings while connecting to the project source code. This override applies only if the build\'s source is GitHub Enterprise.

', 'StartBuildInput$reportBuildStatusOverride' => '

Set to true to report to your source provider the status of a build\'s start and completion. If you use this option with a source provider other than GitHub, GitHub Enterprise, or Bitbucket, an invalidInputException is thrown.

', 'StartBuildInput$privilegedModeOverride' => '

Enable this flag to override privileged mode in the build project.

', 'UpdateProjectInput$badgeEnabled' => '

Set this to true to generate a publicly accessible URL for your project\'s build badge.

', 'WebhookFilter$excludeMatchedPattern' => '

Used to indicate that the pattern determines which webhook events do not trigger a build. If true, then a webhook event that does not match the pattern triggers a build. If false, then a webhook event that matches the pattern triggers a build.

', ], ], 'WrapperInt' => [ 'base' => NULL, 'refs' => [ 'Build$timeoutInMinutes' => '

How long, in minutes, for AWS CodeBuild to wait before timing out this build if it does not get marked as completed.

', 'Build$queuedTimeoutInMinutes' => '

The number of minutes a build is allowed to be queued before it times out.

', ], ], 'WrapperLong' => [ 'base' => NULL, 'refs' => [ 'BuildPhase$durationInSeconds' => '

How long, in seconds, between the starting and ending times of the build\'s phase.

', ], ], ],]; +return [ 'version' => '2.0', 'service' => 'AWS CodeBuild

AWS CodeBuild is a fully managed build service in the cloud. AWS CodeBuild compiles your source code, runs unit tests, and produces artifacts that are ready to deploy. AWS CodeBuild eliminates the need to provision, manage, and scale your own build servers. It provides prepackaged build environments for the most popular programming languages and build tools, such as Apache Maven, Gradle, and more. You can also fully customize build environments in AWS CodeBuild to use your own build tools. AWS CodeBuild scales automatically to meet peak build requests. You pay only for the build time you consume. For more information about AWS CodeBuild, see the AWS CodeBuild User Guide.

AWS CodeBuild supports these operations:

', 'operations' => [ 'BatchDeleteBuilds' => '

Deletes one or more builds.

', 'BatchGetBuilds' => '

Gets information about builds.

', 'BatchGetProjects' => '

Gets information about build projects.

', 'CreateProject' => '

Creates a build project.

', 'CreateWebhook' => '

For an existing AWS CodeBuild build project that has its source code stored in a GitHub or Bitbucket repository, enables AWS CodeBuild to start rebuilding the source code every time a code change is pushed to the repository.

If you enable webhooks for an AWS CodeBuild project, and the project is used as a build step in AWS CodePipeline, then two identical builds are created for each commit. One build is triggered through webhooks, and one through AWS CodePipeline. Because billing is on a per-build basis, you are billed for both builds. Therefore, if you are using AWS CodePipeline, we recommend that you disable webhooks in AWS CodeBuild. In the AWS CodeBuild console, clear the Webhook box. For more information, see step 5 in Change a Build Project\'s Settings.

', 'DeleteProject' => '

Deletes a build project.

', 'DeleteSourceCredentials' => '

Deletes a set of GitHub, GitHub Enterprise, or Bitbucket source credentials.

', 'DeleteWebhook' => '

For an existing AWS CodeBuild build project that has its source code stored in a GitHub or Bitbucket repository, stops AWS CodeBuild from rebuilding the source code every time a code change is pushed to the repository.

', 'ImportSourceCredentials' => '

Imports the source repository credentials for an AWS CodeBuild project that has its source code stored in a GitHub, GitHub Enterprise, or Bitbucket repository.

', 'InvalidateProjectCache' => '

Resets the cache for a project.

', 'ListBuilds' => '

Gets a list of build IDs, with each build ID representing a single build.

', 'ListBuildsForProject' => '

Gets a list of build IDs for the specified build project, with each build ID representing a single build.

', 'ListCuratedEnvironmentImages' => '

Gets information about Docker images that are managed by AWS CodeBuild.

', 'ListProjects' => '

Gets a list of build project names, with each build project name representing a single build project.

', 'ListSourceCredentials' => '

Returns a list of SourceCredentialsInfo objects.

', 'StartBuild' => '

Starts running a build.

', 'StopBuild' => '

Attempts to stop running a build.

', 'UpdateProject' => '

Changes the settings of a build project.

', 'UpdateWebhook' => '

Updates the webhook associated with an AWS CodeBuild build project.

If you use Bitbucket for your repository, rotateSecret is ignored.

', ], 'shapes' => [ 'AccountLimitExceededException' => [ 'base' => '

An AWS service limit was exceeded for the calling AWS account.

', 'refs' => [], ], 'ArtifactNamespace' => [ 'base' => NULL, 'refs' => [ 'ProjectArtifacts$namespaceType' => '

Along with path and name, the pattern that AWS CodeBuild uses to determine the name and location to store the output artifact:

For example, if path is set to MyArtifacts, namespaceType is set to BUILD_ID, and name is set to MyArtifact.zip, the output artifact is stored in MyArtifacts/build-ID/MyArtifact.zip.

', ], ], 'ArtifactPackaging' => [ 'base' => NULL, 'refs' => [ 'ProjectArtifacts$packaging' => '

The type of build output artifact to create:

', ], ], 'ArtifactsType' => [ 'base' => NULL, 'refs' => [ 'ProjectArtifacts$type' => '

The type of build output artifact. Valid values include:

', ], ], 'AuthType' => [ 'base' => NULL, 'refs' => [ 'ImportSourceCredentialsInput$authType' => '

The type of authentication used to connect to a GitHub, GitHub Enterprise, or Bitbucket repository. An OAUTH connection is not supported by the API and must be created using the AWS CodeBuild console.

', 'SourceCredentialsInfo$authType' => '

The type of authentication used by the credentials. Valid options are OAUTH, BASIC_AUTH, or PERSONAL_ACCESS_TOKEN.

', ], ], 'BatchDeleteBuildsInput' => [ 'base' => NULL, 'refs' => [], ], 'BatchDeleteBuildsOutput' => [ 'base' => NULL, 'refs' => [], ], 'BatchGetBuildsInput' => [ 'base' => NULL, 'refs' => [], ], 'BatchGetBuildsOutput' => [ 'base' => NULL, 'refs' => [], ], 'BatchGetProjectsInput' => [ 'base' => NULL, 'refs' => [], ], 'BatchGetProjectsOutput' => [ 'base' => NULL, 'refs' => [], ], 'Boolean' => [ 'base' => NULL, 'refs' => [ 'Build$buildComplete' => '

Whether the build is complete. True if complete; otherwise, false.

', 'ProjectBadge$badgeEnabled' => '

Set this to true to generate a publicly accessible URL for your project\'s build badge.

', 'UpdateWebhookInput$rotateSecret' => '

A boolean value that specifies whether the associated GitHub repository\'s secret token should be updated. If you use Bitbucket for your repository, rotateSecret is ignored.

', ], ], 'Build' => [ 'base' => '

Information about a build.

', 'refs' => [ 'Builds$member' => NULL, 'StartBuildOutput$build' => '

Information about the build to be run.

', 'StopBuildOutput$build' => '

Information about the build.

', ], ], 'BuildArtifacts' => [ 'base' => '

Information about build output artifacts.

', 'refs' => [ 'Build$artifacts' => '

Information about the output artifacts for the build.

', 'BuildArtifactsList$member' => NULL, ], ], 'BuildArtifactsList' => [ 'base' => NULL, 'refs' => [ 'Build$secondaryArtifacts' => '

An array of ProjectArtifacts objects.

', ], ], 'BuildIds' => [ 'base' => NULL, 'refs' => [ 'BatchDeleteBuildsInput$ids' => '

The IDs of the builds to delete.

', 'BatchDeleteBuildsOutput$buildsDeleted' => '

The IDs of the builds that were successfully deleted.

', 'BatchGetBuildsInput$ids' => '

The IDs of the builds.

', 'BatchGetBuildsOutput$buildsNotFound' => '

The IDs of builds for which information could not be found.

', 'ListBuildsForProjectOutput$ids' => '

A list of build IDs for the specified build project, with each build ID representing a single build.

', 'ListBuildsOutput$ids' => '

A list of build IDs, with each build ID representing a single build.

', ], ], 'BuildNotDeleted' => [ 'base' => '

Information about a build that could not be successfully deleted.

', 'refs' => [ 'BuildsNotDeleted$member' => NULL, ], ], 'BuildPhase' => [ 'base' => '

Information about a stage for a build.

', 'refs' => [ 'BuildPhases$member' => NULL, ], ], 'BuildPhaseType' => [ 'base' => NULL, 'refs' => [ 'BuildPhase$phaseType' => '

The name of the build phase. Valid values include:

', ], ], 'BuildPhases' => [ 'base' => NULL, 'refs' => [ 'Build$phases' => '

Information about all previous build phases that are complete and information about any current build phase that is not yet complete.

', ], ], 'Builds' => [ 'base' => NULL, 'refs' => [ 'BatchGetBuildsOutput$builds' => '

Information about the requested builds.

', ], ], 'BuildsNotDeleted' => [ 'base' => NULL, 'refs' => [ 'BatchDeleteBuildsOutput$buildsNotDeleted' => '

Information about any builds that could not be successfully deleted.

', ], ], 'CacheMode' => [ 'base' => NULL, 'refs' => [ 'ProjectCacheModes$member' => NULL, ], ], 'CacheType' => [ 'base' => NULL, 'refs' => [ 'ProjectCache$type' => '

The type of cache used by the build project. Valid values include:

', ], ], 'CloudWatchLogsConfig' => [ 'base' => '

Information about Amazon CloudWatch Logs for a build project.

', 'refs' => [ 'LogsConfig$cloudWatchLogs' => '

Information about Amazon CloudWatch Logs for a build project. Amazon CloudWatch Logs are enabled by default.

', 'LogsLocation$cloudWatchLogs' => '

Information about Amazon CloudWatch Logs for a build project.

', ], ], 'ComputeType' => [ 'base' => NULL, 'refs' => [ 'ProjectEnvironment$computeType' => '

Information about the compute resources the build project uses. Available values include:

', 'StartBuildInput$computeTypeOverride' => '

The name of a compute type for this build that overrides the one specified in the build project.

', ], ], 'CreateProjectInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateProjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateWebhookInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateWebhookOutput' => [ 'base' => NULL, 'refs' => [], ], 'CredentialProviderType' => [ 'base' => NULL, 'refs' => [ 'RegistryCredential$credentialProvider' => '

The service that created the credentials to access a private Docker registry. The valid value, SECRETS_MANAGER, is for AWS Secrets Manager.

', ], ], 'DeleteProjectInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteProjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteSourceCredentialsInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteSourceCredentialsOutput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteWebhookInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteWebhookOutput' => [ 'base' => NULL, 'refs' => [], ], 'EnvironmentImage' => [ 'base' => '

Information about a Docker image that is managed by AWS CodeBuild.

', 'refs' => [ 'EnvironmentImages$member' => NULL, ], ], 'EnvironmentImages' => [ 'base' => NULL, 'refs' => [ 'EnvironmentLanguage$images' => '

The list of Docker images that are related by the specified programming language.

', ], ], 'EnvironmentLanguage' => [ 'base' => '

A set of Docker images that are related by programming language and are managed by AWS CodeBuild.

', 'refs' => [ 'EnvironmentLanguages$member' => NULL, ], ], 'EnvironmentLanguages' => [ 'base' => NULL, 'refs' => [ 'EnvironmentPlatform$languages' => '

The list of programming languages that are available for the specified platform.

', ], ], 'EnvironmentPlatform' => [ 'base' => '

A set of Docker images that are related by platform and are managed by AWS CodeBuild.

', 'refs' => [ 'EnvironmentPlatforms$member' => NULL, ], ], 'EnvironmentPlatforms' => [ 'base' => NULL, 'refs' => [ 'ListCuratedEnvironmentImagesOutput$platforms' => '

Information about supported platforms for Docker images that are managed by AWS CodeBuild.

', ], ], 'EnvironmentType' => [ 'base' => NULL, 'refs' => [ 'ProjectEnvironment$type' => '

The type of build environment to use for related builds.

', 'StartBuildInput$environmentTypeOverride' => '

A container type for this build that overrides the one specified in the build project.

', ], ], 'EnvironmentVariable' => [ 'base' => '

Information about an environment variable for a build project or a build.

', 'refs' => [ 'EnvironmentVariables$member' => NULL, ], ], 'EnvironmentVariableType' => [ 'base' => NULL, 'refs' => [ 'EnvironmentVariable$type' => '

The type of environment variable. Valid values include:

', ], ], 'EnvironmentVariables' => [ 'base' => NULL, 'refs' => [ 'ProjectEnvironment$environmentVariables' => '

A set of environment variables to make available to builds for this build project.

', 'StartBuildInput$environmentVariablesOverride' => '

A set of environment variables that overrides, for this build only, the latest ones already defined in the build project.

', ], ], 'FilterGroup' => [ 'base' => NULL, 'refs' => [ 'FilterGroups$member' => NULL, ], ], 'FilterGroups' => [ 'base' => NULL, 'refs' => [ 'CreateWebhookInput$filterGroups' => '

An array of arrays of WebhookFilter objects used to determine which webhooks are triggered. At least one WebhookFilter in the array must specify EVENT as its type.

For a build to be triggered, at least one filter group in the filterGroups array must pass. For a filter group to pass, each of its filters must pass.

', 'UpdateWebhookInput$filterGroups' => '

An array of arrays of WebhookFilter objects used to determine if a webhook event can trigger a build. A filter group must pcontain at least one EVENT WebhookFilter.

', 'Webhook$filterGroups' => '

An array of arrays of WebhookFilter objects used to determine which webhooks are triggered. At least one WebhookFilter in the array must specify EVENT as its type.

For a build to be triggered, at least one filter group in the filterGroups array must pass. For a filter group to pass, each of its filters must pass.

', ], ], 'GitCloneDepth' => [ 'base' => NULL, 'refs' => [ 'ProjectSource$gitCloneDepth' => '

Information about the Git clone depth for the build project.

', 'StartBuildInput$gitCloneDepthOverride' => '

The user-defined depth of history, with a minimum value of 0, that overrides, for this build only, any previous depth of history defined in the build project.

', ], ], 'GitSubmodulesConfig' => [ 'base' => '

Information about the Git submodules configuration for an AWS CodeBuild build project.

', 'refs' => [ 'ProjectSource$gitSubmodulesConfig' => '

Information about the Git submodules configuration for the build project.

', 'StartBuildInput$gitSubmodulesConfigOverride' => '

Information about the Git submodules configuration for this build of an AWS CodeBuild build project.

', ], ], 'ImagePullCredentialsType' => [ 'base' => NULL, 'refs' => [ 'ProjectEnvironment$imagePullCredentialsType' => '

The type of credentials AWS CodeBuild uses to pull images in your build. There are two valid values:

When you use a cross-account or private registry image, you must use SERVICE_ROLE credentials. When you use an AWS CodeBuild curated image, you must use CODEBUILD credentials.

', 'StartBuildInput$imagePullCredentialsTypeOverride' => '

The type of credentials AWS CodeBuild uses to pull images in your build. There are two valid values:

When using a cross-account or private registry image, you must use SERVICE_ROLE credentials. When using an AWS CodeBuild curated image, you must use CODEBUILD credentials.

', ], ], 'ImageVersions' => [ 'base' => NULL, 'refs' => [ 'EnvironmentImage$versions' => '

A list of environment image versions.

', ], ], 'ImportSourceCredentialsInput' => [ 'base' => NULL, 'refs' => [], ], 'ImportSourceCredentialsOutput' => [ 'base' => NULL, 'refs' => [], ], 'InvalidInputException' => [ 'base' => '

The input value that was provided is not valid.

', 'refs' => [], ], 'InvalidateProjectCacheInput' => [ 'base' => NULL, 'refs' => [], ], 'InvalidateProjectCacheOutput' => [ 'base' => NULL, 'refs' => [], ], 'KeyInput' => [ 'base' => NULL, 'refs' => [ 'Tag$key' => '

The tag\'s key.

', ], ], 'LanguageType' => [ 'base' => NULL, 'refs' => [ 'EnvironmentLanguage$language' => '

The programming language for the Docker images.

', ], ], 'ListBuildsForProjectInput' => [ 'base' => NULL, 'refs' => [], ], 'ListBuildsForProjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListBuildsInput' => [ 'base' => NULL, 'refs' => [], ], 'ListBuildsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListCuratedEnvironmentImagesInput' => [ 'base' => NULL, 'refs' => [], ], 'ListCuratedEnvironmentImagesOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListProjectsInput' => [ 'base' => NULL, 'refs' => [], ], 'ListProjectsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListSourceCredentialsInput' => [ 'base' => NULL, 'refs' => [], ], 'ListSourceCredentialsOutput' => [ 'base' => NULL, 'refs' => [], ], 'LogsConfig' => [ 'base' => '

Information about logs for a build project. These can be logs in Amazon CloudWatch Logs, built in a specified S3 bucket, or both.

', 'refs' => [ 'CreateProjectInput$logsConfig' => '

Information about logs for the build project. These can be logs in Amazon CloudWatch Logs, logs uploaded to a specified S3 bucket, or both.

', 'Project$logsConfig' => '

Information about logs for the build project. A project can create logs in Amazon CloudWatch Logs, an S3 bucket, or both.

', 'StartBuildInput$logsConfigOverride' => '

Log settings for this build that override the log settings defined in the build project.

', 'UpdateProjectInput$logsConfig' => '

Information about logs for the build project. A project can create logs in Amazon CloudWatch Logs, logs in an S3 bucket, or both.

', ], ], 'LogsConfigStatusType' => [ 'base' => NULL, 'refs' => [ 'CloudWatchLogsConfig$status' => '

The current status of the logs in Amazon CloudWatch Logs for a build project. Valid values are:

', 'S3LogsConfig$status' => '

The current status of the S3 build logs. Valid values are:

', ], ], 'LogsLocation' => [ 'base' => '

Information about build logs in Amazon CloudWatch Logs.

', 'refs' => [ 'Build$logs' => '

Information about the build\'s logs in Amazon CloudWatch Logs.

', ], ], 'NetworkInterface' => [ 'base' => '

Describes a network interface.

', 'refs' => [ 'Build$networkInterface' => '

Describes a network interface.

', ], ], 'NonEmptyString' => [ 'base' => NULL, 'refs' => [ 'Build$id' => '

The unique ID for the build.

', 'Build$arn' => '

The Amazon Resource Name (ARN) of the build.

', 'Build$sourceVersion' => '

Any version identifier for the version of the source code to be built.

', 'Build$resolvedSourceVersion' => '

An identifier for the version of this build\'s source code.

', 'Build$projectName' => '

The name of the AWS CodeBuild project.

', 'Build$serviceRole' => '

The name of a service role used for this build.

', 'Build$encryptionKey' => '

The AWS Key Management Service (AWS KMS) customer master key (CMK) to be used for encrypting the build output artifacts.

You can use a cross-account KMS key to encrypt the build output artifacts if your service role has permission to that key.

You can specify either the Amazon Resource Name (ARN) of the CMK or, if available, the CMK\'s alias (using the format alias/alias-name ).

', 'BuildIds$member' => NULL, 'BuildNotDeleted$id' => '

The ID of the build that could not be successfully deleted.

', 'CreateProjectInput$serviceRole' => '

The ARN of the AWS Identity and Access Management (IAM) role that enables AWS CodeBuild to interact with dependent AWS services on behalf of the AWS account.

', 'CreateProjectInput$encryptionKey' => '

The AWS Key Management Service (AWS KMS) customer master key (CMK) to be used for encrypting the build output artifacts.

You can use a cross-account KMS key to encrypt the build output artifacts if your service role has permission to that key.

You can specify either the Amazon Resource Name (ARN) of the CMK or, if available, the CMK\'s alias (using the format alias/alias-name ).

', 'DeleteProjectInput$name' => '

The name of the build project.

', 'DeleteSourceCredentialsInput$arn' => '

The Amazon Resource Name (ARN) of the token.

', 'DeleteSourceCredentialsOutput$arn' => '

The Amazon Resource Name (ARN) of the token.

', 'EnvironmentVariable$name' => '

The name or key of the environment variable.

', 'ImportSourceCredentialsInput$username' => '

The Bitbucket username when the authType is BASIC_AUTH. This parameter is not valid for other types of source providers or connections.

', 'ImportSourceCredentialsOutput$arn' => '

The Amazon Resource Name (ARN) of the token.

', 'InvalidateProjectCacheInput$projectName' => '

The name of the AWS CodeBuild build project that the cache is reset for.

', 'ListBuildsForProjectInput$projectName' => '

The name of the AWS CodeBuild project.

', 'ListProjectsInput$nextToken' => '

During a previous call, if there are more than 100 items in the list, only the first 100 items are returned, along with a unique string called a next token. To get the next batch of items in the list, call this operation again, adding the next token to the call. To get all of the items in the list, keep calling this operation with each subsequent next token that is returned, until no more next tokens are returned.

', 'NetworkInterface$subnetId' => '

The ID of the subnet.

', 'NetworkInterface$networkInterfaceId' => '

The ID of the network interface.

', 'Project$serviceRole' => '

The ARN of the AWS Identity and Access Management (IAM) role that enables AWS CodeBuild to interact with dependent AWS services on behalf of the AWS account.

', 'Project$encryptionKey' => '

The AWS Key Management Service (AWS KMS) customer master key (CMK) to be used for encrypting the build output artifacts.

You can use a cross-account KMS key to encrypt the build output artifacts if your service role has permission to that key.

You can specify either the Amazon Resource Name (ARN) of the CMK or, if available, the CMK\'s alias (using the format alias/alias-name ).

', 'ProjectEnvironment$image' => '

The image tag or image digest that identifies the Docker image to use for this build project. Use the following formats:

', 'ProjectNames$member' => NULL, 'RegistryCredential$credential' => '

The Amazon Resource Name (ARN) or name of credentials created using AWS Secrets Manager.

The credential can use the name of the credentials only if they exist in your current region.

', 'SecurityGroupIds$member' => NULL, 'SourceCredentialsInfo$arn' => '

The Amazon Resource Name (ARN) of the token.

', 'StartBuildInput$projectName' => '

The name of the AWS CodeBuild build project to start running a build.

', 'StartBuildInput$imageOverride' => '

The name of an image for this build that overrides the one specified in the build project.

', 'StartBuildInput$serviceRoleOverride' => '

The name of a service role for this build that overrides the one specified in the build project.

', 'StopBuildInput$id' => '

The ID of the build.

', 'Subnets$member' => NULL, 'UpdateProjectInput$name' => '

The name of the build project.

You cannot change a build project\'s name.

', 'UpdateProjectInput$serviceRole' => '

The replacement ARN of the AWS Identity and Access Management (IAM) role that enables AWS CodeBuild to interact with dependent AWS services on behalf of the AWS account.

', 'UpdateProjectInput$encryptionKey' => '

The AWS Key Management Service (AWS KMS) customer master key (CMK) to be used for encrypting the build output artifacts.

You can use a cross-account KMS key to encrypt the build output artifacts if your service role has permission to that key.

You can specify either the Amazon Resource Name (ARN) of the CMK or, if available, the CMK\'s alias (using the format alias/alias-name ).

', 'VpcConfig$vpcId' => '

The ID of the Amazon VPC.

', 'Webhook$url' => '

The URL to the webhook.

', 'Webhook$payloadUrl' => '

The AWS CodeBuild endpoint where webhook events are sent.

', 'Webhook$secret' => '

The secret token of the associated repository.

A Bitbucket webhook does not support secret.

', ], ], 'OAuthProviderException' => [ 'base' => '

There was a problem with the underlying OAuth provider.

', 'refs' => [], ], 'PhaseContext' => [ 'base' => '

Additional information about a build phase that has an error. You can use this information for troubleshooting.

', 'refs' => [ 'PhaseContexts$member' => NULL, ], ], 'PhaseContexts' => [ 'base' => NULL, 'refs' => [ 'BuildPhase$contexts' => '

Additional information about a build phase, especially to help troubleshoot a failed build.

', ], ], 'PlatformType' => [ 'base' => NULL, 'refs' => [ 'EnvironmentPlatform$platform' => '

The platform\'s name.

', ], ], 'Project' => [ 'base' => '

Information about a build project.

', 'refs' => [ 'CreateProjectOutput$project' => '

Information about the build project that was created.

', 'Projects$member' => NULL, 'UpdateProjectOutput$project' => '

Information about the build project that was changed.

', ], ], 'ProjectArtifacts' => [ 'base' => '

Information about the build output artifacts for the build project.

', 'refs' => [ 'CreateProjectInput$artifacts' => '

Information about the build output artifacts for the build project.

', 'Project$artifacts' => '

Information about the build output artifacts for the build project.

', 'ProjectArtifactsList$member' => NULL, 'StartBuildInput$artifactsOverride' => '

Build output artifact settings that override, for this build only, the latest ones already defined in the build project.

', 'UpdateProjectInput$artifacts' => '

Information to be changed about the build output artifacts for the build project.

', ], ], 'ProjectArtifactsList' => [ 'base' => NULL, 'refs' => [ 'CreateProjectInput$secondaryArtifacts' => '

An array of ProjectArtifacts objects.

', 'Project$secondaryArtifacts' => '

An array of ProjectArtifacts objects.

', 'StartBuildInput$secondaryArtifactsOverride' => '

An array of ProjectArtifacts objects.

', 'UpdateProjectInput$secondaryArtifacts' => '

An array of ProjectSource objects.

', ], ], 'ProjectBadge' => [ 'base' => '

Information about the build badge for the build project.

', 'refs' => [ 'Project$badge' => '

Information about the build badge for the build project.

', ], ], 'ProjectCache' => [ 'base' => '

Information about the cache for the build project.

', 'refs' => [ 'Build$cache' => '

Information about the cache for the build.

', 'CreateProjectInput$cache' => '

Stores recently used information so that it can be quickly accessed at a later time.

', 'Project$cache' => '

Information about the cache for the build project.

', 'StartBuildInput$cacheOverride' => '

A ProjectCache object specified for this build that overrides the one defined in the build project.

', 'UpdateProjectInput$cache' => '

Stores recently used information so that it can be quickly accessed at a later time.

', ], ], 'ProjectCacheModes' => [ 'base' => NULL, 'refs' => [ 'ProjectCache$modes' => '

If you use a LOCAL cache, the local cache mode. You can use one or more local cache modes at the same time.

', ], ], 'ProjectDescription' => [ 'base' => NULL, 'refs' => [ 'CreateProjectInput$description' => '

A description that makes the build project easy to identify.

', 'Project$description' => '

A description that makes the build project easy to identify.

', 'UpdateProjectInput$description' => '

A new or replacement description of the build project.

', ], ], 'ProjectEnvironment' => [ 'base' => '

Information about the build environment of the build project.

', 'refs' => [ 'Build$environment' => '

Information about the build environment for this build.

', 'CreateProjectInput$environment' => '

Information about the build environment for the build project.

', 'Project$environment' => '

Information about the build environment for this build project.

', 'UpdateProjectInput$environment' => '

Information to be changed about the build environment for the build project.

', ], ], 'ProjectName' => [ 'base' => NULL, 'refs' => [ 'CreateProjectInput$name' => '

The name of the build project.

', 'CreateWebhookInput$projectName' => '

The name of the AWS CodeBuild project.

', 'DeleteWebhookInput$projectName' => '

The name of the AWS CodeBuild project.

', 'Project$name' => '

The name of the build project.

', 'UpdateWebhookInput$projectName' => '

The name of the AWS CodeBuild project.

', ], ], 'ProjectNames' => [ 'base' => NULL, 'refs' => [ 'BatchGetProjectsInput$names' => '

The names of the build projects.

', 'BatchGetProjectsOutput$projectsNotFound' => '

The names of build projects for which information could not be found.

', 'ListProjectsOutput$projects' => '

The list of build project names, with each build project name representing a single build project.

', ], ], 'ProjectSecondarySourceVersions' => [ 'base' => NULL, 'refs' => [ 'Build$secondarySourceVersions' => '

An array of ProjectSourceVersion objects. Each ProjectSourceVersion must be one of:

', 'StartBuildInput$secondarySourcesVersionOverride' => '

An array of ProjectSourceVersion objects that specify one or more versions of the project\'s secondary sources to be used for this build only.

', ], ], 'ProjectSortByType' => [ 'base' => NULL, 'refs' => [ 'ListProjectsInput$sortBy' => '

The criterion to be used to list build project names. Valid values include:

Use sortOrder to specify in what order to list the build project names based on the preceding criteria.

', ], ], 'ProjectSource' => [ 'base' => '

Information about the build input source code for the build project.

', 'refs' => [ 'Build$source' => '

Information about the source code to be built.

', 'CreateProjectInput$source' => '

Information about the build input source code for the build project.

', 'Project$source' => '

Information about the build input source code for this build project.

', 'ProjectSources$member' => NULL, 'UpdateProjectInput$source' => '

Information to be changed about the build input source code for the build project.

', ], ], 'ProjectSourceVersion' => [ 'base' => '

A source identifier and its corresponding version.

', 'refs' => [ 'ProjectSecondarySourceVersions$member' => NULL, ], ], 'ProjectSources' => [ 'base' => NULL, 'refs' => [ 'Build$secondarySources' => '

An array of ProjectSource objects.

', 'CreateProjectInput$secondarySources' => '

An array of ProjectSource objects.

', 'Project$secondarySources' => '

An array of ProjectSource objects.

', 'StartBuildInput$secondarySourcesOverride' => '

An array of ProjectSource objects.

', 'UpdateProjectInput$secondarySources' => '

An array of ProjectSource objects.

', ], ], 'Projects' => [ 'base' => NULL, 'refs' => [ 'BatchGetProjectsOutput$projects' => '

Information about the requested build projects.

', ], ], 'RegistryCredential' => [ 'base' => '

Information about credentials that provide access to a private Docker registry. When this is set:

For more information, see Private Registry with AWS Secrets Manager Sample for AWS CodeBuild.

', 'refs' => [ 'ProjectEnvironment$registryCredential' => '

The credentials for access to a private registry.

', 'StartBuildInput$registryCredentialOverride' => '

The credentials for access to a private registry.

', ], ], 'ResourceAlreadyExistsException' => [ 'base' => '

The specified AWS resource cannot be created, because an AWS resource with the same settings already exists.

', 'refs' => [], ], 'ResourceNotFoundException' => [ 'base' => '

The specified AWS resource cannot be found.

', 'refs' => [], ], 'S3LogsConfig' => [ 'base' => '

Information about S3 logs for a build project.

', 'refs' => [ 'LogsConfig$s3Logs' => '

Information about logs built to an S3 bucket for a build project. S3 logs are not enabled by default.

', 'LogsLocation$s3Logs' => '

Information about S3 logs for a build project.

', ], ], 'SecurityGroupIds' => [ 'base' => NULL, 'refs' => [ 'VpcConfig$securityGroupIds' => '

A list of one or more security groups IDs in your Amazon VPC.

', ], ], 'SensitiveNonEmptyString' => [ 'base' => NULL, 'refs' => [ 'ImportSourceCredentialsInput$token' => '

For GitHub or GitHub Enterprise, this is the personal access token. For Bitbucket, this is the app password.

', ], ], 'ServerType' => [ 'base' => NULL, 'refs' => [ 'ImportSourceCredentialsInput$serverType' => '

The source provider used for this project.

', 'SourceCredentialsInfo$serverType' => '

The type of source provider. The valid options are GITHUB, GITHUB_ENTERPRISE, or BITBUCKET.

', ], ], 'SortOrderType' => [ 'base' => NULL, 'refs' => [ 'ListBuildsForProjectInput$sortOrder' => '

The order to list build IDs. Valid values include:

', 'ListBuildsInput$sortOrder' => '

The order to list build IDs. Valid values include:

', 'ListProjectsInput$sortOrder' => '

The order in which to list build projects. Valid values include:

Use sortBy to specify the criterion to be used to list build project names.

', ], ], 'SourceAuth' => [ 'base' => '

Information about the authorization settings for AWS CodeBuild to access the source code to be built.

This information is for the AWS CodeBuild console\'s use only. Your code should not get or set this information directly.

', 'refs' => [ 'ProjectSource$auth' => '

Information about the authorization settings for AWS CodeBuild to access the source code to be built.

This information is for the AWS CodeBuild console\'s use only. Your code should not get or set this information directly.

', 'StartBuildInput$sourceAuthOverride' => '

An authorization type for this build that overrides the one defined in the build project. This override applies only if the build project\'s source is BitBucket or GitHub.

', ], ], 'SourceAuthType' => [ 'base' => NULL, 'refs' => [ 'SourceAuth$type' => '

This data type is deprecated and is no longer accurate or used.

The authorization type to use. The only valid value is OAUTH, which represents the OAuth authorization type.

', ], ], 'SourceCredentialsInfo' => [ 'base' => '

Information about the credentials for a GitHub, GitHub Enterprise, or Bitbucket repository.

', 'refs' => [ 'SourceCredentialsInfos$member' => NULL, ], ], 'SourceCredentialsInfos' => [ 'base' => NULL, 'refs' => [ 'ListSourceCredentialsOutput$sourceCredentialsInfos' => '

A list of SourceCredentialsInfo objects. Each SourceCredentialsInfo object includes the authentication type, token ARN, and type of source provider for one set of credentials.

', ], ], 'SourceType' => [ 'base' => NULL, 'refs' => [ 'ProjectSource$type' => '

The type of repository that contains the source code to be built. Valid values include:

', 'StartBuildInput$sourceTypeOverride' => '

A source input type, for this build, that overrides the source input defined in the build project.

', ], ], 'StartBuildInput' => [ 'base' => NULL, 'refs' => [], ], 'StartBuildOutput' => [ 'base' => NULL, 'refs' => [], ], 'StatusType' => [ 'base' => NULL, 'refs' => [ 'Build$buildStatus' => '

The current status of the build. Valid values include:

', 'BuildPhase$phaseStatus' => '

The current status of the build phase. Valid values include:

', ], ], 'StopBuildInput' => [ 'base' => NULL, 'refs' => [], ], 'StopBuildOutput' => [ 'base' => NULL, 'refs' => [], ], 'String' => [ 'base' => NULL, 'refs' => [ 'Build$currentPhase' => '

The current build phase.

', 'Build$initiator' => '

The entity that started the build. Valid values include:

', 'BuildArtifacts$location' => '

Information about the location of the build artifacts.

', 'BuildArtifacts$sha256sum' => '

The SHA-256 hash of the build artifact.

You can use this hash along with a checksum tool to confirm file integrity and authenticity.

This value is available only if the build project\'s packaging value is set to ZIP.

', 'BuildArtifacts$md5sum' => '

The MD5 hash of the build artifact.

You can use this hash along with a checksum tool to confirm file integrity and authenticity.

This value is available only if the build project\'s packaging value is set to ZIP.

', 'BuildArtifacts$artifactIdentifier' => '

An identifier for this artifact definition.

', 'BuildNotDeleted$statusCode' => '

Additional information about the build that could not be successfully deleted.

', 'CloudWatchLogsConfig$groupName' => '

The group name of the logs in Amazon CloudWatch Logs. For more information, see Working with Log Groups and Log Streams.

', 'CloudWatchLogsConfig$streamName' => '

The prefix of the stream name of the Amazon CloudWatch Logs. For more information, see Working with Log Groups and Log Streams.

', 'CreateWebhookInput$branchFilter' => '

A regular expression used to determine which repository branches are built when a webhook is triggered. If the name of a branch matches the regular expression, then it is built. If branchFilter is empty, then all branches are built.

It is recommended that you use filterGroups instead of branchFilter.

', 'EnvironmentImage$name' => '

The name of the Docker image.

', 'EnvironmentImage$description' => '

The description of the Docker image.

', 'EnvironmentVariable$value' => '

The value of the environment variable.

We strongly discourage the use of environment variables to store sensitive values, especially AWS secret key IDs and secret access keys. Environment variables can be displayed in plain text using the AWS CodeBuild console and the AWS Command Line Interface (AWS CLI).

', 'ImageVersions$member' => NULL, 'ListBuildsForProjectInput$nextToken' => '

During a previous call, if there are more than 100 items in the list, only the first 100 items are returned, along with a unique string called a next token. To get the next batch of items in the list, call this operation again, adding the next token to the call. To get all of the items in the list, keep calling this operation with each subsequent next token that is returned, until no more next tokens are returned.

', 'ListBuildsForProjectOutput$nextToken' => '

If there are more than 100 items in the list, only the first 100 items are returned, along with a unique string called a next token. To get the next batch of items in the list, call this operation again, adding the next token to the call.

', 'ListBuildsInput$nextToken' => '

During a previous call, if there are more than 100 items in the list, only the first 100 items are returned, along with a unique string called a next token. To get the next batch of items in the list, call this operation again, adding the next token to the call. To get all of the items in the list, keep calling this operation with each subsequent next token that is returned, until no more next tokens are returned.

', 'ListBuildsOutput$nextToken' => '

If there are more than 100 items in the list, only the first 100 items are returned, along with a unique string called a next token. To get the next batch of items in the list, call this operation again, adding the next token to the call.

', 'ListProjectsOutput$nextToken' => '

If there are more than 100 items in the list, only the first 100 items are returned, along with a unique string called a next token. To get the next batch of items in the list, call this operation again, adding the next token to the call.

', 'LogsLocation$groupName' => '

The name of the Amazon CloudWatch Logs group for the build logs.

', 'LogsLocation$streamName' => '

The name of the Amazon CloudWatch Logs stream for the build logs.

', 'LogsLocation$deepLink' => '

The URL to an individual build log in Amazon CloudWatch Logs.

', 'LogsLocation$s3DeepLink' => '

The URL to a build log in an S3 bucket.

', 'PhaseContext$statusCode' => '

The status code for the context of the build phase.

', 'PhaseContext$message' => '

An explanation of the build phase\'s context. This might include a command ID and an exit code.

', 'Project$arn' => '

The Amazon Resource Name (ARN) of the build project.

', 'ProjectArtifacts$location' => '

Information about the build output artifact location:

', 'ProjectArtifacts$path' => '

Along with namespaceType and name, the pattern that AWS CodeBuild uses to name and store the output artifact:

For example, if path is set to MyArtifacts, namespaceType is set to NONE, and name is set to MyArtifact.zip, the output artifact is stored in the output bucket at MyArtifacts/MyArtifact.zip.

', 'ProjectArtifacts$name' => '

Along with path and namespaceType, the pattern that AWS CodeBuild uses to name and store the output artifact:

For example:

', 'ProjectArtifacts$artifactIdentifier' => '

An identifier for this artifact definition.

', 'ProjectBadge$badgeRequestUrl' => '

The publicly-accessible URL through which you can access the build badge for your project.

The publicly accessible URL through which you can access the build badge for your project.

', 'ProjectCache$location' => '

Information about the cache location:

', 'ProjectEnvironment$certificate' => '

The certificate to use with this build project.

', 'ProjectSource$location' => '

Information about the location of the source code to be built. Valid values include:

', 'ProjectSource$buildspec' => '

The build spec declaration to use for the builds in this build project.

If this value is not specified, a build spec must be included along with the source code to be built.

', 'ProjectSource$sourceIdentifier' => '

An identifier for this project source.

', 'ProjectSourceVersion$sourceIdentifier' => '

An identifier for a source in the build project.

', 'ProjectSourceVersion$sourceVersion' => '

The source version for the corresponding source identifier. If specified, must be one of:

', 'S3LogsConfig$location' => '

The ARN of an S3 bucket and the path prefix for S3 logs. If your Amazon S3 bucket name is my-bucket, and your path prefix is build-log, then acceptable formats are my-bucket/build-log or arn:aws:s3:::my-bucket/build-log.

', 'SourceAuth$resource' => '

The resource value that applies to the specified authorization type.

', 'StartBuildInput$sourceVersion' => '

A version of the build input to be built, for this build only. If not specified, the latest version is used. If specified, must be one of:

', 'StartBuildInput$sourceLocationOverride' => '

A location that overrides, for this build, the source location for the one defined in the build project.

', 'StartBuildInput$buildspecOverride' => '

A build spec declaration that overrides, for this build only, the latest one already defined in the build project.

', 'StartBuildInput$certificateOverride' => '

The name of a certificate for this build that overrides the one specified in the build project.

', 'StartBuildInput$idempotencyToken' => '

A unique, case sensitive identifier you provide to ensure the idempotency of the StartBuild request. The token is included in the StartBuild request and is valid for 12 hours. If you repeat the StartBuild request with the same token, but change a parameter, AWS CodeBuild returns a parameter mismatch error.

', 'UpdateWebhookInput$branchFilter' => '

A regular expression used to determine which repository branches are built when a webhook is triggered. If the name of a branch matches the regular expression, then it is built. If branchFilter is empty, then all branches are built.

It is recommended that you use filterGroups instead of branchFilter.

', 'Webhook$branchFilter' => '

A regular expression used to determine which repository branches are built when a webhook is triggered. If the name of a branch matches the regular expression, then it is built. If branchFilter is empty, then all branches are built.

It is recommended that you use filterGroups instead of branchFilter.

', 'WebhookFilter$pattern' => '

For a WebHookFilter that uses EVENT type, a comma-separated string that specifies one or more events. For example, the webhook filter PUSH, PULL_REQUEST_CREATED, PULL_REQUEST_UPDATED allows all push, pull request created, and pull request updated events to trigger a build.

For a WebHookFilter that uses any of the other filter types, a regular expression pattern. For example, a WebHookFilter that uses HEAD_REF for its type and the pattern ^refs/heads/ triggers a build when the head reference is a branch with a reference name refs/heads/branch-name.

', ], ], 'Subnets' => [ 'base' => NULL, 'refs' => [ 'VpcConfig$subnets' => '

A list of one or more subnet IDs in your Amazon VPC.

', ], ], 'Tag' => [ 'base' => '

A tag, consisting of a key and a value.

This tag is available for use by AWS services that support tags in AWS CodeBuild.

', 'refs' => [ 'TagList$member' => NULL, ], ], 'TagList' => [ 'base' => NULL, 'refs' => [ 'CreateProjectInput$tags' => '

A set of tags for this build project.

These tags are available for use by AWS services that support AWS CodeBuild build project tags.

', 'Project$tags' => '

The tags for this build project.

These tags are available for use by AWS services that support AWS CodeBuild build project tags.

', 'UpdateProjectInput$tags' => '

The replacement set of tags for this build project.

These tags are available for use by AWS services that support AWS CodeBuild build project tags.

', ], ], 'TimeOut' => [ 'base' => NULL, 'refs' => [ 'CreateProjectInput$timeoutInMinutes' => '

How long, in minutes, from 5 to 480 (8 hours), for AWS CodeBuild to wait before it times out any build that has not been marked as completed. The default is 60 minutes.

', 'CreateProjectInput$queuedTimeoutInMinutes' => '

The number of minutes a build is allowed to be queued before it times out.

', 'Project$timeoutInMinutes' => '

How long, in minutes, from 5 to 480 (8 hours), for AWS CodeBuild to wait before timing out any related build that did not get marked as completed. The default is 60 minutes.

', 'Project$queuedTimeoutInMinutes' => '

The number of minutes a build is allowed to be queued before it times out.

', 'StartBuildInput$timeoutInMinutesOverride' => '

The number of build timeout minutes, from 5 to 480 (8 hours), that overrides, for this build only, the latest setting already defined in the build project.

', 'StartBuildInput$queuedTimeoutInMinutesOverride' => '

The number of minutes a build is allowed to be queued before it times out.

', 'UpdateProjectInput$timeoutInMinutes' => '

The replacement value in minutes, from 5 to 480 (8 hours), for AWS CodeBuild to wait before timing out any related build that did not get marked as completed.

', 'UpdateProjectInput$queuedTimeoutInMinutes' => '

The number of minutes a build is allowed to be queued before it times out.

', ], ], 'Timestamp' => [ 'base' => NULL, 'refs' => [ 'Build$startTime' => '

When the build process started, expressed in Unix time format.

', 'Build$endTime' => '

When the build process ended, expressed in Unix time format.

', 'BuildPhase$startTime' => '

When the build phase started, expressed in Unix time format.

', 'BuildPhase$endTime' => '

When the build phase ended, expressed in Unix time format.

', 'Project$created' => '

When the build project was created, expressed in Unix time format.

', 'Project$lastModified' => '

When the build project\'s settings were last modified, expressed in Unix time format.

', 'Webhook$lastModifiedSecret' => '

A timestamp that indicates the last time a repository\'s secret token was modified.

', ], ], 'UpdateProjectInput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateProjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateWebhookInput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateWebhookOutput' => [ 'base' => NULL, 'refs' => [], ], 'ValueInput' => [ 'base' => NULL, 'refs' => [ 'Tag$value' => '

The tag\'s value.

', ], ], 'VpcConfig' => [ 'base' => '

Information about the VPC configuration that AWS CodeBuild accesses.

', 'refs' => [ 'Build$vpcConfig' => '

If your AWS CodeBuild project accesses resources in an Amazon VPC, you provide this parameter that identifies the VPC ID and the list of security group IDs and subnet IDs. The security groups and subnets must belong to the same VPC. You must provide at least one security group and one subnet ID.

', 'CreateProjectInput$vpcConfig' => '

VpcConfig enables AWS CodeBuild to access resources in an Amazon VPC.

', 'Project$vpcConfig' => '

Information about the VPC configuration that AWS CodeBuild accesses.

', 'UpdateProjectInput$vpcConfig' => '

VpcConfig enables AWS CodeBuild to access resources in an Amazon VPC.

', ], ], 'Webhook' => [ 'base' => '

Information about a webhook that connects repository events to a build project in AWS CodeBuild.

', 'refs' => [ 'CreateWebhookOutput$webhook' => '

Information about a webhook that connects repository events to a build project in AWS CodeBuild.

', 'Project$webhook' => '

Information about a webhook that connects repository events to a build project in AWS CodeBuild.

', 'UpdateWebhookOutput$webhook' => '

Information about a repository\'s webhook that is associated with a project in AWS CodeBuild.

', ], ], 'WebhookFilter' => [ 'base' => '

A filter used to determine which webhooks trigger a build.

', 'refs' => [ 'FilterGroup$member' => NULL, ], ], 'WebhookFilterType' => [ 'base' => NULL, 'refs' => [ 'WebhookFilter$type' => '

The type of webhook filter. There are five webhook filter types: EVENT, ACTOR_ACCOUNT_ID, HEAD_REF, BASE_REF, and FILE_PATH.

EVENT

A webhook event triggers a build when the provided pattern matches one of four event types: PUSH, PULL_REQUEST_CREATED, PULL_REQUEST_UPDATED, and PULL_REQUEST_REOPENED. The EVENT patterns are specified as a comma-separated string. For example, PUSH, PULL_REQUEST_CREATED, PULL_REQUEST_UPDATED filters all push, pull request created, and pull request updated events.

The PULL_REQUEST_REOPENED works with GitHub and GitHub Enterprise only.

ACTOR_ACCOUNT_ID

A webhook event triggers a build when a GitHub, GitHub Enterprise, or Bitbucket account ID matches the regular expression pattern.

HEAD_REF

A webhook event triggers a build when the head reference matches the regular expression pattern. For example, refs/heads/branch-name and refs/tags/tag-name.

Works with GitHub and GitHub Enterprise push, GitHub and GitHub Enterprise pull request, Bitbucket push, and Bitbucket pull request events.

BASE_REF

A webhook event triggers a build when the base reference matches the regular expression pattern. For example, refs/heads/branch-name.

Works with pull request events only.

FILE_PATH

A webhook triggers a build when the path of a changed file matches the regular expression pattern.

Works with GitHub and GitHub Enterprise push events only.

', ], ], 'WrapperBoolean' => [ 'base' => NULL, 'refs' => [ 'BuildArtifacts$overrideArtifactName' => '

If this flag is set, a name specified in the build spec file overrides the artifact name. The name specified in a build spec file is calculated at build time and uses the Shell Command Language. For example, you can append a date and time to your artifact name so that it is always unique.

', 'BuildArtifacts$encryptionDisabled' => '

Information that tells you if encryption for build artifacts is disabled.

', 'CreateProjectInput$badgeEnabled' => '

Set this to true to generate a publicly accessible URL for your project\'s build badge.

', 'GitSubmodulesConfig$fetchSubmodules' => '

Set to true to fetch Git submodules for your AWS CodeBuild build project.

', 'ProjectArtifacts$overrideArtifactName' => '

If this flag is set, a name specified in the build spec file overrides the artifact name. The name specified in a build spec file is calculated at build time and uses the Shell Command Language. For example, you can append a date and time to your artifact name so that it is always unique.

', 'ProjectArtifacts$encryptionDisabled' => '

Set to true if you do not want your output artifacts encrypted. This option is valid only if your artifacts type is Amazon Simple Storage Service (Amazon S3). If this is set with another artifacts type, an invalidInputException is thrown.

', 'ProjectEnvironment$privilegedMode' => '

Enables running the Docker daemon inside a Docker container. Set to true only if the build project is be used to build Docker images, and the specified build environment image is not provided by AWS CodeBuild with Docker support. Otherwise, all associated builds that attempt to interact with the Docker daemon fail. You must also start the Docker daemon so that builds can interact with it. One way to do this is to initialize the Docker daemon during the install phase of your build spec by running the following build commands. (Do not run these commands if the specified build environment image is provided by AWS CodeBuild with Docker support.)

If the operating system\'s base image is Ubuntu Linux:

- nohup /usr/local/bin/dockerd --host=unix:///var/run/docker.sock --host=tcp://0.0.0.0:2375 --storage-driver=overlay& - timeout 15 sh -c "until docker info; do echo .; sleep 1; done"

If the operating system\'s base image is Alpine Linux, add the -t argument to timeout:

- nohup /usr/local/bin/dockerd --host=unix:///var/run/docker.sock --host=tcp://0.0.0.0:2375 --storage-driver=overlay& - timeout 15 -t sh -c "until docker info; do echo .; sleep 1; done"

', 'ProjectSource$reportBuildStatus' => '

Set to true to report the status of a build\'s start and finish to your source provider. This option is valid only when your source provider is GitHub, GitHub Enterprise, or Bitbucket. If this is set and you use a different source provider, an invalidInputException is thrown.

', 'ProjectSource$insecureSsl' => '

Enable this flag to ignore SSL warnings while connecting to the project source code.

', 'S3LogsConfig$encryptionDisabled' => '

Set to true if you do not want your S3 build log output encrypted. By default S3 build logs are encrypted.

', 'StartBuildInput$insecureSslOverride' => '

Enable this flag to override the insecure SSL setting that is specified in the build project. The insecure SSL setting determines whether to ignore SSL warnings while connecting to the project source code. This override applies only if the build\'s source is GitHub Enterprise.

', 'StartBuildInput$reportBuildStatusOverride' => '

Set to true to report to your source provider the status of a build\'s start and completion. If you use this option with a source provider other than GitHub, GitHub Enterprise, or Bitbucket, an invalidInputException is thrown.

', 'StartBuildInput$privilegedModeOverride' => '

Enable this flag to override privileged mode in the build project.

', 'UpdateProjectInput$badgeEnabled' => '

Set this to true to generate a publicly accessible URL for your project\'s build badge.

', 'WebhookFilter$excludeMatchedPattern' => '

Used to indicate that the pattern determines which webhook events do not trigger a build. If true, then a webhook event that does not match the pattern triggers a build. If false, then a webhook event that matches the pattern triggers a build.

', ], ], 'WrapperInt' => [ 'base' => NULL, 'refs' => [ 'Build$timeoutInMinutes' => '

How long, in minutes, for AWS CodeBuild to wait before timing out this build if it does not get marked as completed.

', 'Build$queuedTimeoutInMinutes' => '

The number of minutes a build is allowed to be queued before it times out.

', ], ], 'WrapperLong' => [ 'base' => NULL, 'refs' => [ 'BuildPhase$durationInSeconds' => '

How long, in seconds, between the starting and ending times of the build\'s phase.

', ], ], ],]; diff --git a/src/data/s3/2006-03-01/docs-2.json b/src/data/s3/2006-03-01/docs-2.json index 2ab89990d7..98002545b0 100644 --- a/src/data/s3/2006-03-01/docs-2.json +++ b/src/data/s3/2006-03-01/docs-2.json @@ -1,6 +1,6 @@ { "version": "2.0", - "service": null, + "service": "

", "operations": { "AbortMultipartUpload": "

Aborts a multipart upload.

To verify that all parts have been removed, so you don't get charged for the part storage, you should call the List Parts operation and ensure the parts list is empty.

", "CompleteMultipartUpload": "

Completes a multipart upload by assembling previously uploaded parts.

", @@ -28,12 +28,12 @@ "GetBucketCors": "

Returns the CORS configuration for the bucket.

", "GetBucketEncryption": "

Returns the server-side encryption configuration of a bucket.

", "GetBucketInventoryConfiguration": "

Returns an inventory configuration (identified by the inventory ID) from the bucket.

", - "GetBucketLifecycle": "

Deprecated, see the GetBucketLifecycleConfiguration operation.

", + "GetBucketLifecycle": "

No longer used, see the GetBucketLifecycleConfiguration operation.

", "GetBucketLifecycleConfiguration": "

Returns the lifecycle configuration information set on the bucket.

", "GetBucketLocation": "

Returns the region the bucket resides in.

", "GetBucketLogging": "

Returns the logging status of a bucket and the permissions users have to view and modify that status. To use GET, you must be the bucket owner.

", "GetBucketMetricsConfiguration": "

Gets a metrics configuration (specified by the metrics configuration ID) from the bucket.

", - "GetBucketNotification": "

Deprecated, see the GetBucketNotificationConfiguration operation.

", + "GetBucketNotification": "

No longer used, see the GetBucketNotificationConfiguration operation.

", "GetBucketNotificationConfiguration": "

Returns the notification configuration of a bucket.

", "GetBucketPolicy": "

Returns the policy of a specified bucket.

", "GetBucketPolicyStatus": "

Retrieves the policy status for an Amazon S3 bucket, indicating whether the bucket is public.

", @@ -67,11 +67,11 @@ "PutBucketCors": "

Sets the CORS configuration for a bucket.

", "PutBucketEncryption": "

Creates a new server-side encryption configuration (or replaces an existing one, if present).

", "PutBucketInventoryConfiguration": "

Adds an inventory configuration (identified by the inventory ID) from the bucket.

", - "PutBucketLifecycle": "

Deprecated, see the PutBucketLifecycleConfiguration operation.

", + "PutBucketLifecycle": "

No longer used, see the PutBucketLifecycleConfiguration operation.

", "PutBucketLifecycleConfiguration": "

Sets lifecycle configuration for your bucket. If a lifecycle configuration exists, it replaces it.

", "PutBucketLogging": "

Set the logging parameters for a bucket and to specify permissions for who can view and modify the logging parameters. To set the logging status of a bucket, you must be the bucket owner.

", "PutBucketMetricsConfiguration": "

Sets a metrics configuration (specified by the metrics configuration ID) for the bucket.

", - "PutBucketNotification": "

Deprecated, see the PutBucketNotificationConfiguraiton operation.

", + "PutBucketNotification": "

No longer used, see the PutBucketNotificationConfiguration operation.

", "PutBucketNotificationConfiguration": "

Enables notifications of specified events for a bucket.

", "PutBucketPolicy": "

Replaces a policy on a bucket. If the bucket already has a policy, the one in this request completely replaces it.

", "PutBucketReplication": "

Creates a replication configuration or replaces an existing one. For more information, see Cross-Region Replication (CRR) in the Amazon S3 Developer Guide.

", @@ -124,7 +124,7 @@ } }, "AccelerateConfiguration": { - "base": null, + "base": "

", "refs": { "PutBucketAccelerateConfigurationRequest$AccelerateConfiguration": "

Specifies the Accelerate Configuration you want to set for the bucket.

" } @@ -132,15 +132,15 @@ "AcceptRanges": { "base": null, "refs": { - "GetObjectOutput$AcceptRanges": null, - "HeadObjectOutput$AcceptRanges": null + "GetObjectOutput$AcceptRanges": "

", + "HeadObjectOutput$AcceptRanges": "

" } }, "AccessControlPolicy": { - "base": null, + "base": "

", "refs": { - "PutBucketAclRequest$AccessControlPolicy": null, - "PutObjectAclRequest$AccessControlPolicy": null + "PutBucketAclRequest$AccessControlPolicy": "

", + "PutObjectAclRequest$AccessControlPolicy": "

" } }, "AccessControlTranslation": { @@ -200,13 +200,13 @@ } }, "AnalyticsAndOperator": { - "base": null, + "base": "

", "refs": { "AnalyticsFilter$And": "

A conjunction (logical AND) of predicates, which is used in evaluating an analytics filter. The operator must have at least two predicates.

" } }, "AnalyticsConfiguration": { - "base": null, + "base": "

", "refs": { "AnalyticsConfigurationList$member": null, "GetBucketAnalyticsConfigurationOutput$AnalyticsConfiguration": "

The configuration and any analyses for the analytics filter.

", @@ -220,13 +220,13 @@ } }, "AnalyticsExportDestination": { - "base": null, + "base": "

", "refs": { "StorageClassAnalysisDataExport$Destination": "

The place to store the data for an analysis.

" } }, "AnalyticsFilter": { - "base": null, + "base": "

", "refs": { "AnalyticsConfiguration$Filter": "

The filter used to describe a set of objects for analyses. A filter must have exactly one prefix, one tag, or one conjunction (AnalyticsAndOperator). If no filter is provided, all objects will be considered in any analysis.

" } @@ -241,7 +241,7 @@ } }, "AnalyticsS3BucketDestination": { - "base": null, + "base": "

", "refs": { "AnalyticsExportDestination$S3BucketDestination": "

A destination signifying output to an S3 bucket.

" } @@ -256,14 +256,14 @@ "base": null, "refs": { "GetObjectOutput$Body": "

Object data.

", - "GetObjectTorrentOutput$Body": null, + "GetObjectTorrentOutput$Body": "

", "PutObjectRequest$Body": "

Object data.

", "RecordsEvent$Payload": "

The byte array of partial, one or more result records.

", "UploadPartRequest$Body": "

Object data.

" } }, "Bucket": { - "base": null, + "base": "

", "refs": { "Buckets$member": null } @@ -281,7 +281,7 @@ } }, "BucketAlreadyOwnedByYou": { - "base": null, + "base": "

", "refs": { } }, @@ -293,22 +293,22 @@ } }, "BucketLifecycleConfiguration": { - "base": null, + "base": "

", "refs": { - "PutBucketLifecycleConfigurationRequest$LifecycleConfiguration": null + "PutBucketLifecycleConfigurationRequest$LifecycleConfiguration": "

" } }, "BucketLocationConstraint": { "base": null, "refs": { - "CreateBucketConfiguration$LocationConstraint": "

Specifies the region where the bucket will be created. If you don't specify a region, the bucket will be created in US Standard.

", - "GetBucketLocationOutput$LocationConstraint": null + "CreateBucketConfiguration$LocationConstraint": "

Specifies the region where the bucket will be created. If you don't specify a region, the bucket is created in US East (N. Virginia) Region (us-east-1).

", + "GetBucketLocationOutput$LocationConstraint": "

" } }, "BucketLoggingStatus": { - "base": null, + "base": "

", "refs": { - "PutBucketLoggingRequest$BucketLoggingStatus": null + "PutBucketLoggingRequest$BucketLoggingStatus": "

" } }, "BucketLogsPermission": { @@ -320,103 +320,103 @@ "BucketName": { "base": null, "refs": { - "AbortMultipartUploadRequest$Bucket": null, + "AbortMultipartUploadRequest$Bucket": "

", "AnalyticsS3BucketDestination$Bucket": "

The Amazon resource name (ARN) of the bucket to which data is exported.

", "Bucket$Name": "

The name of the bucket.

", - "CompleteMultipartUploadOutput$Bucket": null, - "CompleteMultipartUploadRequest$Bucket": null, - "CopyObjectRequest$Bucket": null, - "CreateBucketRequest$Bucket": null, + "CompleteMultipartUploadOutput$Bucket": "

", + "CompleteMultipartUploadRequest$Bucket": "

", + "CopyObjectRequest$Bucket": "

", + "CreateBucketRequest$Bucket": "

", "CreateMultipartUploadOutput$Bucket": "

Name of the bucket to which the multipart upload was initiated.

", - "CreateMultipartUploadRequest$Bucket": null, + "CreateMultipartUploadRequest$Bucket": "

", "DeleteBucketAnalyticsConfigurationRequest$Bucket": "

The name of the bucket from which an analytics configuration is deleted.

", - "DeleteBucketCorsRequest$Bucket": null, + "DeleteBucketCorsRequest$Bucket": "

", "DeleteBucketEncryptionRequest$Bucket": "

The name of the bucket containing the server-side encryption configuration to delete.

", "DeleteBucketInventoryConfigurationRequest$Bucket": "

The name of the bucket containing the inventory configuration to delete.

", - "DeleteBucketLifecycleRequest$Bucket": null, + "DeleteBucketLifecycleRequest$Bucket": "

", "DeleteBucketMetricsConfigurationRequest$Bucket": "

The name of the bucket containing the metrics configuration to delete.

", - "DeleteBucketPolicyRequest$Bucket": null, + "DeleteBucketPolicyRequest$Bucket": "

", "DeleteBucketReplicationRequest$Bucket": "

The bucket name.

It can take a while to propagate the deletion of a replication configuration to all Amazon S3 systems.

", - "DeleteBucketRequest$Bucket": null, - "DeleteBucketTaggingRequest$Bucket": null, - "DeleteBucketWebsiteRequest$Bucket": null, - "DeleteObjectRequest$Bucket": null, - "DeleteObjectTaggingRequest$Bucket": null, - "DeleteObjectsRequest$Bucket": null, + "DeleteBucketRequest$Bucket": "

", + "DeleteBucketTaggingRequest$Bucket": "

", + "DeleteBucketWebsiteRequest$Bucket": "

", + "DeleteObjectRequest$Bucket": "

", + "DeleteObjectTaggingRequest$Bucket": "

", + "DeleteObjectsRequest$Bucket": "

", "DeletePublicAccessBlockRequest$Bucket": "

The Amazon S3 bucket whose PublicAccessBlock configuration you want to delete.

", "Destination$Bucket": "

The Amazon Resource Name (ARN) of the bucket where you want Amazon S3 to store replicas of the object identified by the rule.

If there are multiple rules in your replication configuration, all rules must specify the same bucket as the destination. A replication configuration can replicate objects to only one destination bucket.

", "GetBucketAccelerateConfigurationRequest$Bucket": "

Name of the bucket for which the accelerate configuration is retrieved.

", - "GetBucketAclRequest$Bucket": null, + "GetBucketAclRequest$Bucket": "

", "GetBucketAnalyticsConfigurationRequest$Bucket": "

The name of the bucket from which an analytics configuration is retrieved.

", - "GetBucketCorsRequest$Bucket": null, + "GetBucketCorsRequest$Bucket": "

", "GetBucketEncryptionRequest$Bucket": "

The name of the bucket from which the server-side encryption configuration is retrieved.

", "GetBucketInventoryConfigurationRequest$Bucket": "

The name of the bucket containing the inventory configuration to retrieve.

", - "GetBucketLifecycleConfigurationRequest$Bucket": null, - "GetBucketLifecycleRequest$Bucket": null, - "GetBucketLocationRequest$Bucket": null, - "GetBucketLoggingRequest$Bucket": null, + "GetBucketLifecycleConfigurationRequest$Bucket": "

", + "GetBucketLifecycleRequest$Bucket": "

", + "GetBucketLocationRequest$Bucket": "

", + "GetBucketLoggingRequest$Bucket": "

", "GetBucketMetricsConfigurationRequest$Bucket": "

The name of the bucket containing the metrics configuration to retrieve.

", "GetBucketNotificationConfigurationRequest$Bucket": "

Name of the bucket to get the notification configuration for.

", - "GetBucketPolicyRequest$Bucket": null, + "GetBucketPolicyRequest$Bucket": "

", "GetBucketPolicyStatusRequest$Bucket": "

The name of the Amazon S3 bucket whose policy status you want to retrieve.

", - "GetBucketReplicationRequest$Bucket": null, - "GetBucketRequestPaymentRequest$Bucket": null, - "GetBucketTaggingRequest$Bucket": null, - "GetBucketVersioningRequest$Bucket": null, - "GetBucketWebsiteRequest$Bucket": null, - "GetObjectAclRequest$Bucket": null, + "GetBucketReplicationRequest$Bucket": "

", + "GetBucketRequestPaymentRequest$Bucket": "

", + "GetBucketTaggingRequest$Bucket": "

", + "GetBucketVersioningRequest$Bucket": "

", + "GetBucketWebsiteRequest$Bucket": "

", + "GetObjectAclRequest$Bucket": "

", "GetObjectLegalHoldRequest$Bucket": "

The bucket containing the object whose Legal Hold status you want to retrieve.

", "GetObjectLockConfigurationRequest$Bucket": "

The bucket whose Object Lock configuration you want to retrieve.

", - "GetObjectRequest$Bucket": null, + "GetObjectRequest$Bucket": "

", "GetObjectRetentionRequest$Bucket": "

The bucket containing the object whose retention settings you want to retrieve.

", - "GetObjectTaggingRequest$Bucket": null, - "GetObjectTorrentRequest$Bucket": null, + "GetObjectTaggingRequest$Bucket": "

", + "GetObjectTorrentRequest$Bucket": "

", "GetPublicAccessBlockRequest$Bucket": "

The name of the Amazon S3 bucket whose PublicAccessBlock configuration you want to retrieve.

", - "HeadBucketRequest$Bucket": null, - "HeadObjectRequest$Bucket": null, + "HeadBucketRequest$Bucket": "

", + "HeadObjectRequest$Bucket": "

", "InventoryS3BucketDestination$Bucket": "

The Amazon resource name (ARN) of the bucket where inventory results will be published.

", "ListBucketAnalyticsConfigurationsRequest$Bucket": "

The name of the bucket from which analytics configurations are retrieved.

", "ListBucketInventoryConfigurationsRequest$Bucket": "

The name of the bucket containing the inventory configurations to retrieve.

", "ListBucketMetricsConfigurationsRequest$Bucket": "

The name of the bucket containing the metrics configurations to retrieve.

", "ListMultipartUploadsOutput$Bucket": "

Name of the bucket to which the multipart upload was initiated.

", - "ListMultipartUploadsRequest$Bucket": null, - "ListObjectVersionsOutput$Name": null, - "ListObjectVersionsRequest$Bucket": null, - "ListObjectsOutput$Name": null, - "ListObjectsRequest$Bucket": null, + "ListMultipartUploadsRequest$Bucket": "

", + "ListObjectVersionsOutput$Name": "

", + "ListObjectVersionsRequest$Bucket": "

", + "ListObjectsOutput$Name": "

", + "ListObjectsRequest$Bucket": "

", "ListObjectsV2Output$Name": "

Name of the bucket to list.

", "ListObjectsV2Request$Bucket": "

Name of the bucket to list.

", "ListPartsOutput$Bucket": "

Name of the bucket to which the multipart upload was initiated.

", - "ListPartsRequest$Bucket": null, + "ListPartsRequest$Bucket": "

", "PutBucketAccelerateConfigurationRequest$Bucket": "

Name of the bucket for which the accelerate configuration is set.

", - "PutBucketAclRequest$Bucket": null, + "PutBucketAclRequest$Bucket": "

", "PutBucketAnalyticsConfigurationRequest$Bucket": "

The name of the bucket to which an analytics configuration is stored.

", - "PutBucketCorsRequest$Bucket": null, + "PutBucketCorsRequest$Bucket": "

", "PutBucketEncryptionRequest$Bucket": "

The name of the bucket for which the server-side encryption configuration is set.

", "PutBucketInventoryConfigurationRequest$Bucket": "

The name of the bucket where the inventory configuration will be stored.

", - "PutBucketLifecycleConfigurationRequest$Bucket": null, - "PutBucketLifecycleRequest$Bucket": null, - "PutBucketLoggingRequest$Bucket": null, + "PutBucketLifecycleConfigurationRequest$Bucket": "

", + "PutBucketLifecycleRequest$Bucket": "

", + "PutBucketLoggingRequest$Bucket": "

", "PutBucketMetricsConfigurationRequest$Bucket": "

The name of the bucket for which the metrics configuration is set.

", - "PutBucketNotificationConfigurationRequest$Bucket": null, - "PutBucketNotificationRequest$Bucket": null, - "PutBucketPolicyRequest$Bucket": null, - "PutBucketReplicationRequest$Bucket": null, - "PutBucketRequestPaymentRequest$Bucket": null, - "PutBucketTaggingRequest$Bucket": null, - "PutBucketVersioningRequest$Bucket": null, - "PutBucketWebsiteRequest$Bucket": null, - "PutObjectAclRequest$Bucket": null, + "PutBucketNotificationConfigurationRequest$Bucket": "

", + "PutBucketNotificationRequest$Bucket": "

", + "PutBucketPolicyRequest$Bucket": "

", + "PutBucketReplicationRequest$Bucket": "

", + "PutBucketRequestPaymentRequest$Bucket": "

", + "PutBucketTaggingRequest$Bucket": "

", + "PutBucketVersioningRequest$Bucket": "

", + "PutBucketWebsiteRequest$Bucket": "

", + "PutObjectAclRequest$Bucket": "

", "PutObjectLegalHoldRequest$Bucket": "

The bucket containing the object that you want to place a Legal Hold on.

", "PutObjectLockConfigurationRequest$Bucket": "

The bucket whose Object Lock configuration you want to create or replace.

", "PutObjectRequest$Bucket": "

Name of the bucket to which the PUT operation was initiated.

", "PutObjectRetentionRequest$Bucket": "

The bucket that contains the object you want to apply this Object Retention configuration to.

", - "PutObjectTaggingRequest$Bucket": null, + "PutObjectTaggingRequest$Bucket": "

", "PutPublicAccessBlockRequest$Bucket": "

The name of the Amazon S3 bucket whose PublicAccessBlock configuration you want to set.

", - "RestoreObjectRequest$Bucket": null, + "RestoreObjectRequest$Bucket": "

", "S3Location$BucketName": "

The name of the bucket where the restore results will be placed.

", "SelectObjectContentRequest$Bucket": "

The S3 bucket.

", - "UploadPartCopyRequest$Bucket": null, + "UploadPartCopyRequest$Bucket": "

", "UploadPartRequest$Bucket": "

Name of the bucket to which the multipart upload was initiated.

" } }, @@ -430,7 +430,7 @@ "Buckets": { "base": null, "refs": { - "ListBucketsOutput$Buckets": null + "ListBucketsOutput$Buckets": "

" } }, "BypassGovernanceRetention": { @@ -463,13 +463,13 @@ } }, "CORSConfiguration": { - "base": null, + "base": "

", "refs": { - "PutBucketCorsRequest$CORSConfiguration": null + "PutBucketCorsRequest$CORSConfiguration": "

" } }, "CORSRule": { - "base": null, + "base": "

", "refs": { "CORSRules$member": null } @@ -477,8 +477,8 @@ "CORSRules": { "base": null, "refs": { - "CORSConfiguration$CORSRules": null, - "GetBucketCorsOutput$CORSRules": null + "CORSConfiguration$CORSRules": "

", + "GetBucketCorsOutput$CORSRules": "

" } }, "CSVInput": { @@ -506,25 +506,25 @@ "CloudFunction": { "base": null, "refs": { - "CloudFunctionConfiguration$CloudFunction": null + "CloudFunctionConfiguration$CloudFunction": "

" } }, "CloudFunctionConfiguration": { - "base": null, + "base": "

", "refs": { - "NotificationConfigurationDeprecated$CloudFunctionConfiguration": null + "NotificationConfigurationDeprecated$CloudFunctionConfiguration": "

" } }, "CloudFunctionInvocationRole": { "base": null, "refs": { - "CloudFunctionConfiguration$InvocationRole": null + "CloudFunctionConfiguration$InvocationRole": "

" } }, "Code": { "base": null, "refs": { - "Error$Code": null + "Error$Code": "

" } }, "Comments": { @@ -534,7 +534,7 @@ } }, "CommonPrefix": { - "base": null, + "base": "

", "refs": { "CommonPrefixList$member": null } @@ -542,9 +542,9 @@ "CommonPrefixList": { "base": null, "refs": { - "ListMultipartUploadsOutput$CommonPrefixes": null, - "ListObjectVersionsOutput$CommonPrefixes": null, - "ListObjectsOutput$CommonPrefixes": null, + "ListMultipartUploadsOutput$CommonPrefixes": "

", + "ListObjectVersionsOutput$CommonPrefixes": "

", + "ListObjectsOutput$CommonPrefixes": "

", "ListObjectsV2Output$CommonPrefixes": "

CommonPrefixes contains all (if there are any) keys between Prefix and the next occurrence of the string specified by delimiter

" } }, @@ -559,13 +559,13 @@ } }, "CompletedMultipartUpload": { - "base": null, + "base": "

", "refs": { - "CompleteMultipartUploadRequest$MultipartUpload": null + "CompleteMultipartUploadRequest$MultipartUpload": "

" } }, "CompletedPart": { - "base": null, + "base": "

", "refs": { "CompletedPartList$member": null } @@ -573,7 +573,7 @@ "CompletedPartList": { "base": null, "refs": { - "CompletedMultipartUpload$Parts": null + "CompletedMultipartUpload$Parts": "

" } }, "CompressionType": { @@ -583,7 +583,7 @@ } }, "Condition": { - "base": null, + "base": "

", "refs": { "RoutingRule$Condition": "

A container for describing a condition that must be met for the specified redirect to apply. For example, 1. If request is for pages in the /docs folder, redirect to the /documents folder. 2. If request results in HTTP error 4xx, redirect request to another host where you might process the error.

" } @@ -636,24 +636,24 @@ "ContentMD5": { "base": null, "refs": { - "PutBucketAclRequest$ContentMD5": null, - "PutBucketCorsRequest$ContentMD5": null, - "PutBucketEncryptionRequest$ContentMD5": "

The base64-encoded 128-bit MD5 digest of the server-side encryption configuration.

", - "PutBucketLifecycleRequest$ContentMD5": null, - "PutBucketLoggingRequest$ContentMD5": null, - "PutBucketNotificationRequest$ContentMD5": null, - "PutBucketPolicyRequest$ContentMD5": null, - "PutBucketReplicationRequest$ContentMD5": null, - "PutBucketRequestPaymentRequest$ContentMD5": null, - "PutBucketTaggingRequest$ContentMD5": null, - "PutBucketVersioningRequest$ContentMD5": null, - "PutBucketWebsiteRequest$ContentMD5": null, - "PutObjectAclRequest$ContentMD5": null, + "PutBucketAclRequest$ContentMD5": "

", + "PutBucketCorsRequest$ContentMD5": "

", + "PutBucketEncryptionRequest$ContentMD5": "

The base64-encoded 128-bit MD5 digest of the server-side encryption configuration. This parameter is auto-populated when using the command from the CLI

", + "PutBucketLifecycleRequest$ContentMD5": "

", + "PutBucketLoggingRequest$ContentMD5": "

", + "PutBucketNotificationRequest$ContentMD5": "

", + "PutBucketPolicyRequest$ContentMD5": "

", + "PutBucketReplicationRequest$ContentMD5": "

", + "PutBucketRequestPaymentRequest$ContentMD5": "

", + "PutBucketTaggingRequest$ContentMD5": "

", + "PutBucketVersioningRequest$ContentMD5": "

", + "PutBucketWebsiteRequest$ContentMD5": "

", + "PutObjectAclRequest$ContentMD5": "

", "PutObjectLegalHoldRequest$ContentMD5": "

The MD5 hash for the request body.

", "PutObjectLockConfigurationRequest$ContentMD5": "

The MD5 hash for the request body.

", - "PutObjectRequest$ContentMD5": "

The base64-encoded 128-bit MD5 digest of the part data.

", + "PutObjectRequest$ContentMD5": "

The base64-encoded 128-bit MD5 digest of the part data. This parameter is auto-populated when using the command from the CLI

", "PutObjectRetentionRequest$ContentMD5": "

The MD5 hash for the request body.

", - "PutObjectTaggingRequest$ContentMD5": null, + "PutObjectTaggingRequest$ContentMD5": "

", "PutPublicAccessBlockRequest$ContentMD5": "

The MD5 hash of the PutPublicAccessBlock request body.

", "UploadPartRequest$ContentMD5": "

The base64-encoded 128-bit MD5 digest of the part data.

" } @@ -675,7 +675,7 @@ } }, "ContinuationEvent": { - "base": null, + "base": "

", "refs": { "SelectObjectContentEventStream$Cont": "

The Continuation Event.

" } @@ -691,15 +691,15 @@ } }, "CopyObjectResult": { - "base": null, + "base": "

", "refs": { - "CopyObjectOutput$CopyObjectResult": null + "CopyObjectOutput$CopyObjectResult": "

" } }, "CopyPartResult": { - "base": null, + "base": "

", "refs": { - "UploadPartCopyOutput$CopyPartResult": null + "UploadPartCopyOutput$CopyPartResult": "

" } }, "CopySource": { @@ -740,7 +740,7 @@ "CopySourceRange": { "base": null, "refs": { - "UploadPartCopyRequest$CopySourceRange": "

The range of bytes to copy from the source object. The range value must use the form bytes=first-last, where the first and last are the zero-based byte offsets to copy. For example, bytes=0-9 indicates that you want to copy the first ten bytes of the source. You can copy a range only if the source object is greater than 5 GB.

" + "UploadPartCopyRequest$CopySourceRange": "

The range of bytes to copy from the source object. The range value must use the form bytes=first-last, where the first and last are the zero-based byte offsets to copy. For example, bytes=0-9 indicates that you want to copy the first ten bytes of the source. You can copy a range only if the source object is greater than 5 MB.

" } }, "CopySourceSSECustomerAlgorithm": { @@ -767,14 +767,14 @@ "CopySourceVersionId": { "base": null, "refs": { - "CopyObjectOutput$CopySourceVersionId": null, + "CopyObjectOutput$CopySourceVersionId": "

", "UploadPartCopyOutput$CopySourceVersionId": "

The version of the source object that was copied, if you have enabled versioning on the source bucket.

" } }, "CreateBucketConfiguration": { - "base": null, + "base": "

", "refs": { - "CreateBucketRequest$CreateBucketConfiguration": null + "CreateBucketRequest$CreateBucketConfiguration": "

" } }, "CreateBucketOutput": { @@ -816,8 +816,8 @@ "refs": { "DefaultRetention$Days": "

The number of days that you want to specify for the default retention period.

", "LifecycleExpiration$Days": "

Indicates the lifetime, in days, of the objects that are subject to the rule. The value must be a non-zero positive integer.

", - "NoncurrentVersionExpiration$NoncurrentDays": "

Specifies the number of days an object is noncurrent before Amazon S3 can perform the associated action. For information about the noncurrent days calculations, see How Amazon S3 Calculates When an Object Became Noncurrent in the Amazon Simple Storage Service Developer Guide.

", - "NoncurrentVersionTransition$NoncurrentDays": "

Specifies the number of days an object is noncurrent before Amazon S3 can perform the associated action. For information about the noncurrent days calculations, see How Amazon S3 Calculates When an Object Became Noncurrent in the Amazon Simple Storage Service Developer Guide.

", + "NoncurrentVersionExpiration$NoncurrentDays": "

Specifies the number of days an object is noncurrent before Amazon S3 can perform the associated action. For information about the noncurrent days calculations, see How Amazon S3 Calculates When an Object Became Noncurrent in the Amazon Simple Storage Service Developer Guide.

", + "NoncurrentVersionTransition$NoncurrentDays": "

Specifies the number of days an object is noncurrent before Amazon S3 can perform the associated action. For information about the noncurrent days calculations, see How Amazon S3 Calculates When an Object Became Noncurrent in the Amazon Simple Storage Service Developer Guide.

", "RestoreRequest$Days": "

Lifetime of the active copy in days. Do not use with restores that specify OutputLocation.

", "Transition$Days": "

Indicates the lifetime, in days, of the objects that are subject to the rule. The value must be a non-zero positive integer.

" } @@ -835,9 +835,9 @@ } }, "Delete": { - "base": null, + "base": "

", "refs": { - "DeleteObjectsRequest$Delete": null + "DeleteObjectsRequest$Delete": "

" } }, "DeleteBucketAnalyticsConfigurationRequest": { @@ -899,13 +899,13 @@ "base": null, "refs": { "DeleteObjectOutput$DeleteMarker": "

Specifies whether the versioned object that was permanently deleted was (true) or was not (false) a delete marker.

", - "DeletedObject$DeleteMarker": null, + "DeletedObject$DeleteMarker": "

", "GetObjectOutput$DeleteMarker": "

Specifies whether the object retrieved was (true) or was not (false) a Delete Marker. If false, this response header does not appear in the response.

", "HeadObjectOutput$DeleteMarker": "

Specifies whether the object retrieved was (true) or was not (false) a Delete Marker. If false, this response header does not appear in the response.

" } }, "DeleteMarkerEntry": { - "base": null, + "base": "

", "refs": { "DeleteMarkers$member": null } @@ -925,13 +925,13 @@ "DeleteMarkerVersionId": { "base": null, "refs": { - "DeletedObject$DeleteMarkerVersionId": null + "DeletedObject$DeleteMarkerVersionId": "

" } }, "DeleteMarkers": { "base": null, "refs": { - "ListObjectVersionsOutput$DeleteMarkers": null + "ListObjectVersionsOutput$DeleteMarkers": "

" } }, "DeleteObjectOutput": { @@ -970,7 +970,7 @@ } }, "DeletedObject": { - "base": null, + "base": "

", "refs": { "DeletedObjects$member": null } @@ -978,17 +978,17 @@ "DeletedObjects": { "base": null, "refs": { - "DeleteObjectsOutput$Deleted": null + "DeleteObjectsOutput$Deleted": "

" } }, "Delimiter": { "base": null, "refs": { - "ListMultipartUploadsOutput$Delimiter": null, + "ListMultipartUploadsOutput$Delimiter": "

", "ListMultipartUploadsRequest$Delimiter": "

Character you use to group keys.

", - "ListObjectVersionsOutput$Delimiter": null, + "ListObjectVersionsOutput$Delimiter": "

", "ListObjectVersionsRequest$Delimiter": "

A delimiter is a character you use to group keys.

", - "ListObjectsOutput$Delimiter": null, + "ListObjectsOutput$Delimiter": "

", "ListObjectsRequest$Delimiter": "

A delimiter is a character you use to group keys.

", "ListObjectsV2Output$Delimiter": "

A delimiter is a character you use to group keys.

", "ListObjectsV2Request$Delimiter": "

A delimiter is a character you use to group keys.

" @@ -1011,7 +1011,7 @@ "refs": { "Grantee$DisplayName": "

Screen name of the grantee.

", "Initiator$DisplayName": "

Name of the Principal.

", - "Owner$DisplayName": null + "Owner$DisplayName": "

" } }, "ETag": { @@ -1019,12 +1019,12 @@ "refs": { "CompleteMultipartUploadOutput$ETag": "

Entity tag of the object.

", "CompletedPart$ETag": "

Entity tag returned when the part was uploaded.

", - "CopyObjectResult$ETag": null, + "CopyObjectResult$ETag": "

", "CopyPartResult$ETag": "

Entity tag of the object.

", "GetObjectOutput$ETag": "

An ETag is an opaque identifier assigned by a web server to a specific version of a resource found at a URL

", "HeadObjectOutput$ETag": "

An ETag is an opaque identifier assigned by a web server to a specific version of a resource found at a URL

", - "Object$ETag": null, - "ObjectVersion$ETag": null, + "Object$ETag": "

", + "ObjectVersion$ETag": "

", "Part$ETag": "

Entity tag returned when the part was uploaded.

", "PutObjectOutput$ETag": "

Entity tag for the uploaded object.

", "UploadPartOutput$ETag": "

Entity tag for the uploaded object.

" @@ -1068,28 +1068,28 @@ } }, "EndEvent": { - "base": null, + "base": "

", "refs": { "SelectObjectContentEventStream$End": "

The End Event.

" } }, "Error": { - "base": null, + "base": "

", "refs": { "Errors$member": null } }, "ErrorDocument": { - "base": null, + "base": "

", "refs": { - "GetBucketWebsiteOutput$ErrorDocument": null, - "WebsiteConfiguration$ErrorDocument": null + "GetBucketWebsiteOutput$ErrorDocument": "

", + "WebsiteConfiguration$ErrorDocument": "

" } }, "Errors": { "base": null, "refs": { - "DeleteObjectsOutput$Errors": null + "DeleteObjectsOutput$Errors": "

" } }, "Event": { @@ -1104,12 +1104,12 @@ "EventList": { "base": null, "refs": { - "CloudFunctionConfiguration$Events": null, - "LambdaFunctionConfiguration$Events": null, - "QueueConfiguration$Events": null, - "QueueConfigurationDeprecated$Events": null, - "TopicConfiguration$Events": null, - "TopicConfigurationDeprecated$Events": null + "CloudFunctionConfiguration$Events": "

", + "LambdaFunctionConfiguration$Events": "

", + "QueueConfiguration$Events": "

", + "QueueConfigurationDeprecated$Events": "

", + "TopicConfiguration$Events": "

", + "TopicConfigurationDeprecated$Events": "

" } }, "Expiration": { @@ -1205,13 +1205,13 @@ "FilterRuleName": { "base": null, "refs": { - "FilterRule$Name": "

The object key name prefix or suffix identifying one or more objects to which the filtering rule applies. The maximum prefix length is 1,024 characters. Overlapping prefixes and suffixes are not supported. For more information, see Configuring Event Notifications in the Amazon Simple Storage Service Developer Guide.

" + "FilterRule$Name": "

The object key name prefix or suffix identifying one or more objects to which the filtering rule applies. The maximum prefix length is 1,024 characters. Overlapping prefixes and suffixes are not supported. For more information, see Configuring Event Notifications in the Amazon Simple Storage Service Developer Guide.

" } }, "FilterRuleValue": { "base": null, "refs": { - "FilterRule$Value": null + "FilterRule$Value": "

" } }, "GetBucketAccelerateConfigurationOutput": { @@ -1480,13 +1480,13 @@ } }, "GlacierJobParameters": { - "base": null, + "base": "

", "refs": { "RestoreRequest$GlacierJobParameters": "

Glacier related parameters pertaining to this job. Do not use with restores that specify OutputLocation.

" } }, "Grant": { - "base": null, + "base": "

", "refs": { "Grants$member": null } @@ -1544,10 +1544,10 @@ } }, "Grantee": { - "base": null, + "base": "

", "refs": { - "Grant$Grantee": null, - "TargetGrant$Grantee": null + "Grant$Grantee": "

", + "TargetGrant$Grantee": "

" } }, "Grants": { @@ -1599,7 +1599,7 @@ "Grantee$ID": "

The canonical user ID of the grantee.

", "Initiator$ID": "

If the principal is an AWS account, it provides the Canonical User ID. If the principal is an IAM User, it provides a user ARN value.

", "LifecycleRule$ID": "

Unique identifier for the rule. The value cannot be longer than 255 characters.

", - "Owner$ID": null, + "Owner$ID": "

", "ReplicationRule$ID": "

A unique identifier for the rule. The maximum value is 255 characters.

", "Rule$ID": "

Unique identifier for the rule. The value cannot be longer than 255 characters.

" } @@ -1633,10 +1633,10 @@ } }, "IndexDocument": { - "base": null, + "base": "

", "refs": { - "GetBucketWebsiteOutput$IndexDocument": null, - "WebsiteConfiguration$IndexDocument": null + "GetBucketWebsiteOutput$IndexDocument": "

", + "WebsiteConfiguration$IndexDocument": "

" } }, "Initiated": { @@ -1646,7 +1646,7 @@ } }, "Initiator": { - "base": null, + "base": "

", "refs": { "ListPartsOutput$Initiator": "

Identifies who initiated the multipart upload.

", "MultipartUpload$Initiator": "

Identifies who initiated the multipart upload.

" @@ -1660,7 +1660,7 @@ } }, "InventoryConfiguration": { - "base": null, + "base": "

", "refs": { "GetBucketInventoryConfigurationOutput$InventoryConfiguration": "

Specifies the inventory configuration.

", "InventoryConfigurationList$member": null, @@ -1674,7 +1674,7 @@ } }, "InventoryDestination": { - "base": null, + "base": "

", "refs": { "InventoryConfiguration$Destination": "

Contains information about where to publish the inventory results.

" } @@ -1686,7 +1686,7 @@ } }, "InventoryFilter": { - "base": null, + "base": "

", "refs": { "InventoryConfiguration$Filter": "

Specifies an inventory filter. The inventory only includes objects that meet the filter's criteria.

" } @@ -1731,13 +1731,13 @@ } }, "InventoryS3BucketDestination": { - "base": null, + "base": "

", "refs": { "InventoryDestination$S3BucketDestination": "

Contains the bucket name, file format, bucket owner (optional), and prefix (optional) where inventory results are published.

" } }, "InventorySchedule": { - "base": null, + "base": "

", "refs": { "InventoryConfiguration$Schedule": "

Specifies the schedule for generating inventory results.

" } @@ -1775,13 +1775,13 @@ } }, "JSONInput": { - "base": null, + "base": "

", "refs": { "InputSerialization$JSON": "

Specifies JSON as object's input serialization format.

" } }, "JSONOutput": { - "base": null, + "base": "

", "refs": { "OutputSerialization$JSON": "

Specifies JSON as request's output serialization format.

" } @@ -1834,37 +1834,37 @@ "LambdaFunctionConfigurationList": { "base": null, "refs": { - "NotificationConfiguration$LambdaFunctionConfigurations": null + "NotificationConfiguration$LambdaFunctionConfigurations": "

" } }, "LastModified": { "base": null, "refs": { - "CopyObjectResult$LastModified": null, + "CopyObjectResult$LastModified": "

", "CopyPartResult$LastModified": "

Date and time at which the object was uploaded.

", "DeleteMarkerEntry$LastModified": "

Date and time the object was last modified.

", "GetObjectOutput$LastModified": "

Last modified date of the object

", "HeadObjectOutput$LastModified": "

Last modified date of the object

", - "Object$LastModified": null, + "Object$LastModified": "

", "ObjectVersion$LastModified": "

Date and time the object was last modified.

", "Part$LastModified": "

Date and time at which the part was uploaded.

" } }, "LifecycleConfiguration": { - "base": null, + "base": "

", "refs": { - "PutBucketLifecycleRequest$LifecycleConfiguration": null + "PutBucketLifecycleRequest$LifecycleConfiguration": "

" } }, "LifecycleExpiration": { - "base": null, + "base": "

", "refs": { - "LifecycleRule$Expiration": null, - "Rule$Expiration": null + "LifecycleRule$Expiration": "

", + "Rule$Expiration": "

" } }, "LifecycleRule": { - "base": null, + "base": "

", "refs": { "LifecycleRules$member": null } @@ -1884,8 +1884,8 @@ "LifecycleRules": { "base": null, "refs": { - "BucketLifecycleConfiguration$Rules": null, - "GetBucketLifecycleConfigurationOutput$Rules": null + "BucketLifecycleConfiguration$Rules": "

", + "GetBucketLifecycleConfigurationOutput$Rules": "

" } }, "ListBucketAnalyticsConfigurationsOutput": { @@ -1976,8 +1976,8 @@ "Location": { "base": null, "refs": { - "CompleteMultipartUploadOutput$Location": null, - "CreateBucketOutput$Location": null + "CompleteMultipartUploadOutput$Location": "

", + "CreateBucketOutput$Location": "

" } }, "LocationPrefix": { @@ -2016,7 +2016,7 @@ "Marker": { "base": null, "refs": { - "ListObjectsOutput$Marker": null, + "ListObjectsOutput$Marker": "

", "ListObjectsRequest$Marker": "

Specifies the key to start with when listing objects in a bucket.

" } }, @@ -2029,9 +2029,9 @@ "MaxKeys": { "base": null, "refs": { - "ListObjectVersionsOutput$MaxKeys": null, + "ListObjectVersionsOutput$MaxKeys": "

", "ListObjectVersionsRequest$MaxKeys": "

Sets the maximum number of keys returned in the response. The response might contain fewer keys but will never contain more.

", - "ListObjectsOutput$MaxKeys": null, + "ListObjectsOutput$MaxKeys": "

", "ListObjectsRequest$MaxKeys": "

Sets the maximum number of keys returned in the response. The response might contain fewer keys but will never contain more.

", "ListObjectsV2Output$MaxKeys": "

Sets the maximum number of keys returned in the response. The response might contain fewer keys but will never contain more.

", "ListObjectsV2Request$MaxKeys": "

Sets the maximum number of keys returned in the response. The response might contain fewer keys but will never contain more.

" @@ -2054,7 +2054,7 @@ "Message": { "base": null, "refs": { - "Error$Message": null + "Error$Message": "

" } }, "Metadata": { @@ -2083,24 +2083,24 @@ "base": null, "refs": { "Metadata$key": null, - "MetadataEntry$Name": null + "MetadataEntry$Name": "

" } }, "MetadataValue": { "base": null, "refs": { "Metadata$value": null, - "MetadataEntry$Value": null + "MetadataEntry$Value": "

" } }, "MetricsAndOperator": { - "base": null, + "base": "

", "refs": { "MetricsFilter$And": "

A conjunction (logical AND) of predicates, which is used in evaluating a metrics filter. The operator must have at least two predicates, and an object must match all of the predicates in order for the filter to apply.

" } }, "MetricsConfiguration": { - "base": null, + "base": "

", "refs": { "GetBucketMetricsConfigurationOutput$MetricsConfiguration": "

Specifies the metrics configuration.

", "MetricsConfigurationList$member": null, @@ -2114,7 +2114,7 @@ } }, "MetricsFilter": { - "base": null, + "base": "

", "refs": { "MetricsConfiguration$Filter": "

Specifies a metrics configuration filter. The metrics configuration will only include objects that meet the filter's criteria. A filter must be a prefix, a tag, or a conjunction (MetricsAndOperator).

" } @@ -2136,7 +2136,7 @@ } }, "MultipartUpload": { - "base": null, + "base": "

", "refs": { "MultipartUploadList$member": null } @@ -2144,8 +2144,8 @@ "MultipartUploadId": { "base": null, "refs": { - "AbortMultipartUploadRequest$UploadId": null, - "CompleteMultipartUploadRequest$UploadId": null, + "AbortMultipartUploadRequest$UploadId": "

", + "CompleteMultipartUploadRequest$UploadId": "

", "CreateMultipartUploadOutput$UploadId": "

ID for the initiated multipart upload.

", "ListPartsOutput$UploadId": "

Upload ID identifying the multipart upload whose parts are being listed.

", "ListPartsRequest$UploadId": "

Upload ID identifying the multipart upload whose parts are being listed.

", @@ -2157,7 +2157,7 @@ "MultipartUploadList": { "base": null, "refs": { - "ListMultipartUploadsOutput$Uploads": null + "ListMultipartUploadsOutput$Uploads": "

" } }, "NextKeyMarker": { @@ -2232,7 +2232,7 @@ "NoncurrentVersionTransitionList": { "base": null, "refs": { - "LifecycleRule$NoncurrentVersionTransitions": null + "LifecycleRule$NoncurrentVersionTransitions": "

" } }, "NotificationConfiguration": { @@ -2244,11 +2244,11 @@ "NotificationConfigurationDeprecated": { "base": null, "refs": { - "PutBucketNotificationRequest$NotificationConfiguration": null + "PutBucketNotificationRequest$NotificationConfiguration": "

" } }, "NotificationConfigurationFilter": { - "base": "

A container for object key name filtering rules. For information about key name filtering, see Configuring Event Notifications in the Amazon Simple Storage Service Developer Guide.

", + "base": "

A container for object key name filtering rules. For information about key name filtering, see Configuring Event Notifications in the Amazon Simple Storage Service Developer Guide.

", "refs": { "LambdaFunctionConfiguration$Filter": null, "QueueConfiguration$Filter": null, @@ -2267,7 +2267,7 @@ } }, "Object": { - "base": null, + "base": "

", "refs": { "ObjectList$member": null } @@ -2288,7 +2288,7 @@ } }, "ObjectIdentifier": { - "base": null, + "base": "

", "refs": { "ObjectIdentifierList$member": null } @@ -2296,53 +2296,53 @@ "ObjectIdentifierList": { "base": null, "refs": { - "Delete$Objects": null + "Delete$Objects": "

" } }, "ObjectKey": { "base": null, "refs": { - "AbortMultipartUploadRequest$Key": null, - "CompleteMultipartUploadOutput$Key": null, - "CompleteMultipartUploadRequest$Key": null, - "CopyObjectRequest$Key": null, + "AbortMultipartUploadRequest$Key": "

", + "CompleteMultipartUploadOutput$Key": "

", + "CompleteMultipartUploadRequest$Key": "

", + "CopyObjectRequest$Key": "

", "CreateMultipartUploadOutput$Key": "

Object key for which the multipart upload was initiated.

", - "CreateMultipartUploadRequest$Key": null, + "CreateMultipartUploadRequest$Key": "

", "DeleteMarkerEntry$Key": "

The object key.

", - "DeleteObjectRequest$Key": null, - "DeleteObjectTaggingRequest$Key": null, - "DeletedObject$Key": null, - "Error$Key": null, + "DeleteObjectRequest$Key": "

", + "DeleteObjectTaggingRequest$Key": "

", + "DeletedObject$Key": "

", + "Error$Key": "

", "ErrorDocument$Key": "

The object key name to use when a 4XX class error occurs.

", - "GetObjectAclRequest$Key": null, + "GetObjectAclRequest$Key": "

", "GetObjectLegalHoldRequest$Key": "

The key name for the object whose Legal Hold status you want to retrieve.

", - "GetObjectRequest$Key": null, + "GetObjectRequest$Key": "

", "GetObjectRetentionRequest$Key": "

The key name for the object whose retention settings you want to retrieve.

", - "GetObjectTaggingRequest$Key": null, - "GetObjectTorrentRequest$Key": null, - "HeadObjectRequest$Key": null, + "GetObjectTaggingRequest$Key": "

", + "GetObjectTorrentRequest$Key": "

", + "HeadObjectRequest$Key": "

", "ListPartsOutput$Key": "

Object key for which the multipart upload was initiated.

", - "ListPartsRequest$Key": null, + "ListPartsRequest$Key": "

", "MultipartUpload$Key": "

Key of the object for which the multipart upload was initiated.

", - "Object$Key": null, + "Object$Key": "

", "ObjectIdentifier$Key": "

Key name of the object to delete.

", "ObjectVersion$Key": "

The object key.

", - "PutObjectAclRequest$Key": null, + "PutObjectAclRequest$Key": "

", "PutObjectLegalHoldRequest$Key": "

The key name for the object that you want to place a Legal Hold on.

", "PutObjectRequest$Key": "

Object key for which the PUT operation was initiated.

", "PutObjectRetentionRequest$Key": "

The key name for the object that you want to apply this Object Retention configuration to.

", - "PutObjectTaggingRequest$Key": null, - "RestoreObjectRequest$Key": null, + "PutObjectTaggingRequest$Key": "

", + "RestoreObjectRequest$Key": "

", "SelectObjectContentRequest$Key": "

The object key.

", "Tag$Key": "

Name of the tag.

", - "UploadPartCopyRequest$Key": null, + "UploadPartCopyRequest$Key": "

", "UploadPartRequest$Key": "

Object key for which the multipart upload was initiated.

" } }, "ObjectList": { "base": null, "refs": { - "ListObjectsOutput$Contents": null, + "ListObjectsOutput$Contents": "

", "ListObjectsV2Output$Contents": "

Metadata about each object returned.

" } }, @@ -2426,7 +2426,7 @@ "ObjectLockToken": { "base": null, "refs": { - "PutObjectLockConfigurationRequest$Token": null + "PutObjectLockConfigurationRequest$Token": "

A token to allow Object Lock to be enabled for an existing bucket.

" } }, "ObjectNotInActiveTierError": { @@ -2441,7 +2441,7 @@ } }, "ObjectVersion": { - "base": null, + "base": "

", "refs": { "ObjectVersionList$member": null } @@ -2456,15 +2456,15 @@ "DeleteObjectRequest$VersionId": "

VersionId used to reference a specific version of the object.

", "DeleteObjectTaggingOutput$VersionId": "

The versionId of the object the tag-set was removed from.

", "DeleteObjectTaggingRequest$VersionId": "

The versionId of the object that the tag-set will be removed from.

", - "DeletedObject$VersionId": null, - "Error$VersionId": null, + "DeletedObject$VersionId": "

", + "Error$VersionId": "

", "GetObjectAclRequest$VersionId": "

VersionId used to reference a specific version of the object.

", "GetObjectLegalHoldRequest$VersionId": "

The version ID of the object whose Legal Hold status you want to retrieve.

", "GetObjectOutput$VersionId": "

Version of the object.

", "GetObjectRequest$VersionId": "

VersionId used to reference a specific version of the object.

", "GetObjectRetentionRequest$VersionId": "

The version ID for the object whose retention settings you want to retrieve.

", - "GetObjectTaggingOutput$VersionId": null, - "GetObjectTaggingRequest$VersionId": null, + "GetObjectTaggingOutput$VersionId": "

", + "GetObjectTaggingRequest$VersionId": "

", "HeadObjectOutput$VersionId": "

Version of the object.

", "HeadObjectRequest$VersionId": "

VersionId used to reference a specific version of the object.

", "ObjectIdentifier$VersionId": "

VersionId for the specific version of the object to delete.

", @@ -2473,15 +2473,15 @@ "PutObjectLegalHoldRequest$VersionId": "

The version ID of the object that you want to place a Legal Hold on.

", "PutObjectOutput$VersionId": "

Version of the object.

", "PutObjectRetentionRequest$VersionId": "

The version ID for the object that you want to apply this Object Retention configuration to.

", - "PutObjectTaggingOutput$VersionId": null, - "PutObjectTaggingRequest$VersionId": null, - "RestoreObjectRequest$VersionId": null + "PutObjectTaggingOutput$VersionId": "

", + "PutObjectTaggingRequest$VersionId": "

", + "RestoreObjectRequest$VersionId": "

" } }, "ObjectVersionList": { "base": null, "refs": { - "ListObjectVersionsOutput$Versions": null + "ListObjectVersionsOutput$Versions": "

" } }, "ObjectVersionStorageClass": { @@ -2504,17 +2504,17 @@ } }, "Owner": { - "base": null, + "base": "

", "refs": { - "AccessControlPolicy$Owner": null, - "DeleteMarkerEntry$Owner": null, - "GetBucketAclOutput$Owner": null, - "GetObjectAclOutput$Owner": null, - "ListBucketsOutput$Owner": null, - "ListPartsOutput$Owner": null, - "MultipartUpload$Owner": null, - "Object$Owner": null, - "ObjectVersion$Owner": null + "AccessControlPolicy$Owner": "

", + "DeleteMarkerEntry$Owner": "

", + "GetBucketAclOutput$Owner": "

", + "GetObjectAclOutput$Owner": "

", + "ListBucketsOutput$Owner": "

", + "ListPartsOutput$Owner": "

", + "MultipartUpload$Owner": "

", + "Object$Owner": "

", + "ObjectVersion$Owner": "

" } }, "OwnerOverride": { @@ -2524,13 +2524,13 @@ } }, "ParquetInput": { - "base": null, + "base": "

", "refs": { "InputSerialization$Parquet": "

Specifies Parquet as object's input serialization format.

" } }, "Part": { - "base": null, + "base": "

", "refs": { "Parts$member": null } @@ -2556,7 +2556,7 @@ "Parts": { "base": null, "refs": { - "ListPartsOutput$Parts": null + "ListPartsOutput$Parts": "

" } }, "PartsCount": { @@ -2598,24 +2598,24 @@ "AnalyticsAndOperator$Prefix": "

The prefix to use when evaluating an AND predicate.

", "AnalyticsFilter$Prefix": "

The prefix to use when evaluating an analytics filter.

", "AnalyticsS3BucketDestination$Prefix": "

The prefix to use when exporting data. The exported data begins with this prefix.

", - "CommonPrefix$Prefix": null, + "CommonPrefix$Prefix": "

", "InventoryFilter$Prefix": "

The prefix that an object must have to be included in the inventory results.

", "InventoryS3BucketDestination$Prefix": "

The prefix that is prepended to all inventory results.

", - "LifecycleRule$Prefix": "

Prefix identifying one or more objects to which the rule applies. This is deprecated; use Filter instead.

", - "LifecycleRuleAndOperator$Prefix": null, + "LifecycleRule$Prefix": "

Prefix identifying one or more objects to which the rule applies. This is No longer used; use Filter instead.

", + "LifecycleRuleAndOperator$Prefix": "

", "LifecycleRuleFilter$Prefix": "

Prefix identifying one or more objects to which the rule applies.

", "ListMultipartUploadsOutput$Prefix": "

When a prefix is provided in the request, this field contains the specified prefix. The result contains only keys starting with the specified prefix.

", "ListMultipartUploadsRequest$Prefix": "

Lists in-progress uploads only for those keys that begin with the specified prefix.

", - "ListObjectVersionsOutput$Prefix": null, + "ListObjectVersionsOutput$Prefix": "

", "ListObjectVersionsRequest$Prefix": "

Limits the response to keys that begin with the specified prefix.

", - "ListObjectsOutput$Prefix": null, + "ListObjectsOutput$Prefix": "

", "ListObjectsRequest$Prefix": "

Limits the response to keys that begin with the specified prefix.

", "ListObjectsV2Output$Prefix": "

Limits the response to keys that begin with the specified prefix.

", "ListObjectsV2Request$Prefix": "

Limits the response to keys that begin with the specified prefix.

", "MetricsAndOperator$Prefix": "

The prefix used when evaluating an AND predicate.

", "MetricsFilter$Prefix": "

The prefix used when evaluating a metrics filter.

", "ReplicationRule$Prefix": "

An object keyname prefix that identifies the object or objects to which the rule applies. The maximum prefix length is 1,024 characters.

", - "ReplicationRuleAndOperator$Prefix": null, + "ReplicationRuleAndOperator$Prefix": "

", "ReplicationRuleFilter$Prefix": "

An object keyname prefix that identifies the subset of objects to which the rule applies.

", "Rule$Prefix": "

Prefix identifying one or more objects to which the rule applies.

" } @@ -2627,13 +2627,13 @@ } }, "Progress": { - "base": null, + "base": "

", "refs": { "ProgressEvent$Details": "

The Progress event details.

" } }, "ProgressEvent": { - "base": null, + "base": "

", "refs": { "SelectObjectContentEventStream$Progress": "

The Progress Event.

" } @@ -2646,7 +2646,7 @@ } }, "PublicAccessBlockConfiguration": { - "base": null, + "base": "

", "refs": { "GetPublicAccessBlockOutput$PublicAccessBlockConfiguration": "

The PublicAccessBlock configuration currently in effect for this Amazon S3 bucket.

", "PutPublicAccessBlockRequest$PublicAccessBlockConfiguration": "

The PublicAccessBlock configuration that you want to apply to this Amazon S3 bucket. You can enable the configuration options in any combination. For more information about when Amazon S3 considers a bucket or object public, see The Meaning of \"Public\" in the Amazon Simple Storage Service Developer Guide.

" @@ -2811,7 +2811,7 @@ "base": null, "refs": { "QueueConfiguration$QueueArn": "

The Amazon Resource Name (ARN) of the Amazon SQS queue to which Amazon S3 will publish a message when it detects events of the specified type.

", - "QueueConfigurationDeprecated$Queue": null + "QueueConfigurationDeprecated$Queue": "

" } }, "QueueConfiguration": { @@ -2821,15 +2821,15 @@ } }, "QueueConfigurationDeprecated": { - "base": null, + "base": "

", "refs": { - "NotificationConfigurationDeprecated$QueueConfiguration": null + "NotificationConfigurationDeprecated$QueueConfiguration": "

" } }, "QueueConfigurationList": { "base": null, "refs": { - "NotificationConfiguration$QueueConfigurations": null + "NotificationConfiguration$QueueConfigurations": "

" } }, "Quiet": { @@ -2874,22 +2874,22 @@ } }, "RecordsEvent": { - "base": null, + "base": "

", "refs": { "SelectObjectContentEventStream$Records": "

The Records Event.

" } }, "Redirect": { - "base": null, + "base": "

", "refs": { "RoutingRule$Redirect": "

Container for redirect information. You can redirect requests to another host, to another page, or with another protocol. In the event of an error, you can specify a different error code to return.

" } }, "RedirectAllRequestsTo": { - "base": null, + "base": "

", "refs": { - "GetBucketWebsiteOutput$RedirectAllRequestsTo": null, - "WebsiteConfiguration$RedirectAllRequestsTo": null + "GetBucketWebsiteOutput$RedirectAllRequestsTo": "

", + "WebsiteConfiguration$RedirectAllRequestsTo": "

" } }, "ReplaceKeyPrefixWith": { @@ -2924,7 +2924,7 @@ } }, "ReplicationRuleAndOperator": { - "base": null, + "base": "

", "refs": { "ReplicationRuleFilter$And": "

A container for specifying rule filters. The filters determine the subset of objects to which the rule applies. This element is required only if you specify more than one filter. For example:

" } @@ -2950,8 +2950,8 @@ "ReplicationStatus": { "base": null, "refs": { - "GetObjectOutput$ReplicationStatus": null, - "HeadObjectOutput$ReplicationStatus": null + "GetObjectOutput$ReplicationStatus": "

", + "HeadObjectOutput$ReplicationStatus": "

" } }, "RequestCharged": { @@ -3007,13 +3007,13 @@ } }, "RequestPaymentConfiguration": { - "base": null, + "base": "

", "refs": { - "PutBucketRequestPaymentRequest$RequestPaymentConfiguration": null + "PutBucketRequestPaymentRequest$RequestPaymentConfiguration": "

" } }, "RequestProgress": { - "base": null, + "base": "

", "refs": { "SelectObjectContentRequest$RequestProgress": "

Specifies if periodic request progress information should be enabled.

" } @@ -3096,7 +3096,7 @@ } }, "RoutingRule": { - "base": null, + "base": "

", "refs": { "RoutingRules$member": null } @@ -3104,12 +3104,12 @@ "RoutingRules": { "base": null, "refs": { - "GetBucketWebsiteOutput$RoutingRules": null, - "WebsiteConfiguration$RoutingRules": null + "GetBucketWebsiteOutput$RoutingRules": "

", + "WebsiteConfiguration$RoutingRules": "

" } }, "Rule": { - "base": null, + "base": "

", "refs": { "Rules$member": null } @@ -3117,8 +3117,8 @@ "Rules": { "base": null, "refs": { - "GetBucketLifecycleOutput$Rules": null, - "LifecycleConfiguration$Rules": null + "GetBucketLifecycleOutput$Rules": "

", + "LifecycleConfiguration$Rules": "

" } }, "S3KeyFilter": { @@ -3146,7 +3146,7 @@ "HeadObjectRequest$SSECustomerAlgorithm": "

Specifies the algorithm to use to when encrypting the object (e.g., AES256).

", "PutObjectOutput$SSECustomerAlgorithm": "

If server-side encryption with a customer-provided encryption key was requested, the response will include this header confirming the encryption algorithm used.

", "PutObjectRequest$SSECustomerAlgorithm": "

Specifies the algorithm to use to when encrypting the object (e.g., AES256).

", - "SelectObjectContentRequest$SSECustomerAlgorithm": "

The SSE Algorithm used to encrypt the object. For more information, see Server-Side Encryption (Using Customer-Provided Encryption Keys.

", + "SelectObjectContentRequest$SSECustomerAlgorithm": "

The SSE Algorithm used to encrypt the object. For more information, see Server-Side Encryption (Using Customer-Provided Encryption Keys.

", "UploadPartCopyOutput$SSECustomerAlgorithm": "

If server-side encryption with a customer-provided encryption key was requested, the response will include this header confirming the encryption algorithm used.

", "UploadPartCopyRequest$SSECustomerAlgorithm": "

Specifies the algorithm to use to when encrypting the object (e.g., AES256).

", "UploadPartOutput$SSECustomerAlgorithm": "

If server-side encryption with a customer-provided encryption key was requested, the response will include this header confirming the encryption algorithm used.

", @@ -3161,7 +3161,7 @@ "GetObjectRequest$SSECustomerKey": "

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header.

", "HeadObjectRequest$SSECustomerKey": "

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header.

", "PutObjectRequest$SSECustomerKey": "

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header.

", - "SelectObjectContentRequest$SSECustomerKey": "

The SSE Customer Key. For more information, see Server-Side Encryption (Using Customer-Provided Encryption Keys.

", + "SelectObjectContentRequest$SSECustomerKey": "

The SSE Customer Key. For more information, see Server-Side Encryption (Using Customer-Provided Encryption Keys.

", "UploadPartCopyRequest$SSECustomerKey": "

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header. This must be the same encryption key specified in the initiate multipart upload request.

", "UploadPartRequest$SSECustomerKey": "

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header. This must be the same encryption key specified in the initiate multipart upload request.

" } @@ -3179,7 +3179,7 @@ "HeadObjectRequest$SSECustomerKeyMD5": "

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

", "PutObjectOutput$SSECustomerKeyMD5": "

If server-side encryption with a customer-provided encryption key was requested, the response will include this header to provide round trip message integrity verification of the customer-provided encryption key.

", "PutObjectRequest$SSECustomerKeyMD5": "

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

", - "SelectObjectContentRequest$SSECustomerKeyMD5": "

The SSE Customer Key MD5. For more information, see Server-Side Encryption (Using Customer-Provided Encryption Keys.

", + "SelectObjectContentRequest$SSECustomerKeyMD5": "

The SSE Customer Key MD5. For more information, see Server-Side Encryption (Using Customer-Provided Encryption Keys.

", "UploadPartCopyOutput$SSECustomerKeyMD5": "

If server-side encryption with a customer-provided encryption key was requested, the response will include this header to provide round trip message integrity verification of the customer-provided encryption key.

", "UploadPartCopyRequest$SSECustomerKeyMD5": "

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

", "UploadPartOutput$SSECustomerKeyMD5": "

If server-side encryption with a customer-provided encryption key was requested, the response will include this header to provide round trip message integrity verification of the customer-provided encryption key.

", @@ -3218,9 +3218,9 @@ } }, "SelectObjectContentEventStream": { - "base": null, + "base": "

", "refs": { - "SelectObjectContentOutput$Payload": null + "SelectObjectContentOutput$Payload": "

" } }, "SelectObjectContentOutput": { @@ -3229,7 +3229,7 @@ } }, "SelectObjectContentRequest": { - "base": "

Request to filter the contents of an Amazon S3 object based on a simple Structured Query Language (SQL) statement. In the request, along with the SQL expression, you must specify a data serialization format (JSON or CSV) of the object. Amazon S3 uses this to parse object data into records. It returns only records that match the specified SQL expression. You must also specify the data serialization format for the response. For more information, see S3Select API Documentation.

", + "base": "

Request to filter the contents of an Amazon S3 object based on a simple Structured Query Language (SQL) statement. In the request, along with the SQL expression, you must specify a data serialization format (JSON or CSV) of the object. Amazon S3 uses this to parse object data into records. It returns only records that match the specified SQL expression. You must also specify the data serialization format for the response. For more information, see S3Select API Documentation.

", "refs": { } }, @@ -3294,7 +3294,7 @@ "Size": { "base": null, "refs": { - "Object$Size": null, + "Object$Size": "

", "ObjectVersion$Size": "

Size in bytes of the object.

", "Part$Size": "

Size in bytes of the uploaded part data.

" } @@ -3325,13 +3325,13 @@ } }, "Stats": { - "base": null, + "base": "

", "refs": { "StatsEvent$Details": "

The Stats event details.

" } }, "StatsEvent": { - "base": null, + "base": "

", "refs": { "SelectObjectContentEventStream$Stats": "

The Stats Event.

" } @@ -3342,8 +3342,8 @@ "CopyObjectRequest$StorageClass": "

The type of storage to use for the object. Defaults to 'STANDARD'.

", "CreateMultipartUploadRequest$StorageClass": "

The type of storage to use for the object. Defaults to 'STANDARD'.

", "Destination$StorageClass": "

The class of storage used to store the object. By default Amazon S3 uses storage class of the source object when creating a replica.

", - "GetObjectOutput$StorageClass": null, - "HeadObjectOutput$StorageClass": null, + "GetObjectOutput$StorageClass": "

", + "HeadObjectOutput$StorageClass": "

", "ListPartsOutput$StorageClass": "

The class of storage used to store the object.

", "MultipartUpload$StorageClass": "

The class of storage used to store the object.

", "PutObjectRequest$StorageClass": "

The type of storage to use for the object. Defaults to 'STANDARD'.

", @@ -3351,13 +3351,13 @@ } }, "StorageClassAnalysis": { - "base": null, + "base": "

", "refs": { "AnalyticsConfiguration$StorageClassAnalysis": "

If present, it indicates that data related to access patterns will be collected and made available to analyze the tradeoffs between different storage classes.

" } }, "StorageClassAnalysisDataExport": { - "base": null, + "base": "

", "refs": { "StorageClassAnalysis$DataExport": "

A container used to describe how data related to the storage class analysis should be exported.

" } @@ -3375,7 +3375,7 @@ } }, "Tag": { - "base": null, + "base": "

", "refs": { "AnalyticsFilter$Tag": "

The tag to use when evaluating an analytics filter.

", "LifecycleRuleFilter$Tag": "

This tag must exist in the object's tag set in order for the rule to apply.

", @@ -3394,19 +3394,19 @@ "base": null, "refs": { "AnalyticsAndOperator$Tags": "

The list of tags to use when evaluating an AND predicate.

", - "GetBucketTaggingOutput$TagSet": null, - "GetObjectTaggingOutput$TagSet": null, + "GetBucketTaggingOutput$TagSet": "

", + "GetObjectTaggingOutput$TagSet": "

", "LifecycleRuleAndOperator$Tags": "

All of these tags must exist in the object's tag set in order for the rule to apply.

", "MetricsAndOperator$Tags": "

The list of tags used when evaluating an AND predicate.

", - "ReplicationRuleAndOperator$Tags": null, - "Tagging$TagSet": null + "ReplicationRuleAndOperator$Tags": "

", + "Tagging$TagSet": "

" } }, "Tagging": { - "base": null, + "base": "

", "refs": { - "PutBucketTaggingRequest$Tagging": null, - "PutObjectTaggingRequest$Tagging": null, + "PutBucketTaggingRequest$Tagging": "

", + "PutObjectTaggingRequest$Tagging": "

", "S3Location$Tagging": "

The tag-set that is applied to the restore results.

" } }, @@ -3431,7 +3431,7 @@ } }, "TargetGrant": { - "base": null, + "base": "

", "refs": { "TargetGrants$member": null } @@ -3439,7 +3439,7 @@ "TargetGrants": { "base": null, "refs": { - "LoggingEnabled$TargetGrants": null + "LoggingEnabled$TargetGrants": "

" } }, "TargetPrefix": { @@ -3482,28 +3482,28 @@ } }, "TopicConfigurationDeprecated": { - "base": null, + "base": "

", "refs": { - "NotificationConfigurationDeprecated$TopicConfiguration": null + "NotificationConfigurationDeprecated$TopicConfiguration": "

" } }, "TopicConfigurationList": { "base": null, "refs": { - "NotificationConfiguration$TopicConfigurations": null + "NotificationConfiguration$TopicConfigurations": "

" } }, "Transition": { - "base": null, + "base": "

", "refs": { - "Rule$Transition": null, + "Rule$Transition": "

", "TransitionList$member": null } }, "TransitionList": { "base": null, "refs": { - "LifecycleRule$Transitions": null + "LifecycleRule$Transitions": "

" } }, "TransitionStorageClass": { @@ -3567,20 +3567,20 @@ "VersionIdMarker": { "base": null, "refs": { - "ListObjectVersionsOutput$VersionIdMarker": null, + "ListObjectVersionsOutput$VersionIdMarker": "

", "ListObjectVersionsRequest$VersionIdMarker": "

Specifies the object version you want to start listing from.

" } }, "VersioningConfiguration": { - "base": null, + "base": "

", "refs": { - "PutBucketVersioningRequest$VersioningConfiguration": null + "PutBucketVersioningRequest$VersioningConfiguration": "

" } }, "WebsiteConfiguration": { - "base": null, + "base": "

", "refs": { - "PutBucketWebsiteRequest$WebsiteConfiguration": null + "PutBucketWebsiteRequest$WebsiteConfiguration": "

" } }, "WebsiteRedirectLocation": { diff --git a/src/data/s3/2006-03-01/docs-2.json.php b/src/data/s3/2006-03-01/docs-2.json.php index 36483f8017..4f8ce1372f 100644 --- a/src/data/s3/2006-03-01/docs-2.json.php +++ b/src/data/s3/2006-03-01/docs-2.json.php @@ -1,3 +1,3 @@ '2.0', 'service' => NULL, 'operations' => [ 'AbortMultipartUpload' => '

Aborts a multipart upload.

To verify that all parts have been removed, so you don\'t get charged for the part storage, you should call the List Parts operation and ensure the parts list is empty.

', 'CompleteMultipartUpload' => '

Completes a multipart upload by assembling previously uploaded parts.

', 'CopyObject' => '

Creates a copy of an object that is already stored in Amazon S3.

', 'CreateBucket' => '

Creates a new bucket.

', 'CreateMultipartUpload' => '

Initiates a multipart upload and returns an upload ID.

Note: After you initiate multipart upload and upload one or more parts, you must either complete or abort multipart upload in order to stop getting charged for storage of the uploaded parts. Only after you either complete or abort multipart upload, Amazon S3 frees up the parts storage and stops charging you for the parts storage.

', 'DeleteBucket' => '

Deletes the bucket. All objects (including all object versions and Delete Markers) in the bucket must be deleted before the bucket itself can be deleted.

', 'DeleteBucketAnalyticsConfiguration' => '

Deletes an analytics configuration for the bucket (specified by the analytics configuration ID).

', 'DeleteBucketCors' => '

Deletes the CORS configuration information set for the bucket.

', 'DeleteBucketEncryption' => '

Deletes the server-side encryption configuration from the bucket.

', 'DeleteBucketInventoryConfiguration' => '

Deletes an inventory configuration (identified by the inventory ID) from the bucket.

', 'DeleteBucketLifecycle' => '

Deletes the lifecycle configuration from the bucket.

', 'DeleteBucketMetricsConfiguration' => '

Deletes a metrics configuration (specified by the metrics configuration ID) from the bucket.

', 'DeleteBucketPolicy' => '

Deletes the policy from the bucket.

', 'DeleteBucketReplication' => '

Deletes the replication configuration from the bucket. For information about replication configuration, see Cross-Region Replication (CRR) in the Amazon S3 Developer Guide.

', 'DeleteBucketTagging' => '

Deletes the tags from the bucket.

', 'DeleteBucketWebsite' => '

This operation removes the website configuration from the bucket.

', 'DeleteObject' => '

Removes the null version (if there is one) of an object and inserts a delete marker, which becomes the latest version of the object. If there isn\'t a null version, Amazon S3 does not remove any objects.

', 'DeleteObjectTagging' => '

Removes the tag-set from an existing object.

', 'DeleteObjects' => '

This operation enables you to delete multiple objects from a bucket using a single HTTP request. You may specify up to 1000 keys.

', 'DeletePublicAccessBlock' => '

Removes the PublicAccessBlock configuration from an Amazon S3 bucket.

', 'GetBucketAccelerateConfiguration' => '

Returns the accelerate configuration of a bucket.

', 'GetBucketAcl' => '

Gets the access control policy for the bucket.

', 'GetBucketAnalyticsConfiguration' => '

Gets an analytics configuration for the bucket (specified by the analytics configuration ID).

', 'GetBucketCors' => '

Returns the CORS configuration for the bucket.

', 'GetBucketEncryption' => '

Returns the server-side encryption configuration of a bucket.

', 'GetBucketInventoryConfiguration' => '

Returns an inventory configuration (identified by the inventory ID) from the bucket.

', 'GetBucketLifecycle' => '

Deprecated, see the GetBucketLifecycleConfiguration operation.

', 'GetBucketLifecycleConfiguration' => '

Returns the lifecycle configuration information set on the bucket.

', 'GetBucketLocation' => '

Returns the region the bucket resides in.

', 'GetBucketLogging' => '

Returns the logging status of a bucket and the permissions users have to view and modify that status. To use GET, you must be the bucket owner.

', 'GetBucketMetricsConfiguration' => '

Gets a metrics configuration (specified by the metrics configuration ID) from the bucket.

', 'GetBucketNotification' => '

Deprecated, see the GetBucketNotificationConfiguration operation.

', 'GetBucketNotificationConfiguration' => '

Returns the notification configuration of a bucket.

', 'GetBucketPolicy' => '

Returns the policy of a specified bucket.

', 'GetBucketPolicyStatus' => '

Retrieves the policy status for an Amazon S3 bucket, indicating whether the bucket is public.

', 'GetBucketReplication' => '

Returns the replication configuration of a bucket.

It can take a while to propagate the put or delete a replication configuration to all Amazon S3 systems. Therefore, a get request soon after put or delete can return a wrong result.

', 'GetBucketRequestPayment' => '

Returns the request payment configuration of a bucket.

', 'GetBucketTagging' => '

Returns the tag set associated with the bucket.

', 'GetBucketVersioning' => '

Returns the versioning state of a bucket.

', 'GetBucketWebsite' => '

Returns the website configuration for a bucket.

', 'GetObject' => '

Retrieves objects from Amazon S3.

', 'GetObjectAcl' => '

Returns the access control list (ACL) of an object.

', 'GetObjectLegalHold' => '

Gets an object\'s current Legal Hold status.

', 'GetObjectLockConfiguration' => '

Gets the Object Lock configuration for a bucket. The rule specified in the Object Lock configuration will be applied by default to every new object placed in the specified bucket.

', 'GetObjectRetention' => '

Retrieves an object\'s retention settings.

', 'GetObjectTagging' => '

Returns the tag-set of an object.

', 'GetObjectTorrent' => '

Return torrent files from a bucket.

', 'GetPublicAccessBlock' => '

Retrieves the PublicAccessBlock configuration for an Amazon S3 bucket.

', 'HeadBucket' => '

This operation is useful to determine if a bucket exists and you have permission to access it.

', 'HeadObject' => '

The HEAD operation retrieves metadata from an object without returning the object itself. This operation is useful if you\'re only interested in an object\'s metadata. To use HEAD, you must have READ access to the object.

', 'ListBucketAnalyticsConfigurations' => '

Lists the analytics configurations for the bucket.

', 'ListBucketInventoryConfigurations' => '

Returns a list of inventory configurations for the bucket.

', 'ListBucketMetricsConfigurations' => '

Lists the metrics configurations for the bucket.

', 'ListBuckets' => '

Returns a list of all buckets owned by the authenticated sender of the request.

', 'ListMultipartUploads' => '

This operation lists in-progress multipart uploads.

', 'ListObjectVersions' => '

Returns metadata about all of the versions of objects in a bucket.

', 'ListObjects' => '

Returns some or all (up to 1000) of the objects in a bucket. You can use the request parameters as selection criteria to return a subset of the objects in a bucket.

', 'ListObjectsV2' => '

Returns some or all (up to 1000) of the objects in a bucket. You can use the request parameters as selection criteria to return a subset of the objects in a bucket. Note: ListObjectsV2 is the revised List Objects API and we recommend you use this revised API for new application development.

', 'ListParts' => '

Lists the parts that have been uploaded for a specific multipart upload.

', 'PutBucketAccelerateConfiguration' => '

Sets the accelerate configuration of an existing bucket.

', 'PutBucketAcl' => '

Sets the permissions on a bucket using access control lists (ACL).

', 'PutBucketAnalyticsConfiguration' => '

Sets an analytics configuration for the bucket (specified by the analytics configuration ID).

', 'PutBucketCors' => '

Sets the CORS configuration for a bucket.

', 'PutBucketEncryption' => '

Creates a new server-side encryption configuration (or replaces an existing one, if present).

', 'PutBucketInventoryConfiguration' => '

Adds an inventory configuration (identified by the inventory ID) from the bucket.

', 'PutBucketLifecycle' => '

Deprecated, see the PutBucketLifecycleConfiguration operation.

', 'PutBucketLifecycleConfiguration' => '

Sets lifecycle configuration for your bucket. If a lifecycle configuration exists, it replaces it.

', 'PutBucketLogging' => '

Set the logging parameters for a bucket and to specify permissions for who can view and modify the logging parameters. To set the logging status of a bucket, you must be the bucket owner.

', 'PutBucketMetricsConfiguration' => '

Sets a metrics configuration (specified by the metrics configuration ID) for the bucket.

', 'PutBucketNotification' => '

Deprecated, see the PutBucketNotificationConfiguraiton operation.

', 'PutBucketNotificationConfiguration' => '

Enables notifications of specified events for a bucket.

', 'PutBucketPolicy' => '

Replaces a policy on a bucket. If the bucket already has a policy, the one in this request completely replaces it.

', 'PutBucketReplication' => '

Creates a replication configuration or replaces an existing one. For more information, see Cross-Region Replication (CRR) in the Amazon S3 Developer Guide.

', 'PutBucketRequestPayment' => '

Sets the request payment configuration for a bucket. By default, the bucket owner pays for downloads from the bucket. This configuration parameter enables the bucket owner (only) to specify that the person requesting the download will be charged for the download. Documentation on requester pays buckets can be found at http://docs.aws.amazon.com/AmazonS3/latest/dev/RequesterPaysBuckets.html

', 'PutBucketTagging' => '

Sets the tags for a bucket.

', 'PutBucketVersioning' => '

Sets the versioning state of an existing bucket. To set the versioning state, you must be the bucket owner.

', 'PutBucketWebsite' => '

Set the website configuration for a bucket.

', 'PutObject' => '

Adds an object to a bucket.

', 'PutObjectAcl' => '

uses the acl subresource to set the access control list (ACL) permissions for an object that already exists in a bucket

', 'PutObjectLegalHold' => '

Applies a Legal Hold configuration to the specified object.

', 'PutObjectLockConfiguration' => '

Places an Object Lock configuration on the specified bucket. The rule specified in the Object Lock configuration will be applied by default to every new object placed in the specified bucket.

', 'PutObjectRetention' => '

Places an Object Retention configuration on an object.

', 'PutObjectTagging' => '

Sets the supplied tag-set to an object that already exists in a bucket

', 'PutPublicAccessBlock' => '

Creates or modifies the PublicAccessBlock configuration for an Amazon S3 bucket.

', 'RestoreObject' => '

Restores an archived copy of an object back into Amazon S3

', 'SelectObjectContent' => '

This operation filters the contents of an Amazon S3 object based on a simple Structured Query Language (SQL) statement. In the request, along with the SQL expression, you must also specify a data serialization format (JSON or CSV) of the object. Amazon S3 uses this to parse object data into records, and returns only records that match the specified SQL expression. You must also specify the data serialization format for the response.

', 'UploadPart' => '

Uploads a part in a multipart upload.

Note: After you initiate multipart upload and upload one or more parts, you must either complete or abort multipart upload in order to stop getting charged for storage of the uploaded parts. Only after you either complete or abort multipart upload, Amazon S3 frees up the parts storage and stops charging you for the parts storage.

', 'UploadPartCopy' => '

Uploads a part by copying data from an existing object as data source.

', ], 'shapes' => [ 'AbortDate' => [ 'base' => NULL, 'refs' => [ 'CreateMultipartUploadOutput$AbortDate' => '

Date when multipart upload will become eligible for abort operation by lifecycle.

', 'ListPartsOutput$AbortDate' => '

Date when multipart upload will become eligible for abort operation by lifecycle.

', ], ], 'AbortIncompleteMultipartUpload' => [ 'base' => '

Specifies the days since the initiation of an Incomplete Multipart Upload that Lifecycle will wait before permanently removing all parts of the upload.

', 'refs' => [ 'LifecycleRule$AbortIncompleteMultipartUpload' => NULL, 'Rule$AbortIncompleteMultipartUpload' => NULL, ], ], 'AbortMultipartUploadOutput' => [ 'base' => NULL, 'refs' => [], ], 'AbortMultipartUploadRequest' => [ 'base' => NULL, 'refs' => [], ], 'AbortRuleId' => [ 'base' => NULL, 'refs' => [ 'CreateMultipartUploadOutput$AbortRuleId' => '

Id of the lifecycle rule that makes a multipart upload eligible for abort operation.

', 'ListPartsOutput$AbortRuleId' => '

Id of the lifecycle rule that makes a multipart upload eligible for abort operation.

', ], ], 'AccelerateConfiguration' => [ 'base' => NULL, 'refs' => [ 'PutBucketAccelerateConfigurationRequest$AccelerateConfiguration' => '

Specifies the Accelerate Configuration you want to set for the bucket.

', ], ], 'AcceptRanges' => [ 'base' => NULL, 'refs' => [ 'GetObjectOutput$AcceptRanges' => NULL, 'HeadObjectOutput$AcceptRanges' => NULL, ], ], 'AccessControlPolicy' => [ 'base' => NULL, 'refs' => [ 'PutBucketAclRequest$AccessControlPolicy' => NULL, 'PutObjectAclRequest$AccessControlPolicy' => NULL, ], ], 'AccessControlTranslation' => [ 'base' => '

A container for information about access control for replicas.

', 'refs' => [ 'Destination$AccessControlTranslation' => '

A container for information about access control for replicas.

Use this element only in a cross-account scenario where source and destination bucket owners are not the same to change replica ownership to the AWS account that owns the destination bucket. If you don\'t add this element to the replication configuration, the replicas are owned by same AWS account that owns the source object.

', ], ], 'AccountId' => [ 'base' => NULL, 'refs' => [ 'AnalyticsS3BucketDestination$BucketAccountId' => '

The account ID that owns the destination bucket. If no account ID is provided, the owner will not be validated prior to exporting data.

', 'Destination$Account' => '

The account ID of the destination bucket. Currently, Amazon S3 verifies this value only if Access Control Translation is enabled.

In a cross-account scenario, if you change replica ownership to the AWS account that owns the destination bucket by adding the AccessControlTranslation element, this is the account ID of the owner of the destination bucket.

', 'InventoryS3BucketDestination$AccountId' => '

The ID of the account that owns the destination bucket.

', ], ], 'AllowQuotedRecordDelimiter' => [ 'base' => NULL, 'refs' => [ 'CSVInput$AllowQuotedRecordDelimiter' => '

Specifies that CSV field values may contain quoted record delimiters and such records should be allowed. Default value is FALSE. Setting this value to TRUE may lower performance.

', ], ], 'AllowedHeader' => [ 'base' => NULL, 'refs' => [ 'AllowedHeaders$member' => NULL, ], ], 'AllowedHeaders' => [ 'base' => NULL, 'refs' => [ 'CORSRule$AllowedHeaders' => '

Specifies which headers are allowed in a pre-flight OPTIONS request.

', ], ], 'AllowedMethod' => [ 'base' => NULL, 'refs' => [ 'AllowedMethods$member' => NULL, ], ], 'AllowedMethods' => [ 'base' => NULL, 'refs' => [ 'CORSRule$AllowedMethods' => '

Identifies HTTP methods that the domain/origin specified in the rule is allowed to execute.

', ], ], 'AllowedOrigin' => [ 'base' => NULL, 'refs' => [ 'AllowedOrigins$member' => NULL, ], ], 'AllowedOrigins' => [ 'base' => NULL, 'refs' => [ 'CORSRule$AllowedOrigins' => '

One or more origins you want customers to be able to access the bucket from.

', ], ], 'AnalyticsAndOperator' => [ 'base' => NULL, 'refs' => [ 'AnalyticsFilter$And' => '

A conjunction (logical AND) of predicates, which is used in evaluating an analytics filter. The operator must have at least two predicates.

', ], ], 'AnalyticsConfiguration' => [ 'base' => NULL, 'refs' => [ 'AnalyticsConfigurationList$member' => NULL, 'GetBucketAnalyticsConfigurationOutput$AnalyticsConfiguration' => '

The configuration and any analyses for the analytics filter.

', 'PutBucketAnalyticsConfigurationRequest$AnalyticsConfiguration' => '

The configuration and any analyses for the analytics filter.

', ], ], 'AnalyticsConfigurationList' => [ 'base' => NULL, 'refs' => [ 'ListBucketAnalyticsConfigurationsOutput$AnalyticsConfigurationList' => '

The list of analytics configurations for a bucket.

', ], ], 'AnalyticsExportDestination' => [ 'base' => NULL, 'refs' => [ 'StorageClassAnalysisDataExport$Destination' => '

The place to store the data for an analysis.

', ], ], 'AnalyticsFilter' => [ 'base' => NULL, 'refs' => [ 'AnalyticsConfiguration$Filter' => '

The filter used to describe a set of objects for analyses. A filter must have exactly one prefix, one tag, or one conjunction (AnalyticsAndOperator). If no filter is provided, all objects will be considered in any analysis.

', ], ], 'AnalyticsId' => [ 'base' => NULL, 'refs' => [ 'AnalyticsConfiguration$Id' => '

The identifier used to represent an analytics configuration.

', 'DeleteBucketAnalyticsConfigurationRequest$Id' => '

The identifier used to represent an analytics configuration.

', 'GetBucketAnalyticsConfigurationRequest$Id' => '

The identifier used to represent an analytics configuration.

', 'PutBucketAnalyticsConfigurationRequest$Id' => '

The identifier used to represent an analytics configuration.

', ], ], 'AnalyticsS3BucketDestination' => [ 'base' => NULL, 'refs' => [ 'AnalyticsExportDestination$S3BucketDestination' => '

A destination signifying output to an S3 bucket.

', ], ], 'AnalyticsS3ExportFileFormat' => [ 'base' => NULL, 'refs' => [ 'AnalyticsS3BucketDestination$Format' => '

The file format used when exporting data to Amazon S3.

', ], ], 'Body' => [ 'base' => NULL, 'refs' => [ 'GetObjectOutput$Body' => '

Object data.

', 'GetObjectTorrentOutput$Body' => NULL, 'PutObjectRequest$Body' => '

Object data.

', 'RecordsEvent$Payload' => '

The byte array of partial, one or more result records.

', 'UploadPartRequest$Body' => '

Object data.

', ], ], 'Bucket' => [ 'base' => NULL, 'refs' => [ 'Buckets$member' => NULL, ], ], 'BucketAccelerateStatus' => [ 'base' => NULL, 'refs' => [ 'AccelerateConfiguration$Status' => '

The accelerate configuration of the bucket.

', 'GetBucketAccelerateConfigurationOutput$Status' => '

The accelerate configuration of the bucket.

', ], ], 'BucketAlreadyExists' => [ 'base' => '

The requested bucket name is not available. The bucket namespace is shared by all users of the system. Please select a different name and try again.

', 'refs' => [], ], 'BucketAlreadyOwnedByYou' => [ 'base' => NULL, 'refs' => [], ], 'BucketCannedACL' => [ 'base' => NULL, 'refs' => [ 'CreateBucketRequest$ACL' => '

The canned ACL to apply to the bucket.

', 'PutBucketAclRequest$ACL' => '

The canned ACL to apply to the bucket.

', ], ], 'BucketLifecycleConfiguration' => [ 'base' => NULL, 'refs' => [ 'PutBucketLifecycleConfigurationRequest$LifecycleConfiguration' => NULL, ], ], 'BucketLocationConstraint' => [ 'base' => NULL, 'refs' => [ 'CreateBucketConfiguration$LocationConstraint' => '

Specifies the region where the bucket will be created. If you don\'t specify a region, the bucket will be created in US Standard.

', 'GetBucketLocationOutput$LocationConstraint' => NULL, ], ], 'BucketLoggingStatus' => [ 'base' => NULL, 'refs' => [ 'PutBucketLoggingRequest$BucketLoggingStatus' => NULL, ], ], 'BucketLogsPermission' => [ 'base' => NULL, 'refs' => [ 'TargetGrant$Permission' => '

Logging permissions assigned to the Grantee for the bucket.

', ], ], 'BucketName' => [ 'base' => NULL, 'refs' => [ 'AbortMultipartUploadRequest$Bucket' => NULL, 'AnalyticsS3BucketDestination$Bucket' => '

The Amazon resource name (ARN) of the bucket to which data is exported.

', 'Bucket$Name' => '

The name of the bucket.

', 'CompleteMultipartUploadOutput$Bucket' => NULL, 'CompleteMultipartUploadRequest$Bucket' => NULL, 'CopyObjectRequest$Bucket' => NULL, 'CreateBucketRequest$Bucket' => NULL, 'CreateMultipartUploadOutput$Bucket' => '

Name of the bucket to which the multipart upload was initiated.

', 'CreateMultipartUploadRequest$Bucket' => NULL, 'DeleteBucketAnalyticsConfigurationRequest$Bucket' => '

The name of the bucket from which an analytics configuration is deleted.

', 'DeleteBucketCorsRequest$Bucket' => NULL, 'DeleteBucketEncryptionRequest$Bucket' => '

The name of the bucket containing the server-side encryption configuration to delete.

', 'DeleteBucketInventoryConfigurationRequest$Bucket' => '

The name of the bucket containing the inventory configuration to delete.

', 'DeleteBucketLifecycleRequest$Bucket' => NULL, 'DeleteBucketMetricsConfigurationRequest$Bucket' => '

The name of the bucket containing the metrics configuration to delete.

', 'DeleteBucketPolicyRequest$Bucket' => NULL, 'DeleteBucketReplicationRequest$Bucket' => '

The bucket name.

It can take a while to propagate the deletion of a replication configuration to all Amazon S3 systems.

', 'DeleteBucketRequest$Bucket' => NULL, 'DeleteBucketTaggingRequest$Bucket' => NULL, 'DeleteBucketWebsiteRequest$Bucket' => NULL, 'DeleteObjectRequest$Bucket' => NULL, 'DeleteObjectTaggingRequest$Bucket' => NULL, 'DeleteObjectsRequest$Bucket' => NULL, 'DeletePublicAccessBlockRequest$Bucket' => '

The Amazon S3 bucket whose PublicAccessBlock configuration you want to delete.

', 'Destination$Bucket' => '

The Amazon Resource Name (ARN) of the bucket where you want Amazon S3 to store replicas of the object identified by the rule.

If there are multiple rules in your replication configuration, all rules must specify the same bucket as the destination. A replication configuration can replicate objects to only one destination bucket.

', 'GetBucketAccelerateConfigurationRequest$Bucket' => '

Name of the bucket for which the accelerate configuration is retrieved.

', 'GetBucketAclRequest$Bucket' => NULL, 'GetBucketAnalyticsConfigurationRequest$Bucket' => '

The name of the bucket from which an analytics configuration is retrieved.

', 'GetBucketCorsRequest$Bucket' => NULL, 'GetBucketEncryptionRequest$Bucket' => '

The name of the bucket from which the server-side encryption configuration is retrieved.

', 'GetBucketInventoryConfigurationRequest$Bucket' => '

The name of the bucket containing the inventory configuration to retrieve.

', 'GetBucketLifecycleConfigurationRequest$Bucket' => NULL, 'GetBucketLifecycleRequest$Bucket' => NULL, 'GetBucketLocationRequest$Bucket' => NULL, 'GetBucketLoggingRequest$Bucket' => NULL, 'GetBucketMetricsConfigurationRequest$Bucket' => '

The name of the bucket containing the metrics configuration to retrieve.

', 'GetBucketNotificationConfigurationRequest$Bucket' => '

Name of the bucket to get the notification configuration for.

', 'GetBucketPolicyRequest$Bucket' => NULL, 'GetBucketPolicyStatusRequest$Bucket' => '

The name of the Amazon S3 bucket whose policy status you want to retrieve.

', 'GetBucketReplicationRequest$Bucket' => NULL, 'GetBucketRequestPaymentRequest$Bucket' => NULL, 'GetBucketTaggingRequest$Bucket' => NULL, 'GetBucketVersioningRequest$Bucket' => NULL, 'GetBucketWebsiteRequest$Bucket' => NULL, 'GetObjectAclRequest$Bucket' => NULL, 'GetObjectLegalHoldRequest$Bucket' => '

The bucket containing the object whose Legal Hold status you want to retrieve.

', 'GetObjectLockConfigurationRequest$Bucket' => '

The bucket whose Object Lock configuration you want to retrieve.

', 'GetObjectRequest$Bucket' => NULL, 'GetObjectRetentionRequest$Bucket' => '

The bucket containing the object whose retention settings you want to retrieve.

', 'GetObjectTaggingRequest$Bucket' => NULL, 'GetObjectTorrentRequest$Bucket' => NULL, 'GetPublicAccessBlockRequest$Bucket' => '

The name of the Amazon S3 bucket whose PublicAccessBlock configuration you want to retrieve.

', 'HeadBucketRequest$Bucket' => NULL, 'HeadObjectRequest$Bucket' => NULL, 'InventoryS3BucketDestination$Bucket' => '

The Amazon resource name (ARN) of the bucket where inventory results will be published.

', 'ListBucketAnalyticsConfigurationsRequest$Bucket' => '

The name of the bucket from which analytics configurations are retrieved.

', 'ListBucketInventoryConfigurationsRequest$Bucket' => '

The name of the bucket containing the inventory configurations to retrieve.

', 'ListBucketMetricsConfigurationsRequest$Bucket' => '

The name of the bucket containing the metrics configurations to retrieve.

', 'ListMultipartUploadsOutput$Bucket' => '

Name of the bucket to which the multipart upload was initiated.

', 'ListMultipartUploadsRequest$Bucket' => NULL, 'ListObjectVersionsOutput$Name' => NULL, 'ListObjectVersionsRequest$Bucket' => NULL, 'ListObjectsOutput$Name' => NULL, 'ListObjectsRequest$Bucket' => NULL, 'ListObjectsV2Output$Name' => '

Name of the bucket to list.

', 'ListObjectsV2Request$Bucket' => '

Name of the bucket to list.

', 'ListPartsOutput$Bucket' => '

Name of the bucket to which the multipart upload was initiated.

', 'ListPartsRequest$Bucket' => NULL, 'PutBucketAccelerateConfigurationRequest$Bucket' => '

Name of the bucket for which the accelerate configuration is set.

', 'PutBucketAclRequest$Bucket' => NULL, 'PutBucketAnalyticsConfigurationRequest$Bucket' => '

The name of the bucket to which an analytics configuration is stored.

', 'PutBucketCorsRequest$Bucket' => NULL, 'PutBucketEncryptionRequest$Bucket' => '

The name of the bucket for which the server-side encryption configuration is set.

', 'PutBucketInventoryConfigurationRequest$Bucket' => '

The name of the bucket where the inventory configuration will be stored.

', 'PutBucketLifecycleConfigurationRequest$Bucket' => NULL, 'PutBucketLifecycleRequest$Bucket' => NULL, 'PutBucketLoggingRequest$Bucket' => NULL, 'PutBucketMetricsConfigurationRequest$Bucket' => '

The name of the bucket for which the metrics configuration is set.

', 'PutBucketNotificationConfigurationRequest$Bucket' => NULL, 'PutBucketNotificationRequest$Bucket' => NULL, 'PutBucketPolicyRequest$Bucket' => NULL, 'PutBucketReplicationRequest$Bucket' => NULL, 'PutBucketRequestPaymentRequest$Bucket' => NULL, 'PutBucketTaggingRequest$Bucket' => NULL, 'PutBucketVersioningRequest$Bucket' => NULL, 'PutBucketWebsiteRequest$Bucket' => NULL, 'PutObjectAclRequest$Bucket' => NULL, 'PutObjectLegalHoldRequest$Bucket' => '

The bucket containing the object that you want to place a Legal Hold on.

', 'PutObjectLockConfigurationRequest$Bucket' => '

The bucket whose Object Lock configuration you want to create or replace.

', 'PutObjectRequest$Bucket' => '

Name of the bucket to which the PUT operation was initiated.

', 'PutObjectRetentionRequest$Bucket' => '

The bucket that contains the object you want to apply this Object Retention configuration to.

', 'PutObjectTaggingRequest$Bucket' => NULL, 'PutPublicAccessBlockRequest$Bucket' => '

The name of the Amazon S3 bucket whose PublicAccessBlock configuration you want to set.

', 'RestoreObjectRequest$Bucket' => NULL, 'S3Location$BucketName' => '

The name of the bucket where the restore results will be placed.

', 'SelectObjectContentRequest$Bucket' => '

The S3 bucket.

', 'UploadPartCopyRequest$Bucket' => NULL, 'UploadPartRequest$Bucket' => '

Name of the bucket to which the multipart upload was initiated.

', ], ], 'BucketVersioningStatus' => [ 'base' => NULL, 'refs' => [ 'GetBucketVersioningOutput$Status' => '

The versioning state of the bucket.

', 'VersioningConfiguration$Status' => '

The versioning state of the bucket.

', ], ], 'Buckets' => [ 'base' => NULL, 'refs' => [ 'ListBucketsOutput$Buckets' => NULL, ], ], 'BypassGovernanceRetention' => [ 'base' => NULL, 'refs' => [ 'DeleteObjectRequest$BypassGovernanceRetention' => '

Indicates whether S3 Object Lock should bypass Governance-mode restrictions to process this operation.

', 'DeleteObjectsRequest$BypassGovernanceRetention' => '

Specifies whether you want to delete this object even if it has a Governance-type Object Lock in place. You must have sufficient permissions to perform this operation.

', 'PutObjectRetentionRequest$BypassGovernanceRetention' => '

Indicates whether this operation should bypass Governance-mode restrictions.j

', ], ], 'BytesProcessed' => [ 'base' => NULL, 'refs' => [ 'Progress$BytesProcessed' => '

The current number of uncompressed object bytes processed.

', 'Stats$BytesProcessed' => '

The total number of uncompressed object bytes processed.

', ], ], 'BytesReturned' => [ 'base' => NULL, 'refs' => [ 'Progress$BytesReturned' => '

The current number of bytes of records payload data returned.

', 'Stats$BytesReturned' => '

The total number of bytes of records payload data returned.

', ], ], 'BytesScanned' => [ 'base' => NULL, 'refs' => [ 'Progress$BytesScanned' => '

The current number of object bytes scanned.

', 'Stats$BytesScanned' => '

The total number of object bytes scanned.

', ], ], 'CORSConfiguration' => [ 'base' => NULL, 'refs' => [ 'PutBucketCorsRequest$CORSConfiguration' => NULL, ], ], 'CORSRule' => [ 'base' => NULL, 'refs' => [ 'CORSRules$member' => NULL, ], ], 'CORSRules' => [ 'base' => NULL, 'refs' => [ 'CORSConfiguration$CORSRules' => NULL, 'GetBucketCorsOutput$CORSRules' => NULL, ], ], 'CSVInput' => [ 'base' => '

Describes how a CSV-formatted input object is formatted.

', 'refs' => [ 'InputSerialization$CSV' => '

Describes the serialization of a CSV-encoded object.

', ], ], 'CSVOutput' => [ 'base' => '

Describes how CSV-formatted results are formatted.

', 'refs' => [ 'OutputSerialization$CSV' => '

Describes the serialization of CSV-encoded Select results.

', ], ], 'CacheControl' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$CacheControl' => '

Specifies caching behavior along the request/reply chain.

', 'CreateMultipartUploadRequest$CacheControl' => '

Specifies caching behavior along the request/reply chain.

', 'GetObjectOutput$CacheControl' => '

Specifies caching behavior along the request/reply chain.

', 'HeadObjectOutput$CacheControl' => '

Specifies caching behavior along the request/reply chain.

', 'PutObjectRequest$CacheControl' => '

Specifies caching behavior along the request/reply chain.

', ], ], 'CloudFunction' => [ 'base' => NULL, 'refs' => [ 'CloudFunctionConfiguration$CloudFunction' => NULL, ], ], 'CloudFunctionConfiguration' => [ 'base' => NULL, 'refs' => [ 'NotificationConfigurationDeprecated$CloudFunctionConfiguration' => NULL, ], ], 'CloudFunctionInvocationRole' => [ 'base' => NULL, 'refs' => [ 'CloudFunctionConfiguration$InvocationRole' => NULL, ], ], 'Code' => [ 'base' => NULL, 'refs' => [ 'Error$Code' => NULL, ], ], 'Comments' => [ 'base' => NULL, 'refs' => [ 'CSVInput$Comments' => '

The single character used to indicate a row should be ignored when present at the start of a row.

', ], ], 'CommonPrefix' => [ 'base' => NULL, 'refs' => [ 'CommonPrefixList$member' => NULL, ], ], 'CommonPrefixList' => [ 'base' => NULL, 'refs' => [ 'ListMultipartUploadsOutput$CommonPrefixes' => NULL, 'ListObjectVersionsOutput$CommonPrefixes' => NULL, 'ListObjectsOutput$CommonPrefixes' => NULL, 'ListObjectsV2Output$CommonPrefixes' => '

CommonPrefixes contains all (if there are any) keys between Prefix and the next occurrence of the string specified by delimiter

', ], ], 'CompleteMultipartUploadOutput' => [ 'base' => NULL, 'refs' => [], ], 'CompleteMultipartUploadRequest' => [ 'base' => NULL, 'refs' => [], ], 'CompletedMultipartUpload' => [ 'base' => NULL, 'refs' => [ 'CompleteMultipartUploadRequest$MultipartUpload' => NULL, ], ], 'CompletedPart' => [ 'base' => NULL, 'refs' => [ 'CompletedPartList$member' => NULL, ], ], 'CompletedPartList' => [ 'base' => NULL, 'refs' => [ 'CompletedMultipartUpload$Parts' => NULL, ], ], 'CompressionType' => [ 'base' => NULL, 'refs' => [ 'InputSerialization$CompressionType' => '

Specifies object\'s compression format. Valid values: NONE, GZIP, BZIP2. Default Value: NONE.

', ], ], 'Condition' => [ 'base' => NULL, 'refs' => [ 'RoutingRule$Condition' => '

A container for describing a condition that must be met for the specified redirect to apply. For example, 1. If request is for pages in the /docs folder, redirect to the /documents folder. 2. If request results in HTTP error 4xx, redirect request to another host where you might process the error.

', ], ], 'ConfirmRemoveSelfBucketAccess' => [ 'base' => NULL, 'refs' => [ 'PutBucketPolicyRequest$ConfirmRemoveSelfBucketAccess' => '

Set this parameter to true to confirm that you want to remove your permissions to change this bucket policy in the future.

', ], ], 'ContentDisposition' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$ContentDisposition' => '

Specifies presentational information for the object.

', 'CreateMultipartUploadRequest$ContentDisposition' => '

Specifies presentational information for the object.

', 'GetObjectOutput$ContentDisposition' => '

Specifies presentational information for the object.

', 'HeadObjectOutput$ContentDisposition' => '

Specifies presentational information for the object.

', 'PutObjectRequest$ContentDisposition' => '

Specifies presentational information for the object.

', ], ], 'ContentEncoding' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$ContentEncoding' => '

Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field.

', 'CreateMultipartUploadRequest$ContentEncoding' => '

Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field.

', 'GetObjectOutput$ContentEncoding' => '

Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field.

', 'HeadObjectOutput$ContentEncoding' => '

Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field.

', 'PutObjectRequest$ContentEncoding' => '

Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field.

', ], ], 'ContentLanguage' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$ContentLanguage' => '

The language the content is in.

', 'CreateMultipartUploadRequest$ContentLanguage' => '

The language the content is in.

', 'GetObjectOutput$ContentLanguage' => '

The language the content is in.

', 'HeadObjectOutput$ContentLanguage' => '

The language the content is in.

', 'PutObjectRequest$ContentLanguage' => '

The language the content is in.

', ], ], 'ContentLength' => [ 'base' => NULL, 'refs' => [ 'GetObjectOutput$ContentLength' => '

Size of the body in bytes.

', 'HeadObjectOutput$ContentLength' => '

Size of the body in bytes.

', 'PutObjectRequest$ContentLength' => '

Size of the body in bytes. This parameter is useful when the size of the body cannot be determined automatically.

', 'UploadPartRequest$ContentLength' => '

Size of the body in bytes. This parameter is useful when the size of the body cannot be determined automatically.

', ], ], 'ContentMD5' => [ 'base' => NULL, 'refs' => [ 'PutBucketAclRequest$ContentMD5' => NULL, 'PutBucketCorsRequest$ContentMD5' => NULL, 'PutBucketEncryptionRequest$ContentMD5' => '

The base64-encoded 128-bit MD5 digest of the server-side encryption configuration.

', 'PutBucketLifecycleRequest$ContentMD5' => NULL, 'PutBucketLoggingRequest$ContentMD5' => NULL, 'PutBucketNotificationRequest$ContentMD5' => NULL, 'PutBucketPolicyRequest$ContentMD5' => NULL, 'PutBucketReplicationRequest$ContentMD5' => NULL, 'PutBucketRequestPaymentRequest$ContentMD5' => NULL, 'PutBucketTaggingRequest$ContentMD5' => NULL, 'PutBucketVersioningRequest$ContentMD5' => NULL, 'PutBucketWebsiteRequest$ContentMD5' => NULL, 'PutObjectAclRequest$ContentMD5' => NULL, 'PutObjectLegalHoldRequest$ContentMD5' => '

The MD5 hash for the request body.

', 'PutObjectLockConfigurationRequest$ContentMD5' => '

The MD5 hash for the request body.

', 'PutObjectRequest$ContentMD5' => '

The base64-encoded 128-bit MD5 digest of the part data.

', 'PutObjectRetentionRequest$ContentMD5' => '

The MD5 hash for the request body.

', 'PutObjectTaggingRequest$ContentMD5' => NULL, 'PutPublicAccessBlockRequest$ContentMD5' => '

The MD5 hash of the PutPublicAccessBlock request body.

', 'UploadPartRequest$ContentMD5' => '

The base64-encoded 128-bit MD5 digest of the part data.

', ], ], 'ContentRange' => [ 'base' => NULL, 'refs' => [ 'GetObjectOutput$ContentRange' => '

The portion of the object returned in the response.

', ], ], 'ContentType' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$ContentType' => '

A standard MIME type describing the format of the object data.

', 'CreateMultipartUploadRequest$ContentType' => '

A standard MIME type describing the format of the object data.

', 'GetObjectOutput$ContentType' => '

A standard MIME type describing the format of the object data.

', 'HeadObjectOutput$ContentType' => '

A standard MIME type describing the format of the object data.

', 'PutObjectRequest$ContentType' => '

A standard MIME type describing the format of the object data.

', ], ], 'ContinuationEvent' => [ 'base' => NULL, 'refs' => [ 'SelectObjectContentEventStream$Cont' => '

The Continuation Event.

', ], ], 'CopyObjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'CopyObjectRequest' => [ 'base' => NULL, 'refs' => [], ], 'CopyObjectResult' => [ 'base' => NULL, 'refs' => [ 'CopyObjectOutput$CopyObjectResult' => NULL, ], ], 'CopyPartResult' => [ 'base' => NULL, 'refs' => [ 'UploadPartCopyOutput$CopyPartResult' => NULL, ], ], 'CopySource' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$CopySource' => '

The name of the source bucket and key name of the source object, separated by a slash (/). Must be URL-encoded.

', 'UploadPartCopyRequest$CopySource' => '

The name of the source bucket and key name of the source object, separated by a slash (/). Must be URL-encoded.

', ], ], 'CopySourceIfMatch' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$CopySourceIfMatch' => '

Copies the object if its entity tag (ETag) matches the specified tag.

', 'UploadPartCopyRequest$CopySourceIfMatch' => '

Copies the object if its entity tag (ETag) matches the specified tag.

', ], ], 'CopySourceIfModifiedSince' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$CopySourceIfModifiedSince' => '

Copies the object if it has been modified since the specified time.

', 'UploadPartCopyRequest$CopySourceIfModifiedSince' => '

Copies the object if it has been modified since the specified time.

', ], ], 'CopySourceIfNoneMatch' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$CopySourceIfNoneMatch' => '

Copies the object if its entity tag (ETag) is different than the specified ETag.

', 'UploadPartCopyRequest$CopySourceIfNoneMatch' => '

Copies the object if its entity tag (ETag) is different than the specified ETag.

', ], ], 'CopySourceIfUnmodifiedSince' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$CopySourceIfUnmodifiedSince' => '

Copies the object if it hasn\'t been modified since the specified time.

', 'UploadPartCopyRequest$CopySourceIfUnmodifiedSince' => '

Copies the object if it hasn\'t been modified since the specified time.

', ], ], 'CopySourceRange' => [ 'base' => NULL, 'refs' => [ 'UploadPartCopyRequest$CopySourceRange' => '

The range of bytes to copy from the source object. The range value must use the form bytes=first-last, where the first and last are the zero-based byte offsets to copy. For example, bytes=0-9 indicates that you want to copy the first ten bytes of the source. You can copy a range only if the source object is greater than 5 GB.

', ], ], 'CopySourceSSECustomerAlgorithm' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$CopySourceSSECustomerAlgorithm' => '

Specifies the algorithm to use when decrypting the source object (e.g., AES256).

', 'UploadPartCopyRequest$CopySourceSSECustomerAlgorithm' => '

Specifies the algorithm to use when decrypting the source object (e.g., AES256).

', ], ], 'CopySourceSSECustomerKey' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$CopySourceSSECustomerKey' => '

Specifies the customer-provided encryption key for Amazon S3 to use to decrypt the source object. The encryption key provided in this header must be one that was used when the source object was created.

', 'UploadPartCopyRequest$CopySourceSSECustomerKey' => '

Specifies the customer-provided encryption key for Amazon S3 to use to decrypt the source object. The encryption key provided in this header must be one that was used when the source object was created.

', ], ], 'CopySourceSSECustomerKeyMD5' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$CopySourceSSECustomerKeyMD5' => '

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

', 'UploadPartCopyRequest$CopySourceSSECustomerKeyMD5' => '

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

', ], ], 'CopySourceVersionId' => [ 'base' => NULL, 'refs' => [ 'CopyObjectOutput$CopySourceVersionId' => NULL, 'UploadPartCopyOutput$CopySourceVersionId' => '

The version of the source object that was copied, if you have enabled versioning on the source bucket.

', ], ], 'CreateBucketConfiguration' => [ 'base' => NULL, 'refs' => [ 'CreateBucketRequest$CreateBucketConfiguration' => NULL, ], ], 'CreateBucketOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateBucketRequest' => [ 'base' => NULL, 'refs' => [], ], 'CreateMultipartUploadOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateMultipartUploadRequest' => [ 'base' => NULL, 'refs' => [], ], 'CreationDate' => [ 'base' => NULL, 'refs' => [ 'Bucket$CreationDate' => '

Date the bucket was created.

', ], ], 'Date' => [ 'base' => NULL, 'refs' => [ 'LifecycleExpiration$Date' => '

Indicates at what date the object is to be moved or deleted. Should be in GMT ISO 8601 Format.

', 'ObjectLockRetention$RetainUntilDate' => '

The date on which this Object Lock Retention will expire.

', 'Transition$Date' => '

Indicates at what date the object is to be moved or deleted. Should be in GMT ISO 8601 Format.

', ], ], 'Days' => [ 'base' => NULL, 'refs' => [ 'DefaultRetention$Days' => '

The number of days that you want to specify for the default retention period.

', 'LifecycleExpiration$Days' => '

Indicates the lifetime, in days, of the objects that are subject to the rule. The value must be a non-zero positive integer.

', 'NoncurrentVersionExpiration$NoncurrentDays' => '

Specifies the number of days an object is noncurrent before Amazon S3 can perform the associated action. For information about the noncurrent days calculations, see How Amazon S3 Calculates When an Object Became Noncurrent in the Amazon Simple Storage Service Developer Guide.

', 'NoncurrentVersionTransition$NoncurrentDays' => '

Specifies the number of days an object is noncurrent before Amazon S3 can perform the associated action. For information about the noncurrent days calculations, see How Amazon S3 Calculates When an Object Became Noncurrent in the Amazon Simple Storage Service Developer Guide.

', 'RestoreRequest$Days' => '

Lifetime of the active copy in days. Do not use with restores that specify OutputLocation.

', 'Transition$Days' => '

Indicates the lifetime, in days, of the objects that are subject to the rule. The value must be a non-zero positive integer.

', ], ], 'DaysAfterInitiation' => [ 'base' => NULL, 'refs' => [ 'AbortIncompleteMultipartUpload$DaysAfterInitiation' => '

Indicates the number of days that must pass since initiation for Lifecycle to abort an Incomplete Multipart Upload.

', ], ], 'DefaultRetention' => [ 'base' => '

The container element for specifying the default Object Lock retention settings for new objects placed in the specified bucket.

', 'refs' => [ 'ObjectLockRule$DefaultRetention' => '

The default retention period that you want to apply to new objects placed in the specified bucket.

', ], ], 'Delete' => [ 'base' => NULL, 'refs' => [ 'DeleteObjectsRequest$Delete' => NULL, ], ], 'DeleteBucketAnalyticsConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketCorsRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketEncryptionRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketInventoryConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketLifecycleRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketMetricsConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketPolicyRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketReplicationRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketTaggingRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketWebsiteRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteMarker' => [ 'base' => NULL, 'refs' => [ 'DeleteObjectOutput$DeleteMarker' => '

Specifies whether the versioned object that was permanently deleted was (true) or was not (false) a delete marker.

', 'DeletedObject$DeleteMarker' => NULL, 'GetObjectOutput$DeleteMarker' => '

Specifies whether the object retrieved was (true) or was not (false) a Delete Marker. If false, this response header does not appear in the response.

', 'HeadObjectOutput$DeleteMarker' => '

Specifies whether the object retrieved was (true) or was not (false) a Delete Marker. If false, this response header does not appear in the response.

', ], ], 'DeleteMarkerEntry' => [ 'base' => NULL, 'refs' => [ 'DeleteMarkers$member' => NULL, ], ], 'DeleteMarkerReplication' => [ 'base' => '

Specifies whether Amazon S3 should replicate delete makers.

', 'refs' => [ 'ReplicationRule$DeleteMarkerReplication' => NULL, ], ], 'DeleteMarkerReplicationStatus' => [ 'base' => NULL, 'refs' => [ 'DeleteMarkerReplication$Status' => '

The status of the delete marker replication.

In the current implementation, Amazon S3 doesn\'t replicate the delete markers. The status must be Disabled.

', ], ], 'DeleteMarkerVersionId' => [ 'base' => NULL, 'refs' => [ 'DeletedObject$DeleteMarkerVersionId' => NULL, ], ], 'DeleteMarkers' => [ 'base' => NULL, 'refs' => [ 'ListObjectVersionsOutput$DeleteMarkers' => NULL, ], ], 'DeleteObjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteObjectRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteObjectTaggingOutput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteObjectTaggingRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteObjectsOutput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteObjectsRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeletePublicAccessBlockRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeletedObject' => [ 'base' => NULL, 'refs' => [ 'DeletedObjects$member' => NULL, ], ], 'DeletedObjects' => [ 'base' => NULL, 'refs' => [ 'DeleteObjectsOutput$Deleted' => NULL, ], ], 'Delimiter' => [ 'base' => NULL, 'refs' => [ 'ListMultipartUploadsOutput$Delimiter' => NULL, 'ListMultipartUploadsRequest$Delimiter' => '

Character you use to group keys.

', 'ListObjectVersionsOutput$Delimiter' => NULL, 'ListObjectVersionsRequest$Delimiter' => '

A delimiter is a character you use to group keys.

', 'ListObjectsOutput$Delimiter' => NULL, 'ListObjectsRequest$Delimiter' => '

A delimiter is a character you use to group keys.

', 'ListObjectsV2Output$Delimiter' => '

A delimiter is a character you use to group keys.

', 'ListObjectsV2Request$Delimiter' => '

A delimiter is a character you use to group keys.

', ], ], 'Description' => [ 'base' => NULL, 'refs' => [ 'RestoreRequest$Description' => '

The optional description for the job.

', ], ], 'Destination' => [ 'base' => '

A container for information about the replication destination.

', 'refs' => [ 'ReplicationRule$Destination' => '

A container for information about the replication destination.

', ], ], 'DisplayName' => [ 'base' => NULL, 'refs' => [ 'Grantee$DisplayName' => '

Screen name of the grantee.

', 'Initiator$DisplayName' => '

Name of the Principal.

', 'Owner$DisplayName' => NULL, ], ], 'ETag' => [ 'base' => NULL, 'refs' => [ 'CompleteMultipartUploadOutput$ETag' => '

Entity tag of the object.

', 'CompletedPart$ETag' => '

Entity tag returned when the part was uploaded.

', 'CopyObjectResult$ETag' => NULL, 'CopyPartResult$ETag' => '

Entity tag of the object.

', 'GetObjectOutput$ETag' => '

An ETag is an opaque identifier assigned by a web server to a specific version of a resource found at a URL

', 'HeadObjectOutput$ETag' => '

An ETag is an opaque identifier assigned by a web server to a specific version of a resource found at a URL

', 'Object$ETag' => NULL, 'ObjectVersion$ETag' => NULL, 'Part$ETag' => '

Entity tag returned when the part was uploaded.

', 'PutObjectOutput$ETag' => '

Entity tag for the uploaded object.

', 'UploadPartOutput$ETag' => '

Entity tag for the uploaded object.

', ], ], 'EmailAddress' => [ 'base' => NULL, 'refs' => [ 'Grantee$EmailAddress' => '

Email address of the grantee.

', ], ], 'EnableRequestProgress' => [ 'base' => NULL, 'refs' => [ 'RequestProgress$Enabled' => '

Specifies whether periodic QueryProgress frames should be sent. Valid values: TRUE, FALSE. Default value: FALSE.

', ], ], 'EncodingType' => [ 'base' => '

Requests Amazon S3 to encode the object keys in the response and specifies the encoding method to use. An object key may contain any Unicode character; however, XML 1.0 parser cannot parse some characters, such as characters with an ASCII value from 0 to 10. For characters that are not supported in XML 1.0, you can add this parameter to request that Amazon S3 encode the keys in the response.

', 'refs' => [ 'ListMultipartUploadsOutput$EncodingType' => '

Encoding type used by Amazon S3 to encode object keys in the response.

', 'ListMultipartUploadsRequest$EncodingType' => NULL, 'ListObjectVersionsOutput$EncodingType' => '

Encoding type used by Amazon S3 to encode object keys in the response.

', 'ListObjectVersionsRequest$EncodingType' => NULL, 'ListObjectsOutput$EncodingType' => '

Encoding type used by Amazon S3 to encode object keys in the response.

', 'ListObjectsRequest$EncodingType' => NULL, 'ListObjectsV2Output$EncodingType' => '

Encoding type used by Amazon S3 to encode object keys in the response.

', 'ListObjectsV2Request$EncodingType' => '

Encoding type used by Amazon S3 to encode object keys in the response.

', ], ], 'Encryption' => [ 'base' => '

Describes the server-side encryption that will be applied to the restore results.

', 'refs' => [ 'S3Location$Encryption' => NULL, ], ], 'EncryptionConfiguration' => [ 'base' => '

A container for information about the encryption-based configuration for replicas.

', 'refs' => [ 'Destination$EncryptionConfiguration' => '

A container that provides information about encryption. If SourceSelectionCriteria is specified, you must specify this element.

', ], ], 'EndEvent' => [ 'base' => NULL, 'refs' => [ 'SelectObjectContentEventStream$End' => '

The End Event.

', ], ], 'Error' => [ 'base' => NULL, 'refs' => [ 'Errors$member' => NULL, ], ], 'ErrorDocument' => [ 'base' => NULL, 'refs' => [ 'GetBucketWebsiteOutput$ErrorDocument' => NULL, 'WebsiteConfiguration$ErrorDocument' => NULL, ], ], 'Errors' => [ 'base' => NULL, 'refs' => [ 'DeleteObjectsOutput$Errors' => NULL, ], ], 'Event' => [ 'base' => '

The bucket event for which to send notifications.

', 'refs' => [ 'CloudFunctionConfiguration$Event' => NULL, 'EventList$member' => NULL, 'QueueConfigurationDeprecated$Event' => NULL, 'TopicConfigurationDeprecated$Event' => '

Bucket event for which to send notifications.

', ], ], 'EventList' => [ 'base' => NULL, 'refs' => [ 'CloudFunctionConfiguration$Events' => NULL, 'LambdaFunctionConfiguration$Events' => NULL, 'QueueConfiguration$Events' => NULL, 'QueueConfigurationDeprecated$Events' => NULL, 'TopicConfiguration$Events' => NULL, 'TopicConfigurationDeprecated$Events' => NULL, ], ], 'Expiration' => [ 'base' => NULL, 'refs' => [ 'CompleteMultipartUploadOutput$Expiration' => '

If the object expiration is configured, this will contain the expiration date (expiry-date) and rule ID (rule-id). The value of rule-id is URL encoded.

', 'CopyObjectOutput$Expiration' => '

If the object expiration is configured, the response includes this header.

', 'GetObjectOutput$Expiration' => '

If the object expiration is configured (see PUT Bucket lifecycle), the response includes this header. It includes the expiry-date and rule-id key value pairs providing object expiration information. The value of the rule-id is URL encoded.

', 'HeadObjectOutput$Expiration' => '

If the object expiration is configured (see PUT Bucket lifecycle), the response includes this header. It includes the expiry-date and rule-id key value pairs providing object expiration information. The value of the rule-id is URL encoded.

', 'PutObjectOutput$Expiration' => '

If the object expiration is configured, this will contain the expiration date (expiry-date) and rule ID (rule-id). The value of rule-id is URL encoded.

', ], ], 'ExpirationStatus' => [ 'base' => NULL, 'refs' => [ 'LifecycleRule$Status' => '

If \'Enabled\', the rule is currently being applied. If \'Disabled\', the rule is not currently being applied.

', 'Rule$Status' => '

If \'Enabled\', the rule is currently being applied. If \'Disabled\', the rule is not currently being applied.

', ], ], 'ExpiredObjectDeleteMarker' => [ 'base' => NULL, 'refs' => [ 'LifecycleExpiration$ExpiredObjectDeleteMarker' => '

Indicates whether Amazon S3 will remove a delete marker with no noncurrent versions. If set to true, the delete marker will be expired; if set to false the policy takes no action. This cannot be specified with Days or Date in a Lifecycle Expiration Policy.

', ], ], 'Expires' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$Expires' => '

The date and time at which the object is no longer cacheable.

', 'CreateMultipartUploadRequest$Expires' => '

The date and time at which the object is no longer cacheable.

', 'GetObjectOutput$Expires' => '

The date and time at which the object is no longer cacheable.

', 'HeadObjectOutput$Expires' => '

The date and time at which the object is no longer cacheable.

', 'PutObjectRequest$Expires' => '

The date and time at which the object is no longer cacheable.

', ], ], 'ExposeHeader' => [ 'base' => NULL, 'refs' => [ 'ExposeHeaders$member' => NULL, ], ], 'ExposeHeaders' => [ 'base' => NULL, 'refs' => [ 'CORSRule$ExposeHeaders' => '

One or more headers in the response that you want customers to be able to access from their applications (for example, from a JavaScript XMLHttpRequest object).

', ], ], 'Expression' => [ 'base' => NULL, 'refs' => [ 'SelectObjectContentRequest$Expression' => '

The expression that is used to query the object.

', 'SelectParameters$Expression' => '

The expression that is used to query the object.

', ], ], 'ExpressionType' => [ 'base' => NULL, 'refs' => [ 'SelectObjectContentRequest$ExpressionType' => '

The type of the provided expression (for example., SQL).

', 'SelectParameters$ExpressionType' => '

The type of the provided expression (e.g., SQL).

', ], ], 'FetchOwner' => [ 'base' => NULL, 'refs' => [ 'ListObjectsV2Request$FetchOwner' => '

The owner field is not present in listV2 by default, if you want to return owner field with each key in the result then set the fetch owner field to true

', ], ], 'FieldDelimiter' => [ 'base' => NULL, 'refs' => [ 'CSVInput$FieldDelimiter' => '

The value used to separate individual fields in a record.

', 'CSVOutput$FieldDelimiter' => '

The value used to separate individual fields in a record.

', ], ], 'FileHeaderInfo' => [ 'base' => NULL, 'refs' => [ 'CSVInput$FileHeaderInfo' => '

Describes the first line of input. Valid values: None, Ignore, Use.

', ], ], 'FilterRule' => [ 'base' => '

A container for a key value pair that defines the criteria for the filter rule.

', 'refs' => [ 'FilterRuleList$member' => NULL, ], ], 'FilterRuleList' => [ 'base' => '

A list of containers for the key value pair that defines the criteria for the filter rule.

', 'refs' => [ 'S3KeyFilter$FilterRules' => NULL, ], ], 'FilterRuleName' => [ 'base' => NULL, 'refs' => [ 'FilterRule$Name' => '

The object key name prefix or suffix identifying one or more objects to which the filtering rule applies. The maximum prefix length is 1,024 characters. Overlapping prefixes and suffixes are not supported. For more information, see Configuring Event Notifications in the Amazon Simple Storage Service Developer Guide.

', ], ], 'FilterRuleValue' => [ 'base' => NULL, 'refs' => [ 'FilterRule$Value' => NULL, ], ], 'GetBucketAccelerateConfigurationOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketAccelerateConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketAclOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketAclRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketAnalyticsConfigurationOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketAnalyticsConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketCorsOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketCorsRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketEncryptionOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketEncryptionRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketInventoryConfigurationOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketInventoryConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketLifecycleConfigurationOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketLifecycleConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketLifecycleOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketLifecycleRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketLocationOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketLocationRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketLoggingOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketLoggingRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketMetricsConfigurationOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketMetricsConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketNotificationConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketPolicyOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketPolicyRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketPolicyStatusOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketPolicyStatusRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketReplicationOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketReplicationRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketRequestPaymentOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketRequestPaymentRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketTaggingOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketTaggingRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketVersioningOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketVersioningRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketWebsiteOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketWebsiteRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectAclOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectAclRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectLegalHoldOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectLegalHoldRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectLockConfigurationOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectLockConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectRetentionOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectRetentionRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectTaggingOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectTaggingRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectTorrentOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectTorrentRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetPublicAccessBlockOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetPublicAccessBlockRequest' => [ 'base' => NULL, 'refs' => [], ], 'GlacierJobParameters' => [ 'base' => NULL, 'refs' => [ 'RestoreRequest$GlacierJobParameters' => '

Glacier related parameters pertaining to this job. Do not use with restores that specify OutputLocation.

', ], ], 'Grant' => [ 'base' => NULL, 'refs' => [ 'Grants$member' => NULL, ], ], 'GrantFullControl' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$GrantFullControl' => '

Gives the grantee READ, READ_ACP, and WRITE_ACP permissions on the object.

', 'CreateBucketRequest$GrantFullControl' => '

Allows grantee the read, write, read ACP, and write ACP permissions on the bucket.

', 'CreateMultipartUploadRequest$GrantFullControl' => '

Gives the grantee READ, READ_ACP, and WRITE_ACP permissions on the object.

', 'PutBucketAclRequest$GrantFullControl' => '

Allows grantee the read, write, read ACP, and write ACP permissions on the bucket.

', 'PutObjectAclRequest$GrantFullControl' => '

Allows grantee the read, write, read ACP, and write ACP permissions on the bucket.

', 'PutObjectRequest$GrantFullControl' => '

Gives the grantee READ, READ_ACP, and WRITE_ACP permissions on the object.

', ], ], 'GrantRead' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$GrantRead' => '

Allows grantee to read the object data and its metadata.

', 'CreateBucketRequest$GrantRead' => '

Allows grantee to list the objects in the bucket.

', 'CreateMultipartUploadRequest$GrantRead' => '

Allows grantee to read the object data and its metadata.

', 'PutBucketAclRequest$GrantRead' => '

Allows grantee to list the objects in the bucket.

', 'PutObjectAclRequest$GrantRead' => '

Allows grantee to list the objects in the bucket.

', 'PutObjectRequest$GrantRead' => '

Allows grantee to read the object data and its metadata.

', ], ], 'GrantReadACP' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$GrantReadACP' => '

Allows grantee to read the object ACL.

', 'CreateBucketRequest$GrantReadACP' => '

Allows grantee to read the bucket ACL.

', 'CreateMultipartUploadRequest$GrantReadACP' => '

Allows grantee to read the object ACL.

', 'PutBucketAclRequest$GrantReadACP' => '

Allows grantee to read the bucket ACL.

', 'PutObjectAclRequest$GrantReadACP' => '

Allows grantee to read the bucket ACL.

', 'PutObjectRequest$GrantReadACP' => '

Allows grantee to read the object ACL.

', ], ], 'GrantWrite' => [ 'base' => NULL, 'refs' => [ 'CreateBucketRequest$GrantWrite' => '

Allows grantee to create, overwrite, and delete any object in the bucket.

', 'PutBucketAclRequest$GrantWrite' => '

Allows grantee to create, overwrite, and delete any object in the bucket.

', 'PutObjectAclRequest$GrantWrite' => '

Allows grantee to create, overwrite, and delete any object in the bucket.

', ], ], 'GrantWriteACP' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$GrantWriteACP' => '

Allows grantee to write the ACL for the applicable object.

', 'CreateBucketRequest$GrantWriteACP' => '

Allows grantee to write the ACL for the applicable bucket.

', 'CreateMultipartUploadRequest$GrantWriteACP' => '

Allows grantee to write the ACL for the applicable object.

', 'PutBucketAclRequest$GrantWriteACP' => '

Allows grantee to write the ACL for the applicable bucket.

', 'PutObjectAclRequest$GrantWriteACP' => '

Allows grantee to write the ACL for the applicable bucket.

', 'PutObjectRequest$GrantWriteACP' => '

Allows grantee to write the ACL for the applicable object.

', ], ], 'Grantee' => [ 'base' => NULL, 'refs' => [ 'Grant$Grantee' => NULL, 'TargetGrant$Grantee' => NULL, ], ], 'Grants' => [ 'base' => NULL, 'refs' => [ 'AccessControlPolicy$Grants' => '

A list of grants.

', 'GetBucketAclOutput$Grants' => '

A list of grants.

', 'GetObjectAclOutput$Grants' => '

A list of grants.

', 'S3Location$AccessControlList' => '

A list of grants that control access to the staged results.

', ], ], 'HeadBucketRequest' => [ 'base' => NULL, 'refs' => [], ], 'HeadObjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'HeadObjectRequest' => [ 'base' => NULL, 'refs' => [], ], 'HostName' => [ 'base' => NULL, 'refs' => [ 'Redirect$HostName' => '

The host name to use in the redirect request.

', 'RedirectAllRequestsTo$HostName' => '

Name of the host where requests will be redirected.

', ], ], 'HttpErrorCodeReturnedEquals' => [ 'base' => NULL, 'refs' => [ 'Condition$HttpErrorCodeReturnedEquals' => '

The HTTP error code when the redirect is applied. In the event of an error, if the error code equals this value, then the specified redirect is applied. Required when parent element Condition is specified and sibling KeyPrefixEquals is not specified. If both are specified, then both must be true for the redirect to be applied.

', ], ], 'HttpRedirectCode' => [ 'base' => NULL, 'refs' => [ 'Redirect$HttpRedirectCode' => '

The HTTP redirect code to use on the response. Not required if one of the siblings is present.

', ], ], 'ID' => [ 'base' => NULL, 'refs' => [ 'Grantee$ID' => '

The canonical user ID of the grantee.

', 'Initiator$ID' => '

If the principal is an AWS account, it provides the Canonical User ID. If the principal is an IAM User, it provides a user ARN value.

', 'LifecycleRule$ID' => '

Unique identifier for the rule. The value cannot be longer than 255 characters.

', 'Owner$ID' => NULL, 'ReplicationRule$ID' => '

A unique identifier for the rule. The maximum value is 255 characters.

', 'Rule$ID' => '

Unique identifier for the rule. The value cannot be longer than 255 characters.

', ], ], 'IfMatch' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$IfMatch' => '

Return the object only if its entity tag (ETag) is the same as the one specified, otherwise return a 412 (precondition failed).

', 'HeadObjectRequest$IfMatch' => '

Return the object only if its entity tag (ETag) is the same as the one specified, otherwise return a 412 (precondition failed).

', ], ], 'IfModifiedSince' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$IfModifiedSince' => '

Return the object only if it has been modified since the specified time, otherwise return a 304 (not modified).

', 'HeadObjectRequest$IfModifiedSince' => '

Return the object only if it has been modified since the specified time, otherwise return a 304 (not modified).

', ], ], 'IfNoneMatch' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$IfNoneMatch' => '

Return the object only if its entity tag (ETag) is different from the one specified, otherwise return a 304 (not modified).

', 'HeadObjectRequest$IfNoneMatch' => '

Return the object only if its entity tag (ETag) is different from the one specified, otherwise return a 304 (not modified).

', ], ], 'IfUnmodifiedSince' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$IfUnmodifiedSince' => '

Return the object only if it has not been modified since the specified time, otherwise return a 412 (precondition failed).

', 'HeadObjectRequest$IfUnmodifiedSince' => '

Return the object only if it has not been modified since the specified time, otherwise return a 412 (precondition failed).

', ], ], 'IndexDocument' => [ 'base' => NULL, 'refs' => [ 'GetBucketWebsiteOutput$IndexDocument' => NULL, 'WebsiteConfiguration$IndexDocument' => NULL, ], ], 'Initiated' => [ 'base' => NULL, 'refs' => [ 'MultipartUpload$Initiated' => '

Date and time at which the multipart upload was initiated.

', ], ], 'Initiator' => [ 'base' => NULL, 'refs' => [ 'ListPartsOutput$Initiator' => '

Identifies who initiated the multipart upload.

', 'MultipartUpload$Initiator' => '

Identifies who initiated the multipart upload.

', ], ], 'InputSerialization' => [ 'base' => '

Describes the serialization format of the object.

', 'refs' => [ 'SelectObjectContentRequest$InputSerialization' => '

Describes the format of the data in the object that is being queried.

', 'SelectParameters$InputSerialization' => '

Describes the serialization format of the object.

', ], ], 'InventoryConfiguration' => [ 'base' => NULL, 'refs' => [ 'GetBucketInventoryConfigurationOutput$InventoryConfiguration' => '

Specifies the inventory configuration.

', 'InventoryConfigurationList$member' => NULL, 'PutBucketInventoryConfigurationRequest$InventoryConfiguration' => '

Specifies the inventory configuration.

', ], ], 'InventoryConfigurationList' => [ 'base' => NULL, 'refs' => [ 'ListBucketInventoryConfigurationsOutput$InventoryConfigurationList' => '

The list of inventory configurations for a bucket.

', ], ], 'InventoryDestination' => [ 'base' => NULL, 'refs' => [ 'InventoryConfiguration$Destination' => '

Contains information about where to publish the inventory results.

', ], ], 'InventoryEncryption' => [ 'base' => '

Contains the type of server-side encryption used to encrypt the inventory results.

', 'refs' => [ 'InventoryS3BucketDestination$Encryption' => '

Contains the type of server-side encryption used to encrypt the inventory results.

', ], ], 'InventoryFilter' => [ 'base' => NULL, 'refs' => [ 'InventoryConfiguration$Filter' => '

Specifies an inventory filter. The inventory only includes objects that meet the filter\'s criteria.

', ], ], 'InventoryFormat' => [ 'base' => NULL, 'refs' => [ 'InventoryS3BucketDestination$Format' => '

Specifies the output format of the inventory results.

', ], ], 'InventoryFrequency' => [ 'base' => NULL, 'refs' => [ 'InventorySchedule$Frequency' => '

Specifies how frequently inventory results are produced.

', ], ], 'InventoryId' => [ 'base' => NULL, 'refs' => [ 'DeleteBucketInventoryConfigurationRequest$Id' => '

The ID used to identify the inventory configuration.

', 'GetBucketInventoryConfigurationRequest$Id' => '

The ID used to identify the inventory configuration.

', 'InventoryConfiguration$Id' => '

The ID used to identify the inventory configuration.

', 'PutBucketInventoryConfigurationRequest$Id' => '

The ID used to identify the inventory configuration.

', ], ], 'InventoryIncludedObjectVersions' => [ 'base' => NULL, 'refs' => [ 'InventoryConfiguration$IncludedObjectVersions' => '

Specifies which object version(s) to included in the inventory results.

', ], ], 'InventoryOptionalField' => [ 'base' => NULL, 'refs' => [ 'InventoryOptionalFields$member' => NULL, ], ], 'InventoryOptionalFields' => [ 'base' => NULL, 'refs' => [ 'InventoryConfiguration$OptionalFields' => '

Contains the optional fields that are included in the inventory results.

', ], ], 'InventoryS3BucketDestination' => [ 'base' => NULL, 'refs' => [ 'InventoryDestination$S3BucketDestination' => '

Contains the bucket name, file format, bucket owner (optional), and prefix (optional) where inventory results are published.

', ], ], 'InventorySchedule' => [ 'base' => NULL, 'refs' => [ 'InventoryConfiguration$Schedule' => '

Specifies the schedule for generating inventory results.

', ], ], 'IsEnabled' => [ 'base' => NULL, 'refs' => [ 'InventoryConfiguration$IsEnabled' => '

Specifies whether the inventory is enabled or disabled.

', ], ], 'IsLatest' => [ 'base' => NULL, 'refs' => [ 'DeleteMarkerEntry$IsLatest' => '

Specifies whether the object is (true) or is not (false) the latest version of an object.

', 'ObjectVersion$IsLatest' => '

Specifies whether the object is (true) or is not (false) the latest version of an object.

', ], ], 'IsPublic' => [ 'base' => NULL, 'refs' => [ 'PolicyStatus$IsPublic' => '

The policy status for this bucket. TRUE indicates that this bucket is public. FALSE indicates that the bucket is not public.

', ], ], 'IsTruncated' => [ 'base' => NULL, 'refs' => [ 'ListBucketAnalyticsConfigurationsOutput$IsTruncated' => '

Indicates whether the returned list of analytics configurations is complete. A value of true indicates that the list is not complete and the NextContinuationToken will be provided for a subsequent request.

', 'ListBucketInventoryConfigurationsOutput$IsTruncated' => '

Indicates whether the returned list of inventory configurations is truncated in this response. A value of true indicates that the list is truncated.

', 'ListBucketMetricsConfigurationsOutput$IsTruncated' => '

Indicates whether the returned list of metrics configurations is complete. A value of true indicates that the list is not complete and the NextContinuationToken will be provided for a subsequent request.

', 'ListMultipartUploadsOutput$IsTruncated' => '

Indicates whether the returned list of multipart uploads is truncated. A value of true indicates that the list was truncated. The list can be truncated if the number of multipart uploads exceeds the limit allowed or specified by max uploads.

', 'ListObjectVersionsOutput$IsTruncated' => '

A flag that indicates whether or not Amazon S3 returned all of the results that satisfied the search criteria. If your results were truncated, you can make a follow-up paginated request using the NextKeyMarker and NextVersionIdMarker response parameters as a starting place in another request to return the rest of the results.

', 'ListObjectsOutput$IsTruncated' => '

A flag that indicates whether or not Amazon S3 returned all of the results that satisfied the search criteria.

', 'ListObjectsV2Output$IsTruncated' => '

A flag that indicates whether or not Amazon S3 returned all of the results that satisfied the search criteria.

', 'ListPartsOutput$IsTruncated' => '

Indicates whether the returned list of parts is truncated.

', ], ], 'JSONInput' => [ 'base' => NULL, 'refs' => [ 'InputSerialization$JSON' => '

Specifies JSON as object\'s input serialization format.

', ], ], 'JSONOutput' => [ 'base' => NULL, 'refs' => [ 'OutputSerialization$JSON' => '

Specifies JSON as request\'s output serialization format.

', ], ], 'JSONType' => [ 'base' => NULL, 'refs' => [ 'JSONInput$Type' => '

The type of JSON. Valid values: Document, Lines.

', ], ], 'KMSContext' => [ 'base' => NULL, 'refs' => [ 'Encryption$KMSContext' => '

If the encryption type is aws:kms, this optional value can be used to specify the encryption context for the restore results.

', ], ], 'KeyCount' => [ 'base' => NULL, 'refs' => [ 'ListObjectsV2Output$KeyCount' => '

KeyCount is the number of keys returned with this request. KeyCount will always be less than equals to MaxKeys field. Say you ask for 50 keys, your result will include less than equals 50 keys

', ], ], 'KeyMarker' => [ 'base' => NULL, 'refs' => [ 'ListMultipartUploadsOutput$KeyMarker' => '

The key at or after which the listing began.

', 'ListMultipartUploadsRequest$KeyMarker' => '

Together with upload-id-marker, this parameter specifies the multipart upload after which listing should begin.

', 'ListObjectVersionsOutput$KeyMarker' => '

Marks the last Key returned in a truncated response.

', 'ListObjectVersionsRequest$KeyMarker' => '

Specifies the key to start with when listing objects in a bucket.

', ], ], 'KeyPrefixEquals' => [ 'base' => NULL, 'refs' => [ 'Condition$KeyPrefixEquals' => '

The object key name prefix when the redirect is applied. For example, to redirect requests for ExamplePage.html, the key prefix will be ExamplePage.html. To redirect request for all pages with the prefix docs/, the key prefix will be /docs, which identifies all objects in the docs/ folder. Required when the parent element Condition is specified and sibling HttpErrorCodeReturnedEquals is not specified. If both conditions are specified, both must be true for the redirect to be applied.

', ], ], 'LambdaFunctionArn' => [ 'base' => NULL, 'refs' => [ 'LambdaFunctionConfiguration$LambdaFunctionArn' => '

The Amazon Resource Name (ARN) of the Lambda cloud function that Amazon S3 can invoke when it detects events of the specified type.

', ], ], 'LambdaFunctionConfiguration' => [ 'base' => '

A container for specifying the configuration for AWS Lambda notifications.

', 'refs' => [ 'LambdaFunctionConfigurationList$member' => NULL, ], ], 'LambdaFunctionConfigurationList' => [ 'base' => NULL, 'refs' => [ 'NotificationConfiguration$LambdaFunctionConfigurations' => NULL, ], ], 'LastModified' => [ 'base' => NULL, 'refs' => [ 'CopyObjectResult$LastModified' => NULL, 'CopyPartResult$LastModified' => '

Date and time at which the object was uploaded.

', 'DeleteMarkerEntry$LastModified' => '

Date and time the object was last modified.

', 'GetObjectOutput$LastModified' => '

Last modified date of the object

', 'HeadObjectOutput$LastModified' => '

Last modified date of the object

', 'Object$LastModified' => NULL, 'ObjectVersion$LastModified' => '

Date and time the object was last modified.

', 'Part$LastModified' => '

Date and time at which the part was uploaded.

', ], ], 'LifecycleConfiguration' => [ 'base' => NULL, 'refs' => [ 'PutBucketLifecycleRequest$LifecycleConfiguration' => NULL, ], ], 'LifecycleExpiration' => [ 'base' => NULL, 'refs' => [ 'LifecycleRule$Expiration' => NULL, 'Rule$Expiration' => NULL, ], ], 'LifecycleRule' => [ 'base' => NULL, 'refs' => [ 'LifecycleRules$member' => NULL, ], ], 'LifecycleRuleAndOperator' => [ 'base' => '

This is used in a Lifecycle Rule Filter to apply a logical AND to two or more predicates. The Lifecycle Rule will apply to any object matching all of the predicates configured inside the And operator.

', 'refs' => [ 'LifecycleRuleFilter$And' => NULL, ], ], 'LifecycleRuleFilter' => [ 'base' => '

The Filter is used to identify objects that a Lifecycle Rule applies to. A Filter must have exactly one of Prefix, Tag, or And specified.

', 'refs' => [ 'LifecycleRule$Filter' => NULL, ], ], 'LifecycleRules' => [ 'base' => NULL, 'refs' => [ 'BucketLifecycleConfiguration$Rules' => NULL, 'GetBucketLifecycleConfigurationOutput$Rules' => NULL, ], ], 'ListBucketAnalyticsConfigurationsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListBucketAnalyticsConfigurationsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListBucketInventoryConfigurationsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListBucketInventoryConfigurationsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListBucketMetricsConfigurationsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListBucketMetricsConfigurationsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListBucketsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListMultipartUploadsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListMultipartUploadsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListObjectVersionsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListObjectVersionsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListObjectsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListObjectsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListObjectsV2Output' => [ 'base' => NULL, 'refs' => [], ], 'ListObjectsV2Request' => [ 'base' => NULL, 'refs' => [], ], 'ListPartsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListPartsRequest' => [ 'base' => NULL, 'refs' => [], ], 'Location' => [ 'base' => NULL, 'refs' => [ 'CompleteMultipartUploadOutput$Location' => NULL, 'CreateBucketOutput$Location' => NULL, ], ], 'LocationPrefix' => [ 'base' => NULL, 'refs' => [ 'S3Location$Prefix' => '

The prefix that is prepended to the restore results for this request.

', ], ], 'LoggingEnabled' => [ 'base' => '

Container for logging information. Presence of this element indicates that logging is enabled. Parameters TargetBucket and TargetPrefix are required in this case.

', 'refs' => [ 'BucketLoggingStatus$LoggingEnabled' => NULL, 'GetBucketLoggingOutput$LoggingEnabled' => NULL, ], ], 'MFA' => [ 'base' => NULL, 'refs' => [ 'DeleteObjectRequest$MFA' => '

The concatenation of the authentication device\'s serial number, a space, and the value that is displayed on your authentication device.

', 'DeleteObjectsRequest$MFA' => '

The concatenation of the authentication device\'s serial number, a space, and the value that is displayed on your authentication device.

', 'PutBucketVersioningRequest$MFA' => '

The concatenation of the authentication device\'s serial number, a space, and the value that is displayed on your authentication device.

', ], ], 'MFADelete' => [ 'base' => NULL, 'refs' => [ 'VersioningConfiguration$MFADelete' => '

Specifies whether MFA delete is enabled in the bucket versioning configuration. This element is only returned if the bucket has been configured with MFA delete. If the bucket has never been so configured, this element is not returned.

', ], ], 'MFADeleteStatus' => [ 'base' => NULL, 'refs' => [ 'GetBucketVersioningOutput$MFADelete' => '

Specifies whether MFA delete is enabled in the bucket versioning configuration. This element is only returned if the bucket has been configured with MFA delete. If the bucket has never been so configured, this element is not returned.

', ], ], 'Marker' => [ 'base' => NULL, 'refs' => [ 'ListObjectsOutput$Marker' => NULL, 'ListObjectsRequest$Marker' => '

Specifies the key to start with when listing objects in a bucket.

', ], ], 'MaxAgeSeconds' => [ 'base' => NULL, 'refs' => [ 'CORSRule$MaxAgeSeconds' => '

The time in seconds that your browser is to cache the preflight response for the specified resource.

', ], ], 'MaxKeys' => [ 'base' => NULL, 'refs' => [ 'ListObjectVersionsOutput$MaxKeys' => NULL, 'ListObjectVersionsRequest$MaxKeys' => '

Sets the maximum number of keys returned in the response. The response might contain fewer keys but will never contain more.

', 'ListObjectsOutput$MaxKeys' => NULL, 'ListObjectsRequest$MaxKeys' => '

Sets the maximum number of keys returned in the response. The response might contain fewer keys but will never contain more.

', 'ListObjectsV2Output$MaxKeys' => '

Sets the maximum number of keys returned in the response. The response might contain fewer keys but will never contain more.

', 'ListObjectsV2Request$MaxKeys' => '

Sets the maximum number of keys returned in the response. The response might contain fewer keys but will never contain more.

', ], ], 'MaxParts' => [ 'base' => NULL, 'refs' => [ 'ListPartsOutput$MaxParts' => '

Maximum number of parts that were allowed in the response.

', 'ListPartsRequest$MaxParts' => '

Sets the maximum number of parts to return.

', ], ], 'MaxUploads' => [ 'base' => NULL, 'refs' => [ 'ListMultipartUploadsOutput$MaxUploads' => '

Maximum number of multipart uploads that could have been included in the response.

', 'ListMultipartUploadsRequest$MaxUploads' => '

Sets the maximum number of multipart uploads, from 1 to 1,000, to return in the response body. 1,000 is the maximum number of uploads that can be returned in a response.

', ], ], 'Message' => [ 'base' => NULL, 'refs' => [ 'Error$Message' => NULL, ], ], 'Metadata' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$Metadata' => '

A map of metadata to store with the object in S3.

', 'CreateMultipartUploadRequest$Metadata' => '

A map of metadata to store with the object in S3.

', 'GetObjectOutput$Metadata' => '

A map of metadata to store with the object in S3.

', 'HeadObjectOutput$Metadata' => '

A map of metadata to store with the object in S3.

', 'PutObjectRequest$Metadata' => '

A map of metadata to store with the object in S3.

', ], ], 'MetadataDirective' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$MetadataDirective' => '

Specifies whether the metadata is copied from the source object or replaced with metadata provided in the request.

', ], ], 'MetadataEntry' => [ 'base' => '

A metadata key-value pair to store with an object.

', 'refs' => [ 'UserMetadata$member' => NULL, ], ], 'MetadataKey' => [ 'base' => NULL, 'refs' => [ 'Metadata$key' => NULL, 'MetadataEntry$Name' => NULL, ], ], 'MetadataValue' => [ 'base' => NULL, 'refs' => [ 'Metadata$value' => NULL, 'MetadataEntry$Value' => NULL, ], ], 'MetricsAndOperator' => [ 'base' => NULL, 'refs' => [ 'MetricsFilter$And' => '

A conjunction (logical AND) of predicates, which is used in evaluating a metrics filter. The operator must have at least two predicates, and an object must match all of the predicates in order for the filter to apply.

', ], ], 'MetricsConfiguration' => [ 'base' => NULL, 'refs' => [ 'GetBucketMetricsConfigurationOutput$MetricsConfiguration' => '

Specifies the metrics configuration.

', 'MetricsConfigurationList$member' => NULL, 'PutBucketMetricsConfigurationRequest$MetricsConfiguration' => '

Specifies the metrics configuration.

', ], ], 'MetricsConfigurationList' => [ 'base' => NULL, 'refs' => [ 'ListBucketMetricsConfigurationsOutput$MetricsConfigurationList' => '

The list of metrics configurations for a bucket.

', ], ], 'MetricsFilter' => [ 'base' => NULL, 'refs' => [ 'MetricsConfiguration$Filter' => '

Specifies a metrics configuration filter. The metrics configuration will only include objects that meet the filter\'s criteria. A filter must be a prefix, a tag, or a conjunction (MetricsAndOperator).

', ], ], 'MetricsId' => [ 'base' => NULL, 'refs' => [ 'DeleteBucketMetricsConfigurationRequest$Id' => '

The ID used to identify the metrics configuration.

', 'GetBucketMetricsConfigurationRequest$Id' => '

The ID used to identify the metrics configuration.

', 'MetricsConfiguration$Id' => '

The ID used to identify the metrics configuration.

', 'PutBucketMetricsConfigurationRequest$Id' => '

The ID used to identify the metrics configuration.

', ], ], 'MissingMeta' => [ 'base' => NULL, 'refs' => [ 'GetObjectOutput$MissingMeta' => '

This is set to the number of metadata entries not returned in x-amz-meta headers. This can happen if you create metadata using an API like SOAP that supports more flexible metadata than the REST API. For example, using SOAP, you can create metadata whose values are not legal HTTP headers.

', 'HeadObjectOutput$MissingMeta' => '

This is set to the number of metadata entries not returned in x-amz-meta headers. This can happen if you create metadata using an API like SOAP that supports more flexible metadata than the REST API. For example, using SOAP, you can create metadata whose values are not legal HTTP headers.

', ], ], 'MultipartUpload' => [ 'base' => NULL, 'refs' => [ 'MultipartUploadList$member' => NULL, ], ], 'MultipartUploadId' => [ 'base' => NULL, 'refs' => [ 'AbortMultipartUploadRequest$UploadId' => NULL, 'CompleteMultipartUploadRequest$UploadId' => NULL, 'CreateMultipartUploadOutput$UploadId' => '

ID for the initiated multipart upload.

', 'ListPartsOutput$UploadId' => '

Upload ID identifying the multipart upload whose parts are being listed.

', 'ListPartsRequest$UploadId' => '

Upload ID identifying the multipart upload whose parts are being listed.

', 'MultipartUpload$UploadId' => '

Upload ID that identifies the multipart upload.

', 'UploadPartCopyRequest$UploadId' => '

Upload ID identifying the multipart upload whose part is being copied.

', 'UploadPartRequest$UploadId' => '

Upload ID identifying the multipart upload whose part is being uploaded.

', ], ], 'MultipartUploadList' => [ 'base' => NULL, 'refs' => [ 'ListMultipartUploadsOutput$Uploads' => NULL, ], ], 'NextKeyMarker' => [ 'base' => NULL, 'refs' => [ 'ListMultipartUploadsOutput$NextKeyMarker' => '

When a list is truncated, this element specifies the value that should be used for the key-marker request parameter in a subsequent request.

', 'ListObjectVersionsOutput$NextKeyMarker' => '

Use this value for the key marker request parameter in a subsequent request.

', ], ], 'NextMarker' => [ 'base' => NULL, 'refs' => [ 'ListObjectsOutput$NextMarker' => '

When response is truncated (the IsTruncated element value in the response is true), you can use the key name in this field as marker in the subsequent request to get next set of objects. Amazon S3 lists objects in alphabetical order Note: This element is returned only if you have delimiter request parameter specified. If response does not include the NextMaker and it is truncated, you can use the value of the last Key in the response as the marker in the subsequent request to get the next set of object keys.

', ], ], 'NextPartNumberMarker' => [ 'base' => NULL, 'refs' => [ 'ListPartsOutput$NextPartNumberMarker' => '

When a list is truncated, this element specifies the last part in the list, as well as the value to use for the part-number-marker request parameter in a subsequent request.

', ], ], 'NextToken' => [ 'base' => NULL, 'refs' => [ 'ListBucketAnalyticsConfigurationsOutput$NextContinuationToken' => '

NextContinuationToken is sent when isTruncated is true, which indicates that there are more analytics configurations to list. The next request must include this NextContinuationToken. The token is obfuscated and is not a usable value.

', 'ListBucketInventoryConfigurationsOutput$NextContinuationToken' => '

The marker used to continue this inventory configuration listing. Use the NextContinuationToken from this response to continue the listing in a subsequent request. The continuation token is an opaque value that Amazon S3 understands.

', 'ListBucketMetricsConfigurationsOutput$NextContinuationToken' => '

The marker used to continue a metrics configuration listing that has been truncated. Use the NextContinuationToken from a previously truncated list response to continue the listing. The continuation token is an opaque value that Amazon S3 understands.

', 'ListObjectsV2Output$NextContinuationToken' => '

NextContinuationToken is sent when isTruncated is true which means there are more keys in the bucket that can be listed. The next list requests to Amazon S3 can be continued with this NextContinuationToken. NextContinuationToken is obfuscated and is not a real key

', ], ], 'NextUploadIdMarker' => [ 'base' => NULL, 'refs' => [ 'ListMultipartUploadsOutput$NextUploadIdMarker' => '

When a list is truncated, this element specifies the value that should be used for the upload-id-marker request parameter in a subsequent request.

', ], ], 'NextVersionIdMarker' => [ 'base' => NULL, 'refs' => [ 'ListObjectVersionsOutput$NextVersionIdMarker' => '

Use this value for the next version id marker parameter in a subsequent request.

', ], ], 'NoSuchBucket' => [ 'base' => '

The specified bucket does not exist.

', 'refs' => [], ], 'NoSuchKey' => [ 'base' => '

The specified key does not exist.

', 'refs' => [], ], 'NoSuchUpload' => [ 'base' => '

The specified multipart upload does not exist.

', 'refs' => [], ], 'NoncurrentVersionExpiration' => [ 'base' => '

Specifies when noncurrent object versions expire. Upon expiration, Amazon S3 permanently deletes the noncurrent object versions. You set this lifecycle configuration action on a bucket that has versioning enabled (or suspended) to request that Amazon S3 delete noncurrent object versions at a specific period in the object\'s lifetime.

', 'refs' => [ 'LifecycleRule$NoncurrentVersionExpiration' => NULL, 'Rule$NoncurrentVersionExpiration' => NULL, ], ], 'NoncurrentVersionTransition' => [ 'base' => '

Container for the transition rule that describes when noncurrent objects transition to the STANDARD_IA, ONEZONE_IA, INTELLIGENT_TIERING or GLACIER storage class. If your bucket is versioning-enabled (or versioning is suspended), you can set this action to request that Amazon S3 transition noncurrent object versions to the STANDARD_IA, ONEZONE_IA, INTELLIGENT_TIERING or GLACIER storage class at a specific period in the object\'s lifetime.

', 'refs' => [ 'NoncurrentVersionTransitionList$member' => NULL, 'Rule$NoncurrentVersionTransition' => NULL, ], ], 'NoncurrentVersionTransitionList' => [ 'base' => NULL, 'refs' => [ 'LifecycleRule$NoncurrentVersionTransitions' => NULL, ], ], 'NotificationConfiguration' => [ 'base' => '

A container for specifying the notification configuration of the bucket. If this element is empty, notifications are turned off for the bucket.

', 'refs' => [ 'PutBucketNotificationConfigurationRequest$NotificationConfiguration' => NULL, ], ], 'NotificationConfigurationDeprecated' => [ 'base' => NULL, 'refs' => [ 'PutBucketNotificationRequest$NotificationConfiguration' => NULL, ], ], 'NotificationConfigurationFilter' => [ 'base' => '

A container for object key name filtering rules. For information about key name filtering, see Configuring Event Notifications in the Amazon Simple Storage Service Developer Guide.

', 'refs' => [ 'LambdaFunctionConfiguration$Filter' => NULL, 'QueueConfiguration$Filter' => NULL, 'TopicConfiguration$Filter' => NULL, ], ], 'NotificationId' => [ 'base' => '

An optional unique identifier for configurations in a notification configuration. If you don\'t provide one, Amazon S3 will assign an ID.

', 'refs' => [ 'CloudFunctionConfiguration$Id' => NULL, 'LambdaFunctionConfiguration$Id' => NULL, 'QueueConfiguration$Id' => NULL, 'QueueConfigurationDeprecated$Id' => NULL, 'TopicConfiguration$Id' => NULL, 'TopicConfigurationDeprecated$Id' => NULL, ], ], 'Object' => [ 'base' => NULL, 'refs' => [ 'ObjectList$member' => NULL, ], ], 'ObjectAlreadyInActiveTierError' => [ 'base' => '

This operation is not allowed against this storage tier

', 'refs' => [], ], 'ObjectCannedACL' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$ACL' => '

The canned ACL to apply to the object.

', 'CreateMultipartUploadRequest$ACL' => '

The canned ACL to apply to the object.

', 'PutObjectAclRequest$ACL' => '

The canned ACL to apply to the object.

', 'PutObjectRequest$ACL' => '

The canned ACL to apply to the object.

', 'S3Location$CannedACL' => '

The canned ACL to apply to the restore results.

', ], ], 'ObjectIdentifier' => [ 'base' => NULL, 'refs' => [ 'ObjectIdentifierList$member' => NULL, ], ], 'ObjectIdentifierList' => [ 'base' => NULL, 'refs' => [ 'Delete$Objects' => NULL, ], ], 'ObjectKey' => [ 'base' => NULL, 'refs' => [ 'AbortMultipartUploadRequest$Key' => NULL, 'CompleteMultipartUploadOutput$Key' => NULL, 'CompleteMultipartUploadRequest$Key' => NULL, 'CopyObjectRequest$Key' => NULL, 'CreateMultipartUploadOutput$Key' => '

Object key for which the multipart upload was initiated.

', 'CreateMultipartUploadRequest$Key' => NULL, 'DeleteMarkerEntry$Key' => '

The object key.

', 'DeleteObjectRequest$Key' => NULL, 'DeleteObjectTaggingRequest$Key' => NULL, 'DeletedObject$Key' => NULL, 'Error$Key' => NULL, 'ErrorDocument$Key' => '

The object key name to use when a 4XX class error occurs.

', 'GetObjectAclRequest$Key' => NULL, 'GetObjectLegalHoldRequest$Key' => '

The key name for the object whose Legal Hold status you want to retrieve.

', 'GetObjectRequest$Key' => NULL, 'GetObjectRetentionRequest$Key' => '

The key name for the object whose retention settings you want to retrieve.

', 'GetObjectTaggingRequest$Key' => NULL, 'GetObjectTorrentRequest$Key' => NULL, 'HeadObjectRequest$Key' => NULL, 'ListPartsOutput$Key' => '

Object key for which the multipart upload was initiated.

', 'ListPartsRequest$Key' => NULL, 'MultipartUpload$Key' => '

Key of the object for which the multipart upload was initiated.

', 'Object$Key' => NULL, 'ObjectIdentifier$Key' => '

Key name of the object to delete.

', 'ObjectVersion$Key' => '

The object key.

', 'PutObjectAclRequest$Key' => NULL, 'PutObjectLegalHoldRequest$Key' => '

The key name for the object that you want to place a Legal Hold on.

', 'PutObjectRequest$Key' => '

Object key for which the PUT operation was initiated.

', 'PutObjectRetentionRequest$Key' => '

The key name for the object that you want to apply this Object Retention configuration to.

', 'PutObjectTaggingRequest$Key' => NULL, 'RestoreObjectRequest$Key' => NULL, 'SelectObjectContentRequest$Key' => '

The object key.

', 'Tag$Key' => '

Name of the tag.

', 'UploadPartCopyRequest$Key' => NULL, 'UploadPartRequest$Key' => '

Object key for which the multipart upload was initiated.

', ], ], 'ObjectList' => [ 'base' => NULL, 'refs' => [ 'ListObjectsOutput$Contents' => NULL, 'ListObjectsV2Output$Contents' => '

Metadata about each object returned.

', ], ], 'ObjectLockConfiguration' => [ 'base' => '

The container element for Object Lock configuration parameters.

', 'refs' => [ 'GetObjectLockConfigurationOutput$ObjectLockConfiguration' => '

The specified bucket\'s Object Lock configuration.

', 'PutObjectLockConfigurationRequest$ObjectLockConfiguration' => '

The Object Lock configuration that you want to apply to the specified bucket.

', ], ], 'ObjectLockEnabled' => [ 'base' => NULL, 'refs' => [ 'ObjectLockConfiguration$ObjectLockEnabled' => '

Indicates whether this bucket has an Object Lock configuration enabled.

', ], ], 'ObjectLockEnabledForBucket' => [ 'base' => NULL, 'refs' => [ 'CreateBucketRequest$ObjectLockEnabledForBucket' => '

Specifies whether you want S3 Object Lock to be enabled for the new bucket.

', ], ], 'ObjectLockLegalHold' => [ 'base' => '

A Legal Hold configuration for an object.

', 'refs' => [ 'GetObjectLegalHoldOutput$LegalHold' => '

The current Legal Hold status for the specified object.

', 'PutObjectLegalHoldRequest$LegalHold' => '

Container element for the Legal Hold configuration you want to apply to the specified object.

', ], ], 'ObjectLockLegalHoldStatus' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$ObjectLockLegalHoldStatus' => '

Specifies whether you want to apply a Legal Hold to the copied object.

', 'CreateMultipartUploadRequest$ObjectLockLegalHoldStatus' => '

Specifies whether you want to apply a Legal Hold to the uploaded object.

', 'GetObjectOutput$ObjectLockLegalHoldStatus' => '

Indicates whether this object has an active legal hold. This field is only returned if you have permission to view an object\'s legal hold status.

', 'HeadObjectOutput$ObjectLockLegalHoldStatus' => '

The Legal Hold status for the specified object.

', 'ObjectLockLegalHold$Status' => '

Indicates whether the specified object has a Legal Hold in place.

', 'PutObjectRequest$ObjectLockLegalHoldStatus' => '

The Legal Hold status that you want to apply to the specified object.

', ], ], 'ObjectLockMode' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$ObjectLockMode' => '

The Object Lock mode that you want to apply to the copied object.

', 'CreateMultipartUploadRequest$ObjectLockMode' => '

Specifies the Object Lock mode that you want to apply to the uploaded object.

', 'GetObjectOutput$ObjectLockMode' => '

The Object Lock mode currently in place for this object.

', 'HeadObjectOutput$ObjectLockMode' => '

The Object Lock mode currently in place for this object.

', 'PutObjectRequest$ObjectLockMode' => '

The Object Lock mode that you want to apply to this object.

', ], ], 'ObjectLockRetainUntilDate' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$ObjectLockRetainUntilDate' => '

The date and time when you want the copied object\'s Object Lock to expire.

', 'CreateMultipartUploadRequest$ObjectLockRetainUntilDate' => '

Specifies the date and time when you want the Object Lock to expire.

', 'GetObjectOutput$ObjectLockRetainUntilDate' => '

The date and time when this object\'s Object Lock will expire.

', 'HeadObjectOutput$ObjectLockRetainUntilDate' => '

The date and time when this object\'s Object Lock will expire.

', 'PutObjectRequest$ObjectLockRetainUntilDate' => '

The date and time when you want this object\'s Object Lock to expire.

', ], ], 'ObjectLockRetention' => [ 'base' => '

A Retention configuration for an object.

', 'refs' => [ 'GetObjectRetentionOutput$Retention' => '

The container element for an object\'s retention settings.

', 'PutObjectRetentionRequest$Retention' => '

The container element for the Object Retention configuration.

', ], ], 'ObjectLockRetentionMode' => [ 'base' => NULL, 'refs' => [ 'DefaultRetention$Mode' => '

The default Object Lock retention mode you want to apply to new objects placed in the specified bucket.

', 'ObjectLockRetention$Mode' => '

Indicates the Retention mode for the specified object.

', ], ], 'ObjectLockRule' => [ 'base' => '

The container element for an Object Lock rule.

', 'refs' => [ 'ObjectLockConfiguration$Rule' => '

The Object Lock rule in place for the specified object.

', ], ], 'ObjectLockToken' => [ 'base' => NULL, 'refs' => [ 'PutObjectLockConfigurationRequest$Token' => NULL, ], ], 'ObjectNotInActiveTierError' => [ 'base' => '

The source object of the COPY operation is not in the active tier and is only stored in Amazon Glacier.

', 'refs' => [], ], 'ObjectStorageClass' => [ 'base' => NULL, 'refs' => [ 'Object$StorageClass' => '

The class of storage used to store the object.

', ], ], 'ObjectVersion' => [ 'base' => NULL, 'refs' => [ 'ObjectVersionList$member' => NULL, ], ], 'ObjectVersionId' => [ 'base' => NULL, 'refs' => [ 'CompleteMultipartUploadOutput$VersionId' => '

Version of the object.

', 'CopyObjectOutput$VersionId' => '

Version ID of the newly created copy.

', 'DeleteMarkerEntry$VersionId' => '

Version ID of an object.

', 'DeleteObjectOutput$VersionId' => '

Returns the version ID of the delete marker created as a result of the DELETE operation.

', 'DeleteObjectRequest$VersionId' => '

VersionId used to reference a specific version of the object.

', 'DeleteObjectTaggingOutput$VersionId' => '

The versionId of the object the tag-set was removed from.

', 'DeleteObjectTaggingRequest$VersionId' => '

The versionId of the object that the tag-set will be removed from.

', 'DeletedObject$VersionId' => NULL, 'Error$VersionId' => NULL, 'GetObjectAclRequest$VersionId' => '

VersionId used to reference a specific version of the object.

', 'GetObjectLegalHoldRequest$VersionId' => '

The version ID of the object whose Legal Hold status you want to retrieve.

', 'GetObjectOutput$VersionId' => '

Version of the object.

', 'GetObjectRequest$VersionId' => '

VersionId used to reference a specific version of the object.

', 'GetObjectRetentionRequest$VersionId' => '

The version ID for the object whose retention settings you want to retrieve.

', 'GetObjectTaggingOutput$VersionId' => NULL, 'GetObjectTaggingRequest$VersionId' => NULL, 'HeadObjectOutput$VersionId' => '

Version of the object.

', 'HeadObjectRequest$VersionId' => '

VersionId used to reference a specific version of the object.

', 'ObjectIdentifier$VersionId' => '

VersionId for the specific version of the object to delete.

', 'ObjectVersion$VersionId' => '

Version ID of an object.

', 'PutObjectAclRequest$VersionId' => '

VersionId used to reference a specific version of the object.

', 'PutObjectLegalHoldRequest$VersionId' => '

The version ID of the object that you want to place a Legal Hold on.

', 'PutObjectOutput$VersionId' => '

Version of the object.

', 'PutObjectRetentionRequest$VersionId' => '

The version ID for the object that you want to apply this Object Retention configuration to.

', 'PutObjectTaggingOutput$VersionId' => NULL, 'PutObjectTaggingRequest$VersionId' => NULL, 'RestoreObjectRequest$VersionId' => NULL, ], ], 'ObjectVersionList' => [ 'base' => NULL, 'refs' => [ 'ListObjectVersionsOutput$Versions' => NULL, ], ], 'ObjectVersionStorageClass' => [ 'base' => NULL, 'refs' => [ 'ObjectVersion$StorageClass' => '

The class of storage used to store the object.

', ], ], 'OutputLocation' => [ 'base' => '

Describes the location where the restore job\'s output is stored.

', 'refs' => [ 'RestoreRequest$OutputLocation' => '

Describes the location where the restore job\'s output is stored.

', ], ], 'OutputSerialization' => [ 'base' => '

Describes how results of the Select job are serialized.

', 'refs' => [ 'SelectObjectContentRequest$OutputSerialization' => '

Describes the format of the data that you want Amazon S3 to return in response.

', 'SelectParameters$OutputSerialization' => '

Describes how the results of the Select job are serialized.

', ], ], 'Owner' => [ 'base' => NULL, 'refs' => [ 'AccessControlPolicy$Owner' => NULL, 'DeleteMarkerEntry$Owner' => NULL, 'GetBucketAclOutput$Owner' => NULL, 'GetObjectAclOutput$Owner' => NULL, 'ListBucketsOutput$Owner' => NULL, 'ListPartsOutput$Owner' => NULL, 'MultipartUpload$Owner' => NULL, 'Object$Owner' => NULL, 'ObjectVersion$Owner' => NULL, ], ], 'OwnerOverride' => [ 'base' => NULL, 'refs' => [ 'AccessControlTranslation$Owner' => '

The override value for the owner of the replica object.

', ], ], 'ParquetInput' => [ 'base' => NULL, 'refs' => [ 'InputSerialization$Parquet' => '

Specifies Parquet as object\'s input serialization format.

', ], ], 'Part' => [ 'base' => NULL, 'refs' => [ 'Parts$member' => NULL, ], ], 'PartNumber' => [ 'base' => NULL, 'refs' => [ 'CompletedPart$PartNumber' => '

Part number that identifies the part. This is a positive integer between 1 and 10,000.

', 'GetObjectRequest$PartNumber' => '

Part number of the object being read. This is a positive integer between 1 and 10,000. Effectively performs a \'ranged\' GET request for the part specified. Useful for downloading just a part of an object.

', 'HeadObjectRequest$PartNumber' => '

Part number of the object being read. This is a positive integer between 1 and 10,000. Effectively performs a \'ranged\' HEAD request for the part specified. Useful querying about the size of the part and the number of parts in this object.

', 'Part$PartNumber' => '

Part number identifying the part. This is a positive integer between 1 and 10,000.

', 'UploadPartCopyRequest$PartNumber' => '

Part number of part being copied. This is a positive integer between 1 and 10,000.

', 'UploadPartRequest$PartNumber' => '

Part number of part being uploaded. This is a positive integer between 1 and 10,000.

', ], ], 'PartNumberMarker' => [ 'base' => NULL, 'refs' => [ 'ListPartsOutput$PartNumberMarker' => '

Part number after which listing begins.

', 'ListPartsRequest$PartNumberMarker' => '

Specifies the part after which listing should begin. Only parts with higher part numbers will be listed.

', ], ], 'Parts' => [ 'base' => NULL, 'refs' => [ 'ListPartsOutput$Parts' => NULL, ], ], 'PartsCount' => [ 'base' => NULL, 'refs' => [ 'GetObjectOutput$PartsCount' => '

The count of parts this object has.

', 'HeadObjectOutput$PartsCount' => '

The count of parts this object has.

', ], ], 'Payer' => [ 'base' => NULL, 'refs' => [ 'GetBucketRequestPaymentOutput$Payer' => '

Specifies who pays for the download and request fees.

', 'RequestPaymentConfiguration$Payer' => '

Specifies who pays for the download and request fees.

', ], ], 'Permission' => [ 'base' => NULL, 'refs' => [ 'Grant$Permission' => '

Specifies the permission given to the grantee.

', ], ], 'Policy' => [ 'base' => NULL, 'refs' => [ 'GetBucketPolicyOutput$Policy' => '

The bucket policy as a JSON document.

', 'PutBucketPolicyRequest$Policy' => '

The bucket policy as a JSON document.

', ], ], 'PolicyStatus' => [ 'base' => '

The container element for a bucket\'s policy status.

', 'refs' => [ 'GetBucketPolicyStatusOutput$PolicyStatus' => '

The policy status for the specified bucket.

', ], ], 'Prefix' => [ 'base' => NULL, 'refs' => [ 'AnalyticsAndOperator$Prefix' => '

The prefix to use when evaluating an AND predicate.

', 'AnalyticsFilter$Prefix' => '

The prefix to use when evaluating an analytics filter.

', 'AnalyticsS3BucketDestination$Prefix' => '

The prefix to use when exporting data. The exported data begins with this prefix.

', 'CommonPrefix$Prefix' => NULL, 'InventoryFilter$Prefix' => '

The prefix that an object must have to be included in the inventory results.

', 'InventoryS3BucketDestination$Prefix' => '

The prefix that is prepended to all inventory results.

', 'LifecycleRule$Prefix' => '

Prefix identifying one or more objects to which the rule applies. This is deprecated; use Filter instead.

', 'LifecycleRuleAndOperator$Prefix' => NULL, 'LifecycleRuleFilter$Prefix' => '

Prefix identifying one or more objects to which the rule applies.

', 'ListMultipartUploadsOutput$Prefix' => '

When a prefix is provided in the request, this field contains the specified prefix. The result contains only keys starting with the specified prefix.

', 'ListMultipartUploadsRequest$Prefix' => '

Lists in-progress uploads only for those keys that begin with the specified prefix.

', 'ListObjectVersionsOutput$Prefix' => NULL, 'ListObjectVersionsRequest$Prefix' => '

Limits the response to keys that begin with the specified prefix.

', 'ListObjectsOutput$Prefix' => NULL, 'ListObjectsRequest$Prefix' => '

Limits the response to keys that begin with the specified prefix.

', 'ListObjectsV2Output$Prefix' => '

Limits the response to keys that begin with the specified prefix.

', 'ListObjectsV2Request$Prefix' => '

Limits the response to keys that begin with the specified prefix.

', 'MetricsAndOperator$Prefix' => '

The prefix used when evaluating an AND predicate.

', 'MetricsFilter$Prefix' => '

The prefix used when evaluating a metrics filter.

', 'ReplicationRule$Prefix' => '

An object keyname prefix that identifies the object or objects to which the rule applies. The maximum prefix length is 1,024 characters.

', 'ReplicationRuleAndOperator$Prefix' => NULL, 'ReplicationRuleFilter$Prefix' => '

An object keyname prefix that identifies the subset of objects to which the rule applies.

', 'Rule$Prefix' => '

Prefix identifying one or more objects to which the rule applies.

', ], ], 'Priority' => [ 'base' => NULL, 'refs' => [ 'ReplicationRule$Priority' => '

The priority associated with the rule. If you specify multiple rules in a replication configuration, Amazon S3 prioritizes the rules to prevent conflicts when filtering. If two or more rules identify the same object based on a specified filter, the rule with higher priority takes precedence. For example:

For more information, see Cross-Region Replication (CRR) in the Amazon S3 Developer Guide.

', ], ], 'Progress' => [ 'base' => NULL, 'refs' => [ 'ProgressEvent$Details' => '

The Progress event details.

', ], ], 'ProgressEvent' => [ 'base' => NULL, 'refs' => [ 'SelectObjectContentEventStream$Progress' => '

The Progress Event.

', ], ], 'Protocol' => [ 'base' => NULL, 'refs' => [ 'Redirect$Protocol' => '

Protocol to use (http, https) when redirecting requests. The default is the protocol that is used in the original request.

', 'RedirectAllRequestsTo$Protocol' => '

Protocol to use (http, https) when redirecting requests. The default is the protocol that is used in the original request.

', ], ], 'PublicAccessBlockConfiguration' => [ 'base' => NULL, 'refs' => [ 'GetPublicAccessBlockOutput$PublicAccessBlockConfiguration' => '

The PublicAccessBlock configuration currently in effect for this Amazon S3 bucket.

', 'PutPublicAccessBlockRequest$PublicAccessBlockConfiguration' => '

The PublicAccessBlock configuration that you want to apply to this Amazon S3 bucket. You can enable the configuration options in any combination. For more information about when Amazon S3 considers a bucket or object public, see The Meaning of "Public" in the Amazon Simple Storage Service Developer Guide.

', ], ], 'PutBucketAccelerateConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketAclRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketAnalyticsConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketCorsRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketEncryptionRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketInventoryConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketLifecycleConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketLifecycleRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketLoggingRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketMetricsConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketNotificationConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketNotificationRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketPolicyRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketReplicationRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketRequestPaymentRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketTaggingRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketVersioningRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketWebsiteRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectAclOutput' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectAclRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectLegalHoldOutput' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectLegalHoldRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectLockConfigurationOutput' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectLockConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectRetentionOutput' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectRetentionRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectTaggingOutput' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectTaggingRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutPublicAccessBlockRequest' => [ 'base' => NULL, 'refs' => [], ], 'QueueArn' => [ 'base' => NULL, 'refs' => [ 'QueueConfiguration$QueueArn' => '

The Amazon Resource Name (ARN) of the Amazon SQS queue to which Amazon S3 will publish a message when it detects events of the specified type.

', 'QueueConfigurationDeprecated$Queue' => NULL, ], ], 'QueueConfiguration' => [ 'base' => '

A container for specifying the configuration for publication of messages to an Amazon Simple Queue Service (Amazon SQS) queue.when Amazon S3 detects specified events.

', 'refs' => [ 'QueueConfigurationList$member' => NULL, ], ], 'QueueConfigurationDeprecated' => [ 'base' => NULL, 'refs' => [ 'NotificationConfigurationDeprecated$QueueConfiguration' => NULL, ], ], 'QueueConfigurationList' => [ 'base' => NULL, 'refs' => [ 'NotificationConfiguration$QueueConfigurations' => NULL, ], ], 'Quiet' => [ 'base' => NULL, 'refs' => [ 'Delete$Quiet' => '

Element to enable quiet mode for the request. When you add this element, you must set its value to true.

', ], ], 'QuoteCharacter' => [ 'base' => NULL, 'refs' => [ 'CSVInput$QuoteCharacter' => '

Value used for escaping where the field delimiter is part of the value.

', 'CSVOutput$QuoteCharacter' => '

The value used for escaping where the field delimiter is part of the value.

', ], ], 'QuoteEscapeCharacter' => [ 'base' => NULL, 'refs' => [ 'CSVInput$QuoteEscapeCharacter' => '

The single character used for escaping the quote character inside an already escaped value.

', 'CSVOutput$QuoteEscapeCharacter' => '

Th single character used for escaping the quote character inside an already escaped value.

', ], ], 'QuoteFields' => [ 'base' => NULL, 'refs' => [ 'CSVOutput$QuoteFields' => '

Indicates whether or not all output fields should be quoted.

', ], ], 'Range' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$Range' => '

Downloads the specified range bytes of an object. For more information about the HTTP Range header, go to http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.

', 'HeadObjectRequest$Range' => '

Downloads the specified range bytes of an object. For more information about the HTTP Range header, go to http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.

', ], ], 'RecordDelimiter' => [ 'base' => NULL, 'refs' => [ 'CSVInput$RecordDelimiter' => '

The value used to separate individual records.

', 'CSVOutput$RecordDelimiter' => '

The value used to separate individual records.

', 'JSONOutput$RecordDelimiter' => '

The value used to separate individual records in the output.

', ], ], 'RecordsEvent' => [ 'base' => NULL, 'refs' => [ 'SelectObjectContentEventStream$Records' => '

The Records Event.

', ], ], 'Redirect' => [ 'base' => NULL, 'refs' => [ 'RoutingRule$Redirect' => '

Container for redirect information. You can redirect requests to another host, to another page, or with another protocol. In the event of an error, you can specify a different error code to return.

', ], ], 'RedirectAllRequestsTo' => [ 'base' => NULL, 'refs' => [ 'GetBucketWebsiteOutput$RedirectAllRequestsTo' => NULL, 'WebsiteConfiguration$RedirectAllRequestsTo' => NULL, ], ], 'ReplaceKeyPrefixWith' => [ 'base' => NULL, 'refs' => [ 'Redirect$ReplaceKeyPrefixWith' => '

The object key prefix to use in the redirect request. For example, to redirect requests for all pages with prefix docs/ (objects in the docs/ folder) to documents/, you can set a condition block with KeyPrefixEquals set to docs/ and in the Redirect set ReplaceKeyPrefixWith to /documents. Not required if one of the siblings is present. Can be present only if ReplaceKeyWith is not provided.

', ], ], 'ReplaceKeyWith' => [ 'base' => NULL, 'refs' => [ 'Redirect$ReplaceKeyWith' => '

The specific object key to use in the redirect request. For example, redirect request to error.html. Not required if one of the sibling is present. Can be present only if ReplaceKeyPrefixWith is not provided.

', ], ], 'ReplicaKmsKeyID' => [ 'base' => NULL, 'refs' => [ 'EncryptionConfiguration$ReplicaKmsKeyID' => '

The ID of the AWS KMS key for the AWS Region where the destination bucket resides. Amazon S3 uses this key to encrypt the replica object.

', ], ], 'ReplicationConfiguration' => [ 'base' => '

A container for replication rules. You can add up to 1,000 rules. The maximum size of a replication configuration is 2 MB.

', 'refs' => [ 'GetBucketReplicationOutput$ReplicationConfiguration' => NULL, 'PutBucketReplicationRequest$ReplicationConfiguration' => NULL, ], ], 'ReplicationRule' => [ 'base' => '

A container for information about a specific replication rule.

', 'refs' => [ 'ReplicationRules$member' => NULL, ], ], 'ReplicationRuleAndOperator' => [ 'base' => NULL, 'refs' => [ 'ReplicationRuleFilter$And' => '

A container for specifying rule filters. The filters determine the subset of objects to which the rule applies. This element is required only if you specify more than one filter. For example:

', ], ], 'ReplicationRuleFilter' => [ 'base' => '

A filter that identifies the subset of objects to which the replication rule applies. A Filter must specify exactly one Prefix, Tag, or an And child element.

', 'refs' => [ 'ReplicationRule$Filter' => NULL, ], ], 'ReplicationRuleStatus' => [ 'base' => NULL, 'refs' => [ 'ReplicationRule$Status' => '

If status isn\'t enabled, the rule is ignored.

', ], ], 'ReplicationRules' => [ 'base' => NULL, 'refs' => [ 'ReplicationConfiguration$Rules' => '

A container for one or more replication rules. A replication configuration must have at least one rule and can contain a maximum of 1,000 rules.

', ], ], 'ReplicationStatus' => [ 'base' => NULL, 'refs' => [ 'GetObjectOutput$ReplicationStatus' => NULL, 'HeadObjectOutput$ReplicationStatus' => NULL, ], ], 'RequestCharged' => [ 'base' => '

If present, indicates that the requester was successfully charged for the request.

', 'refs' => [ 'AbortMultipartUploadOutput$RequestCharged' => NULL, 'CompleteMultipartUploadOutput$RequestCharged' => NULL, 'CopyObjectOutput$RequestCharged' => NULL, 'CreateMultipartUploadOutput$RequestCharged' => NULL, 'DeleteObjectOutput$RequestCharged' => NULL, 'DeleteObjectsOutput$RequestCharged' => NULL, 'GetObjectAclOutput$RequestCharged' => NULL, 'GetObjectOutput$RequestCharged' => NULL, 'GetObjectTorrentOutput$RequestCharged' => NULL, 'HeadObjectOutput$RequestCharged' => NULL, 'ListPartsOutput$RequestCharged' => NULL, 'PutObjectAclOutput$RequestCharged' => NULL, 'PutObjectLegalHoldOutput$RequestCharged' => NULL, 'PutObjectLockConfigurationOutput$RequestCharged' => NULL, 'PutObjectOutput$RequestCharged' => NULL, 'PutObjectRetentionOutput$RequestCharged' => NULL, 'RestoreObjectOutput$RequestCharged' => NULL, 'UploadPartCopyOutput$RequestCharged' => NULL, 'UploadPartOutput$RequestCharged' => NULL, ], ], 'RequestPayer' => [ 'base' => '

Confirms that the requester knows that she or he will be charged for the request. Bucket owners need not specify this parameter in their requests. Documentation on downloading objects from requester pays buckets can be found at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html

', 'refs' => [ 'AbortMultipartUploadRequest$RequestPayer' => NULL, 'CompleteMultipartUploadRequest$RequestPayer' => NULL, 'CopyObjectRequest$RequestPayer' => NULL, 'CreateMultipartUploadRequest$RequestPayer' => NULL, 'DeleteObjectRequest$RequestPayer' => NULL, 'DeleteObjectsRequest$RequestPayer' => NULL, 'GetObjectAclRequest$RequestPayer' => NULL, 'GetObjectLegalHoldRequest$RequestPayer' => NULL, 'GetObjectRequest$RequestPayer' => NULL, 'GetObjectRetentionRequest$RequestPayer' => NULL, 'GetObjectTorrentRequest$RequestPayer' => NULL, 'HeadObjectRequest$RequestPayer' => NULL, 'ListObjectsRequest$RequestPayer' => '

Confirms that the requester knows that she or he will be charged for the list objects request. Bucket owners need not specify this parameter in their requests.

', 'ListObjectsV2Request$RequestPayer' => '

Confirms that the requester knows that she or he will be charged for the list objects request in V2 style. Bucket owners need not specify this parameter in their requests.

', 'ListPartsRequest$RequestPayer' => NULL, 'PutObjectAclRequest$RequestPayer' => NULL, 'PutObjectLegalHoldRequest$RequestPayer' => NULL, 'PutObjectLockConfigurationRequest$RequestPayer' => NULL, 'PutObjectRequest$RequestPayer' => NULL, 'PutObjectRetentionRequest$RequestPayer' => NULL, 'RestoreObjectRequest$RequestPayer' => NULL, 'UploadPartCopyRequest$RequestPayer' => NULL, 'UploadPartRequest$RequestPayer' => NULL, ], ], 'RequestPaymentConfiguration' => [ 'base' => NULL, 'refs' => [ 'PutBucketRequestPaymentRequest$RequestPaymentConfiguration' => NULL, ], ], 'RequestProgress' => [ 'base' => NULL, 'refs' => [ 'SelectObjectContentRequest$RequestProgress' => '

Specifies if periodic request progress information should be enabled.

', ], ], 'ResponseCacheControl' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$ResponseCacheControl' => '

Sets the Cache-Control header of the response.

', ], ], 'ResponseContentDisposition' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$ResponseContentDisposition' => '

Sets the Content-Disposition header of the response

', ], ], 'ResponseContentEncoding' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$ResponseContentEncoding' => '

Sets the Content-Encoding header of the response.

', ], ], 'ResponseContentLanguage' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$ResponseContentLanguage' => '

Sets the Content-Language header of the response.

', ], ], 'ResponseContentType' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$ResponseContentType' => '

Sets the Content-Type header of the response.

', ], ], 'ResponseExpires' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$ResponseExpires' => '

Sets the Expires header of the response.

', ], ], 'Restore' => [ 'base' => NULL, 'refs' => [ 'GetObjectOutput$Restore' => '

Provides information about object restoration operation and expiration time of the restored object copy.

', 'HeadObjectOutput$Restore' => '

Provides information about object restoration operation and expiration time of the restored object copy.

', ], ], 'RestoreObjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'RestoreObjectRequest' => [ 'base' => NULL, 'refs' => [], ], 'RestoreOutputPath' => [ 'base' => NULL, 'refs' => [ 'RestoreObjectOutput$RestoreOutputPath' => '

Indicates the path in the provided S3 output location where Select results will be restored to.

', ], ], 'RestoreRequest' => [ 'base' => '

Container for restore job parameters.

', 'refs' => [ 'RestoreObjectRequest$RestoreRequest' => NULL, ], ], 'RestoreRequestType' => [ 'base' => NULL, 'refs' => [ 'RestoreRequest$Type' => '

Type of restore request.

', ], ], 'Role' => [ 'base' => NULL, 'refs' => [ 'ReplicationConfiguration$Role' => '

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that Amazon S3 can assume when replicating the objects.

', ], ], 'RoutingRule' => [ 'base' => NULL, 'refs' => [ 'RoutingRules$member' => NULL, ], ], 'RoutingRules' => [ 'base' => NULL, 'refs' => [ 'GetBucketWebsiteOutput$RoutingRules' => NULL, 'WebsiteConfiguration$RoutingRules' => NULL, ], ], 'Rule' => [ 'base' => NULL, 'refs' => [ 'Rules$member' => NULL, ], ], 'Rules' => [ 'base' => NULL, 'refs' => [ 'GetBucketLifecycleOutput$Rules' => NULL, 'LifecycleConfiguration$Rules' => NULL, ], ], 'S3KeyFilter' => [ 'base' => '

A container for object key name prefix and suffix filtering rules.

', 'refs' => [ 'NotificationConfigurationFilter$Key' => NULL, ], ], 'S3Location' => [ 'base' => '

Describes an S3 location that will receive the results of the restore request.

', 'refs' => [ 'OutputLocation$S3' => '

Describes an S3 location that will receive the results of the restore request.

', ], ], 'SSECustomerAlgorithm' => [ 'base' => NULL, 'refs' => [ 'CopyObjectOutput$SSECustomerAlgorithm' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header confirming the encryption algorithm used.

', 'CopyObjectRequest$SSECustomerAlgorithm' => '

Specifies the algorithm to use to when encrypting the object (e.g., AES256).

', 'CreateMultipartUploadOutput$SSECustomerAlgorithm' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header confirming the encryption algorithm used.

', 'CreateMultipartUploadRequest$SSECustomerAlgorithm' => '

Specifies the algorithm to use to when encrypting the object (e.g., AES256).

', 'GetObjectOutput$SSECustomerAlgorithm' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header confirming the encryption algorithm used.

', 'GetObjectRequest$SSECustomerAlgorithm' => '

Specifies the algorithm to use to when encrypting the object (e.g., AES256).

', 'HeadObjectOutput$SSECustomerAlgorithm' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header confirming the encryption algorithm used.

', 'HeadObjectRequest$SSECustomerAlgorithm' => '

Specifies the algorithm to use to when encrypting the object (e.g., AES256).

', 'PutObjectOutput$SSECustomerAlgorithm' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header confirming the encryption algorithm used.

', 'PutObjectRequest$SSECustomerAlgorithm' => '

Specifies the algorithm to use to when encrypting the object (e.g., AES256).

', 'SelectObjectContentRequest$SSECustomerAlgorithm' => '

The SSE Algorithm used to encrypt the object. For more information, see Server-Side Encryption (Using Customer-Provided Encryption Keys.

', 'UploadPartCopyOutput$SSECustomerAlgorithm' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header confirming the encryption algorithm used.

', 'UploadPartCopyRequest$SSECustomerAlgorithm' => '

Specifies the algorithm to use to when encrypting the object (e.g., AES256).

', 'UploadPartOutput$SSECustomerAlgorithm' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header confirming the encryption algorithm used.

', 'UploadPartRequest$SSECustomerAlgorithm' => '

Specifies the algorithm to use to when encrypting the object (e.g., AES256).

', ], ], 'SSECustomerKey' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$SSECustomerKey' => '

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header.

', 'CreateMultipartUploadRequest$SSECustomerKey' => '

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header.

', 'GetObjectRequest$SSECustomerKey' => '

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header.

', 'HeadObjectRequest$SSECustomerKey' => '

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header.

', 'PutObjectRequest$SSECustomerKey' => '

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header.

', 'SelectObjectContentRequest$SSECustomerKey' => '

The SSE Customer Key. For more information, see Server-Side Encryption (Using Customer-Provided Encryption Keys.

', 'UploadPartCopyRequest$SSECustomerKey' => '

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header. This must be the same encryption key specified in the initiate multipart upload request.

', 'UploadPartRequest$SSECustomerKey' => '

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header. This must be the same encryption key specified in the initiate multipart upload request.

', ], ], 'SSECustomerKeyMD5' => [ 'base' => NULL, 'refs' => [ 'CopyObjectOutput$SSECustomerKeyMD5' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header to provide round trip message integrity verification of the customer-provided encryption key.

', 'CopyObjectRequest$SSECustomerKeyMD5' => '

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

', 'CreateMultipartUploadOutput$SSECustomerKeyMD5' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header to provide round trip message integrity verification of the customer-provided encryption key.

', 'CreateMultipartUploadRequest$SSECustomerKeyMD5' => '

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

', 'GetObjectOutput$SSECustomerKeyMD5' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header to provide round trip message integrity verification of the customer-provided encryption key.

', 'GetObjectRequest$SSECustomerKeyMD5' => '

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

', 'HeadObjectOutput$SSECustomerKeyMD5' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header to provide round trip message integrity verification of the customer-provided encryption key.

', 'HeadObjectRequest$SSECustomerKeyMD5' => '

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

', 'PutObjectOutput$SSECustomerKeyMD5' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header to provide round trip message integrity verification of the customer-provided encryption key.

', 'PutObjectRequest$SSECustomerKeyMD5' => '

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

', 'SelectObjectContentRequest$SSECustomerKeyMD5' => '

The SSE Customer Key MD5. For more information, see Server-Side Encryption (Using Customer-Provided Encryption Keys.

', 'UploadPartCopyOutput$SSECustomerKeyMD5' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header to provide round trip message integrity verification of the customer-provided encryption key.

', 'UploadPartCopyRequest$SSECustomerKeyMD5' => '

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

', 'UploadPartOutput$SSECustomerKeyMD5' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header to provide round trip message integrity verification of the customer-provided encryption key.

', 'UploadPartRequest$SSECustomerKeyMD5' => '

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

', ], ], 'SSEKMS' => [ 'base' => '

Specifies the use of SSE-KMS to encrypt delivered Inventory reports.

', 'refs' => [ 'InventoryEncryption$SSEKMS' => '

Specifies the use of SSE-KMS to encrypt delivered Inventory reports.

', ], ], 'SSEKMSKeyId' => [ 'base' => NULL, 'refs' => [ 'CompleteMultipartUploadOutput$SSEKMSKeyId' => '

If present, specifies the ID of the AWS Key Management Service (KMS) master encryption key that was used for the object.

', 'CopyObjectOutput$SSEKMSKeyId' => '

If present, specifies the ID of the AWS Key Management Service (KMS) master encryption key that was used for the object.

', 'CopyObjectRequest$SSEKMSKeyId' => '

Specifies the AWS KMS key ID to use for object encryption. All GET and PUT requests for an object protected by AWS KMS will fail if not made via SSL or using SigV4. Documentation on configuring any of the officially supported AWS SDKs and CLI can be found at http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingAWSSDK.html#specify-signature-version

', 'CreateMultipartUploadOutput$SSEKMSKeyId' => '

If present, specifies the ID of the AWS Key Management Service (KMS) master encryption key that was used for the object.

', 'CreateMultipartUploadRequest$SSEKMSKeyId' => '

Specifies the AWS KMS key ID to use for object encryption. All GET and PUT requests for an object protected by AWS KMS will fail if not made via SSL or using SigV4. Documentation on configuring any of the officially supported AWS SDKs and CLI can be found at http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingAWSSDK.html#specify-signature-version

', 'Encryption$KMSKeyId' => '

If the encryption type is aws:kms, this optional value specifies the AWS KMS key ID to use for encryption of job results.

', 'GetObjectOutput$SSEKMSKeyId' => '

If present, specifies the ID of the AWS Key Management Service (KMS) master encryption key that was used for the object.

', 'HeadObjectOutput$SSEKMSKeyId' => '

If present, specifies the ID of the AWS Key Management Service (KMS) master encryption key that was used for the object.

', 'PutObjectOutput$SSEKMSKeyId' => '

If present, specifies the ID of the AWS Key Management Service (KMS) master encryption key that was used for the object.

', 'PutObjectRequest$SSEKMSKeyId' => '

Specifies the AWS KMS key ID to use for object encryption. All GET and PUT requests for an object protected by AWS KMS will fail if not made via SSL or using SigV4. Documentation on configuring any of the officially supported AWS SDKs and CLI can be found at http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingAWSSDK.html#specify-signature-version

', 'SSEKMS$KeyId' => '

Specifies the ID of the AWS Key Management Service (KMS) master encryption key to use for encrypting Inventory reports.

', 'ServerSideEncryptionByDefault$KMSMasterKeyID' => '

KMS master key ID to use for the default encryption. This parameter is allowed if SSEAlgorithm is aws:kms.

', 'UploadPartCopyOutput$SSEKMSKeyId' => '

If present, specifies the ID of the AWS Key Management Service (KMS) master encryption key that was used for the object.

', 'UploadPartOutput$SSEKMSKeyId' => '

If present, specifies the ID of the AWS Key Management Service (KMS) master encryption key that was used for the object.

', ], ], 'SSES3' => [ 'base' => '

Specifies the use of SSE-S3 to encrypt delivered Inventory reports.

', 'refs' => [ 'InventoryEncryption$SSES3' => '

Specifies the use of SSE-S3 to encrypt delivered Inventory reports.

', ], ], 'SelectObjectContentEventStream' => [ 'base' => NULL, 'refs' => [ 'SelectObjectContentOutput$Payload' => NULL, ], ], 'SelectObjectContentOutput' => [ 'base' => NULL, 'refs' => [], ], 'SelectObjectContentRequest' => [ 'base' => '

Request to filter the contents of an Amazon S3 object based on a simple Structured Query Language (SQL) statement. In the request, along with the SQL expression, you must specify a data serialization format (JSON or CSV) of the object. Amazon S3 uses this to parse object data into records. It returns only records that match the specified SQL expression. You must also specify the data serialization format for the response. For more information, see S3Select API Documentation.

', 'refs' => [], ], 'SelectParameters' => [ 'base' => '

Describes the parameters for Select job types.

', 'refs' => [ 'RestoreRequest$SelectParameters' => '

Describes the parameters for Select job types.

', ], ], 'ServerSideEncryption' => [ 'base' => NULL, 'refs' => [ 'CompleteMultipartUploadOutput$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'CopyObjectOutput$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'CopyObjectRequest$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'CreateMultipartUploadOutput$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'CreateMultipartUploadRequest$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'Encryption$EncryptionType' => '

The server-side encryption algorithm used when storing job results in Amazon S3 (e.g., AES256, aws:kms).

', 'GetObjectOutput$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'HeadObjectOutput$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'PutObjectOutput$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'PutObjectRequest$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'ServerSideEncryptionByDefault$SSEAlgorithm' => '

Server-side encryption algorithm to use for the default encryption.

', 'UploadPartCopyOutput$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'UploadPartOutput$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', ], ], 'ServerSideEncryptionByDefault' => [ 'base' => '

Describes the default server-side encryption to apply to new objects in the bucket. If Put Object request does not specify any server-side encryption, this default encryption will be applied.

', 'refs' => [ 'ServerSideEncryptionRule$ApplyServerSideEncryptionByDefault' => '

Describes the default server-side encryption to apply to new objects in the bucket. If Put Object request does not specify any server-side encryption, this default encryption will be applied.

', ], ], 'ServerSideEncryptionConfiguration' => [ 'base' => '

Container for server-side encryption configuration rules. Currently S3 supports one rule only.

', 'refs' => [ 'GetBucketEncryptionOutput$ServerSideEncryptionConfiguration' => NULL, 'PutBucketEncryptionRequest$ServerSideEncryptionConfiguration' => NULL, ], ], 'ServerSideEncryptionRule' => [ 'base' => '

Container for information about a particular server-side encryption configuration rule.

', 'refs' => [ 'ServerSideEncryptionRules$member' => NULL, ], ], 'ServerSideEncryptionRules' => [ 'base' => NULL, 'refs' => [ 'ServerSideEncryptionConfiguration$Rules' => '

Container for information about a particular server-side encryption configuration rule.

', ], ], 'Setting' => [ 'base' => NULL, 'refs' => [ 'PublicAccessBlockConfiguration$BlockPublicAcls' => '

Specifies whether Amazon S3 should block public access control lists (ACLs) for this bucket and objects in this bucket. Setting this element to TRUE causes the following behavior:

Enabling this setting doesn\'t affect existing policies or ACLs.

', 'PublicAccessBlockConfiguration$IgnorePublicAcls' => '

Specifies whether Amazon S3 should ignore public ACLs for this bucket and objects in this bucket. Setting this element to TRUE causes Amazon S3 to ignore all public ACLs on this bucket and objects in this bucket.

Enabling this setting doesn\'t affect the persistence of any existing ACLs and doesn\'t prevent new public ACLs from being set.

', 'PublicAccessBlockConfiguration$BlockPublicPolicy' => '

Specifies whether Amazon S3 should block public bucket policies for this bucket. Setting this element to TRUE causes Amazon S3 to reject calls to PUT Bucket policy if the specified bucket policy allows public access.

Enabling this setting doesn\'t affect existing bucket policies.

', 'PublicAccessBlockConfiguration$RestrictPublicBuckets' => '

Specifies whether Amazon S3 should restrict public bucket policies for this bucket. Setting this element to TRUE restricts access to this bucket to only AWS services and authorized users within this account if the bucket has a public policy.

Enabling this setting doesn\'t affect previously stored bucket policies, except that public and cross-account access within any public bucket policy, including non-public delegation to specific accounts, is blocked.

', ], ], 'Size' => [ 'base' => NULL, 'refs' => [ 'Object$Size' => NULL, 'ObjectVersion$Size' => '

Size in bytes of the object.

', 'Part$Size' => '

Size in bytes of the uploaded part data.

', ], ], 'SourceSelectionCriteria' => [ 'base' => '

A container for filters that define which source objects should be replicated.

', 'refs' => [ 'ReplicationRule$SourceSelectionCriteria' => '

A container that describes additional filters for identifying the source objects that you want to replicate. You can choose to enable or disable the replication of these objects. Currently, Amazon S3 supports only the filter that you can specify for objects created with server-side encryption using an AWS KMS-Managed Key (SSE-KMS).

If you want Amazon S3 to replicate objects created with server-side encryption using AWS KMS-Managed Keys.

', ], ], 'SseKmsEncryptedObjects' => [ 'base' => '

A container for filter information for the selection of S3 objects encrypted with AWS KMS.

', 'refs' => [ 'SourceSelectionCriteria$SseKmsEncryptedObjects' => '

A container for filter information for the selection of S3 objects encrypted with AWS KMS. If you include SourceSelectionCriteria in the replication configuration, this element is required.

', ], ], 'SseKmsEncryptedObjectsStatus' => [ 'base' => NULL, 'refs' => [ 'SseKmsEncryptedObjects$Status' => '

If the status is not Enabled, replication for S3 objects encrypted with AWS KMS is disabled.

', ], ], 'StartAfter' => [ 'base' => NULL, 'refs' => [ 'ListObjectsV2Output$StartAfter' => '

StartAfter is where you want Amazon S3 to start listing from. Amazon S3 starts listing after this specified key. StartAfter can be any key in the bucket

', 'ListObjectsV2Request$StartAfter' => '

StartAfter is where you want Amazon S3 to start listing from. Amazon S3 starts listing after this specified key. StartAfter can be any key in the bucket

', ], ], 'Stats' => [ 'base' => NULL, 'refs' => [ 'StatsEvent$Details' => '

The Stats event details.

', ], ], 'StatsEvent' => [ 'base' => NULL, 'refs' => [ 'SelectObjectContentEventStream$Stats' => '

The Stats Event.

', ], ], 'StorageClass' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$StorageClass' => '

The type of storage to use for the object. Defaults to \'STANDARD\'.

', 'CreateMultipartUploadRequest$StorageClass' => '

The type of storage to use for the object. Defaults to \'STANDARD\'.

', 'Destination$StorageClass' => '

The class of storage used to store the object. By default Amazon S3 uses storage class of the source object when creating a replica.

', 'GetObjectOutput$StorageClass' => NULL, 'HeadObjectOutput$StorageClass' => NULL, 'ListPartsOutput$StorageClass' => '

The class of storage used to store the object.

', 'MultipartUpload$StorageClass' => '

The class of storage used to store the object.

', 'PutObjectRequest$StorageClass' => '

The type of storage to use for the object. Defaults to \'STANDARD\'.

', 'S3Location$StorageClass' => '

The class of storage used to store the restore results.

', ], ], 'StorageClassAnalysis' => [ 'base' => NULL, 'refs' => [ 'AnalyticsConfiguration$StorageClassAnalysis' => '

If present, it indicates that data related to access patterns will be collected and made available to analyze the tradeoffs between different storage classes.

', ], ], 'StorageClassAnalysisDataExport' => [ 'base' => NULL, 'refs' => [ 'StorageClassAnalysis$DataExport' => '

A container used to describe how data related to the storage class analysis should be exported.

', ], ], 'StorageClassAnalysisSchemaVersion' => [ 'base' => NULL, 'refs' => [ 'StorageClassAnalysisDataExport$OutputSchemaVersion' => '

The version of the output schema to use when exporting data. Must be V_1.

', ], ], 'Suffix' => [ 'base' => NULL, 'refs' => [ 'IndexDocument$Suffix' => '

A suffix that is appended to a request that is for a directory on the website endpoint (e.g. if the suffix is index.html and you make a request to samplebucket/images/ the data that is returned will be for the object with the key name images/index.html) The suffix must not be empty and must not include a slash character.

', ], ], 'Tag' => [ 'base' => NULL, 'refs' => [ 'AnalyticsFilter$Tag' => '

The tag to use when evaluating an analytics filter.

', 'LifecycleRuleFilter$Tag' => '

This tag must exist in the object\'s tag set in order for the rule to apply.

', 'MetricsFilter$Tag' => '

The tag used when evaluating a metrics filter.

', 'ReplicationRuleFilter$Tag' => '

A container for specifying a tag key and value.

The rule applies only to objects that have the tag in their tag set.

', 'TagSet$member' => NULL, ], ], 'TagCount' => [ 'base' => NULL, 'refs' => [ 'GetObjectOutput$TagCount' => '

The number of tags, if any, on the object.

', ], ], 'TagSet' => [ 'base' => NULL, 'refs' => [ 'AnalyticsAndOperator$Tags' => '

The list of tags to use when evaluating an AND predicate.

', 'GetBucketTaggingOutput$TagSet' => NULL, 'GetObjectTaggingOutput$TagSet' => NULL, 'LifecycleRuleAndOperator$Tags' => '

All of these tags must exist in the object\'s tag set in order for the rule to apply.

', 'MetricsAndOperator$Tags' => '

The list of tags used when evaluating an AND predicate.

', 'ReplicationRuleAndOperator$Tags' => NULL, 'Tagging$TagSet' => NULL, ], ], 'Tagging' => [ 'base' => NULL, 'refs' => [ 'PutBucketTaggingRequest$Tagging' => NULL, 'PutObjectTaggingRequest$Tagging' => NULL, 'S3Location$Tagging' => '

The tag-set that is applied to the restore results.

', ], ], 'TaggingDirective' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$TaggingDirective' => '

Specifies whether the object tag-set are copied from the source object or replaced with tag-set provided in the request.

', ], ], 'TaggingHeader' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$Tagging' => '

The tag-set for the object destination object this value must be used in conjunction with the TaggingDirective. The tag-set must be encoded as URL Query parameters

', 'CreateMultipartUploadRequest$Tagging' => '

The tag-set for the object. The tag-set must be encoded as URL Query parameters

', 'PutObjectRequest$Tagging' => '

The tag-set for the object. The tag-set must be encoded as URL Query parameters. (For example, "Key1=Value1")

', ], ], 'TargetBucket' => [ 'base' => NULL, 'refs' => [ 'LoggingEnabled$TargetBucket' => '

Specifies the bucket where you want Amazon S3 to store server access logs. You can have your logs delivered to any bucket that you own, including the same bucket that is being logged. You can also configure multiple buckets to deliver their logs to the same target bucket. In this case you should choose a different TargetPrefix for each source bucket so that the delivered log files can be distinguished by key.

', ], ], 'TargetGrant' => [ 'base' => NULL, 'refs' => [ 'TargetGrants$member' => NULL, ], ], 'TargetGrants' => [ 'base' => NULL, 'refs' => [ 'LoggingEnabled$TargetGrants' => NULL, ], ], 'TargetPrefix' => [ 'base' => NULL, 'refs' => [ 'LoggingEnabled$TargetPrefix' => '

This element lets you specify a prefix for the keys that the log files will be stored under.

', ], ], 'Tier' => [ 'base' => NULL, 'refs' => [ 'GlacierJobParameters$Tier' => '

Glacier retrieval tier at which the restore will be processed.

', 'RestoreRequest$Tier' => '

Glacier retrieval tier at which the restore will be processed.

', ], ], 'Token' => [ 'base' => NULL, 'refs' => [ 'ListBucketAnalyticsConfigurationsOutput$ContinuationToken' => '

The ContinuationToken that represents where this request began.

', 'ListBucketAnalyticsConfigurationsRequest$ContinuationToken' => '

The ContinuationToken that represents a placeholder from where this request should begin.

', 'ListBucketInventoryConfigurationsOutput$ContinuationToken' => '

If sent in the request, the marker that is used as a starting point for this inventory configuration list response.

', 'ListBucketInventoryConfigurationsRequest$ContinuationToken' => '

The marker used to continue an inventory configuration listing that has been truncated. Use the NextContinuationToken from a previously truncated list response to continue the listing. The continuation token is an opaque value that Amazon S3 understands.

', 'ListBucketMetricsConfigurationsOutput$ContinuationToken' => '

The marker that is used as a starting point for this metrics configuration list response. This value is present if it was sent in the request.

', 'ListBucketMetricsConfigurationsRequest$ContinuationToken' => '

The marker that is used to continue a metrics configuration listing that has been truncated. Use the NextContinuationToken from a previously truncated list response to continue the listing. The continuation token is an opaque value that Amazon S3 understands.

', 'ListObjectsV2Output$ContinuationToken' => '

ContinuationToken indicates Amazon S3 that the list is being continued on this bucket with a token. ContinuationToken is obfuscated and is not a real key

', 'ListObjectsV2Request$ContinuationToken' => '

ContinuationToken indicates Amazon S3 that the list is being continued on this bucket with a token. ContinuationToken is obfuscated and is not a real key

', ], ], 'TopicArn' => [ 'base' => NULL, 'refs' => [ 'TopicConfiguration$TopicArn' => '

The Amazon Resource Name (ARN) of the Amazon SNS topic to which Amazon S3 will publish a message when it detects events of the specified type.

', 'TopicConfigurationDeprecated$Topic' => '

Amazon SNS topic to which Amazon S3 will publish a message to report the specified events for the bucket.

', ], ], 'TopicConfiguration' => [ 'base' => '

A container for specifying the configuration for publication of messages to an Amazon Simple Notification Service (Amazon SNS) topic.when Amazon S3 detects specified events.

', 'refs' => [ 'TopicConfigurationList$member' => NULL, ], ], 'TopicConfigurationDeprecated' => [ 'base' => NULL, 'refs' => [ 'NotificationConfigurationDeprecated$TopicConfiguration' => NULL, ], ], 'TopicConfigurationList' => [ 'base' => NULL, 'refs' => [ 'NotificationConfiguration$TopicConfigurations' => NULL, ], ], 'Transition' => [ 'base' => NULL, 'refs' => [ 'Rule$Transition' => NULL, 'TransitionList$member' => NULL, ], ], 'TransitionList' => [ 'base' => NULL, 'refs' => [ 'LifecycleRule$Transitions' => NULL, ], ], 'TransitionStorageClass' => [ 'base' => NULL, 'refs' => [ 'NoncurrentVersionTransition$StorageClass' => '

The class of storage used to store the object.

', 'Transition$StorageClass' => '

The class of storage used to store the object.

', ], ], 'Type' => [ 'base' => NULL, 'refs' => [ 'Grantee$Type' => '

Type of grantee

', ], ], 'URI' => [ 'base' => NULL, 'refs' => [ 'Grantee$URI' => '

URI of the grantee group.

', ], ], 'UploadIdMarker' => [ 'base' => NULL, 'refs' => [ 'ListMultipartUploadsOutput$UploadIdMarker' => '

Upload ID after which listing began.

', 'ListMultipartUploadsRequest$UploadIdMarker' => '

Together with key-marker, specifies the multipart upload after which listing should begin. If key-marker is not specified, the upload-id-marker parameter is ignored.

', ], ], 'UploadPartCopyOutput' => [ 'base' => NULL, 'refs' => [], ], 'UploadPartCopyRequest' => [ 'base' => NULL, 'refs' => [], ], 'UploadPartOutput' => [ 'base' => NULL, 'refs' => [], ], 'UploadPartRequest' => [ 'base' => NULL, 'refs' => [], ], 'UserMetadata' => [ 'base' => NULL, 'refs' => [ 'S3Location$UserMetadata' => '

A list of metadata to store with the restore results in S3.

', ], ], 'Value' => [ 'base' => NULL, 'refs' => [ 'Tag$Value' => '

Value of the tag.

', ], ], 'VersionIdMarker' => [ 'base' => NULL, 'refs' => [ 'ListObjectVersionsOutput$VersionIdMarker' => NULL, 'ListObjectVersionsRequest$VersionIdMarker' => '

Specifies the object version you want to start listing from.

', ], ], 'VersioningConfiguration' => [ 'base' => NULL, 'refs' => [ 'PutBucketVersioningRequest$VersioningConfiguration' => NULL, ], ], 'WebsiteConfiguration' => [ 'base' => NULL, 'refs' => [ 'PutBucketWebsiteRequest$WebsiteConfiguration' => NULL, ], ], 'WebsiteRedirectLocation' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$WebsiteRedirectLocation' => '

If the bucket is configured as a website, redirects requests for this object to another object in the same bucket or to an external URL. Amazon S3 stores the value of this header in the object metadata.

', 'CreateMultipartUploadRequest$WebsiteRedirectLocation' => '

If the bucket is configured as a website, redirects requests for this object to another object in the same bucket or to an external URL. Amazon S3 stores the value of this header in the object metadata.

', 'GetObjectOutput$WebsiteRedirectLocation' => '

If the bucket is configured as a website, redirects requests for this object to another object in the same bucket or to an external URL. Amazon S3 stores the value of this header in the object metadata.

', 'HeadObjectOutput$WebsiteRedirectLocation' => '

If the bucket is configured as a website, redirects requests for this object to another object in the same bucket or to an external URL. Amazon S3 stores the value of this header in the object metadata.

', 'PutObjectRequest$WebsiteRedirectLocation' => '

If the bucket is configured as a website, redirects requests for this object to another object in the same bucket or to an external URL. Amazon S3 stores the value of this header in the object metadata.

', ], ], 'Years' => [ 'base' => NULL, 'refs' => [ 'DefaultRetention$Years' => '

The number of years that you want to specify for the default retention period.

', ], ], ],]; +return [ 'version' => '2.0', 'service' => '

', 'operations' => [ 'AbortMultipartUpload' => '

Aborts a multipart upload.

To verify that all parts have been removed, so you don\'t get charged for the part storage, you should call the List Parts operation and ensure the parts list is empty.

', 'CompleteMultipartUpload' => '

Completes a multipart upload by assembling previously uploaded parts.

', 'CopyObject' => '

Creates a copy of an object that is already stored in Amazon S3.

', 'CreateBucket' => '

Creates a new bucket.

', 'CreateMultipartUpload' => '

Initiates a multipart upload and returns an upload ID.

Note: After you initiate multipart upload and upload one or more parts, you must either complete or abort multipart upload in order to stop getting charged for storage of the uploaded parts. Only after you either complete or abort multipart upload, Amazon S3 frees up the parts storage and stops charging you for the parts storage.

', 'DeleteBucket' => '

Deletes the bucket. All objects (including all object versions and Delete Markers) in the bucket must be deleted before the bucket itself can be deleted.

', 'DeleteBucketAnalyticsConfiguration' => '

Deletes an analytics configuration for the bucket (specified by the analytics configuration ID).

', 'DeleteBucketCors' => '

Deletes the CORS configuration information set for the bucket.

', 'DeleteBucketEncryption' => '

Deletes the server-side encryption configuration from the bucket.

', 'DeleteBucketInventoryConfiguration' => '

Deletes an inventory configuration (identified by the inventory ID) from the bucket.

', 'DeleteBucketLifecycle' => '

Deletes the lifecycle configuration from the bucket.

', 'DeleteBucketMetricsConfiguration' => '

Deletes a metrics configuration (specified by the metrics configuration ID) from the bucket.

', 'DeleteBucketPolicy' => '

Deletes the policy from the bucket.

', 'DeleteBucketReplication' => '

Deletes the replication configuration from the bucket. For information about replication configuration, see Cross-Region Replication (CRR) in the Amazon S3 Developer Guide.

', 'DeleteBucketTagging' => '

Deletes the tags from the bucket.

', 'DeleteBucketWebsite' => '

This operation removes the website configuration from the bucket.

', 'DeleteObject' => '

Removes the null version (if there is one) of an object and inserts a delete marker, which becomes the latest version of the object. If there isn\'t a null version, Amazon S3 does not remove any objects.

', 'DeleteObjectTagging' => '

Removes the tag-set from an existing object.

', 'DeleteObjects' => '

This operation enables you to delete multiple objects from a bucket using a single HTTP request. You may specify up to 1000 keys.

', 'DeletePublicAccessBlock' => '

Removes the PublicAccessBlock configuration from an Amazon S3 bucket.

', 'GetBucketAccelerateConfiguration' => '

Returns the accelerate configuration of a bucket.

', 'GetBucketAcl' => '

Gets the access control policy for the bucket.

', 'GetBucketAnalyticsConfiguration' => '

Gets an analytics configuration for the bucket (specified by the analytics configuration ID).

', 'GetBucketCors' => '

Returns the CORS configuration for the bucket.

', 'GetBucketEncryption' => '

Returns the server-side encryption configuration of a bucket.

', 'GetBucketInventoryConfiguration' => '

Returns an inventory configuration (identified by the inventory ID) from the bucket.

', 'GetBucketLifecycle' => '

No longer used, see the GetBucketLifecycleConfiguration operation.

', 'GetBucketLifecycleConfiguration' => '

Returns the lifecycle configuration information set on the bucket.

', 'GetBucketLocation' => '

Returns the region the bucket resides in.

', 'GetBucketLogging' => '

Returns the logging status of a bucket and the permissions users have to view and modify that status. To use GET, you must be the bucket owner.

', 'GetBucketMetricsConfiguration' => '

Gets a metrics configuration (specified by the metrics configuration ID) from the bucket.

', 'GetBucketNotification' => '

No longer used, see the GetBucketNotificationConfiguration operation.

', 'GetBucketNotificationConfiguration' => '

Returns the notification configuration of a bucket.

', 'GetBucketPolicy' => '

Returns the policy of a specified bucket.

', 'GetBucketPolicyStatus' => '

Retrieves the policy status for an Amazon S3 bucket, indicating whether the bucket is public.

', 'GetBucketReplication' => '

Returns the replication configuration of a bucket.

It can take a while to propagate the put or delete a replication configuration to all Amazon S3 systems. Therefore, a get request soon after put or delete can return a wrong result.

', 'GetBucketRequestPayment' => '

Returns the request payment configuration of a bucket.

', 'GetBucketTagging' => '

Returns the tag set associated with the bucket.

', 'GetBucketVersioning' => '

Returns the versioning state of a bucket.

', 'GetBucketWebsite' => '

Returns the website configuration for a bucket.

', 'GetObject' => '

Retrieves objects from Amazon S3.

', 'GetObjectAcl' => '

Returns the access control list (ACL) of an object.

', 'GetObjectLegalHold' => '

Gets an object\'s current Legal Hold status.

', 'GetObjectLockConfiguration' => '

Gets the Object Lock configuration for a bucket. The rule specified in the Object Lock configuration will be applied by default to every new object placed in the specified bucket.

', 'GetObjectRetention' => '

Retrieves an object\'s retention settings.

', 'GetObjectTagging' => '

Returns the tag-set of an object.

', 'GetObjectTorrent' => '

Return torrent files from a bucket.

', 'GetPublicAccessBlock' => '

Retrieves the PublicAccessBlock configuration for an Amazon S3 bucket.

', 'HeadBucket' => '

This operation is useful to determine if a bucket exists and you have permission to access it.

', 'HeadObject' => '

The HEAD operation retrieves metadata from an object without returning the object itself. This operation is useful if you\'re only interested in an object\'s metadata. To use HEAD, you must have READ access to the object.

', 'ListBucketAnalyticsConfigurations' => '

Lists the analytics configurations for the bucket.

', 'ListBucketInventoryConfigurations' => '

Returns a list of inventory configurations for the bucket.

', 'ListBucketMetricsConfigurations' => '

Lists the metrics configurations for the bucket.

', 'ListBuckets' => '

Returns a list of all buckets owned by the authenticated sender of the request.

', 'ListMultipartUploads' => '

This operation lists in-progress multipart uploads.

', 'ListObjectVersions' => '

Returns metadata about all of the versions of objects in a bucket.

', 'ListObjects' => '

Returns some or all (up to 1000) of the objects in a bucket. You can use the request parameters as selection criteria to return a subset of the objects in a bucket.

', 'ListObjectsV2' => '

Returns some or all (up to 1000) of the objects in a bucket. You can use the request parameters as selection criteria to return a subset of the objects in a bucket. Note: ListObjectsV2 is the revised List Objects API and we recommend you use this revised API for new application development.

', 'ListParts' => '

Lists the parts that have been uploaded for a specific multipart upload.

', 'PutBucketAccelerateConfiguration' => '

Sets the accelerate configuration of an existing bucket.

', 'PutBucketAcl' => '

Sets the permissions on a bucket using access control lists (ACL).

', 'PutBucketAnalyticsConfiguration' => '

Sets an analytics configuration for the bucket (specified by the analytics configuration ID).

', 'PutBucketCors' => '

Sets the CORS configuration for a bucket.

', 'PutBucketEncryption' => '

Creates a new server-side encryption configuration (or replaces an existing one, if present).

', 'PutBucketInventoryConfiguration' => '

Adds an inventory configuration (identified by the inventory ID) from the bucket.

', 'PutBucketLifecycle' => '

No longer used, see the PutBucketLifecycleConfiguration operation.

', 'PutBucketLifecycleConfiguration' => '

Sets lifecycle configuration for your bucket. If a lifecycle configuration exists, it replaces it.

', 'PutBucketLogging' => '

Set the logging parameters for a bucket and to specify permissions for who can view and modify the logging parameters. To set the logging status of a bucket, you must be the bucket owner.

', 'PutBucketMetricsConfiguration' => '

Sets a metrics configuration (specified by the metrics configuration ID) for the bucket.

', 'PutBucketNotification' => '

No longer used, see the PutBucketNotificationConfiguration operation.

', 'PutBucketNotificationConfiguration' => '

Enables notifications of specified events for a bucket.

', 'PutBucketPolicy' => '

Replaces a policy on a bucket. If the bucket already has a policy, the one in this request completely replaces it.

', 'PutBucketReplication' => '

Creates a replication configuration or replaces an existing one. For more information, see Cross-Region Replication (CRR) in the Amazon S3 Developer Guide.

', 'PutBucketRequestPayment' => '

Sets the request payment configuration for a bucket. By default, the bucket owner pays for downloads from the bucket. This configuration parameter enables the bucket owner (only) to specify that the person requesting the download will be charged for the download. Documentation on requester pays buckets can be found at http://docs.aws.amazon.com/AmazonS3/latest/dev/RequesterPaysBuckets.html

', 'PutBucketTagging' => '

Sets the tags for a bucket.

', 'PutBucketVersioning' => '

Sets the versioning state of an existing bucket. To set the versioning state, you must be the bucket owner.

', 'PutBucketWebsite' => '

Set the website configuration for a bucket.

', 'PutObject' => '

Adds an object to a bucket.

', 'PutObjectAcl' => '

uses the acl subresource to set the access control list (ACL) permissions for an object that already exists in a bucket

', 'PutObjectLegalHold' => '

Applies a Legal Hold configuration to the specified object.

', 'PutObjectLockConfiguration' => '

Places an Object Lock configuration on the specified bucket. The rule specified in the Object Lock configuration will be applied by default to every new object placed in the specified bucket.

', 'PutObjectRetention' => '

Places an Object Retention configuration on an object.

', 'PutObjectTagging' => '

Sets the supplied tag-set to an object that already exists in a bucket

', 'PutPublicAccessBlock' => '

Creates or modifies the PublicAccessBlock configuration for an Amazon S3 bucket.

', 'RestoreObject' => '

Restores an archived copy of an object back into Amazon S3

', 'SelectObjectContent' => '

This operation filters the contents of an Amazon S3 object based on a simple Structured Query Language (SQL) statement. In the request, along with the SQL expression, you must also specify a data serialization format (JSON or CSV) of the object. Amazon S3 uses this to parse object data into records, and returns only records that match the specified SQL expression. You must also specify the data serialization format for the response.

', 'UploadPart' => '

Uploads a part in a multipart upload.

Note: After you initiate multipart upload and upload one or more parts, you must either complete or abort multipart upload in order to stop getting charged for storage of the uploaded parts. Only after you either complete or abort multipart upload, Amazon S3 frees up the parts storage and stops charging you for the parts storage.

', 'UploadPartCopy' => '

Uploads a part by copying data from an existing object as data source.

', ], 'shapes' => [ 'AbortDate' => [ 'base' => NULL, 'refs' => [ 'CreateMultipartUploadOutput$AbortDate' => '

Date when multipart upload will become eligible for abort operation by lifecycle.

', 'ListPartsOutput$AbortDate' => '

Date when multipart upload will become eligible for abort operation by lifecycle.

', ], ], 'AbortIncompleteMultipartUpload' => [ 'base' => '

Specifies the days since the initiation of an Incomplete Multipart Upload that Lifecycle will wait before permanently removing all parts of the upload.

', 'refs' => [ 'LifecycleRule$AbortIncompleteMultipartUpload' => NULL, 'Rule$AbortIncompleteMultipartUpload' => NULL, ], ], 'AbortMultipartUploadOutput' => [ 'base' => NULL, 'refs' => [], ], 'AbortMultipartUploadRequest' => [ 'base' => NULL, 'refs' => [], ], 'AbortRuleId' => [ 'base' => NULL, 'refs' => [ 'CreateMultipartUploadOutput$AbortRuleId' => '

Id of the lifecycle rule that makes a multipart upload eligible for abort operation.

', 'ListPartsOutput$AbortRuleId' => '

Id of the lifecycle rule that makes a multipart upload eligible for abort operation.

', ], ], 'AccelerateConfiguration' => [ 'base' => '

', 'refs' => [ 'PutBucketAccelerateConfigurationRequest$AccelerateConfiguration' => '

Specifies the Accelerate Configuration you want to set for the bucket.

', ], ], 'AcceptRanges' => [ 'base' => NULL, 'refs' => [ 'GetObjectOutput$AcceptRanges' => '

', 'HeadObjectOutput$AcceptRanges' => '

', ], ], 'AccessControlPolicy' => [ 'base' => '

', 'refs' => [ 'PutBucketAclRequest$AccessControlPolicy' => '

', 'PutObjectAclRequest$AccessControlPolicy' => '

', ], ], 'AccessControlTranslation' => [ 'base' => '

A container for information about access control for replicas.

', 'refs' => [ 'Destination$AccessControlTranslation' => '

A container for information about access control for replicas.

Use this element only in a cross-account scenario where source and destination bucket owners are not the same to change replica ownership to the AWS account that owns the destination bucket. If you don\'t add this element to the replication configuration, the replicas are owned by same AWS account that owns the source object.

', ], ], 'AccountId' => [ 'base' => NULL, 'refs' => [ 'AnalyticsS3BucketDestination$BucketAccountId' => '

The account ID that owns the destination bucket. If no account ID is provided, the owner will not be validated prior to exporting data.

', 'Destination$Account' => '

The account ID of the destination bucket. Currently, Amazon S3 verifies this value only if Access Control Translation is enabled.

In a cross-account scenario, if you change replica ownership to the AWS account that owns the destination bucket by adding the AccessControlTranslation element, this is the account ID of the owner of the destination bucket.

', 'InventoryS3BucketDestination$AccountId' => '

The ID of the account that owns the destination bucket.

', ], ], 'AllowQuotedRecordDelimiter' => [ 'base' => NULL, 'refs' => [ 'CSVInput$AllowQuotedRecordDelimiter' => '

Specifies that CSV field values may contain quoted record delimiters and such records should be allowed. Default value is FALSE. Setting this value to TRUE may lower performance.

', ], ], 'AllowedHeader' => [ 'base' => NULL, 'refs' => [ 'AllowedHeaders$member' => NULL, ], ], 'AllowedHeaders' => [ 'base' => NULL, 'refs' => [ 'CORSRule$AllowedHeaders' => '

Specifies which headers are allowed in a pre-flight OPTIONS request.

', ], ], 'AllowedMethod' => [ 'base' => NULL, 'refs' => [ 'AllowedMethods$member' => NULL, ], ], 'AllowedMethods' => [ 'base' => NULL, 'refs' => [ 'CORSRule$AllowedMethods' => '

Identifies HTTP methods that the domain/origin specified in the rule is allowed to execute.

', ], ], 'AllowedOrigin' => [ 'base' => NULL, 'refs' => [ 'AllowedOrigins$member' => NULL, ], ], 'AllowedOrigins' => [ 'base' => NULL, 'refs' => [ 'CORSRule$AllowedOrigins' => '

One or more origins you want customers to be able to access the bucket from.

', ], ], 'AnalyticsAndOperator' => [ 'base' => '

', 'refs' => [ 'AnalyticsFilter$And' => '

A conjunction (logical AND) of predicates, which is used in evaluating an analytics filter. The operator must have at least two predicates.

', ], ], 'AnalyticsConfiguration' => [ 'base' => '

', 'refs' => [ 'AnalyticsConfigurationList$member' => NULL, 'GetBucketAnalyticsConfigurationOutput$AnalyticsConfiguration' => '

The configuration and any analyses for the analytics filter.

', 'PutBucketAnalyticsConfigurationRequest$AnalyticsConfiguration' => '

The configuration and any analyses for the analytics filter.

', ], ], 'AnalyticsConfigurationList' => [ 'base' => NULL, 'refs' => [ 'ListBucketAnalyticsConfigurationsOutput$AnalyticsConfigurationList' => '

The list of analytics configurations for a bucket.

', ], ], 'AnalyticsExportDestination' => [ 'base' => '

', 'refs' => [ 'StorageClassAnalysisDataExport$Destination' => '

The place to store the data for an analysis.

', ], ], 'AnalyticsFilter' => [ 'base' => '

', 'refs' => [ 'AnalyticsConfiguration$Filter' => '

The filter used to describe a set of objects for analyses. A filter must have exactly one prefix, one tag, or one conjunction (AnalyticsAndOperator). If no filter is provided, all objects will be considered in any analysis.

', ], ], 'AnalyticsId' => [ 'base' => NULL, 'refs' => [ 'AnalyticsConfiguration$Id' => '

The identifier used to represent an analytics configuration.

', 'DeleteBucketAnalyticsConfigurationRequest$Id' => '

The identifier used to represent an analytics configuration.

', 'GetBucketAnalyticsConfigurationRequest$Id' => '

The identifier used to represent an analytics configuration.

', 'PutBucketAnalyticsConfigurationRequest$Id' => '

The identifier used to represent an analytics configuration.

', ], ], 'AnalyticsS3BucketDestination' => [ 'base' => '

', 'refs' => [ 'AnalyticsExportDestination$S3BucketDestination' => '

A destination signifying output to an S3 bucket.

', ], ], 'AnalyticsS3ExportFileFormat' => [ 'base' => NULL, 'refs' => [ 'AnalyticsS3BucketDestination$Format' => '

The file format used when exporting data to Amazon S3.

', ], ], 'Body' => [ 'base' => NULL, 'refs' => [ 'GetObjectOutput$Body' => '

Object data.

', 'GetObjectTorrentOutput$Body' => '

', 'PutObjectRequest$Body' => '

Object data.

', 'RecordsEvent$Payload' => '

The byte array of partial, one or more result records.

', 'UploadPartRequest$Body' => '

Object data.

', ], ], 'Bucket' => [ 'base' => '

', 'refs' => [ 'Buckets$member' => NULL, ], ], 'BucketAccelerateStatus' => [ 'base' => NULL, 'refs' => [ 'AccelerateConfiguration$Status' => '

The accelerate configuration of the bucket.

', 'GetBucketAccelerateConfigurationOutput$Status' => '

The accelerate configuration of the bucket.

', ], ], 'BucketAlreadyExists' => [ 'base' => '

The requested bucket name is not available. The bucket namespace is shared by all users of the system. Please select a different name and try again.

', 'refs' => [], ], 'BucketAlreadyOwnedByYou' => [ 'base' => '

', 'refs' => [], ], 'BucketCannedACL' => [ 'base' => NULL, 'refs' => [ 'CreateBucketRequest$ACL' => '

The canned ACL to apply to the bucket.

', 'PutBucketAclRequest$ACL' => '

The canned ACL to apply to the bucket.

', ], ], 'BucketLifecycleConfiguration' => [ 'base' => '

', 'refs' => [ 'PutBucketLifecycleConfigurationRequest$LifecycleConfiguration' => '

', ], ], 'BucketLocationConstraint' => [ 'base' => NULL, 'refs' => [ 'CreateBucketConfiguration$LocationConstraint' => '

Specifies the region where the bucket will be created. If you don\'t specify a region, the bucket is created in US East (N. Virginia) Region (us-east-1).

', 'GetBucketLocationOutput$LocationConstraint' => '

', ], ], 'BucketLoggingStatus' => [ 'base' => '

', 'refs' => [ 'PutBucketLoggingRequest$BucketLoggingStatus' => '

', ], ], 'BucketLogsPermission' => [ 'base' => NULL, 'refs' => [ 'TargetGrant$Permission' => '

Logging permissions assigned to the Grantee for the bucket.

', ], ], 'BucketName' => [ 'base' => NULL, 'refs' => [ 'AbortMultipartUploadRequest$Bucket' => '

', 'AnalyticsS3BucketDestination$Bucket' => '

The Amazon resource name (ARN) of the bucket to which data is exported.

', 'Bucket$Name' => '

The name of the bucket.

', 'CompleteMultipartUploadOutput$Bucket' => '

', 'CompleteMultipartUploadRequest$Bucket' => '

', 'CopyObjectRequest$Bucket' => '

', 'CreateBucketRequest$Bucket' => '

', 'CreateMultipartUploadOutput$Bucket' => '

Name of the bucket to which the multipart upload was initiated.

', 'CreateMultipartUploadRequest$Bucket' => '

', 'DeleteBucketAnalyticsConfigurationRequest$Bucket' => '

The name of the bucket from which an analytics configuration is deleted.

', 'DeleteBucketCorsRequest$Bucket' => '

', 'DeleteBucketEncryptionRequest$Bucket' => '

The name of the bucket containing the server-side encryption configuration to delete.

', 'DeleteBucketInventoryConfigurationRequest$Bucket' => '

The name of the bucket containing the inventory configuration to delete.

', 'DeleteBucketLifecycleRequest$Bucket' => '

', 'DeleteBucketMetricsConfigurationRequest$Bucket' => '

The name of the bucket containing the metrics configuration to delete.

', 'DeleteBucketPolicyRequest$Bucket' => '

', 'DeleteBucketReplicationRequest$Bucket' => '

The bucket name.

It can take a while to propagate the deletion of a replication configuration to all Amazon S3 systems.

', 'DeleteBucketRequest$Bucket' => '

', 'DeleteBucketTaggingRequest$Bucket' => '

', 'DeleteBucketWebsiteRequest$Bucket' => '

', 'DeleteObjectRequest$Bucket' => '

', 'DeleteObjectTaggingRequest$Bucket' => '

', 'DeleteObjectsRequest$Bucket' => '

', 'DeletePublicAccessBlockRequest$Bucket' => '

The Amazon S3 bucket whose PublicAccessBlock configuration you want to delete.

', 'Destination$Bucket' => '

The Amazon Resource Name (ARN) of the bucket where you want Amazon S3 to store replicas of the object identified by the rule.

If there are multiple rules in your replication configuration, all rules must specify the same bucket as the destination. A replication configuration can replicate objects to only one destination bucket.

', 'GetBucketAccelerateConfigurationRequest$Bucket' => '

Name of the bucket for which the accelerate configuration is retrieved.

', 'GetBucketAclRequest$Bucket' => '

', 'GetBucketAnalyticsConfigurationRequest$Bucket' => '

The name of the bucket from which an analytics configuration is retrieved.

', 'GetBucketCorsRequest$Bucket' => '

', 'GetBucketEncryptionRequest$Bucket' => '

The name of the bucket from which the server-side encryption configuration is retrieved.

', 'GetBucketInventoryConfigurationRequest$Bucket' => '

The name of the bucket containing the inventory configuration to retrieve.

', 'GetBucketLifecycleConfigurationRequest$Bucket' => '

', 'GetBucketLifecycleRequest$Bucket' => '

', 'GetBucketLocationRequest$Bucket' => '

', 'GetBucketLoggingRequest$Bucket' => '

', 'GetBucketMetricsConfigurationRequest$Bucket' => '

The name of the bucket containing the metrics configuration to retrieve.

', 'GetBucketNotificationConfigurationRequest$Bucket' => '

Name of the bucket to get the notification configuration for.

', 'GetBucketPolicyRequest$Bucket' => '

', 'GetBucketPolicyStatusRequest$Bucket' => '

The name of the Amazon S3 bucket whose policy status you want to retrieve.

', 'GetBucketReplicationRequest$Bucket' => '

', 'GetBucketRequestPaymentRequest$Bucket' => '

', 'GetBucketTaggingRequest$Bucket' => '

', 'GetBucketVersioningRequest$Bucket' => '

', 'GetBucketWebsiteRequest$Bucket' => '

', 'GetObjectAclRequest$Bucket' => '

', 'GetObjectLegalHoldRequest$Bucket' => '

The bucket containing the object whose Legal Hold status you want to retrieve.

', 'GetObjectLockConfigurationRequest$Bucket' => '

The bucket whose Object Lock configuration you want to retrieve.

', 'GetObjectRequest$Bucket' => '

', 'GetObjectRetentionRequest$Bucket' => '

The bucket containing the object whose retention settings you want to retrieve.

', 'GetObjectTaggingRequest$Bucket' => '

', 'GetObjectTorrentRequest$Bucket' => '

', 'GetPublicAccessBlockRequest$Bucket' => '

The name of the Amazon S3 bucket whose PublicAccessBlock configuration you want to retrieve.

', 'HeadBucketRequest$Bucket' => '

', 'HeadObjectRequest$Bucket' => '

', 'InventoryS3BucketDestination$Bucket' => '

The Amazon resource name (ARN) of the bucket where inventory results will be published.

', 'ListBucketAnalyticsConfigurationsRequest$Bucket' => '

The name of the bucket from which analytics configurations are retrieved.

', 'ListBucketInventoryConfigurationsRequest$Bucket' => '

The name of the bucket containing the inventory configurations to retrieve.

', 'ListBucketMetricsConfigurationsRequest$Bucket' => '

The name of the bucket containing the metrics configurations to retrieve.

', 'ListMultipartUploadsOutput$Bucket' => '

Name of the bucket to which the multipart upload was initiated.

', 'ListMultipartUploadsRequest$Bucket' => '

', 'ListObjectVersionsOutput$Name' => '

', 'ListObjectVersionsRequest$Bucket' => '

', 'ListObjectsOutput$Name' => '

', 'ListObjectsRequest$Bucket' => '

', 'ListObjectsV2Output$Name' => '

Name of the bucket to list.

', 'ListObjectsV2Request$Bucket' => '

Name of the bucket to list.

', 'ListPartsOutput$Bucket' => '

Name of the bucket to which the multipart upload was initiated.

', 'ListPartsRequest$Bucket' => '

', 'PutBucketAccelerateConfigurationRequest$Bucket' => '

Name of the bucket for which the accelerate configuration is set.

', 'PutBucketAclRequest$Bucket' => '

', 'PutBucketAnalyticsConfigurationRequest$Bucket' => '

The name of the bucket to which an analytics configuration is stored.

', 'PutBucketCorsRequest$Bucket' => '

', 'PutBucketEncryptionRequest$Bucket' => '

The name of the bucket for which the server-side encryption configuration is set.

', 'PutBucketInventoryConfigurationRequest$Bucket' => '

The name of the bucket where the inventory configuration will be stored.

', 'PutBucketLifecycleConfigurationRequest$Bucket' => '

', 'PutBucketLifecycleRequest$Bucket' => '

', 'PutBucketLoggingRequest$Bucket' => '

', 'PutBucketMetricsConfigurationRequest$Bucket' => '

The name of the bucket for which the metrics configuration is set.

', 'PutBucketNotificationConfigurationRequest$Bucket' => '

', 'PutBucketNotificationRequest$Bucket' => '

', 'PutBucketPolicyRequest$Bucket' => '

', 'PutBucketReplicationRequest$Bucket' => '

', 'PutBucketRequestPaymentRequest$Bucket' => '

', 'PutBucketTaggingRequest$Bucket' => '

', 'PutBucketVersioningRequest$Bucket' => '

', 'PutBucketWebsiteRequest$Bucket' => '

', 'PutObjectAclRequest$Bucket' => '

', 'PutObjectLegalHoldRequest$Bucket' => '

The bucket containing the object that you want to place a Legal Hold on.

', 'PutObjectLockConfigurationRequest$Bucket' => '

The bucket whose Object Lock configuration you want to create or replace.

', 'PutObjectRequest$Bucket' => '

Name of the bucket to which the PUT operation was initiated.

', 'PutObjectRetentionRequest$Bucket' => '

The bucket that contains the object you want to apply this Object Retention configuration to.

', 'PutObjectTaggingRequest$Bucket' => '

', 'PutPublicAccessBlockRequest$Bucket' => '

The name of the Amazon S3 bucket whose PublicAccessBlock configuration you want to set.

', 'RestoreObjectRequest$Bucket' => '

', 'S3Location$BucketName' => '

The name of the bucket where the restore results will be placed.

', 'SelectObjectContentRequest$Bucket' => '

The S3 bucket.

', 'UploadPartCopyRequest$Bucket' => '

', 'UploadPartRequest$Bucket' => '

Name of the bucket to which the multipart upload was initiated.

', ], ], 'BucketVersioningStatus' => [ 'base' => NULL, 'refs' => [ 'GetBucketVersioningOutput$Status' => '

The versioning state of the bucket.

', 'VersioningConfiguration$Status' => '

The versioning state of the bucket.

', ], ], 'Buckets' => [ 'base' => NULL, 'refs' => [ 'ListBucketsOutput$Buckets' => '

', ], ], 'BypassGovernanceRetention' => [ 'base' => NULL, 'refs' => [ 'DeleteObjectRequest$BypassGovernanceRetention' => '

Indicates whether S3 Object Lock should bypass Governance-mode restrictions to process this operation.

', 'DeleteObjectsRequest$BypassGovernanceRetention' => '

Specifies whether you want to delete this object even if it has a Governance-type Object Lock in place. You must have sufficient permissions to perform this operation.

', 'PutObjectRetentionRequest$BypassGovernanceRetention' => '

Indicates whether this operation should bypass Governance-mode restrictions.j

', ], ], 'BytesProcessed' => [ 'base' => NULL, 'refs' => [ 'Progress$BytesProcessed' => '

The current number of uncompressed object bytes processed.

', 'Stats$BytesProcessed' => '

The total number of uncompressed object bytes processed.

', ], ], 'BytesReturned' => [ 'base' => NULL, 'refs' => [ 'Progress$BytesReturned' => '

The current number of bytes of records payload data returned.

', 'Stats$BytesReturned' => '

The total number of bytes of records payload data returned.

', ], ], 'BytesScanned' => [ 'base' => NULL, 'refs' => [ 'Progress$BytesScanned' => '

The current number of object bytes scanned.

', 'Stats$BytesScanned' => '

The total number of object bytes scanned.

', ], ], 'CORSConfiguration' => [ 'base' => '

', 'refs' => [ 'PutBucketCorsRequest$CORSConfiguration' => '

', ], ], 'CORSRule' => [ 'base' => '

', 'refs' => [ 'CORSRules$member' => NULL, ], ], 'CORSRules' => [ 'base' => NULL, 'refs' => [ 'CORSConfiguration$CORSRules' => '

', 'GetBucketCorsOutput$CORSRules' => '

', ], ], 'CSVInput' => [ 'base' => '

Describes how a CSV-formatted input object is formatted.

', 'refs' => [ 'InputSerialization$CSV' => '

Describes the serialization of a CSV-encoded object.

', ], ], 'CSVOutput' => [ 'base' => '

Describes how CSV-formatted results are formatted.

', 'refs' => [ 'OutputSerialization$CSV' => '

Describes the serialization of CSV-encoded Select results.

', ], ], 'CacheControl' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$CacheControl' => '

Specifies caching behavior along the request/reply chain.

', 'CreateMultipartUploadRequest$CacheControl' => '

Specifies caching behavior along the request/reply chain.

', 'GetObjectOutput$CacheControl' => '

Specifies caching behavior along the request/reply chain.

', 'HeadObjectOutput$CacheControl' => '

Specifies caching behavior along the request/reply chain.

', 'PutObjectRequest$CacheControl' => '

Specifies caching behavior along the request/reply chain.

', ], ], 'CloudFunction' => [ 'base' => NULL, 'refs' => [ 'CloudFunctionConfiguration$CloudFunction' => '

', ], ], 'CloudFunctionConfiguration' => [ 'base' => '

', 'refs' => [ 'NotificationConfigurationDeprecated$CloudFunctionConfiguration' => '

', ], ], 'CloudFunctionInvocationRole' => [ 'base' => NULL, 'refs' => [ 'CloudFunctionConfiguration$InvocationRole' => '

', ], ], 'Code' => [ 'base' => NULL, 'refs' => [ 'Error$Code' => '

', ], ], 'Comments' => [ 'base' => NULL, 'refs' => [ 'CSVInput$Comments' => '

The single character used to indicate a row should be ignored when present at the start of a row.

', ], ], 'CommonPrefix' => [ 'base' => '

', 'refs' => [ 'CommonPrefixList$member' => NULL, ], ], 'CommonPrefixList' => [ 'base' => NULL, 'refs' => [ 'ListMultipartUploadsOutput$CommonPrefixes' => '

', 'ListObjectVersionsOutput$CommonPrefixes' => '

', 'ListObjectsOutput$CommonPrefixes' => '

', 'ListObjectsV2Output$CommonPrefixes' => '

CommonPrefixes contains all (if there are any) keys between Prefix and the next occurrence of the string specified by delimiter

', ], ], 'CompleteMultipartUploadOutput' => [ 'base' => NULL, 'refs' => [], ], 'CompleteMultipartUploadRequest' => [ 'base' => NULL, 'refs' => [], ], 'CompletedMultipartUpload' => [ 'base' => '

', 'refs' => [ 'CompleteMultipartUploadRequest$MultipartUpload' => '

', ], ], 'CompletedPart' => [ 'base' => '

', 'refs' => [ 'CompletedPartList$member' => NULL, ], ], 'CompletedPartList' => [ 'base' => NULL, 'refs' => [ 'CompletedMultipartUpload$Parts' => '

', ], ], 'CompressionType' => [ 'base' => NULL, 'refs' => [ 'InputSerialization$CompressionType' => '

Specifies object\'s compression format. Valid values: NONE, GZIP, BZIP2. Default Value: NONE.

', ], ], 'Condition' => [ 'base' => '

', 'refs' => [ 'RoutingRule$Condition' => '

A container for describing a condition that must be met for the specified redirect to apply. For example, 1. If request is for pages in the /docs folder, redirect to the /documents folder. 2. If request results in HTTP error 4xx, redirect request to another host where you might process the error.

', ], ], 'ConfirmRemoveSelfBucketAccess' => [ 'base' => NULL, 'refs' => [ 'PutBucketPolicyRequest$ConfirmRemoveSelfBucketAccess' => '

Set this parameter to true to confirm that you want to remove your permissions to change this bucket policy in the future.

', ], ], 'ContentDisposition' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$ContentDisposition' => '

Specifies presentational information for the object.

', 'CreateMultipartUploadRequest$ContentDisposition' => '

Specifies presentational information for the object.

', 'GetObjectOutput$ContentDisposition' => '

Specifies presentational information for the object.

', 'HeadObjectOutput$ContentDisposition' => '

Specifies presentational information for the object.

', 'PutObjectRequest$ContentDisposition' => '

Specifies presentational information for the object.

', ], ], 'ContentEncoding' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$ContentEncoding' => '

Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field.

', 'CreateMultipartUploadRequest$ContentEncoding' => '

Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field.

', 'GetObjectOutput$ContentEncoding' => '

Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field.

', 'HeadObjectOutput$ContentEncoding' => '

Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field.

', 'PutObjectRequest$ContentEncoding' => '

Specifies what content encodings have been applied to the object and thus what decoding mechanisms must be applied to obtain the media-type referenced by the Content-Type header field.

', ], ], 'ContentLanguage' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$ContentLanguage' => '

The language the content is in.

', 'CreateMultipartUploadRequest$ContentLanguage' => '

The language the content is in.

', 'GetObjectOutput$ContentLanguage' => '

The language the content is in.

', 'HeadObjectOutput$ContentLanguage' => '

The language the content is in.

', 'PutObjectRequest$ContentLanguage' => '

The language the content is in.

', ], ], 'ContentLength' => [ 'base' => NULL, 'refs' => [ 'GetObjectOutput$ContentLength' => '

Size of the body in bytes.

', 'HeadObjectOutput$ContentLength' => '

Size of the body in bytes.

', 'PutObjectRequest$ContentLength' => '

Size of the body in bytes. This parameter is useful when the size of the body cannot be determined automatically.

', 'UploadPartRequest$ContentLength' => '

Size of the body in bytes. This parameter is useful when the size of the body cannot be determined automatically.

', ], ], 'ContentMD5' => [ 'base' => NULL, 'refs' => [ 'PutBucketAclRequest$ContentMD5' => '

', 'PutBucketCorsRequest$ContentMD5' => '

', 'PutBucketEncryptionRequest$ContentMD5' => '

The base64-encoded 128-bit MD5 digest of the server-side encryption configuration. This parameter is auto-populated when using the command from the CLI

', 'PutBucketLifecycleRequest$ContentMD5' => '

', 'PutBucketLoggingRequest$ContentMD5' => '

', 'PutBucketNotificationRequest$ContentMD5' => '

', 'PutBucketPolicyRequest$ContentMD5' => '

', 'PutBucketReplicationRequest$ContentMD5' => '

', 'PutBucketRequestPaymentRequest$ContentMD5' => '

', 'PutBucketTaggingRequest$ContentMD5' => '

', 'PutBucketVersioningRequest$ContentMD5' => '

', 'PutBucketWebsiteRequest$ContentMD5' => '

', 'PutObjectAclRequest$ContentMD5' => '

', 'PutObjectLegalHoldRequest$ContentMD5' => '

The MD5 hash for the request body.

', 'PutObjectLockConfigurationRequest$ContentMD5' => '

The MD5 hash for the request body.

', 'PutObjectRequest$ContentMD5' => '

The base64-encoded 128-bit MD5 digest of the part data. This parameter is auto-populated when using the command from the CLI

', 'PutObjectRetentionRequest$ContentMD5' => '

The MD5 hash for the request body.

', 'PutObjectTaggingRequest$ContentMD5' => '

', 'PutPublicAccessBlockRequest$ContentMD5' => '

The MD5 hash of the PutPublicAccessBlock request body.

', 'UploadPartRequest$ContentMD5' => '

The base64-encoded 128-bit MD5 digest of the part data.

', ], ], 'ContentRange' => [ 'base' => NULL, 'refs' => [ 'GetObjectOutput$ContentRange' => '

The portion of the object returned in the response.

', ], ], 'ContentType' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$ContentType' => '

A standard MIME type describing the format of the object data.

', 'CreateMultipartUploadRequest$ContentType' => '

A standard MIME type describing the format of the object data.

', 'GetObjectOutput$ContentType' => '

A standard MIME type describing the format of the object data.

', 'HeadObjectOutput$ContentType' => '

A standard MIME type describing the format of the object data.

', 'PutObjectRequest$ContentType' => '

A standard MIME type describing the format of the object data.

', ], ], 'ContinuationEvent' => [ 'base' => '

', 'refs' => [ 'SelectObjectContentEventStream$Cont' => '

The Continuation Event.

', ], ], 'CopyObjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'CopyObjectRequest' => [ 'base' => NULL, 'refs' => [], ], 'CopyObjectResult' => [ 'base' => '

', 'refs' => [ 'CopyObjectOutput$CopyObjectResult' => '

', ], ], 'CopyPartResult' => [ 'base' => '

', 'refs' => [ 'UploadPartCopyOutput$CopyPartResult' => '

', ], ], 'CopySource' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$CopySource' => '

The name of the source bucket and key name of the source object, separated by a slash (/). Must be URL-encoded.

', 'UploadPartCopyRequest$CopySource' => '

The name of the source bucket and key name of the source object, separated by a slash (/). Must be URL-encoded.

', ], ], 'CopySourceIfMatch' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$CopySourceIfMatch' => '

Copies the object if its entity tag (ETag) matches the specified tag.

', 'UploadPartCopyRequest$CopySourceIfMatch' => '

Copies the object if its entity tag (ETag) matches the specified tag.

', ], ], 'CopySourceIfModifiedSince' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$CopySourceIfModifiedSince' => '

Copies the object if it has been modified since the specified time.

', 'UploadPartCopyRequest$CopySourceIfModifiedSince' => '

Copies the object if it has been modified since the specified time.

', ], ], 'CopySourceIfNoneMatch' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$CopySourceIfNoneMatch' => '

Copies the object if its entity tag (ETag) is different than the specified ETag.

', 'UploadPartCopyRequest$CopySourceIfNoneMatch' => '

Copies the object if its entity tag (ETag) is different than the specified ETag.

', ], ], 'CopySourceIfUnmodifiedSince' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$CopySourceIfUnmodifiedSince' => '

Copies the object if it hasn\'t been modified since the specified time.

', 'UploadPartCopyRequest$CopySourceIfUnmodifiedSince' => '

Copies the object if it hasn\'t been modified since the specified time.

', ], ], 'CopySourceRange' => [ 'base' => NULL, 'refs' => [ 'UploadPartCopyRequest$CopySourceRange' => '

The range of bytes to copy from the source object. The range value must use the form bytes=first-last, where the first and last are the zero-based byte offsets to copy. For example, bytes=0-9 indicates that you want to copy the first ten bytes of the source. You can copy a range only if the source object is greater than 5 MB.

', ], ], 'CopySourceSSECustomerAlgorithm' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$CopySourceSSECustomerAlgorithm' => '

Specifies the algorithm to use when decrypting the source object (e.g., AES256).

', 'UploadPartCopyRequest$CopySourceSSECustomerAlgorithm' => '

Specifies the algorithm to use when decrypting the source object (e.g., AES256).

', ], ], 'CopySourceSSECustomerKey' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$CopySourceSSECustomerKey' => '

Specifies the customer-provided encryption key for Amazon S3 to use to decrypt the source object. The encryption key provided in this header must be one that was used when the source object was created.

', 'UploadPartCopyRequest$CopySourceSSECustomerKey' => '

Specifies the customer-provided encryption key for Amazon S3 to use to decrypt the source object. The encryption key provided in this header must be one that was used when the source object was created.

', ], ], 'CopySourceSSECustomerKeyMD5' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$CopySourceSSECustomerKeyMD5' => '

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

', 'UploadPartCopyRequest$CopySourceSSECustomerKeyMD5' => '

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

', ], ], 'CopySourceVersionId' => [ 'base' => NULL, 'refs' => [ 'CopyObjectOutput$CopySourceVersionId' => '

', 'UploadPartCopyOutput$CopySourceVersionId' => '

The version of the source object that was copied, if you have enabled versioning on the source bucket.

', ], ], 'CreateBucketConfiguration' => [ 'base' => '

', 'refs' => [ 'CreateBucketRequest$CreateBucketConfiguration' => '

', ], ], 'CreateBucketOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateBucketRequest' => [ 'base' => NULL, 'refs' => [], ], 'CreateMultipartUploadOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateMultipartUploadRequest' => [ 'base' => NULL, 'refs' => [], ], 'CreationDate' => [ 'base' => NULL, 'refs' => [ 'Bucket$CreationDate' => '

Date the bucket was created.

', ], ], 'Date' => [ 'base' => NULL, 'refs' => [ 'LifecycleExpiration$Date' => '

Indicates at what date the object is to be moved or deleted. Should be in GMT ISO 8601 Format.

', 'ObjectLockRetention$RetainUntilDate' => '

The date on which this Object Lock Retention will expire.

', 'Transition$Date' => '

Indicates at what date the object is to be moved or deleted. Should be in GMT ISO 8601 Format.

', ], ], 'Days' => [ 'base' => NULL, 'refs' => [ 'DefaultRetention$Days' => '

The number of days that you want to specify for the default retention period.

', 'LifecycleExpiration$Days' => '

Indicates the lifetime, in days, of the objects that are subject to the rule. The value must be a non-zero positive integer.

', 'NoncurrentVersionExpiration$NoncurrentDays' => '

Specifies the number of days an object is noncurrent before Amazon S3 can perform the associated action. For information about the noncurrent days calculations, see How Amazon S3 Calculates When an Object Became Noncurrent in the Amazon Simple Storage Service Developer Guide.

', 'NoncurrentVersionTransition$NoncurrentDays' => '

Specifies the number of days an object is noncurrent before Amazon S3 can perform the associated action. For information about the noncurrent days calculations, see How Amazon S3 Calculates When an Object Became Noncurrent in the Amazon Simple Storage Service Developer Guide.

', 'RestoreRequest$Days' => '

Lifetime of the active copy in days. Do not use with restores that specify OutputLocation.

', 'Transition$Days' => '

Indicates the lifetime, in days, of the objects that are subject to the rule. The value must be a non-zero positive integer.

', ], ], 'DaysAfterInitiation' => [ 'base' => NULL, 'refs' => [ 'AbortIncompleteMultipartUpload$DaysAfterInitiation' => '

Indicates the number of days that must pass since initiation for Lifecycle to abort an Incomplete Multipart Upload.

', ], ], 'DefaultRetention' => [ 'base' => '

The container element for specifying the default Object Lock retention settings for new objects placed in the specified bucket.

', 'refs' => [ 'ObjectLockRule$DefaultRetention' => '

The default retention period that you want to apply to new objects placed in the specified bucket.

', ], ], 'Delete' => [ 'base' => '

', 'refs' => [ 'DeleteObjectsRequest$Delete' => '

', ], ], 'DeleteBucketAnalyticsConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketCorsRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketEncryptionRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketInventoryConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketLifecycleRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketMetricsConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketPolicyRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketReplicationRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketTaggingRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteBucketWebsiteRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteMarker' => [ 'base' => NULL, 'refs' => [ 'DeleteObjectOutput$DeleteMarker' => '

Specifies whether the versioned object that was permanently deleted was (true) or was not (false) a delete marker.

', 'DeletedObject$DeleteMarker' => '

', 'GetObjectOutput$DeleteMarker' => '

Specifies whether the object retrieved was (true) or was not (false) a Delete Marker. If false, this response header does not appear in the response.

', 'HeadObjectOutput$DeleteMarker' => '

Specifies whether the object retrieved was (true) or was not (false) a Delete Marker. If false, this response header does not appear in the response.

', ], ], 'DeleteMarkerEntry' => [ 'base' => '

', 'refs' => [ 'DeleteMarkers$member' => NULL, ], ], 'DeleteMarkerReplication' => [ 'base' => '

Specifies whether Amazon S3 should replicate delete makers.

', 'refs' => [ 'ReplicationRule$DeleteMarkerReplication' => NULL, ], ], 'DeleteMarkerReplicationStatus' => [ 'base' => NULL, 'refs' => [ 'DeleteMarkerReplication$Status' => '

The status of the delete marker replication.

In the current implementation, Amazon S3 doesn\'t replicate the delete markers. The status must be Disabled.

', ], ], 'DeleteMarkerVersionId' => [ 'base' => NULL, 'refs' => [ 'DeletedObject$DeleteMarkerVersionId' => '

', ], ], 'DeleteMarkers' => [ 'base' => NULL, 'refs' => [ 'ListObjectVersionsOutput$DeleteMarkers' => '

', ], ], 'DeleteObjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteObjectRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteObjectTaggingOutput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteObjectTaggingRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteObjectsOutput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteObjectsRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeletePublicAccessBlockRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeletedObject' => [ 'base' => '

', 'refs' => [ 'DeletedObjects$member' => NULL, ], ], 'DeletedObjects' => [ 'base' => NULL, 'refs' => [ 'DeleteObjectsOutput$Deleted' => '

', ], ], 'Delimiter' => [ 'base' => NULL, 'refs' => [ 'ListMultipartUploadsOutput$Delimiter' => '

', 'ListMultipartUploadsRequest$Delimiter' => '

Character you use to group keys.

', 'ListObjectVersionsOutput$Delimiter' => '

', 'ListObjectVersionsRequest$Delimiter' => '

A delimiter is a character you use to group keys.

', 'ListObjectsOutput$Delimiter' => '

', 'ListObjectsRequest$Delimiter' => '

A delimiter is a character you use to group keys.

', 'ListObjectsV2Output$Delimiter' => '

A delimiter is a character you use to group keys.

', 'ListObjectsV2Request$Delimiter' => '

A delimiter is a character you use to group keys.

', ], ], 'Description' => [ 'base' => NULL, 'refs' => [ 'RestoreRequest$Description' => '

The optional description for the job.

', ], ], 'Destination' => [ 'base' => '

A container for information about the replication destination.

', 'refs' => [ 'ReplicationRule$Destination' => '

A container for information about the replication destination.

', ], ], 'DisplayName' => [ 'base' => NULL, 'refs' => [ 'Grantee$DisplayName' => '

Screen name of the grantee.

', 'Initiator$DisplayName' => '

Name of the Principal.

', 'Owner$DisplayName' => '

', ], ], 'ETag' => [ 'base' => NULL, 'refs' => [ 'CompleteMultipartUploadOutput$ETag' => '

Entity tag of the object.

', 'CompletedPart$ETag' => '

Entity tag returned when the part was uploaded.

', 'CopyObjectResult$ETag' => '

', 'CopyPartResult$ETag' => '

Entity tag of the object.

', 'GetObjectOutput$ETag' => '

An ETag is an opaque identifier assigned by a web server to a specific version of a resource found at a URL

', 'HeadObjectOutput$ETag' => '

An ETag is an opaque identifier assigned by a web server to a specific version of a resource found at a URL

', 'Object$ETag' => '

', 'ObjectVersion$ETag' => '

', 'Part$ETag' => '

Entity tag returned when the part was uploaded.

', 'PutObjectOutput$ETag' => '

Entity tag for the uploaded object.

', 'UploadPartOutput$ETag' => '

Entity tag for the uploaded object.

', ], ], 'EmailAddress' => [ 'base' => NULL, 'refs' => [ 'Grantee$EmailAddress' => '

Email address of the grantee.

', ], ], 'EnableRequestProgress' => [ 'base' => NULL, 'refs' => [ 'RequestProgress$Enabled' => '

Specifies whether periodic QueryProgress frames should be sent. Valid values: TRUE, FALSE. Default value: FALSE.

', ], ], 'EncodingType' => [ 'base' => '

Requests Amazon S3 to encode the object keys in the response and specifies the encoding method to use. An object key may contain any Unicode character; however, XML 1.0 parser cannot parse some characters, such as characters with an ASCII value from 0 to 10. For characters that are not supported in XML 1.0, you can add this parameter to request that Amazon S3 encode the keys in the response.

', 'refs' => [ 'ListMultipartUploadsOutput$EncodingType' => '

Encoding type used by Amazon S3 to encode object keys in the response.

', 'ListMultipartUploadsRequest$EncodingType' => NULL, 'ListObjectVersionsOutput$EncodingType' => '

Encoding type used by Amazon S3 to encode object keys in the response.

', 'ListObjectVersionsRequest$EncodingType' => NULL, 'ListObjectsOutput$EncodingType' => '

Encoding type used by Amazon S3 to encode object keys in the response.

', 'ListObjectsRequest$EncodingType' => NULL, 'ListObjectsV2Output$EncodingType' => '

Encoding type used by Amazon S3 to encode object keys in the response.

', 'ListObjectsV2Request$EncodingType' => '

Encoding type used by Amazon S3 to encode object keys in the response.

', ], ], 'Encryption' => [ 'base' => '

Describes the server-side encryption that will be applied to the restore results.

', 'refs' => [ 'S3Location$Encryption' => NULL, ], ], 'EncryptionConfiguration' => [ 'base' => '

A container for information about the encryption-based configuration for replicas.

', 'refs' => [ 'Destination$EncryptionConfiguration' => '

A container that provides information about encryption. If SourceSelectionCriteria is specified, you must specify this element.

', ], ], 'EndEvent' => [ 'base' => '

', 'refs' => [ 'SelectObjectContentEventStream$End' => '

The End Event.

', ], ], 'Error' => [ 'base' => '

', 'refs' => [ 'Errors$member' => NULL, ], ], 'ErrorDocument' => [ 'base' => '

', 'refs' => [ 'GetBucketWebsiteOutput$ErrorDocument' => '

', 'WebsiteConfiguration$ErrorDocument' => '

', ], ], 'Errors' => [ 'base' => NULL, 'refs' => [ 'DeleteObjectsOutput$Errors' => '

', ], ], 'Event' => [ 'base' => '

The bucket event for which to send notifications.

', 'refs' => [ 'CloudFunctionConfiguration$Event' => NULL, 'EventList$member' => NULL, 'QueueConfigurationDeprecated$Event' => NULL, 'TopicConfigurationDeprecated$Event' => '

Bucket event for which to send notifications.

', ], ], 'EventList' => [ 'base' => NULL, 'refs' => [ 'CloudFunctionConfiguration$Events' => '

', 'LambdaFunctionConfiguration$Events' => '

', 'QueueConfiguration$Events' => '

', 'QueueConfigurationDeprecated$Events' => '

', 'TopicConfiguration$Events' => '

', 'TopicConfigurationDeprecated$Events' => '

', ], ], 'Expiration' => [ 'base' => NULL, 'refs' => [ 'CompleteMultipartUploadOutput$Expiration' => '

If the object expiration is configured, this will contain the expiration date (expiry-date) and rule ID (rule-id). The value of rule-id is URL encoded.

', 'CopyObjectOutput$Expiration' => '

If the object expiration is configured, the response includes this header.

', 'GetObjectOutput$Expiration' => '

If the object expiration is configured (see PUT Bucket lifecycle), the response includes this header. It includes the expiry-date and rule-id key value pairs providing object expiration information. The value of the rule-id is URL encoded.

', 'HeadObjectOutput$Expiration' => '

If the object expiration is configured (see PUT Bucket lifecycle), the response includes this header. It includes the expiry-date and rule-id key value pairs providing object expiration information. The value of the rule-id is URL encoded.

', 'PutObjectOutput$Expiration' => '

If the object expiration is configured, this will contain the expiration date (expiry-date) and rule ID (rule-id). The value of rule-id is URL encoded.

', ], ], 'ExpirationStatus' => [ 'base' => NULL, 'refs' => [ 'LifecycleRule$Status' => '

If \'Enabled\', the rule is currently being applied. If \'Disabled\', the rule is not currently being applied.

', 'Rule$Status' => '

If \'Enabled\', the rule is currently being applied. If \'Disabled\', the rule is not currently being applied.

', ], ], 'ExpiredObjectDeleteMarker' => [ 'base' => NULL, 'refs' => [ 'LifecycleExpiration$ExpiredObjectDeleteMarker' => '

Indicates whether Amazon S3 will remove a delete marker with no noncurrent versions. If set to true, the delete marker will be expired; if set to false the policy takes no action. This cannot be specified with Days or Date in a Lifecycle Expiration Policy.

', ], ], 'Expires' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$Expires' => '

The date and time at which the object is no longer cacheable.

', 'CreateMultipartUploadRequest$Expires' => '

The date and time at which the object is no longer cacheable.

', 'GetObjectOutput$Expires' => '

The date and time at which the object is no longer cacheable.

', 'HeadObjectOutput$Expires' => '

The date and time at which the object is no longer cacheable.

', 'PutObjectRequest$Expires' => '

The date and time at which the object is no longer cacheable.

', ], ], 'ExposeHeader' => [ 'base' => NULL, 'refs' => [ 'ExposeHeaders$member' => NULL, ], ], 'ExposeHeaders' => [ 'base' => NULL, 'refs' => [ 'CORSRule$ExposeHeaders' => '

One or more headers in the response that you want customers to be able to access from their applications (for example, from a JavaScript XMLHttpRequest object).

', ], ], 'Expression' => [ 'base' => NULL, 'refs' => [ 'SelectObjectContentRequest$Expression' => '

The expression that is used to query the object.

', 'SelectParameters$Expression' => '

The expression that is used to query the object.

', ], ], 'ExpressionType' => [ 'base' => NULL, 'refs' => [ 'SelectObjectContentRequest$ExpressionType' => '

The type of the provided expression (for example., SQL).

', 'SelectParameters$ExpressionType' => '

The type of the provided expression (e.g., SQL).

', ], ], 'FetchOwner' => [ 'base' => NULL, 'refs' => [ 'ListObjectsV2Request$FetchOwner' => '

The owner field is not present in listV2 by default, if you want to return owner field with each key in the result then set the fetch owner field to true

', ], ], 'FieldDelimiter' => [ 'base' => NULL, 'refs' => [ 'CSVInput$FieldDelimiter' => '

The value used to separate individual fields in a record.

', 'CSVOutput$FieldDelimiter' => '

The value used to separate individual fields in a record.

', ], ], 'FileHeaderInfo' => [ 'base' => NULL, 'refs' => [ 'CSVInput$FileHeaderInfo' => '

Describes the first line of input. Valid values: None, Ignore, Use.

', ], ], 'FilterRule' => [ 'base' => '

A container for a key value pair that defines the criteria for the filter rule.

', 'refs' => [ 'FilterRuleList$member' => NULL, ], ], 'FilterRuleList' => [ 'base' => '

A list of containers for the key value pair that defines the criteria for the filter rule.

', 'refs' => [ 'S3KeyFilter$FilterRules' => NULL, ], ], 'FilterRuleName' => [ 'base' => NULL, 'refs' => [ 'FilterRule$Name' => '

The object key name prefix or suffix identifying one or more objects to which the filtering rule applies. The maximum prefix length is 1,024 characters. Overlapping prefixes and suffixes are not supported. For more information, see Configuring Event Notifications in the Amazon Simple Storage Service Developer Guide.

', ], ], 'FilterRuleValue' => [ 'base' => NULL, 'refs' => [ 'FilterRule$Value' => '

', ], ], 'GetBucketAccelerateConfigurationOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketAccelerateConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketAclOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketAclRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketAnalyticsConfigurationOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketAnalyticsConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketCorsOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketCorsRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketEncryptionOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketEncryptionRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketInventoryConfigurationOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketInventoryConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketLifecycleConfigurationOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketLifecycleConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketLifecycleOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketLifecycleRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketLocationOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketLocationRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketLoggingOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketLoggingRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketMetricsConfigurationOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketMetricsConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketNotificationConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketPolicyOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketPolicyRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketPolicyStatusOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketPolicyStatusRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketReplicationOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketReplicationRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketRequestPaymentOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketRequestPaymentRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketTaggingOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketTaggingRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketVersioningOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketVersioningRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketWebsiteOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetBucketWebsiteRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectAclOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectAclRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectLegalHoldOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectLegalHoldRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectLockConfigurationOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectLockConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectRetentionOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectRetentionRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectTaggingOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectTaggingRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectTorrentOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetObjectTorrentRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetPublicAccessBlockOutput' => [ 'base' => NULL, 'refs' => [], ], 'GetPublicAccessBlockRequest' => [ 'base' => NULL, 'refs' => [], ], 'GlacierJobParameters' => [ 'base' => '

', 'refs' => [ 'RestoreRequest$GlacierJobParameters' => '

Glacier related parameters pertaining to this job. Do not use with restores that specify OutputLocation.

', ], ], 'Grant' => [ 'base' => '

', 'refs' => [ 'Grants$member' => NULL, ], ], 'GrantFullControl' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$GrantFullControl' => '

Gives the grantee READ, READ_ACP, and WRITE_ACP permissions on the object.

', 'CreateBucketRequest$GrantFullControl' => '

Allows grantee the read, write, read ACP, and write ACP permissions on the bucket.

', 'CreateMultipartUploadRequest$GrantFullControl' => '

Gives the grantee READ, READ_ACP, and WRITE_ACP permissions on the object.

', 'PutBucketAclRequest$GrantFullControl' => '

Allows grantee the read, write, read ACP, and write ACP permissions on the bucket.

', 'PutObjectAclRequest$GrantFullControl' => '

Allows grantee the read, write, read ACP, and write ACP permissions on the bucket.

', 'PutObjectRequest$GrantFullControl' => '

Gives the grantee READ, READ_ACP, and WRITE_ACP permissions on the object.

', ], ], 'GrantRead' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$GrantRead' => '

Allows grantee to read the object data and its metadata.

', 'CreateBucketRequest$GrantRead' => '

Allows grantee to list the objects in the bucket.

', 'CreateMultipartUploadRequest$GrantRead' => '

Allows grantee to read the object data and its metadata.

', 'PutBucketAclRequest$GrantRead' => '

Allows grantee to list the objects in the bucket.

', 'PutObjectAclRequest$GrantRead' => '

Allows grantee to list the objects in the bucket.

', 'PutObjectRequest$GrantRead' => '

Allows grantee to read the object data and its metadata.

', ], ], 'GrantReadACP' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$GrantReadACP' => '

Allows grantee to read the object ACL.

', 'CreateBucketRequest$GrantReadACP' => '

Allows grantee to read the bucket ACL.

', 'CreateMultipartUploadRequest$GrantReadACP' => '

Allows grantee to read the object ACL.

', 'PutBucketAclRequest$GrantReadACP' => '

Allows grantee to read the bucket ACL.

', 'PutObjectAclRequest$GrantReadACP' => '

Allows grantee to read the bucket ACL.

', 'PutObjectRequest$GrantReadACP' => '

Allows grantee to read the object ACL.

', ], ], 'GrantWrite' => [ 'base' => NULL, 'refs' => [ 'CreateBucketRequest$GrantWrite' => '

Allows grantee to create, overwrite, and delete any object in the bucket.

', 'PutBucketAclRequest$GrantWrite' => '

Allows grantee to create, overwrite, and delete any object in the bucket.

', 'PutObjectAclRequest$GrantWrite' => '

Allows grantee to create, overwrite, and delete any object in the bucket.

', ], ], 'GrantWriteACP' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$GrantWriteACP' => '

Allows grantee to write the ACL for the applicable object.

', 'CreateBucketRequest$GrantWriteACP' => '

Allows grantee to write the ACL for the applicable bucket.

', 'CreateMultipartUploadRequest$GrantWriteACP' => '

Allows grantee to write the ACL for the applicable object.

', 'PutBucketAclRequest$GrantWriteACP' => '

Allows grantee to write the ACL for the applicable bucket.

', 'PutObjectAclRequest$GrantWriteACP' => '

Allows grantee to write the ACL for the applicable bucket.

', 'PutObjectRequest$GrantWriteACP' => '

Allows grantee to write the ACL for the applicable object.

', ], ], 'Grantee' => [ 'base' => '

', 'refs' => [ 'Grant$Grantee' => '

', 'TargetGrant$Grantee' => '

', ], ], 'Grants' => [ 'base' => NULL, 'refs' => [ 'AccessControlPolicy$Grants' => '

A list of grants.

', 'GetBucketAclOutput$Grants' => '

A list of grants.

', 'GetObjectAclOutput$Grants' => '

A list of grants.

', 'S3Location$AccessControlList' => '

A list of grants that control access to the staged results.

', ], ], 'HeadBucketRequest' => [ 'base' => NULL, 'refs' => [], ], 'HeadObjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'HeadObjectRequest' => [ 'base' => NULL, 'refs' => [], ], 'HostName' => [ 'base' => NULL, 'refs' => [ 'Redirect$HostName' => '

The host name to use in the redirect request.

', 'RedirectAllRequestsTo$HostName' => '

Name of the host where requests will be redirected.

', ], ], 'HttpErrorCodeReturnedEquals' => [ 'base' => NULL, 'refs' => [ 'Condition$HttpErrorCodeReturnedEquals' => '

The HTTP error code when the redirect is applied. In the event of an error, if the error code equals this value, then the specified redirect is applied. Required when parent element Condition is specified and sibling KeyPrefixEquals is not specified. If both are specified, then both must be true for the redirect to be applied.

', ], ], 'HttpRedirectCode' => [ 'base' => NULL, 'refs' => [ 'Redirect$HttpRedirectCode' => '

The HTTP redirect code to use on the response. Not required if one of the siblings is present.

', ], ], 'ID' => [ 'base' => NULL, 'refs' => [ 'Grantee$ID' => '

The canonical user ID of the grantee.

', 'Initiator$ID' => '

If the principal is an AWS account, it provides the Canonical User ID. If the principal is an IAM User, it provides a user ARN value.

', 'LifecycleRule$ID' => '

Unique identifier for the rule. The value cannot be longer than 255 characters.

', 'Owner$ID' => '

', 'ReplicationRule$ID' => '

A unique identifier for the rule. The maximum value is 255 characters.

', 'Rule$ID' => '

Unique identifier for the rule. The value cannot be longer than 255 characters.

', ], ], 'IfMatch' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$IfMatch' => '

Return the object only if its entity tag (ETag) is the same as the one specified, otherwise return a 412 (precondition failed).

', 'HeadObjectRequest$IfMatch' => '

Return the object only if its entity tag (ETag) is the same as the one specified, otherwise return a 412 (precondition failed).

', ], ], 'IfModifiedSince' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$IfModifiedSince' => '

Return the object only if it has been modified since the specified time, otherwise return a 304 (not modified).

', 'HeadObjectRequest$IfModifiedSince' => '

Return the object only if it has been modified since the specified time, otherwise return a 304 (not modified).

', ], ], 'IfNoneMatch' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$IfNoneMatch' => '

Return the object only if its entity tag (ETag) is different from the one specified, otherwise return a 304 (not modified).

', 'HeadObjectRequest$IfNoneMatch' => '

Return the object only if its entity tag (ETag) is different from the one specified, otherwise return a 304 (not modified).

', ], ], 'IfUnmodifiedSince' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$IfUnmodifiedSince' => '

Return the object only if it has not been modified since the specified time, otherwise return a 412 (precondition failed).

', 'HeadObjectRequest$IfUnmodifiedSince' => '

Return the object only if it has not been modified since the specified time, otherwise return a 412 (precondition failed).

', ], ], 'IndexDocument' => [ 'base' => '

', 'refs' => [ 'GetBucketWebsiteOutput$IndexDocument' => '

', 'WebsiteConfiguration$IndexDocument' => '

', ], ], 'Initiated' => [ 'base' => NULL, 'refs' => [ 'MultipartUpload$Initiated' => '

Date and time at which the multipart upload was initiated.

', ], ], 'Initiator' => [ 'base' => '

', 'refs' => [ 'ListPartsOutput$Initiator' => '

Identifies who initiated the multipart upload.

', 'MultipartUpload$Initiator' => '

Identifies who initiated the multipart upload.

', ], ], 'InputSerialization' => [ 'base' => '

Describes the serialization format of the object.

', 'refs' => [ 'SelectObjectContentRequest$InputSerialization' => '

Describes the format of the data in the object that is being queried.

', 'SelectParameters$InputSerialization' => '

Describes the serialization format of the object.

', ], ], 'InventoryConfiguration' => [ 'base' => '

', 'refs' => [ 'GetBucketInventoryConfigurationOutput$InventoryConfiguration' => '

Specifies the inventory configuration.

', 'InventoryConfigurationList$member' => NULL, 'PutBucketInventoryConfigurationRequest$InventoryConfiguration' => '

Specifies the inventory configuration.

', ], ], 'InventoryConfigurationList' => [ 'base' => NULL, 'refs' => [ 'ListBucketInventoryConfigurationsOutput$InventoryConfigurationList' => '

The list of inventory configurations for a bucket.

', ], ], 'InventoryDestination' => [ 'base' => '

', 'refs' => [ 'InventoryConfiguration$Destination' => '

Contains information about where to publish the inventory results.

', ], ], 'InventoryEncryption' => [ 'base' => '

Contains the type of server-side encryption used to encrypt the inventory results.

', 'refs' => [ 'InventoryS3BucketDestination$Encryption' => '

Contains the type of server-side encryption used to encrypt the inventory results.

', ], ], 'InventoryFilter' => [ 'base' => '

', 'refs' => [ 'InventoryConfiguration$Filter' => '

Specifies an inventory filter. The inventory only includes objects that meet the filter\'s criteria.

', ], ], 'InventoryFormat' => [ 'base' => NULL, 'refs' => [ 'InventoryS3BucketDestination$Format' => '

Specifies the output format of the inventory results.

', ], ], 'InventoryFrequency' => [ 'base' => NULL, 'refs' => [ 'InventorySchedule$Frequency' => '

Specifies how frequently inventory results are produced.

', ], ], 'InventoryId' => [ 'base' => NULL, 'refs' => [ 'DeleteBucketInventoryConfigurationRequest$Id' => '

The ID used to identify the inventory configuration.

', 'GetBucketInventoryConfigurationRequest$Id' => '

The ID used to identify the inventory configuration.

', 'InventoryConfiguration$Id' => '

The ID used to identify the inventory configuration.

', 'PutBucketInventoryConfigurationRequest$Id' => '

The ID used to identify the inventory configuration.

', ], ], 'InventoryIncludedObjectVersions' => [ 'base' => NULL, 'refs' => [ 'InventoryConfiguration$IncludedObjectVersions' => '

Specifies which object version(s) to included in the inventory results.

', ], ], 'InventoryOptionalField' => [ 'base' => NULL, 'refs' => [ 'InventoryOptionalFields$member' => NULL, ], ], 'InventoryOptionalFields' => [ 'base' => NULL, 'refs' => [ 'InventoryConfiguration$OptionalFields' => '

Contains the optional fields that are included in the inventory results.

', ], ], 'InventoryS3BucketDestination' => [ 'base' => '

', 'refs' => [ 'InventoryDestination$S3BucketDestination' => '

Contains the bucket name, file format, bucket owner (optional), and prefix (optional) where inventory results are published.

', ], ], 'InventorySchedule' => [ 'base' => '

', 'refs' => [ 'InventoryConfiguration$Schedule' => '

Specifies the schedule for generating inventory results.

', ], ], 'IsEnabled' => [ 'base' => NULL, 'refs' => [ 'InventoryConfiguration$IsEnabled' => '

Specifies whether the inventory is enabled or disabled.

', ], ], 'IsLatest' => [ 'base' => NULL, 'refs' => [ 'DeleteMarkerEntry$IsLatest' => '

Specifies whether the object is (true) or is not (false) the latest version of an object.

', 'ObjectVersion$IsLatest' => '

Specifies whether the object is (true) or is not (false) the latest version of an object.

', ], ], 'IsPublic' => [ 'base' => NULL, 'refs' => [ 'PolicyStatus$IsPublic' => '

The policy status for this bucket. TRUE indicates that this bucket is public. FALSE indicates that the bucket is not public.

', ], ], 'IsTruncated' => [ 'base' => NULL, 'refs' => [ 'ListBucketAnalyticsConfigurationsOutput$IsTruncated' => '

Indicates whether the returned list of analytics configurations is complete. A value of true indicates that the list is not complete and the NextContinuationToken will be provided for a subsequent request.

', 'ListBucketInventoryConfigurationsOutput$IsTruncated' => '

Indicates whether the returned list of inventory configurations is truncated in this response. A value of true indicates that the list is truncated.

', 'ListBucketMetricsConfigurationsOutput$IsTruncated' => '

Indicates whether the returned list of metrics configurations is complete. A value of true indicates that the list is not complete and the NextContinuationToken will be provided for a subsequent request.

', 'ListMultipartUploadsOutput$IsTruncated' => '

Indicates whether the returned list of multipart uploads is truncated. A value of true indicates that the list was truncated. The list can be truncated if the number of multipart uploads exceeds the limit allowed or specified by max uploads.

', 'ListObjectVersionsOutput$IsTruncated' => '

A flag that indicates whether or not Amazon S3 returned all of the results that satisfied the search criteria. If your results were truncated, you can make a follow-up paginated request using the NextKeyMarker and NextVersionIdMarker response parameters as a starting place in another request to return the rest of the results.

', 'ListObjectsOutput$IsTruncated' => '

A flag that indicates whether or not Amazon S3 returned all of the results that satisfied the search criteria.

', 'ListObjectsV2Output$IsTruncated' => '

A flag that indicates whether or not Amazon S3 returned all of the results that satisfied the search criteria.

', 'ListPartsOutput$IsTruncated' => '

Indicates whether the returned list of parts is truncated.

', ], ], 'JSONInput' => [ 'base' => '

', 'refs' => [ 'InputSerialization$JSON' => '

Specifies JSON as object\'s input serialization format.

', ], ], 'JSONOutput' => [ 'base' => '

', 'refs' => [ 'OutputSerialization$JSON' => '

Specifies JSON as request\'s output serialization format.

', ], ], 'JSONType' => [ 'base' => NULL, 'refs' => [ 'JSONInput$Type' => '

The type of JSON. Valid values: Document, Lines.

', ], ], 'KMSContext' => [ 'base' => NULL, 'refs' => [ 'Encryption$KMSContext' => '

If the encryption type is aws:kms, this optional value can be used to specify the encryption context for the restore results.

', ], ], 'KeyCount' => [ 'base' => NULL, 'refs' => [ 'ListObjectsV2Output$KeyCount' => '

KeyCount is the number of keys returned with this request. KeyCount will always be less than equals to MaxKeys field. Say you ask for 50 keys, your result will include less than equals 50 keys

', ], ], 'KeyMarker' => [ 'base' => NULL, 'refs' => [ 'ListMultipartUploadsOutput$KeyMarker' => '

The key at or after which the listing began.

', 'ListMultipartUploadsRequest$KeyMarker' => '

Together with upload-id-marker, this parameter specifies the multipart upload after which listing should begin.

', 'ListObjectVersionsOutput$KeyMarker' => '

Marks the last Key returned in a truncated response.

', 'ListObjectVersionsRequest$KeyMarker' => '

Specifies the key to start with when listing objects in a bucket.

', ], ], 'KeyPrefixEquals' => [ 'base' => NULL, 'refs' => [ 'Condition$KeyPrefixEquals' => '

The object key name prefix when the redirect is applied. For example, to redirect requests for ExamplePage.html, the key prefix will be ExamplePage.html. To redirect request for all pages with the prefix docs/, the key prefix will be /docs, which identifies all objects in the docs/ folder. Required when the parent element Condition is specified and sibling HttpErrorCodeReturnedEquals is not specified. If both conditions are specified, both must be true for the redirect to be applied.

', ], ], 'LambdaFunctionArn' => [ 'base' => NULL, 'refs' => [ 'LambdaFunctionConfiguration$LambdaFunctionArn' => '

The Amazon Resource Name (ARN) of the Lambda cloud function that Amazon S3 can invoke when it detects events of the specified type.

', ], ], 'LambdaFunctionConfiguration' => [ 'base' => '

A container for specifying the configuration for AWS Lambda notifications.

', 'refs' => [ 'LambdaFunctionConfigurationList$member' => NULL, ], ], 'LambdaFunctionConfigurationList' => [ 'base' => NULL, 'refs' => [ 'NotificationConfiguration$LambdaFunctionConfigurations' => '

', ], ], 'LastModified' => [ 'base' => NULL, 'refs' => [ 'CopyObjectResult$LastModified' => '

', 'CopyPartResult$LastModified' => '

Date and time at which the object was uploaded.

', 'DeleteMarkerEntry$LastModified' => '

Date and time the object was last modified.

', 'GetObjectOutput$LastModified' => '

Last modified date of the object

', 'HeadObjectOutput$LastModified' => '

Last modified date of the object

', 'Object$LastModified' => '

', 'ObjectVersion$LastModified' => '

Date and time the object was last modified.

', 'Part$LastModified' => '

Date and time at which the part was uploaded.

', ], ], 'LifecycleConfiguration' => [ 'base' => '

', 'refs' => [ 'PutBucketLifecycleRequest$LifecycleConfiguration' => '

', ], ], 'LifecycleExpiration' => [ 'base' => '

', 'refs' => [ 'LifecycleRule$Expiration' => '

', 'Rule$Expiration' => '

', ], ], 'LifecycleRule' => [ 'base' => '

', 'refs' => [ 'LifecycleRules$member' => NULL, ], ], 'LifecycleRuleAndOperator' => [ 'base' => '

This is used in a Lifecycle Rule Filter to apply a logical AND to two or more predicates. The Lifecycle Rule will apply to any object matching all of the predicates configured inside the And operator.

', 'refs' => [ 'LifecycleRuleFilter$And' => NULL, ], ], 'LifecycleRuleFilter' => [ 'base' => '

The Filter is used to identify objects that a Lifecycle Rule applies to. A Filter must have exactly one of Prefix, Tag, or And specified.

', 'refs' => [ 'LifecycleRule$Filter' => NULL, ], ], 'LifecycleRules' => [ 'base' => NULL, 'refs' => [ 'BucketLifecycleConfiguration$Rules' => '

', 'GetBucketLifecycleConfigurationOutput$Rules' => '

', ], ], 'ListBucketAnalyticsConfigurationsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListBucketAnalyticsConfigurationsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListBucketInventoryConfigurationsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListBucketInventoryConfigurationsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListBucketMetricsConfigurationsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListBucketMetricsConfigurationsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListBucketsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListMultipartUploadsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListMultipartUploadsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListObjectVersionsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListObjectVersionsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListObjectsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListObjectsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListObjectsV2Output' => [ 'base' => NULL, 'refs' => [], ], 'ListObjectsV2Request' => [ 'base' => NULL, 'refs' => [], ], 'ListPartsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListPartsRequest' => [ 'base' => NULL, 'refs' => [], ], 'Location' => [ 'base' => NULL, 'refs' => [ 'CompleteMultipartUploadOutput$Location' => '

', 'CreateBucketOutput$Location' => '

', ], ], 'LocationPrefix' => [ 'base' => NULL, 'refs' => [ 'S3Location$Prefix' => '

The prefix that is prepended to the restore results for this request.

', ], ], 'LoggingEnabled' => [ 'base' => '

Container for logging information. Presence of this element indicates that logging is enabled. Parameters TargetBucket and TargetPrefix are required in this case.

', 'refs' => [ 'BucketLoggingStatus$LoggingEnabled' => NULL, 'GetBucketLoggingOutput$LoggingEnabled' => NULL, ], ], 'MFA' => [ 'base' => NULL, 'refs' => [ 'DeleteObjectRequest$MFA' => '

The concatenation of the authentication device\'s serial number, a space, and the value that is displayed on your authentication device.

', 'DeleteObjectsRequest$MFA' => '

The concatenation of the authentication device\'s serial number, a space, and the value that is displayed on your authentication device.

', 'PutBucketVersioningRequest$MFA' => '

The concatenation of the authentication device\'s serial number, a space, and the value that is displayed on your authentication device.

', ], ], 'MFADelete' => [ 'base' => NULL, 'refs' => [ 'VersioningConfiguration$MFADelete' => '

Specifies whether MFA delete is enabled in the bucket versioning configuration. This element is only returned if the bucket has been configured with MFA delete. If the bucket has never been so configured, this element is not returned.

', ], ], 'MFADeleteStatus' => [ 'base' => NULL, 'refs' => [ 'GetBucketVersioningOutput$MFADelete' => '

Specifies whether MFA delete is enabled in the bucket versioning configuration. This element is only returned if the bucket has been configured with MFA delete. If the bucket has never been so configured, this element is not returned.

', ], ], 'Marker' => [ 'base' => NULL, 'refs' => [ 'ListObjectsOutput$Marker' => '

', 'ListObjectsRequest$Marker' => '

Specifies the key to start with when listing objects in a bucket.

', ], ], 'MaxAgeSeconds' => [ 'base' => NULL, 'refs' => [ 'CORSRule$MaxAgeSeconds' => '

The time in seconds that your browser is to cache the preflight response for the specified resource.

', ], ], 'MaxKeys' => [ 'base' => NULL, 'refs' => [ 'ListObjectVersionsOutput$MaxKeys' => '

', 'ListObjectVersionsRequest$MaxKeys' => '

Sets the maximum number of keys returned in the response. The response might contain fewer keys but will never contain more.

', 'ListObjectsOutput$MaxKeys' => '

', 'ListObjectsRequest$MaxKeys' => '

Sets the maximum number of keys returned in the response. The response might contain fewer keys but will never contain more.

', 'ListObjectsV2Output$MaxKeys' => '

Sets the maximum number of keys returned in the response. The response might contain fewer keys but will never contain more.

', 'ListObjectsV2Request$MaxKeys' => '

Sets the maximum number of keys returned in the response. The response might contain fewer keys but will never contain more.

', ], ], 'MaxParts' => [ 'base' => NULL, 'refs' => [ 'ListPartsOutput$MaxParts' => '

Maximum number of parts that were allowed in the response.

', 'ListPartsRequest$MaxParts' => '

Sets the maximum number of parts to return.

', ], ], 'MaxUploads' => [ 'base' => NULL, 'refs' => [ 'ListMultipartUploadsOutput$MaxUploads' => '

Maximum number of multipart uploads that could have been included in the response.

', 'ListMultipartUploadsRequest$MaxUploads' => '

Sets the maximum number of multipart uploads, from 1 to 1,000, to return in the response body. 1,000 is the maximum number of uploads that can be returned in a response.

', ], ], 'Message' => [ 'base' => NULL, 'refs' => [ 'Error$Message' => '

', ], ], 'Metadata' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$Metadata' => '

A map of metadata to store with the object in S3.

', 'CreateMultipartUploadRequest$Metadata' => '

A map of metadata to store with the object in S3.

', 'GetObjectOutput$Metadata' => '

A map of metadata to store with the object in S3.

', 'HeadObjectOutput$Metadata' => '

A map of metadata to store with the object in S3.

', 'PutObjectRequest$Metadata' => '

A map of metadata to store with the object in S3.

', ], ], 'MetadataDirective' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$MetadataDirective' => '

Specifies whether the metadata is copied from the source object or replaced with metadata provided in the request.

', ], ], 'MetadataEntry' => [ 'base' => '

A metadata key-value pair to store with an object.

', 'refs' => [ 'UserMetadata$member' => NULL, ], ], 'MetadataKey' => [ 'base' => NULL, 'refs' => [ 'Metadata$key' => NULL, 'MetadataEntry$Name' => '

', ], ], 'MetadataValue' => [ 'base' => NULL, 'refs' => [ 'Metadata$value' => NULL, 'MetadataEntry$Value' => '

', ], ], 'MetricsAndOperator' => [ 'base' => '

', 'refs' => [ 'MetricsFilter$And' => '

A conjunction (logical AND) of predicates, which is used in evaluating a metrics filter. The operator must have at least two predicates, and an object must match all of the predicates in order for the filter to apply.

', ], ], 'MetricsConfiguration' => [ 'base' => '

', 'refs' => [ 'GetBucketMetricsConfigurationOutput$MetricsConfiguration' => '

Specifies the metrics configuration.

', 'MetricsConfigurationList$member' => NULL, 'PutBucketMetricsConfigurationRequest$MetricsConfiguration' => '

Specifies the metrics configuration.

', ], ], 'MetricsConfigurationList' => [ 'base' => NULL, 'refs' => [ 'ListBucketMetricsConfigurationsOutput$MetricsConfigurationList' => '

The list of metrics configurations for a bucket.

', ], ], 'MetricsFilter' => [ 'base' => '

', 'refs' => [ 'MetricsConfiguration$Filter' => '

Specifies a metrics configuration filter. The metrics configuration will only include objects that meet the filter\'s criteria. A filter must be a prefix, a tag, or a conjunction (MetricsAndOperator).

', ], ], 'MetricsId' => [ 'base' => NULL, 'refs' => [ 'DeleteBucketMetricsConfigurationRequest$Id' => '

The ID used to identify the metrics configuration.

', 'GetBucketMetricsConfigurationRequest$Id' => '

The ID used to identify the metrics configuration.

', 'MetricsConfiguration$Id' => '

The ID used to identify the metrics configuration.

', 'PutBucketMetricsConfigurationRequest$Id' => '

The ID used to identify the metrics configuration.

', ], ], 'MissingMeta' => [ 'base' => NULL, 'refs' => [ 'GetObjectOutput$MissingMeta' => '

This is set to the number of metadata entries not returned in x-amz-meta headers. This can happen if you create metadata using an API like SOAP that supports more flexible metadata than the REST API. For example, using SOAP, you can create metadata whose values are not legal HTTP headers.

', 'HeadObjectOutput$MissingMeta' => '

This is set to the number of metadata entries not returned in x-amz-meta headers. This can happen if you create metadata using an API like SOAP that supports more flexible metadata than the REST API. For example, using SOAP, you can create metadata whose values are not legal HTTP headers.

', ], ], 'MultipartUpload' => [ 'base' => '

', 'refs' => [ 'MultipartUploadList$member' => NULL, ], ], 'MultipartUploadId' => [ 'base' => NULL, 'refs' => [ 'AbortMultipartUploadRequest$UploadId' => '

', 'CompleteMultipartUploadRequest$UploadId' => '

', 'CreateMultipartUploadOutput$UploadId' => '

ID for the initiated multipart upload.

', 'ListPartsOutput$UploadId' => '

Upload ID identifying the multipart upload whose parts are being listed.

', 'ListPartsRequest$UploadId' => '

Upload ID identifying the multipart upload whose parts are being listed.

', 'MultipartUpload$UploadId' => '

Upload ID that identifies the multipart upload.

', 'UploadPartCopyRequest$UploadId' => '

Upload ID identifying the multipart upload whose part is being copied.

', 'UploadPartRequest$UploadId' => '

Upload ID identifying the multipart upload whose part is being uploaded.

', ], ], 'MultipartUploadList' => [ 'base' => NULL, 'refs' => [ 'ListMultipartUploadsOutput$Uploads' => '

', ], ], 'NextKeyMarker' => [ 'base' => NULL, 'refs' => [ 'ListMultipartUploadsOutput$NextKeyMarker' => '

When a list is truncated, this element specifies the value that should be used for the key-marker request parameter in a subsequent request.

', 'ListObjectVersionsOutput$NextKeyMarker' => '

Use this value for the key marker request parameter in a subsequent request.

', ], ], 'NextMarker' => [ 'base' => NULL, 'refs' => [ 'ListObjectsOutput$NextMarker' => '

When response is truncated (the IsTruncated element value in the response is true), you can use the key name in this field as marker in the subsequent request to get next set of objects. Amazon S3 lists objects in alphabetical order Note: This element is returned only if you have delimiter request parameter specified. If response does not include the NextMaker and it is truncated, you can use the value of the last Key in the response as the marker in the subsequent request to get the next set of object keys.

', ], ], 'NextPartNumberMarker' => [ 'base' => NULL, 'refs' => [ 'ListPartsOutput$NextPartNumberMarker' => '

When a list is truncated, this element specifies the last part in the list, as well as the value to use for the part-number-marker request parameter in a subsequent request.

', ], ], 'NextToken' => [ 'base' => NULL, 'refs' => [ 'ListBucketAnalyticsConfigurationsOutput$NextContinuationToken' => '

NextContinuationToken is sent when isTruncated is true, which indicates that there are more analytics configurations to list. The next request must include this NextContinuationToken. The token is obfuscated and is not a usable value.

', 'ListBucketInventoryConfigurationsOutput$NextContinuationToken' => '

The marker used to continue this inventory configuration listing. Use the NextContinuationToken from this response to continue the listing in a subsequent request. The continuation token is an opaque value that Amazon S3 understands.

', 'ListBucketMetricsConfigurationsOutput$NextContinuationToken' => '

The marker used to continue a metrics configuration listing that has been truncated. Use the NextContinuationToken from a previously truncated list response to continue the listing. The continuation token is an opaque value that Amazon S3 understands.

', 'ListObjectsV2Output$NextContinuationToken' => '

NextContinuationToken is sent when isTruncated is true which means there are more keys in the bucket that can be listed. The next list requests to Amazon S3 can be continued with this NextContinuationToken. NextContinuationToken is obfuscated and is not a real key

', ], ], 'NextUploadIdMarker' => [ 'base' => NULL, 'refs' => [ 'ListMultipartUploadsOutput$NextUploadIdMarker' => '

When a list is truncated, this element specifies the value that should be used for the upload-id-marker request parameter in a subsequent request.

', ], ], 'NextVersionIdMarker' => [ 'base' => NULL, 'refs' => [ 'ListObjectVersionsOutput$NextVersionIdMarker' => '

Use this value for the next version id marker parameter in a subsequent request.

', ], ], 'NoSuchBucket' => [ 'base' => '

The specified bucket does not exist.

', 'refs' => [], ], 'NoSuchKey' => [ 'base' => '

The specified key does not exist.

', 'refs' => [], ], 'NoSuchUpload' => [ 'base' => '

The specified multipart upload does not exist.

', 'refs' => [], ], 'NoncurrentVersionExpiration' => [ 'base' => '

Specifies when noncurrent object versions expire. Upon expiration, Amazon S3 permanently deletes the noncurrent object versions. You set this lifecycle configuration action on a bucket that has versioning enabled (or suspended) to request that Amazon S3 delete noncurrent object versions at a specific period in the object\'s lifetime.

', 'refs' => [ 'LifecycleRule$NoncurrentVersionExpiration' => NULL, 'Rule$NoncurrentVersionExpiration' => NULL, ], ], 'NoncurrentVersionTransition' => [ 'base' => '

Container for the transition rule that describes when noncurrent objects transition to the STANDARD_IA, ONEZONE_IA, INTELLIGENT_TIERING or GLACIER storage class. If your bucket is versioning-enabled (or versioning is suspended), you can set this action to request that Amazon S3 transition noncurrent object versions to the STANDARD_IA, ONEZONE_IA, INTELLIGENT_TIERING or GLACIER storage class at a specific period in the object\'s lifetime.

', 'refs' => [ 'NoncurrentVersionTransitionList$member' => NULL, 'Rule$NoncurrentVersionTransition' => NULL, ], ], 'NoncurrentVersionTransitionList' => [ 'base' => NULL, 'refs' => [ 'LifecycleRule$NoncurrentVersionTransitions' => '

', ], ], 'NotificationConfiguration' => [ 'base' => '

A container for specifying the notification configuration of the bucket. If this element is empty, notifications are turned off for the bucket.

', 'refs' => [ 'PutBucketNotificationConfigurationRequest$NotificationConfiguration' => NULL, ], ], 'NotificationConfigurationDeprecated' => [ 'base' => NULL, 'refs' => [ 'PutBucketNotificationRequest$NotificationConfiguration' => '

', ], ], 'NotificationConfigurationFilter' => [ 'base' => '

A container for object key name filtering rules. For information about key name filtering, see Configuring Event Notifications in the Amazon Simple Storage Service Developer Guide.

', 'refs' => [ 'LambdaFunctionConfiguration$Filter' => NULL, 'QueueConfiguration$Filter' => NULL, 'TopicConfiguration$Filter' => NULL, ], ], 'NotificationId' => [ 'base' => '

An optional unique identifier for configurations in a notification configuration. If you don\'t provide one, Amazon S3 will assign an ID.

', 'refs' => [ 'CloudFunctionConfiguration$Id' => NULL, 'LambdaFunctionConfiguration$Id' => NULL, 'QueueConfiguration$Id' => NULL, 'QueueConfigurationDeprecated$Id' => NULL, 'TopicConfiguration$Id' => NULL, 'TopicConfigurationDeprecated$Id' => NULL, ], ], 'Object' => [ 'base' => '

', 'refs' => [ 'ObjectList$member' => NULL, ], ], 'ObjectAlreadyInActiveTierError' => [ 'base' => '

This operation is not allowed against this storage tier

', 'refs' => [], ], 'ObjectCannedACL' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$ACL' => '

The canned ACL to apply to the object.

', 'CreateMultipartUploadRequest$ACL' => '

The canned ACL to apply to the object.

', 'PutObjectAclRequest$ACL' => '

The canned ACL to apply to the object.

', 'PutObjectRequest$ACL' => '

The canned ACL to apply to the object.

', 'S3Location$CannedACL' => '

The canned ACL to apply to the restore results.

', ], ], 'ObjectIdentifier' => [ 'base' => '

', 'refs' => [ 'ObjectIdentifierList$member' => NULL, ], ], 'ObjectIdentifierList' => [ 'base' => NULL, 'refs' => [ 'Delete$Objects' => '

', ], ], 'ObjectKey' => [ 'base' => NULL, 'refs' => [ 'AbortMultipartUploadRequest$Key' => '

', 'CompleteMultipartUploadOutput$Key' => '

', 'CompleteMultipartUploadRequest$Key' => '

', 'CopyObjectRequest$Key' => '

', 'CreateMultipartUploadOutput$Key' => '

Object key for which the multipart upload was initiated.

', 'CreateMultipartUploadRequest$Key' => '

', 'DeleteMarkerEntry$Key' => '

The object key.

', 'DeleteObjectRequest$Key' => '

', 'DeleteObjectTaggingRequest$Key' => '

', 'DeletedObject$Key' => '

', 'Error$Key' => '

', 'ErrorDocument$Key' => '

The object key name to use when a 4XX class error occurs.

', 'GetObjectAclRequest$Key' => '

', 'GetObjectLegalHoldRequest$Key' => '

The key name for the object whose Legal Hold status you want to retrieve.

', 'GetObjectRequest$Key' => '

', 'GetObjectRetentionRequest$Key' => '

The key name for the object whose retention settings you want to retrieve.

', 'GetObjectTaggingRequest$Key' => '

', 'GetObjectTorrentRequest$Key' => '

', 'HeadObjectRequest$Key' => '

', 'ListPartsOutput$Key' => '

Object key for which the multipart upload was initiated.

', 'ListPartsRequest$Key' => '

', 'MultipartUpload$Key' => '

Key of the object for which the multipart upload was initiated.

', 'Object$Key' => '

', 'ObjectIdentifier$Key' => '

Key name of the object to delete.

', 'ObjectVersion$Key' => '

The object key.

', 'PutObjectAclRequest$Key' => '

', 'PutObjectLegalHoldRequest$Key' => '

The key name for the object that you want to place a Legal Hold on.

', 'PutObjectRequest$Key' => '

Object key for which the PUT operation was initiated.

', 'PutObjectRetentionRequest$Key' => '

The key name for the object that you want to apply this Object Retention configuration to.

', 'PutObjectTaggingRequest$Key' => '

', 'RestoreObjectRequest$Key' => '

', 'SelectObjectContentRequest$Key' => '

The object key.

', 'Tag$Key' => '

Name of the tag.

', 'UploadPartCopyRequest$Key' => '

', 'UploadPartRequest$Key' => '

Object key for which the multipart upload was initiated.

', ], ], 'ObjectList' => [ 'base' => NULL, 'refs' => [ 'ListObjectsOutput$Contents' => '

', 'ListObjectsV2Output$Contents' => '

Metadata about each object returned.

', ], ], 'ObjectLockConfiguration' => [ 'base' => '

The container element for Object Lock configuration parameters.

', 'refs' => [ 'GetObjectLockConfigurationOutput$ObjectLockConfiguration' => '

The specified bucket\'s Object Lock configuration.

', 'PutObjectLockConfigurationRequest$ObjectLockConfiguration' => '

The Object Lock configuration that you want to apply to the specified bucket.

', ], ], 'ObjectLockEnabled' => [ 'base' => NULL, 'refs' => [ 'ObjectLockConfiguration$ObjectLockEnabled' => '

Indicates whether this bucket has an Object Lock configuration enabled.

', ], ], 'ObjectLockEnabledForBucket' => [ 'base' => NULL, 'refs' => [ 'CreateBucketRequest$ObjectLockEnabledForBucket' => '

Specifies whether you want S3 Object Lock to be enabled for the new bucket.

', ], ], 'ObjectLockLegalHold' => [ 'base' => '

A Legal Hold configuration for an object.

', 'refs' => [ 'GetObjectLegalHoldOutput$LegalHold' => '

The current Legal Hold status for the specified object.

', 'PutObjectLegalHoldRequest$LegalHold' => '

Container element for the Legal Hold configuration you want to apply to the specified object.

', ], ], 'ObjectLockLegalHoldStatus' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$ObjectLockLegalHoldStatus' => '

Specifies whether you want to apply a Legal Hold to the copied object.

', 'CreateMultipartUploadRequest$ObjectLockLegalHoldStatus' => '

Specifies whether you want to apply a Legal Hold to the uploaded object.

', 'GetObjectOutput$ObjectLockLegalHoldStatus' => '

Indicates whether this object has an active legal hold. This field is only returned if you have permission to view an object\'s legal hold status.

', 'HeadObjectOutput$ObjectLockLegalHoldStatus' => '

The Legal Hold status for the specified object.

', 'ObjectLockLegalHold$Status' => '

Indicates whether the specified object has a Legal Hold in place.

', 'PutObjectRequest$ObjectLockLegalHoldStatus' => '

The Legal Hold status that you want to apply to the specified object.

', ], ], 'ObjectLockMode' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$ObjectLockMode' => '

The Object Lock mode that you want to apply to the copied object.

', 'CreateMultipartUploadRequest$ObjectLockMode' => '

Specifies the Object Lock mode that you want to apply to the uploaded object.

', 'GetObjectOutput$ObjectLockMode' => '

The Object Lock mode currently in place for this object.

', 'HeadObjectOutput$ObjectLockMode' => '

The Object Lock mode currently in place for this object.

', 'PutObjectRequest$ObjectLockMode' => '

The Object Lock mode that you want to apply to this object.

', ], ], 'ObjectLockRetainUntilDate' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$ObjectLockRetainUntilDate' => '

The date and time when you want the copied object\'s Object Lock to expire.

', 'CreateMultipartUploadRequest$ObjectLockRetainUntilDate' => '

Specifies the date and time when you want the Object Lock to expire.

', 'GetObjectOutput$ObjectLockRetainUntilDate' => '

The date and time when this object\'s Object Lock will expire.

', 'HeadObjectOutput$ObjectLockRetainUntilDate' => '

The date and time when this object\'s Object Lock will expire.

', 'PutObjectRequest$ObjectLockRetainUntilDate' => '

The date and time when you want this object\'s Object Lock to expire.

', ], ], 'ObjectLockRetention' => [ 'base' => '

A Retention configuration for an object.

', 'refs' => [ 'GetObjectRetentionOutput$Retention' => '

The container element for an object\'s retention settings.

', 'PutObjectRetentionRequest$Retention' => '

The container element for the Object Retention configuration.

', ], ], 'ObjectLockRetentionMode' => [ 'base' => NULL, 'refs' => [ 'DefaultRetention$Mode' => '

The default Object Lock retention mode you want to apply to new objects placed in the specified bucket.

', 'ObjectLockRetention$Mode' => '

Indicates the Retention mode for the specified object.

', ], ], 'ObjectLockRule' => [ 'base' => '

The container element for an Object Lock rule.

', 'refs' => [ 'ObjectLockConfiguration$Rule' => '

The Object Lock rule in place for the specified object.

', ], ], 'ObjectLockToken' => [ 'base' => NULL, 'refs' => [ 'PutObjectLockConfigurationRequest$Token' => '

A token to allow Object Lock to be enabled for an existing bucket.

', ], ], 'ObjectNotInActiveTierError' => [ 'base' => '

The source object of the COPY operation is not in the active tier and is only stored in Amazon Glacier.

', 'refs' => [], ], 'ObjectStorageClass' => [ 'base' => NULL, 'refs' => [ 'Object$StorageClass' => '

The class of storage used to store the object.

', ], ], 'ObjectVersion' => [ 'base' => '

', 'refs' => [ 'ObjectVersionList$member' => NULL, ], ], 'ObjectVersionId' => [ 'base' => NULL, 'refs' => [ 'CompleteMultipartUploadOutput$VersionId' => '

Version of the object.

', 'CopyObjectOutput$VersionId' => '

Version ID of the newly created copy.

', 'DeleteMarkerEntry$VersionId' => '

Version ID of an object.

', 'DeleteObjectOutput$VersionId' => '

Returns the version ID of the delete marker created as a result of the DELETE operation.

', 'DeleteObjectRequest$VersionId' => '

VersionId used to reference a specific version of the object.

', 'DeleteObjectTaggingOutput$VersionId' => '

The versionId of the object the tag-set was removed from.

', 'DeleteObjectTaggingRequest$VersionId' => '

The versionId of the object that the tag-set will be removed from.

', 'DeletedObject$VersionId' => '

', 'Error$VersionId' => '

', 'GetObjectAclRequest$VersionId' => '

VersionId used to reference a specific version of the object.

', 'GetObjectLegalHoldRequest$VersionId' => '

The version ID of the object whose Legal Hold status you want to retrieve.

', 'GetObjectOutput$VersionId' => '

Version of the object.

', 'GetObjectRequest$VersionId' => '

VersionId used to reference a specific version of the object.

', 'GetObjectRetentionRequest$VersionId' => '

The version ID for the object whose retention settings you want to retrieve.

', 'GetObjectTaggingOutput$VersionId' => '

', 'GetObjectTaggingRequest$VersionId' => '

', 'HeadObjectOutput$VersionId' => '

Version of the object.

', 'HeadObjectRequest$VersionId' => '

VersionId used to reference a specific version of the object.

', 'ObjectIdentifier$VersionId' => '

VersionId for the specific version of the object to delete.

', 'ObjectVersion$VersionId' => '

Version ID of an object.

', 'PutObjectAclRequest$VersionId' => '

VersionId used to reference a specific version of the object.

', 'PutObjectLegalHoldRequest$VersionId' => '

The version ID of the object that you want to place a Legal Hold on.

', 'PutObjectOutput$VersionId' => '

Version of the object.

', 'PutObjectRetentionRequest$VersionId' => '

The version ID for the object that you want to apply this Object Retention configuration to.

', 'PutObjectTaggingOutput$VersionId' => '

', 'PutObjectTaggingRequest$VersionId' => '

', 'RestoreObjectRequest$VersionId' => '

', ], ], 'ObjectVersionList' => [ 'base' => NULL, 'refs' => [ 'ListObjectVersionsOutput$Versions' => '

', ], ], 'ObjectVersionStorageClass' => [ 'base' => NULL, 'refs' => [ 'ObjectVersion$StorageClass' => '

The class of storage used to store the object.

', ], ], 'OutputLocation' => [ 'base' => '

Describes the location where the restore job\'s output is stored.

', 'refs' => [ 'RestoreRequest$OutputLocation' => '

Describes the location where the restore job\'s output is stored.

', ], ], 'OutputSerialization' => [ 'base' => '

Describes how results of the Select job are serialized.

', 'refs' => [ 'SelectObjectContentRequest$OutputSerialization' => '

Describes the format of the data that you want Amazon S3 to return in response.

', 'SelectParameters$OutputSerialization' => '

Describes how the results of the Select job are serialized.

', ], ], 'Owner' => [ 'base' => '

', 'refs' => [ 'AccessControlPolicy$Owner' => '

', 'DeleteMarkerEntry$Owner' => '

', 'GetBucketAclOutput$Owner' => '

', 'GetObjectAclOutput$Owner' => '

', 'ListBucketsOutput$Owner' => '

', 'ListPartsOutput$Owner' => '

', 'MultipartUpload$Owner' => '

', 'Object$Owner' => '

', 'ObjectVersion$Owner' => '

', ], ], 'OwnerOverride' => [ 'base' => NULL, 'refs' => [ 'AccessControlTranslation$Owner' => '

The override value for the owner of the replica object.

', ], ], 'ParquetInput' => [ 'base' => '

', 'refs' => [ 'InputSerialization$Parquet' => '

Specifies Parquet as object\'s input serialization format.

', ], ], 'Part' => [ 'base' => '

', 'refs' => [ 'Parts$member' => NULL, ], ], 'PartNumber' => [ 'base' => NULL, 'refs' => [ 'CompletedPart$PartNumber' => '

Part number that identifies the part. This is a positive integer between 1 and 10,000.

', 'GetObjectRequest$PartNumber' => '

Part number of the object being read. This is a positive integer between 1 and 10,000. Effectively performs a \'ranged\' GET request for the part specified. Useful for downloading just a part of an object.

', 'HeadObjectRequest$PartNumber' => '

Part number of the object being read. This is a positive integer between 1 and 10,000. Effectively performs a \'ranged\' HEAD request for the part specified. Useful querying about the size of the part and the number of parts in this object.

', 'Part$PartNumber' => '

Part number identifying the part. This is a positive integer between 1 and 10,000.

', 'UploadPartCopyRequest$PartNumber' => '

Part number of part being copied. This is a positive integer between 1 and 10,000.

', 'UploadPartRequest$PartNumber' => '

Part number of part being uploaded. This is a positive integer between 1 and 10,000.

', ], ], 'PartNumberMarker' => [ 'base' => NULL, 'refs' => [ 'ListPartsOutput$PartNumberMarker' => '

Part number after which listing begins.

', 'ListPartsRequest$PartNumberMarker' => '

Specifies the part after which listing should begin. Only parts with higher part numbers will be listed.

', ], ], 'Parts' => [ 'base' => NULL, 'refs' => [ 'ListPartsOutput$Parts' => '

', ], ], 'PartsCount' => [ 'base' => NULL, 'refs' => [ 'GetObjectOutput$PartsCount' => '

The count of parts this object has.

', 'HeadObjectOutput$PartsCount' => '

The count of parts this object has.

', ], ], 'Payer' => [ 'base' => NULL, 'refs' => [ 'GetBucketRequestPaymentOutput$Payer' => '

Specifies who pays for the download and request fees.

', 'RequestPaymentConfiguration$Payer' => '

Specifies who pays for the download and request fees.

', ], ], 'Permission' => [ 'base' => NULL, 'refs' => [ 'Grant$Permission' => '

Specifies the permission given to the grantee.

', ], ], 'Policy' => [ 'base' => NULL, 'refs' => [ 'GetBucketPolicyOutput$Policy' => '

The bucket policy as a JSON document.

', 'PutBucketPolicyRequest$Policy' => '

The bucket policy as a JSON document.

', ], ], 'PolicyStatus' => [ 'base' => '

The container element for a bucket\'s policy status.

', 'refs' => [ 'GetBucketPolicyStatusOutput$PolicyStatus' => '

The policy status for the specified bucket.

', ], ], 'Prefix' => [ 'base' => NULL, 'refs' => [ 'AnalyticsAndOperator$Prefix' => '

The prefix to use when evaluating an AND predicate.

', 'AnalyticsFilter$Prefix' => '

The prefix to use when evaluating an analytics filter.

', 'AnalyticsS3BucketDestination$Prefix' => '

The prefix to use when exporting data. The exported data begins with this prefix.

', 'CommonPrefix$Prefix' => '

', 'InventoryFilter$Prefix' => '

The prefix that an object must have to be included in the inventory results.

', 'InventoryS3BucketDestination$Prefix' => '

The prefix that is prepended to all inventory results.

', 'LifecycleRule$Prefix' => '

Prefix identifying one or more objects to which the rule applies. This is No longer used; use Filter instead.

', 'LifecycleRuleAndOperator$Prefix' => '

', 'LifecycleRuleFilter$Prefix' => '

Prefix identifying one or more objects to which the rule applies.

', 'ListMultipartUploadsOutput$Prefix' => '

When a prefix is provided in the request, this field contains the specified prefix. The result contains only keys starting with the specified prefix.

', 'ListMultipartUploadsRequest$Prefix' => '

Lists in-progress uploads only for those keys that begin with the specified prefix.

', 'ListObjectVersionsOutput$Prefix' => '

', 'ListObjectVersionsRequest$Prefix' => '

Limits the response to keys that begin with the specified prefix.

', 'ListObjectsOutput$Prefix' => '

', 'ListObjectsRequest$Prefix' => '

Limits the response to keys that begin with the specified prefix.

', 'ListObjectsV2Output$Prefix' => '

Limits the response to keys that begin with the specified prefix.

', 'ListObjectsV2Request$Prefix' => '

Limits the response to keys that begin with the specified prefix.

', 'MetricsAndOperator$Prefix' => '

The prefix used when evaluating an AND predicate.

', 'MetricsFilter$Prefix' => '

The prefix used when evaluating a metrics filter.

', 'ReplicationRule$Prefix' => '

An object keyname prefix that identifies the object or objects to which the rule applies. The maximum prefix length is 1,024 characters.

', 'ReplicationRuleAndOperator$Prefix' => '

', 'ReplicationRuleFilter$Prefix' => '

An object keyname prefix that identifies the subset of objects to which the rule applies.

', 'Rule$Prefix' => '

Prefix identifying one or more objects to which the rule applies.

', ], ], 'Priority' => [ 'base' => NULL, 'refs' => [ 'ReplicationRule$Priority' => '

The priority associated with the rule. If you specify multiple rules in a replication configuration, Amazon S3 prioritizes the rules to prevent conflicts when filtering. If two or more rules identify the same object based on a specified filter, the rule with higher priority takes precedence. For example:

For more information, see Cross-Region Replication (CRR) in the Amazon S3 Developer Guide.

', ], ], 'Progress' => [ 'base' => '

', 'refs' => [ 'ProgressEvent$Details' => '

The Progress event details.

', ], ], 'ProgressEvent' => [ 'base' => '

', 'refs' => [ 'SelectObjectContentEventStream$Progress' => '

The Progress Event.

', ], ], 'Protocol' => [ 'base' => NULL, 'refs' => [ 'Redirect$Protocol' => '

Protocol to use (http, https) when redirecting requests. The default is the protocol that is used in the original request.

', 'RedirectAllRequestsTo$Protocol' => '

Protocol to use (http, https) when redirecting requests. The default is the protocol that is used in the original request.

', ], ], 'PublicAccessBlockConfiguration' => [ 'base' => '

', 'refs' => [ 'GetPublicAccessBlockOutput$PublicAccessBlockConfiguration' => '

The PublicAccessBlock configuration currently in effect for this Amazon S3 bucket.

', 'PutPublicAccessBlockRequest$PublicAccessBlockConfiguration' => '

The PublicAccessBlock configuration that you want to apply to this Amazon S3 bucket. You can enable the configuration options in any combination. For more information about when Amazon S3 considers a bucket or object public, see The Meaning of "Public" in the Amazon Simple Storage Service Developer Guide.

', ], ], 'PutBucketAccelerateConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketAclRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketAnalyticsConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketCorsRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketEncryptionRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketInventoryConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketLifecycleConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketLifecycleRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketLoggingRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketMetricsConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketNotificationConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketNotificationRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketPolicyRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketReplicationRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketRequestPaymentRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketTaggingRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketVersioningRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutBucketWebsiteRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectAclOutput' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectAclRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectLegalHoldOutput' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectLegalHoldRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectLockConfigurationOutput' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectLockConfigurationRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectRetentionOutput' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectRetentionRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectTaggingOutput' => [ 'base' => NULL, 'refs' => [], ], 'PutObjectTaggingRequest' => [ 'base' => NULL, 'refs' => [], ], 'PutPublicAccessBlockRequest' => [ 'base' => NULL, 'refs' => [], ], 'QueueArn' => [ 'base' => NULL, 'refs' => [ 'QueueConfiguration$QueueArn' => '

The Amazon Resource Name (ARN) of the Amazon SQS queue to which Amazon S3 will publish a message when it detects events of the specified type.

', 'QueueConfigurationDeprecated$Queue' => '

', ], ], 'QueueConfiguration' => [ 'base' => '

A container for specifying the configuration for publication of messages to an Amazon Simple Queue Service (Amazon SQS) queue.when Amazon S3 detects specified events.

', 'refs' => [ 'QueueConfigurationList$member' => NULL, ], ], 'QueueConfigurationDeprecated' => [ 'base' => '

', 'refs' => [ 'NotificationConfigurationDeprecated$QueueConfiguration' => '

', ], ], 'QueueConfigurationList' => [ 'base' => NULL, 'refs' => [ 'NotificationConfiguration$QueueConfigurations' => '

', ], ], 'Quiet' => [ 'base' => NULL, 'refs' => [ 'Delete$Quiet' => '

Element to enable quiet mode for the request. When you add this element, you must set its value to true.

', ], ], 'QuoteCharacter' => [ 'base' => NULL, 'refs' => [ 'CSVInput$QuoteCharacter' => '

Value used for escaping where the field delimiter is part of the value.

', 'CSVOutput$QuoteCharacter' => '

The value used for escaping where the field delimiter is part of the value.

', ], ], 'QuoteEscapeCharacter' => [ 'base' => NULL, 'refs' => [ 'CSVInput$QuoteEscapeCharacter' => '

The single character used for escaping the quote character inside an already escaped value.

', 'CSVOutput$QuoteEscapeCharacter' => '

Th single character used for escaping the quote character inside an already escaped value.

', ], ], 'QuoteFields' => [ 'base' => NULL, 'refs' => [ 'CSVOutput$QuoteFields' => '

Indicates whether or not all output fields should be quoted.

', ], ], 'Range' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$Range' => '

Downloads the specified range bytes of an object. For more information about the HTTP Range header, go to http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.

', 'HeadObjectRequest$Range' => '

Downloads the specified range bytes of an object. For more information about the HTTP Range header, go to http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.35.

', ], ], 'RecordDelimiter' => [ 'base' => NULL, 'refs' => [ 'CSVInput$RecordDelimiter' => '

The value used to separate individual records.

', 'CSVOutput$RecordDelimiter' => '

The value used to separate individual records.

', 'JSONOutput$RecordDelimiter' => '

The value used to separate individual records in the output.

', ], ], 'RecordsEvent' => [ 'base' => '

', 'refs' => [ 'SelectObjectContentEventStream$Records' => '

The Records Event.

', ], ], 'Redirect' => [ 'base' => '

', 'refs' => [ 'RoutingRule$Redirect' => '

Container for redirect information. You can redirect requests to another host, to another page, or with another protocol. In the event of an error, you can specify a different error code to return.

', ], ], 'RedirectAllRequestsTo' => [ 'base' => '

', 'refs' => [ 'GetBucketWebsiteOutput$RedirectAllRequestsTo' => '

', 'WebsiteConfiguration$RedirectAllRequestsTo' => '

', ], ], 'ReplaceKeyPrefixWith' => [ 'base' => NULL, 'refs' => [ 'Redirect$ReplaceKeyPrefixWith' => '

The object key prefix to use in the redirect request. For example, to redirect requests for all pages with prefix docs/ (objects in the docs/ folder) to documents/, you can set a condition block with KeyPrefixEquals set to docs/ and in the Redirect set ReplaceKeyPrefixWith to /documents. Not required if one of the siblings is present. Can be present only if ReplaceKeyWith is not provided.

', ], ], 'ReplaceKeyWith' => [ 'base' => NULL, 'refs' => [ 'Redirect$ReplaceKeyWith' => '

The specific object key to use in the redirect request. For example, redirect request to error.html. Not required if one of the sibling is present. Can be present only if ReplaceKeyPrefixWith is not provided.

', ], ], 'ReplicaKmsKeyID' => [ 'base' => NULL, 'refs' => [ 'EncryptionConfiguration$ReplicaKmsKeyID' => '

The ID of the AWS KMS key for the AWS Region where the destination bucket resides. Amazon S3 uses this key to encrypt the replica object.

', ], ], 'ReplicationConfiguration' => [ 'base' => '

A container for replication rules. You can add up to 1,000 rules. The maximum size of a replication configuration is 2 MB.

', 'refs' => [ 'GetBucketReplicationOutput$ReplicationConfiguration' => NULL, 'PutBucketReplicationRequest$ReplicationConfiguration' => NULL, ], ], 'ReplicationRule' => [ 'base' => '

A container for information about a specific replication rule.

', 'refs' => [ 'ReplicationRules$member' => NULL, ], ], 'ReplicationRuleAndOperator' => [ 'base' => '

', 'refs' => [ 'ReplicationRuleFilter$And' => '

A container for specifying rule filters. The filters determine the subset of objects to which the rule applies. This element is required only if you specify more than one filter. For example:

', ], ], 'ReplicationRuleFilter' => [ 'base' => '

A filter that identifies the subset of objects to which the replication rule applies. A Filter must specify exactly one Prefix, Tag, or an And child element.

', 'refs' => [ 'ReplicationRule$Filter' => NULL, ], ], 'ReplicationRuleStatus' => [ 'base' => NULL, 'refs' => [ 'ReplicationRule$Status' => '

If status isn\'t enabled, the rule is ignored.

', ], ], 'ReplicationRules' => [ 'base' => NULL, 'refs' => [ 'ReplicationConfiguration$Rules' => '

A container for one or more replication rules. A replication configuration must have at least one rule and can contain a maximum of 1,000 rules.

', ], ], 'ReplicationStatus' => [ 'base' => NULL, 'refs' => [ 'GetObjectOutput$ReplicationStatus' => '

', 'HeadObjectOutput$ReplicationStatus' => '

', ], ], 'RequestCharged' => [ 'base' => '

If present, indicates that the requester was successfully charged for the request.

', 'refs' => [ 'AbortMultipartUploadOutput$RequestCharged' => NULL, 'CompleteMultipartUploadOutput$RequestCharged' => NULL, 'CopyObjectOutput$RequestCharged' => NULL, 'CreateMultipartUploadOutput$RequestCharged' => NULL, 'DeleteObjectOutput$RequestCharged' => NULL, 'DeleteObjectsOutput$RequestCharged' => NULL, 'GetObjectAclOutput$RequestCharged' => NULL, 'GetObjectOutput$RequestCharged' => NULL, 'GetObjectTorrentOutput$RequestCharged' => NULL, 'HeadObjectOutput$RequestCharged' => NULL, 'ListPartsOutput$RequestCharged' => NULL, 'PutObjectAclOutput$RequestCharged' => NULL, 'PutObjectLegalHoldOutput$RequestCharged' => NULL, 'PutObjectLockConfigurationOutput$RequestCharged' => NULL, 'PutObjectOutput$RequestCharged' => NULL, 'PutObjectRetentionOutput$RequestCharged' => NULL, 'RestoreObjectOutput$RequestCharged' => NULL, 'UploadPartCopyOutput$RequestCharged' => NULL, 'UploadPartOutput$RequestCharged' => NULL, ], ], 'RequestPayer' => [ 'base' => '

Confirms that the requester knows that she or he will be charged for the request. Bucket owners need not specify this parameter in their requests. Documentation on downloading objects from requester pays buckets can be found at http://docs.aws.amazon.com/AmazonS3/latest/dev/ObjectsinRequesterPaysBuckets.html

', 'refs' => [ 'AbortMultipartUploadRequest$RequestPayer' => NULL, 'CompleteMultipartUploadRequest$RequestPayer' => NULL, 'CopyObjectRequest$RequestPayer' => NULL, 'CreateMultipartUploadRequest$RequestPayer' => NULL, 'DeleteObjectRequest$RequestPayer' => NULL, 'DeleteObjectsRequest$RequestPayer' => NULL, 'GetObjectAclRequest$RequestPayer' => NULL, 'GetObjectLegalHoldRequest$RequestPayer' => NULL, 'GetObjectRequest$RequestPayer' => NULL, 'GetObjectRetentionRequest$RequestPayer' => NULL, 'GetObjectTorrentRequest$RequestPayer' => NULL, 'HeadObjectRequest$RequestPayer' => NULL, 'ListObjectsRequest$RequestPayer' => '

Confirms that the requester knows that she or he will be charged for the list objects request. Bucket owners need not specify this parameter in their requests.

', 'ListObjectsV2Request$RequestPayer' => '

Confirms that the requester knows that she or he will be charged for the list objects request in V2 style. Bucket owners need not specify this parameter in their requests.

', 'ListPartsRequest$RequestPayer' => NULL, 'PutObjectAclRequest$RequestPayer' => NULL, 'PutObjectLegalHoldRequest$RequestPayer' => NULL, 'PutObjectLockConfigurationRequest$RequestPayer' => NULL, 'PutObjectRequest$RequestPayer' => NULL, 'PutObjectRetentionRequest$RequestPayer' => NULL, 'RestoreObjectRequest$RequestPayer' => NULL, 'UploadPartCopyRequest$RequestPayer' => NULL, 'UploadPartRequest$RequestPayer' => NULL, ], ], 'RequestPaymentConfiguration' => [ 'base' => '

', 'refs' => [ 'PutBucketRequestPaymentRequest$RequestPaymentConfiguration' => '

', ], ], 'RequestProgress' => [ 'base' => '

', 'refs' => [ 'SelectObjectContentRequest$RequestProgress' => '

Specifies if periodic request progress information should be enabled.

', ], ], 'ResponseCacheControl' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$ResponseCacheControl' => '

Sets the Cache-Control header of the response.

', ], ], 'ResponseContentDisposition' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$ResponseContentDisposition' => '

Sets the Content-Disposition header of the response

', ], ], 'ResponseContentEncoding' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$ResponseContentEncoding' => '

Sets the Content-Encoding header of the response.

', ], ], 'ResponseContentLanguage' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$ResponseContentLanguage' => '

Sets the Content-Language header of the response.

', ], ], 'ResponseContentType' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$ResponseContentType' => '

Sets the Content-Type header of the response.

', ], ], 'ResponseExpires' => [ 'base' => NULL, 'refs' => [ 'GetObjectRequest$ResponseExpires' => '

Sets the Expires header of the response.

', ], ], 'Restore' => [ 'base' => NULL, 'refs' => [ 'GetObjectOutput$Restore' => '

Provides information about object restoration operation and expiration time of the restored object copy.

', 'HeadObjectOutput$Restore' => '

Provides information about object restoration operation and expiration time of the restored object copy.

', ], ], 'RestoreObjectOutput' => [ 'base' => NULL, 'refs' => [], ], 'RestoreObjectRequest' => [ 'base' => NULL, 'refs' => [], ], 'RestoreOutputPath' => [ 'base' => NULL, 'refs' => [ 'RestoreObjectOutput$RestoreOutputPath' => '

Indicates the path in the provided S3 output location where Select results will be restored to.

', ], ], 'RestoreRequest' => [ 'base' => '

Container for restore job parameters.

', 'refs' => [ 'RestoreObjectRequest$RestoreRequest' => NULL, ], ], 'RestoreRequestType' => [ 'base' => NULL, 'refs' => [ 'RestoreRequest$Type' => '

Type of restore request.

', ], ], 'Role' => [ 'base' => NULL, 'refs' => [ 'ReplicationConfiguration$Role' => '

The Amazon Resource Name (ARN) of the AWS Identity and Access Management (IAM) role that Amazon S3 can assume when replicating the objects.

', ], ], 'RoutingRule' => [ 'base' => '

', 'refs' => [ 'RoutingRules$member' => NULL, ], ], 'RoutingRules' => [ 'base' => NULL, 'refs' => [ 'GetBucketWebsiteOutput$RoutingRules' => '

', 'WebsiteConfiguration$RoutingRules' => '

', ], ], 'Rule' => [ 'base' => '

', 'refs' => [ 'Rules$member' => NULL, ], ], 'Rules' => [ 'base' => NULL, 'refs' => [ 'GetBucketLifecycleOutput$Rules' => '

', 'LifecycleConfiguration$Rules' => '

', ], ], 'S3KeyFilter' => [ 'base' => '

A container for object key name prefix and suffix filtering rules.

', 'refs' => [ 'NotificationConfigurationFilter$Key' => NULL, ], ], 'S3Location' => [ 'base' => '

Describes an S3 location that will receive the results of the restore request.

', 'refs' => [ 'OutputLocation$S3' => '

Describes an S3 location that will receive the results of the restore request.

', ], ], 'SSECustomerAlgorithm' => [ 'base' => NULL, 'refs' => [ 'CopyObjectOutput$SSECustomerAlgorithm' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header confirming the encryption algorithm used.

', 'CopyObjectRequest$SSECustomerAlgorithm' => '

Specifies the algorithm to use to when encrypting the object (e.g., AES256).

', 'CreateMultipartUploadOutput$SSECustomerAlgorithm' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header confirming the encryption algorithm used.

', 'CreateMultipartUploadRequest$SSECustomerAlgorithm' => '

Specifies the algorithm to use to when encrypting the object (e.g., AES256).

', 'GetObjectOutput$SSECustomerAlgorithm' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header confirming the encryption algorithm used.

', 'GetObjectRequest$SSECustomerAlgorithm' => '

Specifies the algorithm to use to when encrypting the object (e.g., AES256).

', 'HeadObjectOutput$SSECustomerAlgorithm' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header confirming the encryption algorithm used.

', 'HeadObjectRequest$SSECustomerAlgorithm' => '

Specifies the algorithm to use to when encrypting the object (e.g., AES256).

', 'PutObjectOutput$SSECustomerAlgorithm' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header confirming the encryption algorithm used.

', 'PutObjectRequest$SSECustomerAlgorithm' => '

Specifies the algorithm to use to when encrypting the object (e.g., AES256).

', 'SelectObjectContentRequest$SSECustomerAlgorithm' => '

The SSE Algorithm used to encrypt the object. For more information, see Server-Side Encryption (Using Customer-Provided Encryption Keys.

', 'UploadPartCopyOutput$SSECustomerAlgorithm' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header confirming the encryption algorithm used.

', 'UploadPartCopyRequest$SSECustomerAlgorithm' => '

Specifies the algorithm to use to when encrypting the object (e.g., AES256).

', 'UploadPartOutput$SSECustomerAlgorithm' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header confirming the encryption algorithm used.

', 'UploadPartRequest$SSECustomerAlgorithm' => '

Specifies the algorithm to use to when encrypting the object (e.g., AES256).

', ], ], 'SSECustomerKey' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$SSECustomerKey' => '

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header.

', 'CreateMultipartUploadRequest$SSECustomerKey' => '

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header.

', 'GetObjectRequest$SSECustomerKey' => '

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header.

', 'HeadObjectRequest$SSECustomerKey' => '

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header.

', 'PutObjectRequest$SSECustomerKey' => '

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header.

', 'SelectObjectContentRequest$SSECustomerKey' => '

The SSE Customer Key. For more information, see Server-Side Encryption (Using Customer-Provided Encryption Keys.

', 'UploadPartCopyRequest$SSECustomerKey' => '

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header. This must be the same encryption key specified in the initiate multipart upload request.

', 'UploadPartRequest$SSECustomerKey' => '

Specifies the customer-provided encryption key for Amazon S3 to use in encrypting data. This value is used to store the object and then it is discarded; Amazon does not store the encryption key. The key must be appropriate for use with the algorithm specified in the x-amz-server-side​-encryption​-customer-algorithm header. This must be the same encryption key specified in the initiate multipart upload request.

', ], ], 'SSECustomerKeyMD5' => [ 'base' => NULL, 'refs' => [ 'CopyObjectOutput$SSECustomerKeyMD5' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header to provide round trip message integrity verification of the customer-provided encryption key.

', 'CopyObjectRequest$SSECustomerKeyMD5' => '

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

', 'CreateMultipartUploadOutput$SSECustomerKeyMD5' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header to provide round trip message integrity verification of the customer-provided encryption key.

', 'CreateMultipartUploadRequest$SSECustomerKeyMD5' => '

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

', 'GetObjectOutput$SSECustomerKeyMD5' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header to provide round trip message integrity verification of the customer-provided encryption key.

', 'GetObjectRequest$SSECustomerKeyMD5' => '

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

', 'HeadObjectOutput$SSECustomerKeyMD5' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header to provide round trip message integrity verification of the customer-provided encryption key.

', 'HeadObjectRequest$SSECustomerKeyMD5' => '

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

', 'PutObjectOutput$SSECustomerKeyMD5' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header to provide round trip message integrity verification of the customer-provided encryption key.

', 'PutObjectRequest$SSECustomerKeyMD5' => '

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

', 'SelectObjectContentRequest$SSECustomerKeyMD5' => '

The SSE Customer Key MD5. For more information, see Server-Side Encryption (Using Customer-Provided Encryption Keys.

', 'UploadPartCopyOutput$SSECustomerKeyMD5' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header to provide round trip message integrity verification of the customer-provided encryption key.

', 'UploadPartCopyRequest$SSECustomerKeyMD5' => '

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

', 'UploadPartOutput$SSECustomerKeyMD5' => '

If server-side encryption with a customer-provided encryption key was requested, the response will include this header to provide round trip message integrity verification of the customer-provided encryption key.

', 'UploadPartRequest$SSECustomerKeyMD5' => '

Specifies the 128-bit MD5 digest of the encryption key according to RFC 1321. Amazon S3 uses this header for a message integrity check to ensure the encryption key was transmitted without error.

', ], ], 'SSEKMS' => [ 'base' => '

Specifies the use of SSE-KMS to encrypt delivered Inventory reports.

', 'refs' => [ 'InventoryEncryption$SSEKMS' => '

Specifies the use of SSE-KMS to encrypt delivered Inventory reports.

', ], ], 'SSEKMSKeyId' => [ 'base' => NULL, 'refs' => [ 'CompleteMultipartUploadOutput$SSEKMSKeyId' => '

If present, specifies the ID of the AWS Key Management Service (KMS) master encryption key that was used for the object.

', 'CopyObjectOutput$SSEKMSKeyId' => '

If present, specifies the ID of the AWS Key Management Service (KMS) master encryption key that was used for the object.

', 'CopyObjectRequest$SSEKMSKeyId' => '

Specifies the AWS KMS key ID to use for object encryption. All GET and PUT requests for an object protected by AWS KMS will fail if not made via SSL or using SigV4. Documentation on configuring any of the officially supported AWS SDKs and CLI can be found at http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingAWSSDK.html#specify-signature-version

', 'CreateMultipartUploadOutput$SSEKMSKeyId' => '

If present, specifies the ID of the AWS Key Management Service (KMS) master encryption key that was used for the object.

', 'CreateMultipartUploadRequest$SSEKMSKeyId' => '

Specifies the AWS KMS key ID to use for object encryption. All GET and PUT requests for an object protected by AWS KMS will fail if not made via SSL or using SigV4. Documentation on configuring any of the officially supported AWS SDKs and CLI can be found at http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingAWSSDK.html#specify-signature-version

', 'Encryption$KMSKeyId' => '

If the encryption type is aws:kms, this optional value specifies the AWS KMS key ID to use for encryption of job results.

', 'GetObjectOutput$SSEKMSKeyId' => '

If present, specifies the ID of the AWS Key Management Service (KMS) master encryption key that was used for the object.

', 'HeadObjectOutput$SSEKMSKeyId' => '

If present, specifies the ID of the AWS Key Management Service (KMS) master encryption key that was used for the object.

', 'PutObjectOutput$SSEKMSKeyId' => '

If present, specifies the ID of the AWS Key Management Service (KMS) master encryption key that was used for the object.

', 'PutObjectRequest$SSEKMSKeyId' => '

Specifies the AWS KMS key ID to use for object encryption. All GET and PUT requests for an object protected by AWS KMS will fail if not made via SSL or using SigV4. Documentation on configuring any of the officially supported AWS SDKs and CLI can be found at http://docs.aws.amazon.com/AmazonS3/latest/dev/UsingAWSSDK.html#specify-signature-version

', 'SSEKMS$KeyId' => '

Specifies the ID of the AWS Key Management Service (KMS) master encryption key to use for encrypting Inventory reports.

', 'ServerSideEncryptionByDefault$KMSMasterKeyID' => '

KMS master key ID to use for the default encryption. This parameter is allowed if SSEAlgorithm is aws:kms.

', 'UploadPartCopyOutput$SSEKMSKeyId' => '

If present, specifies the ID of the AWS Key Management Service (KMS) master encryption key that was used for the object.

', 'UploadPartOutput$SSEKMSKeyId' => '

If present, specifies the ID of the AWS Key Management Service (KMS) master encryption key that was used for the object.

', ], ], 'SSES3' => [ 'base' => '

Specifies the use of SSE-S3 to encrypt delivered Inventory reports.

', 'refs' => [ 'InventoryEncryption$SSES3' => '

Specifies the use of SSE-S3 to encrypt delivered Inventory reports.

', ], ], 'SelectObjectContentEventStream' => [ 'base' => '

', 'refs' => [ 'SelectObjectContentOutput$Payload' => '

', ], ], 'SelectObjectContentOutput' => [ 'base' => NULL, 'refs' => [], ], 'SelectObjectContentRequest' => [ 'base' => '

Request to filter the contents of an Amazon S3 object based on a simple Structured Query Language (SQL) statement. In the request, along with the SQL expression, you must specify a data serialization format (JSON or CSV) of the object. Amazon S3 uses this to parse object data into records. It returns only records that match the specified SQL expression. You must also specify the data serialization format for the response. For more information, see S3Select API Documentation.

', 'refs' => [], ], 'SelectParameters' => [ 'base' => '

Describes the parameters for Select job types.

', 'refs' => [ 'RestoreRequest$SelectParameters' => '

Describes the parameters for Select job types.

', ], ], 'ServerSideEncryption' => [ 'base' => NULL, 'refs' => [ 'CompleteMultipartUploadOutput$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'CopyObjectOutput$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'CopyObjectRequest$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'CreateMultipartUploadOutput$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'CreateMultipartUploadRequest$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'Encryption$EncryptionType' => '

The server-side encryption algorithm used when storing job results in Amazon S3 (e.g., AES256, aws:kms).

', 'GetObjectOutput$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'HeadObjectOutput$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'PutObjectOutput$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'PutObjectRequest$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'ServerSideEncryptionByDefault$SSEAlgorithm' => '

Server-side encryption algorithm to use for the default encryption.

', 'UploadPartCopyOutput$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', 'UploadPartOutput$ServerSideEncryption' => '

The Server-side encryption algorithm used when storing this object in S3 (e.g., AES256, aws:kms).

', ], ], 'ServerSideEncryptionByDefault' => [ 'base' => '

Describes the default server-side encryption to apply to new objects in the bucket. If Put Object request does not specify any server-side encryption, this default encryption will be applied.

', 'refs' => [ 'ServerSideEncryptionRule$ApplyServerSideEncryptionByDefault' => '

Describes the default server-side encryption to apply to new objects in the bucket. If Put Object request does not specify any server-side encryption, this default encryption will be applied.

', ], ], 'ServerSideEncryptionConfiguration' => [ 'base' => '

Container for server-side encryption configuration rules. Currently S3 supports one rule only.

', 'refs' => [ 'GetBucketEncryptionOutput$ServerSideEncryptionConfiguration' => NULL, 'PutBucketEncryptionRequest$ServerSideEncryptionConfiguration' => NULL, ], ], 'ServerSideEncryptionRule' => [ 'base' => '

Container for information about a particular server-side encryption configuration rule.

', 'refs' => [ 'ServerSideEncryptionRules$member' => NULL, ], ], 'ServerSideEncryptionRules' => [ 'base' => NULL, 'refs' => [ 'ServerSideEncryptionConfiguration$Rules' => '

Container for information about a particular server-side encryption configuration rule.

', ], ], 'Setting' => [ 'base' => NULL, 'refs' => [ 'PublicAccessBlockConfiguration$BlockPublicAcls' => '

Specifies whether Amazon S3 should block public access control lists (ACLs) for this bucket and objects in this bucket. Setting this element to TRUE causes the following behavior:

Enabling this setting doesn\'t affect existing policies or ACLs.

', 'PublicAccessBlockConfiguration$IgnorePublicAcls' => '

Specifies whether Amazon S3 should ignore public ACLs for this bucket and objects in this bucket. Setting this element to TRUE causes Amazon S3 to ignore all public ACLs on this bucket and objects in this bucket.

Enabling this setting doesn\'t affect the persistence of any existing ACLs and doesn\'t prevent new public ACLs from being set.

', 'PublicAccessBlockConfiguration$BlockPublicPolicy' => '

Specifies whether Amazon S3 should block public bucket policies for this bucket. Setting this element to TRUE causes Amazon S3 to reject calls to PUT Bucket policy if the specified bucket policy allows public access.

Enabling this setting doesn\'t affect existing bucket policies.

', 'PublicAccessBlockConfiguration$RestrictPublicBuckets' => '

Specifies whether Amazon S3 should restrict public bucket policies for this bucket. Setting this element to TRUE restricts access to this bucket to only AWS services and authorized users within this account if the bucket has a public policy.

Enabling this setting doesn\'t affect previously stored bucket policies, except that public and cross-account access within any public bucket policy, including non-public delegation to specific accounts, is blocked.

', ], ], 'Size' => [ 'base' => NULL, 'refs' => [ 'Object$Size' => '

', 'ObjectVersion$Size' => '

Size in bytes of the object.

', 'Part$Size' => '

Size in bytes of the uploaded part data.

', ], ], 'SourceSelectionCriteria' => [ 'base' => '

A container for filters that define which source objects should be replicated.

', 'refs' => [ 'ReplicationRule$SourceSelectionCriteria' => '

A container that describes additional filters for identifying the source objects that you want to replicate. You can choose to enable or disable the replication of these objects. Currently, Amazon S3 supports only the filter that you can specify for objects created with server-side encryption using an AWS KMS-Managed Key (SSE-KMS).

If you want Amazon S3 to replicate objects created with server-side encryption using AWS KMS-Managed Keys.

', ], ], 'SseKmsEncryptedObjects' => [ 'base' => '

A container for filter information for the selection of S3 objects encrypted with AWS KMS.

', 'refs' => [ 'SourceSelectionCriteria$SseKmsEncryptedObjects' => '

A container for filter information for the selection of S3 objects encrypted with AWS KMS. If you include SourceSelectionCriteria in the replication configuration, this element is required.

', ], ], 'SseKmsEncryptedObjectsStatus' => [ 'base' => NULL, 'refs' => [ 'SseKmsEncryptedObjects$Status' => '

If the status is not Enabled, replication for S3 objects encrypted with AWS KMS is disabled.

', ], ], 'StartAfter' => [ 'base' => NULL, 'refs' => [ 'ListObjectsV2Output$StartAfter' => '

StartAfter is where you want Amazon S3 to start listing from. Amazon S3 starts listing after this specified key. StartAfter can be any key in the bucket

', 'ListObjectsV2Request$StartAfter' => '

StartAfter is where you want Amazon S3 to start listing from. Amazon S3 starts listing after this specified key. StartAfter can be any key in the bucket

', ], ], 'Stats' => [ 'base' => '

', 'refs' => [ 'StatsEvent$Details' => '

The Stats event details.

', ], ], 'StatsEvent' => [ 'base' => '

', 'refs' => [ 'SelectObjectContentEventStream$Stats' => '

The Stats Event.

', ], ], 'StorageClass' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$StorageClass' => '

The type of storage to use for the object. Defaults to \'STANDARD\'.

', 'CreateMultipartUploadRequest$StorageClass' => '

The type of storage to use for the object. Defaults to \'STANDARD\'.

', 'Destination$StorageClass' => '

The class of storage used to store the object. By default Amazon S3 uses storage class of the source object when creating a replica.

', 'GetObjectOutput$StorageClass' => '

', 'HeadObjectOutput$StorageClass' => '

', 'ListPartsOutput$StorageClass' => '

The class of storage used to store the object.

', 'MultipartUpload$StorageClass' => '

The class of storage used to store the object.

', 'PutObjectRequest$StorageClass' => '

The type of storage to use for the object. Defaults to \'STANDARD\'.

', 'S3Location$StorageClass' => '

The class of storage used to store the restore results.

', ], ], 'StorageClassAnalysis' => [ 'base' => '

', 'refs' => [ 'AnalyticsConfiguration$StorageClassAnalysis' => '

If present, it indicates that data related to access patterns will be collected and made available to analyze the tradeoffs between different storage classes.

', ], ], 'StorageClassAnalysisDataExport' => [ 'base' => '

', 'refs' => [ 'StorageClassAnalysis$DataExport' => '

A container used to describe how data related to the storage class analysis should be exported.

', ], ], 'StorageClassAnalysisSchemaVersion' => [ 'base' => NULL, 'refs' => [ 'StorageClassAnalysisDataExport$OutputSchemaVersion' => '

The version of the output schema to use when exporting data. Must be V_1.

', ], ], 'Suffix' => [ 'base' => NULL, 'refs' => [ 'IndexDocument$Suffix' => '

A suffix that is appended to a request that is for a directory on the website endpoint (e.g. if the suffix is index.html and you make a request to samplebucket/images/ the data that is returned will be for the object with the key name images/index.html) The suffix must not be empty and must not include a slash character.

', ], ], 'Tag' => [ 'base' => '

', 'refs' => [ 'AnalyticsFilter$Tag' => '

The tag to use when evaluating an analytics filter.

', 'LifecycleRuleFilter$Tag' => '

This tag must exist in the object\'s tag set in order for the rule to apply.

', 'MetricsFilter$Tag' => '

The tag used when evaluating a metrics filter.

', 'ReplicationRuleFilter$Tag' => '

A container for specifying a tag key and value.

The rule applies only to objects that have the tag in their tag set.

', 'TagSet$member' => NULL, ], ], 'TagCount' => [ 'base' => NULL, 'refs' => [ 'GetObjectOutput$TagCount' => '

The number of tags, if any, on the object.

', ], ], 'TagSet' => [ 'base' => NULL, 'refs' => [ 'AnalyticsAndOperator$Tags' => '

The list of tags to use when evaluating an AND predicate.

', 'GetBucketTaggingOutput$TagSet' => '

', 'GetObjectTaggingOutput$TagSet' => '

', 'LifecycleRuleAndOperator$Tags' => '

All of these tags must exist in the object\'s tag set in order for the rule to apply.

', 'MetricsAndOperator$Tags' => '

The list of tags used when evaluating an AND predicate.

', 'ReplicationRuleAndOperator$Tags' => '

', 'Tagging$TagSet' => '

', ], ], 'Tagging' => [ 'base' => '

', 'refs' => [ 'PutBucketTaggingRequest$Tagging' => '

', 'PutObjectTaggingRequest$Tagging' => '

', 'S3Location$Tagging' => '

The tag-set that is applied to the restore results.

', ], ], 'TaggingDirective' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$TaggingDirective' => '

Specifies whether the object tag-set are copied from the source object or replaced with tag-set provided in the request.

', ], ], 'TaggingHeader' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$Tagging' => '

The tag-set for the object destination object this value must be used in conjunction with the TaggingDirective. The tag-set must be encoded as URL Query parameters

', 'CreateMultipartUploadRequest$Tagging' => '

The tag-set for the object. The tag-set must be encoded as URL Query parameters

', 'PutObjectRequest$Tagging' => '

The tag-set for the object. The tag-set must be encoded as URL Query parameters. (For example, "Key1=Value1")

', ], ], 'TargetBucket' => [ 'base' => NULL, 'refs' => [ 'LoggingEnabled$TargetBucket' => '

Specifies the bucket where you want Amazon S3 to store server access logs. You can have your logs delivered to any bucket that you own, including the same bucket that is being logged. You can also configure multiple buckets to deliver their logs to the same target bucket. In this case you should choose a different TargetPrefix for each source bucket so that the delivered log files can be distinguished by key.

', ], ], 'TargetGrant' => [ 'base' => '

', 'refs' => [ 'TargetGrants$member' => NULL, ], ], 'TargetGrants' => [ 'base' => NULL, 'refs' => [ 'LoggingEnabled$TargetGrants' => '

', ], ], 'TargetPrefix' => [ 'base' => NULL, 'refs' => [ 'LoggingEnabled$TargetPrefix' => '

This element lets you specify a prefix for the keys that the log files will be stored under.

', ], ], 'Tier' => [ 'base' => NULL, 'refs' => [ 'GlacierJobParameters$Tier' => '

Glacier retrieval tier at which the restore will be processed.

', 'RestoreRequest$Tier' => '

Glacier retrieval tier at which the restore will be processed.

', ], ], 'Token' => [ 'base' => NULL, 'refs' => [ 'ListBucketAnalyticsConfigurationsOutput$ContinuationToken' => '

The ContinuationToken that represents where this request began.

', 'ListBucketAnalyticsConfigurationsRequest$ContinuationToken' => '

The ContinuationToken that represents a placeholder from where this request should begin.

', 'ListBucketInventoryConfigurationsOutput$ContinuationToken' => '

If sent in the request, the marker that is used as a starting point for this inventory configuration list response.

', 'ListBucketInventoryConfigurationsRequest$ContinuationToken' => '

The marker used to continue an inventory configuration listing that has been truncated. Use the NextContinuationToken from a previously truncated list response to continue the listing. The continuation token is an opaque value that Amazon S3 understands.

', 'ListBucketMetricsConfigurationsOutput$ContinuationToken' => '

The marker that is used as a starting point for this metrics configuration list response. This value is present if it was sent in the request.

', 'ListBucketMetricsConfigurationsRequest$ContinuationToken' => '

The marker that is used to continue a metrics configuration listing that has been truncated. Use the NextContinuationToken from a previously truncated list response to continue the listing. The continuation token is an opaque value that Amazon S3 understands.

', 'ListObjectsV2Output$ContinuationToken' => '

ContinuationToken indicates Amazon S3 that the list is being continued on this bucket with a token. ContinuationToken is obfuscated and is not a real key

', 'ListObjectsV2Request$ContinuationToken' => '

ContinuationToken indicates Amazon S3 that the list is being continued on this bucket with a token. ContinuationToken is obfuscated and is not a real key

', ], ], 'TopicArn' => [ 'base' => NULL, 'refs' => [ 'TopicConfiguration$TopicArn' => '

The Amazon Resource Name (ARN) of the Amazon SNS topic to which Amazon S3 will publish a message when it detects events of the specified type.

', 'TopicConfigurationDeprecated$Topic' => '

Amazon SNS topic to which Amazon S3 will publish a message to report the specified events for the bucket.

', ], ], 'TopicConfiguration' => [ 'base' => '

A container for specifying the configuration for publication of messages to an Amazon Simple Notification Service (Amazon SNS) topic.when Amazon S3 detects specified events.

', 'refs' => [ 'TopicConfigurationList$member' => NULL, ], ], 'TopicConfigurationDeprecated' => [ 'base' => '

', 'refs' => [ 'NotificationConfigurationDeprecated$TopicConfiguration' => '

', ], ], 'TopicConfigurationList' => [ 'base' => NULL, 'refs' => [ 'NotificationConfiguration$TopicConfigurations' => '

', ], ], 'Transition' => [ 'base' => '

', 'refs' => [ 'Rule$Transition' => '

', 'TransitionList$member' => NULL, ], ], 'TransitionList' => [ 'base' => NULL, 'refs' => [ 'LifecycleRule$Transitions' => '

', ], ], 'TransitionStorageClass' => [ 'base' => NULL, 'refs' => [ 'NoncurrentVersionTransition$StorageClass' => '

The class of storage used to store the object.

', 'Transition$StorageClass' => '

The class of storage used to store the object.

', ], ], 'Type' => [ 'base' => NULL, 'refs' => [ 'Grantee$Type' => '

Type of grantee

', ], ], 'URI' => [ 'base' => NULL, 'refs' => [ 'Grantee$URI' => '

URI of the grantee group.

', ], ], 'UploadIdMarker' => [ 'base' => NULL, 'refs' => [ 'ListMultipartUploadsOutput$UploadIdMarker' => '

Upload ID after which listing began.

', 'ListMultipartUploadsRequest$UploadIdMarker' => '

Together with key-marker, specifies the multipart upload after which listing should begin. If key-marker is not specified, the upload-id-marker parameter is ignored.

', ], ], 'UploadPartCopyOutput' => [ 'base' => NULL, 'refs' => [], ], 'UploadPartCopyRequest' => [ 'base' => NULL, 'refs' => [], ], 'UploadPartOutput' => [ 'base' => NULL, 'refs' => [], ], 'UploadPartRequest' => [ 'base' => NULL, 'refs' => [], ], 'UserMetadata' => [ 'base' => NULL, 'refs' => [ 'S3Location$UserMetadata' => '

A list of metadata to store with the restore results in S3.

', ], ], 'Value' => [ 'base' => NULL, 'refs' => [ 'Tag$Value' => '

Value of the tag.

', ], ], 'VersionIdMarker' => [ 'base' => NULL, 'refs' => [ 'ListObjectVersionsOutput$VersionIdMarker' => '

', 'ListObjectVersionsRequest$VersionIdMarker' => '

Specifies the object version you want to start listing from.

', ], ], 'VersioningConfiguration' => [ 'base' => '

', 'refs' => [ 'PutBucketVersioningRequest$VersioningConfiguration' => '

', ], ], 'WebsiteConfiguration' => [ 'base' => '

', 'refs' => [ 'PutBucketWebsiteRequest$WebsiteConfiguration' => '

', ], ], 'WebsiteRedirectLocation' => [ 'base' => NULL, 'refs' => [ 'CopyObjectRequest$WebsiteRedirectLocation' => '

If the bucket is configured as a website, redirects requests for this object to another object in the same bucket or to an external URL. Amazon S3 stores the value of this header in the object metadata.

', 'CreateMultipartUploadRequest$WebsiteRedirectLocation' => '

If the bucket is configured as a website, redirects requests for this object to another object in the same bucket or to an external URL. Amazon S3 stores the value of this header in the object metadata.

', 'GetObjectOutput$WebsiteRedirectLocation' => '

If the bucket is configured as a website, redirects requests for this object to another object in the same bucket or to an external URL. Amazon S3 stores the value of this header in the object metadata.

', 'HeadObjectOutput$WebsiteRedirectLocation' => '

If the bucket is configured as a website, redirects requests for this object to another object in the same bucket or to an external URL. Amazon S3 stores the value of this header in the object metadata.

', 'PutObjectRequest$WebsiteRedirectLocation' => '

If the bucket is configured as a website, redirects requests for this object to another object in the same bucket or to an external URL. Amazon S3 stores the value of this header in the object metadata.

', ], ], 'Years' => [ 'base' => NULL, 'refs' => [ 'DefaultRetention$Years' => '

The number of years that you want to specify for the default retention period.

', ], ], ],]; diff --git a/src/data/s3/2006-03-01/examples-1.json b/src/data/s3/2006-03-01/examples-1.json index a215535051..0732c2fba9 100644 --- a/src/data/s3/2006-03-01/examples-1.json +++ b/src/data/s3/2006-03-01/examples-1.json @@ -257,10 +257,8 @@ "DeleteObject": [ { "input": { - "Bucket": "examplebucket", - "Key": "objectkey.jpg" - }, - "output": { + "Bucket": "ExampleBucket", + "Key": "HappyFace.jpg" }, "comments": { "input": { @@ -268,14 +266,16 @@ "output": { } }, - "description": "The following example deletes an object from an S3 bucket.", - "id": "to-delete-an-object-1472850136595", - "title": "To delete an object" + "description": "The following example deletes an object from a non-versioned bucket.", + "id": "to-delete-an-object-from-a-non-versioned-bucket-1481588533089", + "title": "To delete an object (from a non-versioned bucket)" }, { "input": { - "Bucket": "ExampleBucket", - "Key": "HappyFace.jpg" + "Bucket": "examplebucket", + "Key": "objectkey.jpg" + }, + "output": { }, "comments": { "input": { @@ -283,20 +283,19 @@ "output": { } }, - "description": "The following example deletes an object from a non-versioned bucket.", - "id": "to-delete-an-object-from-a-non-versioned-bucket-1481588533089", - "title": "To delete an object (from a non-versioned bucket)" + "description": "The following example deletes an object from an S3 bucket.", + "id": "to-delete-an-object-1472850136595", + "title": "To delete an object" } ], "DeleteObjectTagging": [ { "input": { "Bucket": "examplebucket", - "Key": "HappyFace.jpg", - "VersionId": "ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI" + "Key": "HappyFace.jpg" }, "output": { - "VersionId": "ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI" + "VersionId": "null" }, "comments": { "input": { @@ -304,17 +303,18 @@ "output": { } }, - "description": "The following example removes tag set associated with the specified object version. The request specifies both the object key and object version.", - "id": "to-remove-tag-set-from-an-object-version-1483145285913", - "title": "To remove tag set from an object version" + "description": "The following example removes tag set associated with the specified object. If the bucket is versioning enabled, the operation removes tag set from the latest object version.", + "id": "to-remove-tag-set-from-an-object-1483145342862", + "title": "To remove tag set from an object" }, { "input": { "Bucket": "examplebucket", - "Key": "HappyFace.jpg" + "Key": "HappyFace.jpg", + "VersionId": "ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI" }, "output": { - "VersionId": "null" + "VersionId": "ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI" }, "comments": { "input": { @@ -322,9 +322,9 @@ "output": { } }, - "description": "The following example removes tag set associated with the specified object. If the bucket is versioning enabled, the operation removes tag set from the latest object version.", - "id": "to-remove-tag-set-from-an-object-1483145342862", - "title": "To remove tag set from an object" + "description": "The following example removes tag set associated with the specified object version. The request specifies both the object key and object version.", + "id": "to-remove-tag-set-from-an-object-version-1483145285913", + "title": "To remove tag set from an object version" } ], "DeleteObjects": [ @@ -728,17 +728,18 @@ { "input": { "Bucket": "examplebucket", - "Key": "HappyFace.jpg" + "Key": "SampleFile.txt", + "Range": "bytes=0-9" }, "output": { "AcceptRanges": "bytes", - "ContentLength": "3191", - "ContentType": "image/jpeg", - "ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"", - "LastModified": "Thu, 15 Dec 2016 01:19:41 GMT", + "ContentLength": "10", + "ContentRange": "bytes 0-9/43", + "ContentType": "text/plain", + "ETag": "\"0d94420ffd0bc68cd3d152506b97a9cc\"", + "LastModified": "Thu, 09 Oct 2014 22:57:28 GMT", "Metadata": { }, - "TagCount": 2, "VersionId": "null" }, "comments": { @@ -747,25 +748,24 @@ "output": { } }, - "description": "The following example retrieves an object for an S3 bucket.", - "id": "to-retrieve-an-object-1481827837012", - "title": "To retrieve an object" + "description": "The following example retrieves an object for an S3 bucket. The request specifies the range header to retrieve a specific byte range.", + "id": "to-retrieve-a-byte-range-of-an-object--1481832674603", + "title": "To retrieve a byte range of an object " }, { "input": { "Bucket": "examplebucket", - "Key": "SampleFile.txt", - "Range": "bytes=0-9" + "Key": "HappyFace.jpg" }, "output": { "AcceptRanges": "bytes", - "ContentLength": "10", - "ContentRange": "bytes 0-9/43", - "ContentType": "text/plain", - "ETag": "\"0d94420ffd0bc68cd3d152506b97a9cc\"", - "LastModified": "Thu, 09 Oct 2014 22:57:28 GMT", + "ContentLength": "3191", + "ContentType": "image/jpeg", + "ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"", + "LastModified": "Thu, 15 Dec 2016 01:19:41 GMT", "Metadata": { }, + "TagCount": 2, "VersionId": "null" }, "comments": { @@ -774,9 +774,9 @@ "output": { } }, - "description": "The following example retrieves an object for an S3 bucket. The request specifies the range header to retrieve a specific byte range.", - "id": "to-retrieve-a-byte-range-of-an-object--1481832674603", - "title": "To retrieve a byte range of an object " + "description": "The following example retrieves an object for an S3 bucket.", + "id": "to-retrieve-an-object-1481827837012", + "title": "To retrieve an object" } ], "GetObjectAcl": [ @@ -840,17 +840,20 @@ { "input": { "Bucket": "examplebucket", - "Key": "exampleobject", - "VersionId": "ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI" + "Key": "HappyFace.jpg" }, "output": { "TagSet": [ { - "Key": "Key1", - "Value": "Value1" + "Key": "Key4", + "Value": "Value4" + }, + { + "Key": "Key3", + "Value": "Value3" } ], - "VersionId": "ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI" + "VersionId": "null" }, "comments": { "input": { @@ -858,27 +861,24 @@ "output": { } }, - "description": "The following example retrieves tag set of an object. The request specifies object version.", - "id": "to-retrieve-tag-set-of-a-specific-object-version-1483400283663", - "title": "To retrieve tag set of a specific object version" + "description": "The following example retrieves tag set of an object.", + "id": "to-retrieve-tag-set-of-an-object-1481833847896", + "title": "To retrieve tag set of an object" }, { "input": { "Bucket": "examplebucket", - "Key": "HappyFace.jpg" + "Key": "exampleobject", + "VersionId": "ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI" }, "output": { "TagSet": [ { - "Key": "Key4", - "Value": "Value4" - }, - { - "Key": "Key3", - "Value": "Value3" + "Key": "Key1", + "Value": "Value1" } ], - "VersionId": "null" + "VersionId": "ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI" }, "comments": { "input": { @@ -886,9 +886,9 @@ "output": { } }, - "description": "The following example retrieves tag set of an object.", - "id": "to-retrieve-tag-set-of-an-object-1481833847896", - "title": "To retrieve tag set of an object" + "description": "The following example retrieves tag set of an object. The request specifies object version.", + "id": "to-retrieve-tag-set-of-a-specific-object-version-1483400283663", + "title": "To retrieve tag set of a specific object version" } ], "GetObjectTorrent": [ @@ -1567,13 +1567,14 @@ "PutObject": [ { "input": { + "ACL": "authenticated-read", "Body": "filetoupload", "Bucket": "examplebucket", - "Key": "objectkey" + "Key": "exampleobject" }, "output": { "ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"", - "VersionId": "Bvq0EDKxOcXLJXNo_Lkz37eM3R4pfzyQ" + "VersionId": "Kirh.unyZwjQ69YxcQLA8z4F5j3kJJKr" }, "comments": { "input": { @@ -1581,9 +1582,9 @@ "output": { } }, - "description": "The following example creates an object. If the bucket is versioning enabled, S3 returns version ID in response.", - "id": "to-create-an-object-1483147613675", - "title": "To create an object." + "description": "The following example uploads and object. The request specifies optional canned ACL (access control list) to all READ access to authenticated users. If the bucket is versioning enabled, S3 returns version ID in response.", + "id": "to-upload-an-object-and-specify-canned-acl-1483397779571", + "title": "To upload an object and specify canned ACL." }, { "input": { @@ -1609,14 +1610,11 @@ "input": { "Body": "filetoupload", "Bucket": "examplebucket", - "Key": "exampleobject", - "ServerSideEncryption": "AES256", - "Tagging": "key1=value1&key2=value2" + "Key": "objectkey" }, "output": { "ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"", - "ServerSideEncryption": "AES256", - "VersionId": "Ri.vC6qVlA4dEnjgRV4ZHsHoFIjqEMNt" + "VersionId": "Bvq0EDKxOcXLJXNo_Lkz37eM3R4pfzyQ" }, "comments": { "input": { @@ -1624,9 +1622,9 @@ "output": { } }, - "description": "The following example uploads and object. The request specifies the optional server-side encryption option. The request also specifies optional object tags. If the bucket is versioning enabled, S3 returns version ID in response.", - "id": "to-upload-an-object-and-specify-server-side-encryption-and-object-tags-1483398331831", - "title": "To upload an object and specify server-side encryption and object tags" + "description": "The following example creates an object. If the bucket is versioning enabled, S3 returns version ID in response.", + "id": "to-create-an-object-1483147613675", + "title": "To create an object." }, { "input": { @@ -1675,16 +1673,16 @@ }, { "input": { - "Body": "HappyFace.jpg", + "Body": "filetoupload", "Bucket": "examplebucket", - "Key": "HappyFace.jpg", + "Key": "exampleobject", "ServerSideEncryption": "AES256", - "StorageClass": "STANDARD_IA" + "Tagging": "key1=value1&key2=value2" }, "output": { "ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"", "ServerSideEncryption": "AES256", - "VersionId": "CG612hodqujkf8FaaNfp8U..FIhLROcp" + "VersionId": "Ri.vC6qVlA4dEnjgRV4ZHsHoFIjqEMNt" }, "comments": { "input": { @@ -1692,20 +1690,22 @@ "output": { } }, - "description": "The following example uploads an object. The request specifies optional request headers to directs S3 to use specific storage class and use server-side encryption.", - "id": "to-upload-an-object-(specify-optional-headers)", - "title": "To upload an object (specify optional headers)" + "description": "The following example uploads and object. The request specifies the optional server-side encryption option. The request also specifies optional object tags. If the bucket is versioning enabled, S3 returns version ID in response.", + "id": "to-upload-an-object-and-specify-server-side-encryption-and-object-tags-1483398331831", + "title": "To upload an object and specify server-side encryption and object tags" }, { "input": { - "ACL": "authenticated-read", - "Body": "filetoupload", + "Body": "HappyFace.jpg", "Bucket": "examplebucket", - "Key": "exampleobject" + "Key": "HappyFace.jpg", + "ServerSideEncryption": "AES256", + "StorageClass": "STANDARD_IA" }, "output": { "ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"", - "VersionId": "Kirh.unyZwjQ69YxcQLA8z4F5j3kJJKr" + "ServerSideEncryption": "AES256", + "VersionId": "CG612hodqujkf8FaaNfp8U..FIhLROcp" }, "comments": { "input": { @@ -1713,9 +1713,9 @@ "output": { } }, - "description": "The following example uploads and object. The request specifies optional canned ACL (access control list) to all READ access to authenticated users. If the bucket is versioning enabled, S3 returns version ID in response.", - "id": "to-upload-an-object-and-specify-canned-acl-1483397779571", - "title": "To upload an object and specify canned ACL." + "description": "The following example uploads an object. The request specifies optional request headers to directs S3 to use specific storage class and use server-side encryption.", + "id": "to-upload-an-object-(specify-optional-headers)", + "title": "To upload an object (specify optional headers)" } ], "PutObjectAcl": [ diff --git a/src/data/s3/2006-03-01/examples-1.json.php b/src/data/s3/2006-03-01/examples-1.json.php index 4771d0c15d..ae520751cf 100644 --- a/src/data/s3/2006-03-01/examples-1.json.php +++ b/src/data/s3/2006-03-01/examples-1.json.php @@ -1,3 +1,3 @@ '1.0', 'examples' => [ 'AbortMultipartUpload' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'bigobject', 'UploadId' => 'xadcOB_7YPBOJuoFiQ9cz4P3Pe6FIZwO4f7wN93uHsNBEw97pl5eNwzExg0LAT2dUN91cOmrEQHDsP3WA60CEg--', ], 'output' => [], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example aborts a multipart upload.', 'id' => 'to-abort-a-multipart-upload-1481853354987', 'title' => 'To abort a multipart upload', ], ], 'CompleteMultipartUpload' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'bigobject', 'MultipartUpload' => [ 'Parts' => [ [ 'ETag' => '"d8c2eafd90c266e19ab9dcacc479f8af"', 'PartNumber' => '1', ], [ 'ETag' => '"d8c2eafd90c266e19ab9dcacc479f8af"', 'PartNumber' => '2', ], ], ], 'UploadId' => '7YPBOJuoFiQ9cz4P3Pe6FIZwO4f7wN93uHsNBEw97pl5eNwzExg0LAT2dUN91cOmrEQHDsP3WA60CEg--', ], 'output' => [ 'Bucket' => 'acexamplebucket', 'ETag' => '"4d9031c7644d8081c2829f4ea23c55f7-2"', 'Key' => 'bigobject', 'Location' => 'https://examplebucket.s3.amazonaws.com/bigobject', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example completes a multipart upload.', 'id' => 'to-complete-multipart-upload-1481851590483', 'title' => 'To complete multipart upload', ], ], 'CopyObject' => [ [ 'input' => [ 'Bucket' => 'destinationbucket', 'CopySource' => '/sourcebucket/HappyFacejpg', 'Key' => 'HappyFaceCopyjpg', ], 'output' => [ 'CopyObjectResult' => [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'LastModified' => '2016-12-15T17:38:53.000Z', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example copies an object from one bucket to another.', 'id' => 'to-copy-an-object-1481823186878', 'title' => 'To copy an object', ], ], 'CreateBucket' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'Location' => '/examplebucket', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example creates a bucket.', 'id' => 'to-create-a-bucket--1472851826060', 'title' => 'To create a bucket ', ], [ 'input' => [ 'Bucket' => 'examplebucket', 'CreateBucketConfiguration' => [ 'LocationConstraint' => 'eu-west-1', ], ], 'output' => [ 'Location' => 'http://examplebucket.s3.amazonaws.com/', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example creates a bucket. The request specifies an AWS region where to create the bucket.', 'id' => 'to-create-a-bucket-in-a-specific-region-1483399072992', 'title' => 'To create a bucket in a specific region', ], ], 'CreateMultipartUpload' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'largeobject', ], 'output' => [ 'Bucket' => 'examplebucket', 'Key' => 'largeobject', 'UploadId' => 'ibZBv_75gd9r8lH_gqXatLdxMVpAlj6ZQjEs.OwyF3953YdwbcQnMA2BLGn8Lx12fQNICtMw5KyteFeHw.Sjng--', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example initiates a multipart upload.', 'id' => 'to-initiate-a-multipart-upload-1481836794513', 'title' => 'To initiate a multipart upload', ], ], 'DeleteBucket' => [ [ 'input' => [ 'Bucket' => 'forrandall2', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes the specified bucket.', 'id' => 'to-delete-a-bucket-1473108514262', 'title' => 'To delete a bucket', ], ], 'DeleteBucketCors' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes CORS configuration on a bucket.', 'id' => 'to-delete-cors-configuration-on-a-bucket-1483042856112', 'title' => 'To delete cors configuration on a bucket.', ], ], 'DeleteBucketLifecycle' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes lifecycle configuration on a bucket.', 'id' => 'to-delete-lifecycle-configuration-on-a-bucket-1483043310583', 'title' => 'To delete lifecycle configuration on a bucket.', ], ], 'DeleteBucketPolicy' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes bucket policy on the specified bucket.', 'id' => 'to-delete-bucket-policy-1483043406577', 'title' => 'To delete bucket policy', ], ], 'DeleteBucketReplication' => [ [ 'input' => [ 'Bucket' => 'example', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes replication configuration set on bucket.', 'id' => 'to-delete-bucket-replication-configuration-1483043684668', 'title' => 'To delete bucket replication configuration', ], ], 'DeleteBucketTagging' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes bucket tags.', 'id' => 'to-delete-bucket-tags-1483043846509', 'title' => 'To delete bucket tags', ], ], 'DeleteBucketWebsite' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes bucket website configuration.', 'id' => 'to-delete-bucket-website-configuration-1483043937825', 'title' => 'To delete bucket website configuration', ], ], 'DeleteObject' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'objectkey.jpg', ], 'output' => [], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes an object from an S3 bucket.', 'id' => 'to-delete-an-object-1472850136595', 'title' => 'To delete an object', ], [ 'input' => [ 'Bucket' => 'ExampleBucket', 'Key' => 'HappyFace.jpg', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes an object from a non-versioned bucket.', 'id' => 'to-delete-an-object-from-a-non-versioned-bucket-1481588533089', 'title' => 'To delete an object (from a non-versioned bucket)', ], ], 'DeleteObjectTagging' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', 'VersionId' => 'ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI', ], 'output' => [ 'VersionId' => 'ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example removes tag set associated with the specified object version. The request specifies both the object key and object version.', 'id' => 'to-remove-tag-set-from-an-object-version-1483145285913', 'title' => 'To remove tag set from an object version', ], [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', ], 'output' => [ 'VersionId' => 'null', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example removes tag set associated with the specified object. If the bucket is versioning enabled, the operation removes tag set from the latest object version.', 'id' => 'to-remove-tag-set-from-an-object-1483145342862', 'title' => 'To remove tag set from an object', ], ], 'DeleteObjects' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Delete' => [ 'Objects' => [ [ 'Key' => 'objectkey1', ], [ 'Key' => 'objectkey2', ], ], 'Quiet' => false, ], ], 'output' => [ 'Deleted' => [ [ 'DeleteMarker' => 'true', 'DeleteMarkerVersionId' => 'A._w1z6EFiCF5uhtQMDal9JDkID9tQ7F', 'Key' => 'objectkey1', ], [ 'DeleteMarker' => 'true', 'DeleteMarkerVersionId' => 'iOd_ORxhkKe_e8G8_oSGxt2PjsCZKlkt', 'Key' => 'objectkey2', ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes objects from a bucket. The bucket is versioned, and the request does not specify the object version to delete. In this case, all versions remain in the bucket and S3 adds a delete marker.', 'id' => 'to-delete-multiple-objects-from-a-versioned-bucket-1483146248805', 'title' => 'To delete multiple objects from a versioned bucket', ], [ 'input' => [ 'Bucket' => 'examplebucket', 'Delete' => [ 'Objects' => [ [ 'Key' => 'HappyFace.jpg', 'VersionId' => '2LWg7lQLnY41.maGB5Z6SWW.dcq0vx7b', ], [ 'Key' => 'HappyFace.jpg', 'VersionId' => 'yoz3HB.ZhCS_tKVEmIOr7qYyyAaZSKVd', ], ], 'Quiet' => false, ], ], 'output' => [ 'Deleted' => [ [ 'Key' => 'HappyFace.jpg', 'VersionId' => 'yoz3HB.ZhCS_tKVEmIOr7qYyyAaZSKVd', ], [ 'Key' => 'HappyFace.jpg', 'VersionId' => '2LWg7lQLnY41.maGB5Z6SWW.dcq0vx7b', ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes objects from a bucket. The request specifies object versions. S3 deletes specific object versions and returns the key and versions of deleted objects in the response.', 'id' => 'to-delete-multiple-object-versions-from-a-versioned-bucket-1483147087737', 'title' => 'To delete multiple object versions from a versioned bucket', ], ], 'GetBucketCors' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'CORSRules' => [ [ 'AllowedHeaders' => [ 'Authorization', ], 'AllowedMethods' => [ 'GET', ], 'AllowedOrigins' => [ '*', ], 'MaxAgeSeconds' => 3000, ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example returns cross-origin resource sharing (CORS) configuration set on a bucket.', 'id' => 'to-get-cors-configuration-set-on-a-bucket-1481596855475', 'title' => 'To get cors configuration set on a bucket', ], ], 'GetBucketLifecycle' => [ [ 'input' => [ 'Bucket' => 'acl1', ], 'output' => [ 'Rules' => [ [ 'Expiration' => [ 'Days' => 1, ], 'ID' => 'delete logs', 'Prefix' => '123/', 'Status' => 'Enabled', ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example gets ACL on the specified bucket.', 'id' => 'to-get-a-bucket-acl-1474413606503', 'title' => 'To get a bucket acl', ], ], 'GetBucketLifecycleConfiguration' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'Rules' => [ [ 'ID' => 'Rule for TaxDocs/', 'Prefix' => 'TaxDocs', 'Status' => 'Enabled', 'Transitions' => [ [ 'Days' => 365, 'StorageClass' => 'STANDARD_IA', ], ], ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves lifecycle configuration on set on a bucket. ', 'id' => 'to-get-lifecycle-configuration-on-a-bucket-1481666063200', 'title' => 'To get lifecycle configuration on a bucket', ], ], 'GetBucketLocation' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'LocationConstraint' => 'us-west-2', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example returns bucket location.', 'id' => 'to-get-bucket-location-1481594573609', 'title' => 'To get bucket location', ], ], 'GetBucketNotification' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'QueueConfiguration' => [ 'Event' => 's3:ObjectCreated:Put', 'Events' => [ 's3:ObjectCreated:Put', ], 'Id' => 'MDQ2OGQ4NDEtOTBmNi00YTM4LTk0NzYtZDIwN2I3NWQ1NjIx', 'Queue' => 'arn:aws:sqs:us-east-1:acct-id:S3ObjectCreatedEventQueue', ], 'TopicConfiguration' => [ 'Event' => 's3:ObjectCreated:Copy', 'Events' => [ 's3:ObjectCreated:Copy', ], 'Id' => 'YTVkMWEzZGUtNTY1NS00ZmE2LWJjYjktMmRlY2QwODFkNTJi', 'Topic' => 'arn:aws:sns:us-east-1:acct-id:S3ObjectCreatedEventTopic', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example returns notification configuration set on a bucket.', 'id' => 'to-get-notification-configuration-set-on-a-bucket-1481594028667', 'title' => 'To get notification configuration set on a bucket', ], [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'QueueConfiguration' => [ 'Event' => 's3:ObjectCreated:Put', 'Events' => [ 's3:ObjectCreated:Put', ], 'Id' => 'MDQ2OGQ4NDEtOTBmNi00YTM4LTk0NzYtZDIwN2I3NWQ1NjIx', 'Queue' => 'arn:aws:sqs:us-east-1:acct-id:S3ObjectCreatedEventQueue', ], 'TopicConfiguration' => [ 'Event' => 's3:ObjectCreated:Copy', 'Events' => [ 's3:ObjectCreated:Copy', ], 'Id' => 'YTVkMWEzZGUtNTY1NS00ZmE2LWJjYjktMmRlY2QwODFkNTJi', 'Topic' => 'arn:aws:sns:us-east-1:acct-id:S3ObjectCreatedEventTopic', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example returns notification configuration set on a bucket.', 'id' => 'to-get-notification-configuration-set-on-a-bucket-1481594028667', 'title' => 'To get notification configuration set on a bucket', ], ], 'GetBucketPolicy' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'Policy' => '{"Version":"2008-10-17","Id":"LogPolicy","Statement":[{"Sid":"Enables the log delivery group to publish logs to your bucket ","Effect":"Allow","Principal":{"AWS":"111122223333"},"Action":["s3:GetBucketAcl","s3:GetObjectAcl","s3:PutObject"],"Resource":["arn:aws:s3:::policytest1/*","arn:aws:s3:::policytest1"]}]}', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example returns bucket policy associated with a bucket.', 'id' => 'to-get-bucket-policy-1481595098424', 'title' => 'To get bucket policy', ], ], 'GetBucketReplication' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'ReplicationConfiguration' => [ 'Role' => 'arn:aws:iam::acct-id:role/example-role', 'Rules' => [ [ 'Destination' => [ 'Bucket' => 'arn:aws:s3:::destination-bucket', ], 'ID' => 'MWIwNTkwZmItMTE3MS00ZTc3LWJkZDEtNzRmODQwYzc1OTQy', 'Prefix' => 'Tax', 'Status' => 'Enabled', ], ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example returns replication configuration set on a bucket.', 'id' => 'to-get-replication-configuration-set-on-a-bucket-1481593597175', 'title' => 'To get replication configuration set on a bucket', ], ], 'GetBucketRequestPayment' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'Payer' => 'BucketOwner', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves bucket versioning configuration.', 'id' => 'to-get-bucket-versioning-configuration-1483037183929', 'title' => 'To get bucket versioning configuration', ], ], 'GetBucketTagging' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'TagSet' => [ [ 'Key' => 'key1', 'Value' => 'value1', ], [ 'Key' => 'key2', 'Value' => 'value2', ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example returns tag set associated with a bucket', 'id' => 'to-get-tag-set-associated-with-a-bucket-1481593232107', 'title' => 'To get tag set associated with a bucket', ], ], 'GetBucketVersioning' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'MFADelete' => 'Disabled', 'Status' => 'Enabled', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves bucket versioning configuration.', 'id' => 'to-get-bucket-versioning-configuration-1483037183929', 'title' => 'To get bucket versioning configuration', ], ], 'GetBucketWebsite' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'ErrorDocument' => [ 'Key' => 'error.html', ], 'IndexDocument' => [ 'Suffix' => 'index.html', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves website configuration of a bucket.', 'id' => 'to-get-bucket-website-configuration-1483037016926', 'title' => 'To get bucket website configuration', ], ], 'GetObject' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', ], 'output' => [ 'AcceptRanges' => 'bytes', 'ContentLength' => '3191', 'ContentType' => 'image/jpeg', 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'LastModified' => 'Thu, 15 Dec 2016 01:19:41 GMT', 'Metadata' => [], 'TagCount' => 2, 'VersionId' => 'null', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves an object for an S3 bucket.', 'id' => 'to-retrieve-an-object-1481827837012', 'title' => 'To retrieve an object', ], [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'SampleFile.txt', 'Range' => 'bytes=0-9', ], 'output' => [ 'AcceptRanges' => 'bytes', 'ContentLength' => '10', 'ContentRange' => 'bytes 0-9/43', 'ContentType' => 'text/plain', 'ETag' => '"0d94420ffd0bc68cd3d152506b97a9cc"', 'LastModified' => 'Thu, 09 Oct 2014 22:57:28 GMT', 'Metadata' => [], 'VersionId' => 'null', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves an object for an S3 bucket. The request specifies the range header to retrieve a specific byte range.', 'id' => 'to-retrieve-a-byte-range-of-an-object--1481832674603', 'title' => 'To retrieve a byte range of an object ', ], ], 'GetObjectAcl' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', ], 'output' => [ 'Grants' => [ [ 'Grantee' => [ 'DisplayName' => 'owner-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', 'Type' => 'CanonicalUser', ], 'Permission' => 'WRITE', ], [ 'Grantee' => [ 'DisplayName' => 'owner-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', 'Type' => 'CanonicalUser', ], 'Permission' => 'WRITE_ACP', ], [ 'Grantee' => [ 'DisplayName' => 'owner-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', 'Type' => 'CanonicalUser', ], 'Permission' => 'READ', ], [ 'Grantee' => [ 'DisplayName' => 'owner-display-name', 'ID' => '852b113eexamplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', 'Type' => 'CanonicalUser', ], 'Permission' => 'READ_ACP', ], ], 'Owner' => [ 'DisplayName' => 'owner-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves access control list (ACL) of an object.', 'id' => 'to-retrieve-object-acl-1481833557740', 'title' => 'To retrieve object ACL', ], ], 'GetObjectTagging' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'exampleobject', 'VersionId' => 'ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI', ], 'output' => [ 'TagSet' => [ [ 'Key' => 'Key1', 'Value' => 'Value1', ], ], 'VersionId' => 'ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves tag set of an object. The request specifies object version.', 'id' => 'to-retrieve-tag-set-of-a-specific-object-version-1483400283663', 'title' => 'To retrieve tag set of a specific object version', ], [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', ], 'output' => [ 'TagSet' => [ [ 'Key' => 'Key4', 'Value' => 'Value4', ], [ 'Key' => 'Key3', 'Value' => 'Value3', ], ], 'VersionId' => 'null', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves tag set of an object.', 'id' => 'to-retrieve-tag-set-of-an-object-1481833847896', 'title' => 'To retrieve tag set of an object', ], ], 'GetObjectTorrent' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', ], 'output' => [], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves torrent files of an object.', 'id' => 'to-retrieve-torrent-files-for-an-object-1481834115959', 'title' => 'To retrieve torrent files for an object', ], ], 'HeadBucket' => [ [ 'input' => [ 'Bucket' => 'acl1', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'This operation checks to see if a bucket exists.', 'id' => 'to-determine-if-bucket-exists-1473110292262', 'title' => 'To determine if bucket exists', ], ], 'HeadObject' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', ], 'output' => [ 'AcceptRanges' => 'bytes', 'ContentLength' => '3191', 'ContentType' => 'image/jpeg', 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'LastModified' => 'Thu, 15 Dec 2016 01:19:41 GMT', 'Metadata' => [], 'VersionId' => 'null', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves an object metadata.', 'id' => 'to-retrieve-metadata-of-an-object-without-returning-the-object-itself-1481834820480', 'title' => 'To retrieve metadata of an object without returning the object itself', ], ], 'ListBuckets' => [ [ 'output' => [ 'Buckets' => [ [ 'CreationDate' => '2012-02-15T21: 03: 02.000Z', 'Name' => 'examplebucket', ], [ 'CreationDate' => '2011-07-24T19: 33: 50.000Z', 'Name' => 'examplebucket2', ], [ 'CreationDate' => '2010-12-17T00: 56: 49.000Z', 'Name' => 'examplebucket3', ], ], 'Owner' => [ 'DisplayName' => 'own-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example return versions of an object with specific key name prefix. The request limits the number of items returned to two. If there are are more than two object version, S3 returns NextToken in the response. You can specify this token value in your next request to fetch next set of object versions.', 'id' => 'to-list-object-versions-1481910996058', 'title' => 'To list object versions', ], ], 'ListMultipartUploads' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'Uploads' => [ [ 'Initiated' => '2014-05-01T05:40:58.000Z', 'Initiator' => [ 'DisplayName' => 'display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Key' => 'JavaFile', 'Owner' => [ 'DisplayName' => 'display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'StorageClass' => 'STANDARD', 'UploadId' => 'examplelUa.CInXklLQtSMJITdUnoZ1Y5GACB5UckOtspm5zbDMCkPF_qkfZzMiFZ6dksmcnqxJyIBvQMG9X9Q--', ], [ 'Initiated' => '2014-05-01T05:41:27.000Z', 'Initiator' => [ 'DisplayName' => 'display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Key' => 'JavaFile', 'Owner' => [ 'DisplayName' => 'display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'StorageClass' => 'STANDARD', 'UploadId' => 'examplelo91lv1iwvWpvCiJWugw2xXLPAD7Z8cJyX9.WiIRgNrdG6Ldsn.9FtS63TCl1Uf5faTB.1U5Ckcbmdw--', ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example lists in-progress multipart uploads on a specific bucket.', 'id' => 'to-list-in-progress-multipart-uploads-on-a-bucket-1481852775260', 'title' => 'To list in-progress multipart uploads on a bucket', ], [ 'input' => [ 'Bucket' => 'examplebucket', 'KeyMarker' => 'nextkeyfrompreviousresponse', 'MaxUploads' => '2', 'UploadIdMarker' => 'valuefrompreviousresponse', ], 'output' => [ 'Bucket' => 'acl1', 'IsTruncated' => true, 'KeyMarker' => '', 'MaxUploads' => '2', 'NextKeyMarker' => 'someobjectkey', 'NextUploadIdMarker' => 'examplelo91lv1iwvWpvCiJWugw2xXLPAD7Z8cJyX9.WiIRgNrdG6Ldsn.9FtS63TCl1Uf5faTB.1U5Ckcbmdw--', 'UploadIdMarker' => '', 'Uploads' => [ [ 'Initiated' => '2014-05-01T05:40:58.000Z', 'Initiator' => [ 'DisplayName' => 'ownder-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Key' => 'JavaFile', 'Owner' => [ 'DisplayName' => 'mohanataws', 'ID' => '852b113e7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'StorageClass' => 'STANDARD', 'UploadId' => 'gZ30jIqlUa.CInXklLQtSMJITdUnoZ1Y5GACB5UckOtspm5zbDMCkPF_qkfZzMiFZ6dksmcnqxJyIBvQMG9X9Q--', ], [ 'Initiated' => '2014-05-01T05:41:27.000Z', 'Initiator' => [ 'DisplayName' => 'ownder-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Key' => 'JavaFile', 'Owner' => [ 'DisplayName' => 'ownder-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'StorageClass' => 'STANDARD', 'UploadId' => 'b7tZSqIlo91lv1iwvWpvCiJWugw2xXLPAD7Z8cJyX9.WiIRgNrdG6Ldsn.9FtS63TCl1Uf5faTB.1U5Ckcbmdw--', ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example specifies the upload-id-marker and key-marker from previous truncated response to retrieve next setup of multipart uploads.', 'id' => 'list-next-set-of-multipart-uploads-when-previous-result-is-truncated-1482428106748', 'title' => 'List next set of multipart uploads when previous result is truncated', ], ], 'ListObjectVersions' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Prefix' => 'HappyFace.jpg', ], 'output' => [ 'Versions' => [ [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'IsLatest' => true, 'Key' => 'HappyFace.jpg', 'LastModified' => '2016-12-15T01:19:41.000Z', 'Owner' => [ 'DisplayName' => 'owner-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Size' => 3191, 'StorageClass' => 'STANDARD', 'VersionId' => 'null', ], [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'IsLatest' => false, 'Key' => 'HappyFace.jpg', 'LastModified' => '2016-12-13T00:58:26.000Z', 'Owner' => [ 'DisplayName' => 'owner-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Size' => 3191, 'StorageClass' => 'STANDARD', 'VersionId' => 'PHtexPGjH2y.zBgT8LmB7wwLI2mpbz.k', ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example return versions of an object with specific key name prefix. The request limits the number of items returned to two. If there are are more than two object version, S3 returns NextToken in the response. You can specify this token value in your next request to fetch next set of object versions.', 'id' => 'to-list-object-versions-1481910996058', 'title' => 'To list object versions', ], ], 'ListObjects' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'MaxKeys' => '2', ], 'output' => [ 'Contents' => [ [ 'ETag' => '"70ee1738b6b21e2c8a43f3a5ab0eee71"', 'Key' => 'example1.jpg', 'LastModified' => '2014-11-21T19:40:05.000Z', 'Owner' => [ 'DisplayName' => 'myname', 'ID' => '12345example25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Size' => 11, 'StorageClass' => 'STANDARD', ], [ 'ETag' => '"9c8af9a76df052144598c115ef33e511"', 'Key' => 'example2.jpg', 'LastModified' => '2013-11-15T01:10:49.000Z', 'Owner' => [ 'DisplayName' => 'myname', 'ID' => '12345example25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Size' => 713193, 'StorageClass' => 'STANDARD', ], ], 'NextMarker' => 'eyJNYXJrZXIiOiBudWxsLCAiYm90b190cnVuY2F0ZV9hbW91bnQiOiAyfQ==', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example list two objects in a bucket.', 'id' => 'to-list-objects-in-a-bucket-1473447646507', 'title' => 'To list objects in a bucket', ], ], 'ListObjectsV2' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'MaxKeys' => '2', ], 'output' => [ 'Contents' => [ [ 'ETag' => '"70ee1738b6b21e2c8a43f3a5ab0eee71"', 'Key' => 'happyface.jpg', 'LastModified' => '2014-11-21T19:40:05.000Z', 'Size' => 11, 'StorageClass' => 'STANDARD', ], [ 'ETag' => '"becf17f89c30367a9a44495d62ed521a-1"', 'Key' => 'test.jpg', 'LastModified' => '2014-05-02T04:51:50.000Z', 'Size' => 4192256, 'StorageClass' => 'STANDARD', ], ], 'IsTruncated' => true, 'KeyCount' => '2', 'MaxKeys' => '2', 'Name' => 'examplebucket', 'NextContinuationToken' => '1w41l63U0xa8q7smH50vCxyTQqdxo69O3EmK28Bi5PcROI4wI/EyIJg==', 'Prefix' => '', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves object list. The request specifies max keys to limit response to include only 2 object keys. ', 'id' => 'to-get-object-list', 'title' => 'To get object list', ], ], 'ListParts' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'bigobject', 'UploadId' => 'example7YPBOJuoFiQ9cz4P3Pe6FIZwO4f7wN93uHsNBEw97pl5eNwzExg0LAT2dUN91cOmrEQHDsP3WA60CEg--', ], 'output' => [ 'Initiator' => [ 'DisplayName' => 'owner-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Owner' => [ 'DisplayName' => 'owner-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Parts' => [ [ 'ETag' => '"d8c2eafd90c266e19ab9dcacc479f8af"', 'LastModified' => '2016-12-16T00:11:42.000Z', 'PartNumber' => '1', 'Size' => 26246026, ], [ 'ETag' => '"d8c2eafd90c266e19ab9dcacc479f8af"', 'LastModified' => '2016-12-16T00:15:01.000Z', 'PartNumber' => '2', 'Size' => 26246026, ], ], 'StorageClass' => 'STANDARD', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example lists parts uploaded for a specific multipart upload.', 'id' => 'to-list-parts-of-a-multipart-upload-1481852006923', 'title' => 'To list parts of a multipart upload.', ], ], 'PutBucketAcl' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'GrantFullControl' => 'id=examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484', 'GrantWrite' => 'uri=http://acs.amazonaws.com/groups/s3/LogDelivery', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example replaces existing ACL on a bucket. The ACL grants the bucket owner (specified using the owner ID) and write permission to the LogDelivery group. Because this is a replace operation, you must specify all the grants in your request. To incrementally add or remove ACL grants, you might use the console.', 'id' => 'put-bucket-acl-1482260397033', 'title' => 'Put bucket acl', ], ], 'PutBucketCors' => [ [ 'input' => [ 'Bucket' => '', 'CORSConfiguration' => [ 'CORSRules' => [ [ 'AllowedHeaders' => [ '*', ], 'AllowedMethods' => [ 'PUT', 'POST', 'DELETE', ], 'AllowedOrigins' => [ 'http://www.example.com', ], 'ExposeHeaders' => [ 'x-amz-server-side-encryption', ], 'MaxAgeSeconds' => 3000, ], [ 'AllowedHeaders' => [ 'Authorization', ], 'AllowedMethods' => [ 'GET', ], 'AllowedOrigins' => [ '*', ], 'MaxAgeSeconds' => 3000, ], ], ], 'ContentMD5' => '', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example enables PUT, POST, and DELETE requests from www.example.com, and enables GET requests from any domain.', 'id' => 'to-set-cors-configuration-on-a-bucket-1483037818805', 'title' => 'To set cors configuration on a bucket.', ], ], 'PutBucketLifecycleConfiguration' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'LifecycleConfiguration' => [ 'Rules' => [ [ 'Expiration' => [ 'Days' => 3650, ], 'Filter' => [ 'Prefix' => 'documents/', ], 'ID' => 'TestOnly', 'Status' => 'Enabled', 'Transitions' => [ [ 'Days' => 365, 'StorageClass' => 'GLACIER', ], ], ], ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example replaces existing lifecycle configuration, if any, on the specified bucket. ', 'id' => 'put-bucket-lifecycle-1482264533092', 'title' => 'Put bucket lifecycle', ], ], 'PutBucketLogging' => [ [ 'input' => [ 'Bucket' => 'sourcebucket', 'BucketLoggingStatus' => [ 'LoggingEnabled' => [ 'TargetBucket' => 'targetbucket', 'TargetGrants' => [ [ 'Grantee' => [ 'Type' => 'Group', 'URI' => 'http://acs.amazonaws.com/groups/global/AllUsers', ], 'Permission' => 'READ', ], ], 'TargetPrefix' => 'MyBucketLogs/', ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example sets logging policy on a bucket. For the Log Delivery group to deliver logs to the destination bucket, it needs permission for the READ_ACP action which the policy grants.', 'id' => 'set-logging-configuration-for-a-bucket-1482269119909', 'title' => 'Set logging configuration for a bucket', ], ], 'PutBucketNotificationConfiguration' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'NotificationConfiguration' => [ 'TopicConfigurations' => [ [ 'Events' => [ 's3:ObjectCreated:*', ], 'TopicArn' => 'arn:aws:sns:us-west-2:123456789012:s3-notification-topic', ], ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example sets notification configuration on a bucket to publish the object created events to an SNS topic.', 'id' => 'set-notification-configuration-for-a-bucket-1482270296426', 'title' => 'Set notification configuration for a bucket', ], ], 'PutBucketPolicy' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Policy' => '{"Version": "2012-10-17", "Statement": [{ "Sid": "id-1","Effect": "Allow","Principal": {"AWS": "arn:aws:iam::123456789012:root"}, "Action": [ "s3:PutObject","s3:PutObjectAcl"], "Resource": ["arn:aws:s3:::acl3/*" ] } ]}', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example sets a permission policy on a bucket.', 'id' => 'set-bucket-policy-1482448903302', 'title' => 'Set bucket policy', ], ], 'PutBucketReplication' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'ReplicationConfiguration' => [ 'Role' => 'arn:aws:iam::123456789012:role/examplerole', 'Rules' => [ [ 'Destination' => [ 'Bucket' => 'arn:aws:s3:::destinationbucket', 'StorageClass' => 'STANDARD', ], 'Prefix' => '', 'Status' => 'Enabled', ], ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example sets replication configuration on a bucket.', 'id' => 'id-1', 'title' => 'Set replication configuration on a bucket', ], ], 'PutBucketRequestPayment' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'RequestPaymentConfiguration' => [ 'Payer' => 'Requester', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example sets request payment configuration on a bucket so that person requesting the download is charged.', 'id' => 'set-request-payment-configuration-on-a-bucket-1482343596680', 'title' => 'Set request payment configuration on a bucket.', ], ], 'PutBucketTagging' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Tagging' => [ 'TagSet' => [ [ 'Key' => 'Key1', 'Value' => 'Value1', ], [ 'Key' => 'Key2', 'Value' => 'Value2', ], ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example sets tags on a bucket. Any existing tags are replaced.', 'id' => 'set-tags-on-a-bucket-1482346269066', 'title' => 'Set tags on a bucket', ], ], 'PutBucketVersioning' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'VersioningConfiguration' => [ 'MFADelete' => 'Disabled', 'Status' => 'Enabled', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example sets versioning configuration on bucket. The configuration enables versioning on the bucket.', 'id' => 'set-versioning-configuration-on-a-bucket-1482344186279', 'title' => 'Set versioning configuration on a bucket', ], ], 'PutBucketWebsite' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'ContentMD5' => '', 'WebsiteConfiguration' => [ 'ErrorDocument' => [ 'Key' => 'error.html', ], 'IndexDocument' => [ 'Suffix' => 'index.html', ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example adds website configuration to a bucket.', 'id' => 'set-website-configuration-on-a-bucket-1482346836261', 'title' => 'Set website configuration on a bucket', ], ], 'PutObject' => [ [ 'input' => [ 'Body' => 'filetoupload', 'Bucket' => 'examplebucket', 'Key' => 'objectkey', ], 'output' => [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'VersionId' => 'Bvq0EDKxOcXLJXNo_Lkz37eM3R4pfzyQ', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example creates an object. If the bucket is versioning enabled, S3 returns version ID in response.', 'id' => 'to-create-an-object-1483147613675', 'title' => 'To create an object.', ], [ 'input' => [ 'Body' => 'HappyFace.jpg', 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', ], 'output' => [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'VersionId' => 'tpf3zF08nBplQK1XLOefGskR7mGDwcDk', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example uploads an object to a versioning-enabled bucket. The source file is specified using Windows file syntax. S3 returns VersionId of the newly created object.', 'id' => 'to-upload-an-object-1481760101010', 'title' => 'To upload an object', ], [ 'input' => [ 'Body' => 'filetoupload', 'Bucket' => 'examplebucket', 'Key' => 'exampleobject', 'ServerSideEncryption' => 'AES256', 'Tagging' => 'key1=value1&key2=value2', ], 'output' => [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'ServerSideEncryption' => 'AES256', 'VersionId' => 'Ri.vC6qVlA4dEnjgRV4ZHsHoFIjqEMNt', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example uploads and object. The request specifies the optional server-side encryption option. The request also specifies optional object tags. If the bucket is versioning enabled, S3 returns version ID in response.', 'id' => 'to-upload-an-object-and-specify-server-side-encryption-and-object-tags-1483398331831', 'title' => 'To upload an object and specify server-side encryption and object tags', ], [ 'input' => [ 'Body' => 'filetoupload', 'Bucket' => 'examplebucket', 'Key' => 'exampleobject', 'Metadata' => [ 'metadata1' => 'value1', 'metadata2' => 'value2', ], ], 'output' => [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'VersionId' => 'pSKidl4pHBiNwukdbcPXAIs.sshFFOc0', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example creates an object. The request also specifies optional metadata. If the bucket is versioning enabled, S3 returns version ID in response.', 'id' => 'to-upload-object-and-specify-user-defined-metadata-1483396974757', 'title' => 'To upload object and specify user-defined metadata', ], [ 'input' => [ 'Body' => 'c:\\HappyFace.jpg', 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', 'Tagging' => 'key1=value1&key2=value2', ], 'output' => [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'VersionId' => 'psM2sYY4.o1501dSx8wMvnkOzSBB.V4a', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example uploads an object. The request specifies optional object tags. The bucket is versioned, therefore S3 returns version ID of the newly created object.', 'id' => 'to-upload-an-object-and-specify-optional-tags-1481762310955', 'title' => 'To upload an object and specify optional tags', ], [ 'input' => [ 'Body' => 'HappyFace.jpg', 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', 'ServerSideEncryption' => 'AES256', 'StorageClass' => 'STANDARD_IA', ], 'output' => [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'ServerSideEncryption' => 'AES256', 'VersionId' => 'CG612hodqujkf8FaaNfp8U..FIhLROcp', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example uploads an object. The request specifies optional request headers to directs S3 to use specific storage class and use server-side encryption.', 'id' => 'to-upload-an-object-(specify-optional-headers)', 'title' => 'To upload an object (specify optional headers)', ], [ 'input' => [ 'ACL' => 'authenticated-read', 'Body' => 'filetoupload', 'Bucket' => 'examplebucket', 'Key' => 'exampleobject', ], 'output' => [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'VersionId' => 'Kirh.unyZwjQ69YxcQLA8z4F5j3kJJKr', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example uploads and object. The request specifies optional canned ACL (access control list) to all READ access to authenticated users. If the bucket is versioning enabled, S3 returns version ID in response.', 'id' => 'to-upload-an-object-and-specify-canned-acl-1483397779571', 'title' => 'To upload an object and specify canned ACL.', ], ], 'PutObjectAcl' => [ [ 'input' => [ 'AccessControlPolicy' => [], 'Bucket' => 'examplebucket', 'GrantFullControl' => 'emailaddress=user1@example.com,emailaddress=user2@example.com', 'GrantRead' => 'uri=http://acs.amazonaws.com/groups/global/AllUsers', 'Key' => 'HappyFace.jpg', ], 'output' => [], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example adds grants to an object ACL. The first permission grants user1 and user2 FULL_CONTROL and the AllUsers group READ permission.', 'id' => 'to-grant-permissions-using-object-acl-1481835549285', 'title' => 'To grant permissions using object ACL', ], ], 'PutObjectTagging' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', 'Tagging' => [ 'TagSet' => [ [ 'Key' => 'Key3', 'Value' => 'Value3', ], [ 'Key' => 'Key4', 'Value' => 'Value4', ], ], ], ], 'output' => [ 'VersionId' => 'null', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example adds tags to an existing object.', 'id' => 'to-add-tags-to-an-existing-object-1481764668793', 'title' => 'To add tags to an existing object', ], ], 'RestoreObject' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'archivedobjectkey', 'RestoreRequest' => [ 'Days' => 1, 'GlacierJobParameters' => [ 'Tier' => 'Expedited', ], ], ], 'output' => [], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example restores for one day an archived copy of an object back into Amazon S3 bucket.', 'id' => 'to-restore-an-archived-object-1483049329953', 'title' => 'To restore an archived object', ], ], 'UploadPart' => [ [ 'input' => [ 'Body' => 'fileToUpload', 'Bucket' => 'examplebucket', 'Key' => 'examplelargeobject', 'PartNumber' => '1', 'UploadId' => 'xadcOB_7YPBOJuoFiQ9cz4P3Pe6FIZwO4f7wN93uHsNBEw97pl5eNwzExg0LAT2dUN91cOmrEQHDsP3WA60CEg--', ], 'output' => [ 'ETag' => '"d8c2eafd90c266e19ab9dcacc479f8af"', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example uploads part 1 of a multipart upload. The example specifies a file name for the part data. The Upload ID is same that is returned by the initiate multipart upload.', 'id' => 'to-upload-a-part-1481847914943', 'title' => 'To upload a part', ], ], 'UploadPartCopy' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'CopySource' => '/bucketname/sourceobjectkey', 'CopySourceRange' => 'bytes=1-100000', 'Key' => 'examplelargeobject', 'PartNumber' => '2', 'UploadId' => 'exampleuoh_10OhKhT7YukE9bjzTPRiuaCotmZM_pFngJFir9OZNrSr5cWa3cq3LZSUsfjI4FI7PkP91We7Nrw--', ], 'output' => [ 'CopyPartResult' => [ 'ETag' => '"65d16d19e65a7508a51f043180edcc36"', 'LastModified' => '2016-12-29T21:44:28.000Z', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example uploads a part of a multipart upload by copying a specified byte range from an existing object as data source.', 'id' => 'to-upload-a-part-by-copying-byte-range-from-an-existing-object-as-data-source-1483048068594', 'title' => 'To upload a part by copying byte range from an existing object as data source', ], [ 'input' => [ 'Bucket' => 'examplebucket', 'CopySource' => '/bucketname/sourceobjectkey', 'Key' => 'examplelargeobject', 'PartNumber' => '1', 'UploadId' => 'exampleuoh_10OhKhT7YukE9bjzTPRiuaCotmZM_pFngJFir9OZNrSr5cWa3cq3LZSUsfjI4FI7PkP91We7Nrw--', ], 'output' => [ 'CopyPartResult' => [ 'ETag' => '"b0c6f0e7e054ab8fa2536a2677f8734d"', 'LastModified' => '2016-12-29T21:24:43.000Z', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example uploads a part of a multipart upload by copying data from an existing object as data source.', 'id' => 'to-upload-a-part-by-copying-data-from-an-existing-object-as-data-source-1483046746348', 'title' => 'To upload a part by copying data from an existing object as data source', ], ], ],]; +return [ 'version' => '1.0', 'examples' => [ 'AbortMultipartUpload' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'bigobject', 'UploadId' => 'xadcOB_7YPBOJuoFiQ9cz4P3Pe6FIZwO4f7wN93uHsNBEw97pl5eNwzExg0LAT2dUN91cOmrEQHDsP3WA60CEg--', ], 'output' => [], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example aborts a multipart upload.', 'id' => 'to-abort-a-multipart-upload-1481853354987', 'title' => 'To abort a multipart upload', ], ], 'CompleteMultipartUpload' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'bigobject', 'MultipartUpload' => [ 'Parts' => [ [ 'ETag' => '"d8c2eafd90c266e19ab9dcacc479f8af"', 'PartNumber' => '1', ], [ 'ETag' => '"d8c2eafd90c266e19ab9dcacc479f8af"', 'PartNumber' => '2', ], ], ], 'UploadId' => '7YPBOJuoFiQ9cz4P3Pe6FIZwO4f7wN93uHsNBEw97pl5eNwzExg0LAT2dUN91cOmrEQHDsP3WA60CEg--', ], 'output' => [ 'Bucket' => 'acexamplebucket', 'ETag' => '"4d9031c7644d8081c2829f4ea23c55f7-2"', 'Key' => 'bigobject', 'Location' => 'https://examplebucket.s3.amazonaws.com/bigobject', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example completes a multipart upload.', 'id' => 'to-complete-multipart-upload-1481851590483', 'title' => 'To complete multipart upload', ], ], 'CopyObject' => [ [ 'input' => [ 'Bucket' => 'destinationbucket', 'CopySource' => '/sourcebucket/HappyFacejpg', 'Key' => 'HappyFaceCopyjpg', ], 'output' => [ 'CopyObjectResult' => [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'LastModified' => '2016-12-15T17:38:53.000Z', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example copies an object from one bucket to another.', 'id' => 'to-copy-an-object-1481823186878', 'title' => 'To copy an object', ], ], 'CreateBucket' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'Location' => '/examplebucket', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example creates a bucket.', 'id' => 'to-create-a-bucket--1472851826060', 'title' => 'To create a bucket ', ], [ 'input' => [ 'Bucket' => 'examplebucket', 'CreateBucketConfiguration' => [ 'LocationConstraint' => 'eu-west-1', ], ], 'output' => [ 'Location' => 'http://examplebucket.s3.amazonaws.com/', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example creates a bucket. The request specifies an AWS region where to create the bucket.', 'id' => 'to-create-a-bucket-in-a-specific-region-1483399072992', 'title' => 'To create a bucket in a specific region', ], ], 'CreateMultipartUpload' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'largeobject', ], 'output' => [ 'Bucket' => 'examplebucket', 'Key' => 'largeobject', 'UploadId' => 'ibZBv_75gd9r8lH_gqXatLdxMVpAlj6ZQjEs.OwyF3953YdwbcQnMA2BLGn8Lx12fQNICtMw5KyteFeHw.Sjng--', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example initiates a multipart upload.', 'id' => 'to-initiate-a-multipart-upload-1481836794513', 'title' => 'To initiate a multipart upload', ], ], 'DeleteBucket' => [ [ 'input' => [ 'Bucket' => 'forrandall2', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes the specified bucket.', 'id' => 'to-delete-a-bucket-1473108514262', 'title' => 'To delete a bucket', ], ], 'DeleteBucketCors' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes CORS configuration on a bucket.', 'id' => 'to-delete-cors-configuration-on-a-bucket-1483042856112', 'title' => 'To delete cors configuration on a bucket.', ], ], 'DeleteBucketLifecycle' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes lifecycle configuration on a bucket.', 'id' => 'to-delete-lifecycle-configuration-on-a-bucket-1483043310583', 'title' => 'To delete lifecycle configuration on a bucket.', ], ], 'DeleteBucketPolicy' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes bucket policy on the specified bucket.', 'id' => 'to-delete-bucket-policy-1483043406577', 'title' => 'To delete bucket policy', ], ], 'DeleteBucketReplication' => [ [ 'input' => [ 'Bucket' => 'example', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes replication configuration set on bucket.', 'id' => 'to-delete-bucket-replication-configuration-1483043684668', 'title' => 'To delete bucket replication configuration', ], ], 'DeleteBucketTagging' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes bucket tags.', 'id' => 'to-delete-bucket-tags-1483043846509', 'title' => 'To delete bucket tags', ], ], 'DeleteBucketWebsite' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes bucket website configuration.', 'id' => 'to-delete-bucket-website-configuration-1483043937825', 'title' => 'To delete bucket website configuration', ], ], 'DeleteObject' => [ [ 'input' => [ 'Bucket' => 'ExampleBucket', 'Key' => 'HappyFace.jpg', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes an object from a non-versioned bucket.', 'id' => 'to-delete-an-object-from-a-non-versioned-bucket-1481588533089', 'title' => 'To delete an object (from a non-versioned bucket)', ], [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'objectkey.jpg', ], 'output' => [], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes an object from an S3 bucket.', 'id' => 'to-delete-an-object-1472850136595', 'title' => 'To delete an object', ], ], 'DeleteObjectTagging' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', ], 'output' => [ 'VersionId' => 'null', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example removes tag set associated with the specified object. If the bucket is versioning enabled, the operation removes tag set from the latest object version.', 'id' => 'to-remove-tag-set-from-an-object-1483145342862', 'title' => 'To remove tag set from an object', ], [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', 'VersionId' => 'ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI', ], 'output' => [ 'VersionId' => 'ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example removes tag set associated with the specified object version. The request specifies both the object key and object version.', 'id' => 'to-remove-tag-set-from-an-object-version-1483145285913', 'title' => 'To remove tag set from an object version', ], ], 'DeleteObjects' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Delete' => [ 'Objects' => [ [ 'Key' => 'objectkey1', ], [ 'Key' => 'objectkey2', ], ], 'Quiet' => false, ], ], 'output' => [ 'Deleted' => [ [ 'DeleteMarker' => 'true', 'DeleteMarkerVersionId' => 'A._w1z6EFiCF5uhtQMDal9JDkID9tQ7F', 'Key' => 'objectkey1', ], [ 'DeleteMarker' => 'true', 'DeleteMarkerVersionId' => 'iOd_ORxhkKe_e8G8_oSGxt2PjsCZKlkt', 'Key' => 'objectkey2', ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes objects from a bucket. The bucket is versioned, and the request does not specify the object version to delete. In this case, all versions remain in the bucket and S3 adds a delete marker.', 'id' => 'to-delete-multiple-objects-from-a-versioned-bucket-1483146248805', 'title' => 'To delete multiple objects from a versioned bucket', ], [ 'input' => [ 'Bucket' => 'examplebucket', 'Delete' => [ 'Objects' => [ [ 'Key' => 'HappyFace.jpg', 'VersionId' => '2LWg7lQLnY41.maGB5Z6SWW.dcq0vx7b', ], [ 'Key' => 'HappyFace.jpg', 'VersionId' => 'yoz3HB.ZhCS_tKVEmIOr7qYyyAaZSKVd', ], ], 'Quiet' => false, ], ], 'output' => [ 'Deleted' => [ [ 'Key' => 'HappyFace.jpg', 'VersionId' => 'yoz3HB.ZhCS_tKVEmIOr7qYyyAaZSKVd', ], [ 'Key' => 'HappyFace.jpg', 'VersionId' => '2LWg7lQLnY41.maGB5Z6SWW.dcq0vx7b', ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example deletes objects from a bucket. The request specifies object versions. S3 deletes specific object versions and returns the key and versions of deleted objects in the response.', 'id' => 'to-delete-multiple-object-versions-from-a-versioned-bucket-1483147087737', 'title' => 'To delete multiple object versions from a versioned bucket', ], ], 'GetBucketCors' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'CORSRules' => [ [ 'AllowedHeaders' => [ 'Authorization', ], 'AllowedMethods' => [ 'GET', ], 'AllowedOrigins' => [ '*', ], 'MaxAgeSeconds' => 3000, ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example returns cross-origin resource sharing (CORS) configuration set on a bucket.', 'id' => 'to-get-cors-configuration-set-on-a-bucket-1481596855475', 'title' => 'To get cors configuration set on a bucket', ], ], 'GetBucketLifecycle' => [ [ 'input' => [ 'Bucket' => 'acl1', ], 'output' => [ 'Rules' => [ [ 'Expiration' => [ 'Days' => 1, ], 'ID' => 'delete logs', 'Prefix' => '123/', 'Status' => 'Enabled', ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example gets ACL on the specified bucket.', 'id' => 'to-get-a-bucket-acl-1474413606503', 'title' => 'To get a bucket acl', ], ], 'GetBucketLifecycleConfiguration' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'Rules' => [ [ 'ID' => 'Rule for TaxDocs/', 'Prefix' => 'TaxDocs', 'Status' => 'Enabled', 'Transitions' => [ [ 'Days' => 365, 'StorageClass' => 'STANDARD_IA', ], ], ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves lifecycle configuration on set on a bucket. ', 'id' => 'to-get-lifecycle-configuration-on-a-bucket-1481666063200', 'title' => 'To get lifecycle configuration on a bucket', ], ], 'GetBucketLocation' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'LocationConstraint' => 'us-west-2', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example returns bucket location.', 'id' => 'to-get-bucket-location-1481594573609', 'title' => 'To get bucket location', ], ], 'GetBucketNotification' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'QueueConfiguration' => [ 'Event' => 's3:ObjectCreated:Put', 'Events' => [ 's3:ObjectCreated:Put', ], 'Id' => 'MDQ2OGQ4NDEtOTBmNi00YTM4LTk0NzYtZDIwN2I3NWQ1NjIx', 'Queue' => 'arn:aws:sqs:us-east-1:acct-id:S3ObjectCreatedEventQueue', ], 'TopicConfiguration' => [ 'Event' => 's3:ObjectCreated:Copy', 'Events' => [ 's3:ObjectCreated:Copy', ], 'Id' => 'YTVkMWEzZGUtNTY1NS00ZmE2LWJjYjktMmRlY2QwODFkNTJi', 'Topic' => 'arn:aws:sns:us-east-1:acct-id:S3ObjectCreatedEventTopic', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example returns notification configuration set on a bucket.', 'id' => 'to-get-notification-configuration-set-on-a-bucket-1481594028667', 'title' => 'To get notification configuration set on a bucket', ], [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'QueueConfiguration' => [ 'Event' => 's3:ObjectCreated:Put', 'Events' => [ 's3:ObjectCreated:Put', ], 'Id' => 'MDQ2OGQ4NDEtOTBmNi00YTM4LTk0NzYtZDIwN2I3NWQ1NjIx', 'Queue' => 'arn:aws:sqs:us-east-1:acct-id:S3ObjectCreatedEventQueue', ], 'TopicConfiguration' => [ 'Event' => 's3:ObjectCreated:Copy', 'Events' => [ 's3:ObjectCreated:Copy', ], 'Id' => 'YTVkMWEzZGUtNTY1NS00ZmE2LWJjYjktMmRlY2QwODFkNTJi', 'Topic' => 'arn:aws:sns:us-east-1:acct-id:S3ObjectCreatedEventTopic', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example returns notification configuration set on a bucket.', 'id' => 'to-get-notification-configuration-set-on-a-bucket-1481594028667', 'title' => 'To get notification configuration set on a bucket', ], ], 'GetBucketPolicy' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'Policy' => '{"Version":"2008-10-17","Id":"LogPolicy","Statement":[{"Sid":"Enables the log delivery group to publish logs to your bucket ","Effect":"Allow","Principal":{"AWS":"111122223333"},"Action":["s3:GetBucketAcl","s3:GetObjectAcl","s3:PutObject"],"Resource":["arn:aws:s3:::policytest1/*","arn:aws:s3:::policytest1"]}]}', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example returns bucket policy associated with a bucket.', 'id' => 'to-get-bucket-policy-1481595098424', 'title' => 'To get bucket policy', ], ], 'GetBucketReplication' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'ReplicationConfiguration' => [ 'Role' => 'arn:aws:iam::acct-id:role/example-role', 'Rules' => [ [ 'Destination' => [ 'Bucket' => 'arn:aws:s3:::destination-bucket', ], 'ID' => 'MWIwNTkwZmItMTE3MS00ZTc3LWJkZDEtNzRmODQwYzc1OTQy', 'Prefix' => 'Tax', 'Status' => 'Enabled', ], ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example returns replication configuration set on a bucket.', 'id' => 'to-get-replication-configuration-set-on-a-bucket-1481593597175', 'title' => 'To get replication configuration set on a bucket', ], ], 'GetBucketRequestPayment' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'Payer' => 'BucketOwner', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves bucket versioning configuration.', 'id' => 'to-get-bucket-versioning-configuration-1483037183929', 'title' => 'To get bucket versioning configuration', ], ], 'GetBucketTagging' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'TagSet' => [ [ 'Key' => 'key1', 'Value' => 'value1', ], [ 'Key' => 'key2', 'Value' => 'value2', ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example returns tag set associated with a bucket', 'id' => 'to-get-tag-set-associated-with-a-bucket-1481593232107', 'title' => 'To get tag set associated with a bucket', ], ], 'GetBucketVersioning' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'MFADelete' => 'Disabled', 'Status' => 'Enabled', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves bucket versioning configuration.', 'id' => 'to-get-bucket-versioning-configuration-1483037183929', 'title' => 'To get bucket versioning configuration', ], ], 'GetBucketWebsite' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'ErrorDocument' => [ 'Key' => 'error.html', ], 'IndexDocument' => [ 'Suffix' => 'index.html', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves website configuration of a bucket.', 'id' => 'to-get-bucket-website-configuration-1483037016926', 'title' => 'To get bucket website configuration', ], ], 'GetObject' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'SampleFile.txt', 'Range' => 'bytes=0-9', ], 'output' => [ 'AcceptRanges' => 'bytes', 'ContentLength' => '10', 'ContentRange' => 'bytes 0-9/43', 'ContentType' => 'text/plain', 'ETag' => '"0d94420ffd0bc68cd3d152506b97a9cc"', 'LastModified' => 'Thu, 09 Oct 2014 22:57:28 GMT', 'Metadata' => [], 'VersionId' => 'null', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves an object for an S3 bucket. The request specifies the range header to retrieve a specific byte range.', 'id' => 'to-retrieve-a-byte-range-of-an-object--1481832674603', 'title' => 'To retrieve a byte range of an object ', ], [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', ], 'output' => [ 'AcceptRanges' => 'bytes', 'ContentLength' => '3191', 'ContentType' => 'image/jpeg', 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'LastModified' => 'Thu, 15 Dec 2016 01:19:41 GMT', 'Metadata' => [], 'TagCount' => 2, 'VersionId' => 'null', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves an object for an S3 bucket.', 'id' => 'to-retrieve-an-object-1481827837012', 'title' => 'To retrieve an object', ], ], 'GetObjectAcl' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', ], 'output' => [ 'Grants' => [ [ 'Grantee' => [ 'DisplayName' => 'owner-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', 'Type' => 'CanonicalUser', ], 'Permission' => 'WRITE', ], [ 'Grantee' => [ 'DisplayName' => 'owner-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', 'Type' => 'CanonicalUser', ], 'Permission' => 'WRITE_ACP', ], [ 'Grantee' => [ 'DisplayName' => 'owner-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', 'Type' => 'CanonicalUser', ], 'Permission' => 'READ', ], [ 'Grantee' => [ 'DisplayName' => 'owner-display-name', 'ID' => '852b113eexamplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', 'Type' => 'CanonicalUser', ], 'Permission' => 'READ_ACP', ], ], 'Owner' => [ 'DisplayName' => 'owner-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves access control list (ACL) of an object.', 'id' => 'to-retrieve-object-acl-1481833557740', 'title' => 'To retrieve object ACL', ], ], 'GetObjectTagging' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', ], 'output' => [ 'TagSet' => [ [ 'Key' => 'Key4', 'Value' => 'Value4', ], [ 'Key' => 'Key3', 'Value' => 'Value3', ], ], 'VersionId' => 'null', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves tag set of an object.', 'id' => 'to-retrieve-tag-set-of-an-object-1481833847896', 'title' => 'To retrieve tag set of an object', ], [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'exampleobject', 'VersionId' => 'ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI', ], 'output' => [ 'TagSet' => [ [ 'Key' => 'Key1', 'Value' => 'Value1', ], ], 'VersionId' => 'ydlaNkwWm0SfKJR.T1b1fIdPRbldTYRI', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves tag set of an object. The request specifies object version.', 'id' => 'to-retrieve-tag-set-of-a-specific-object-version-1483400283663', 'title' => 'To retrieve tag set of a specific object version', ], ], 'GetObjectTorrent' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', ], 'output' => [], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves torrent files of an object.', 'id' => 'to-retrieve-torrent-files-for-an-object-1481834115959', 'title' => 'To retrieve torrent files for an object', ], ], 'HeadBucket' => [ [ 'input' => [ 'Bucket' => 'acl1', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'This operation checks to see if a bucket exists.', 'id' => 'to-determine-if-bucket-exists-1473110292262', 'title' => 'To determine if bucket exists', ], ], 'HeadObject' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', ], 'output' => [ 'AcceptRanges' => 'bytes', 'ContentLength' => '3191', 'ContentType' => 'image/jpeg', 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'LastModified' => 'Thu, 15 Dec 2016 01:19:41 GMT', 'Metadata' => [], 'VersionId' => 'null', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves an object metadata.', 'id' => 'to-retrieve-metadata-of-an-object-without-returning-the-object-itself-1481834820480', 'title' => 'To retrieve metadata of an object without returning the object itself', ], ], 'ListBuckets' => [ [ 'output' => [ 'Buckets' => [ [ 'CreationDate' => '2012-02-15T21: 03: 02.000Z', 'Name' => 'examplebucket', ], [ 'CreationDate' => '2011-07-24T19: 33: 50.000Z', 'Name' => 'examplebucket2', ], [ 'CreationDate' => '2010-12-17T00: 56: 49.000Z', 'Name' => 'examplebucket3', ], ], 'Owner' => [ 'DisplayName' => 'own-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example return versions of an object with specific key name prefix. The request limits the number of items returned to two. If there are are more than two object version, S3 returns NextToken in the response. You can specify this token value in your next request to fetch next set of object versions.', 'id' => 'to-list-object-versions-1481910996058', 'title' => 'To list object versions', ], ], 'ListMultipartUploads' => [ [ 'input' => [ 'Bucket' => 'examplebucket', ], 'output' => [ 'Uploads' => [ [ 'Initiated' => '2014-05-01T05:40:58.000Z', 'Initiator' => [ 'DisplayName' => 'display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Key' => 'JavaFile', 'Owner' => [ 'DisplayName' => 'display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'StorageClass' => 'STANDARD', 'UploadId' => 'examplelUa.CInXklLQtSMJITdUnoZ1Y5GACB5UckOtspm5zbDMCkPF_qkfZzMiFZ6dksmcnqxJyIBvQMG9X9Q--', ], [ 'Initiated' => '2014-05-01T05:41:27.000Z', 'Initiator' => [ 'DisplayName' => 'display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Key' => 'JavaFile', 'Owner' => [ 'DisplayName' => 'display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'StorageClass' => 'STANDARD', 'UploadId' => 'examplelo91lv1iwvWpvCiJWugw2xXLPAD7Z8cJyX9.WiIRgNrdG6Ldsn.9FtS63TCl1Uf5faTB.1U5Ckcbmdw--', ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example lists in-progress multipart uploads on a specific bucket.', 'id' => 'to-list-in-progress-multipart-uploads-on-a-bucket-1481852775260', 'title' => 'To list in-progress multipart uploads on a bucket', ], [ 'input' => [ 'Bucket' => 'examplebucket', 'KeyMarker' => 'nextkeyfrompreviousresponse', 'MaxUploads' => '2', 'UploadIdMarker' => 'valuefrompreviousresponse', ], 'output' => [ 'Bucket' => 'acl1', 'IsTruncated' => true, 'KeyMarker' => '', 'MaxUploads' => '2', 'NextKeyMarker' => 'someobjectkey', 'NextUploadIdMarker' => 'examplelo91lv1iwvWpvCiJWugw2xXLPAD7Z8cJyX9.WiIRgNrdG6Ldsn.9FtS63TCl1Uf5faTB.1U5Ckcbmdw--', 'UploadIdMarker' => '', 'Uploads' => [ [ 'Initiated' => '2014-05-01T05:40:58.000Z', 'Initiator' => [ 'DisplayName' => 'ownder-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Key' => 'JavaFile', 'Owner' => [ 'DisplayName' => 'mohanataws', 'ID' => '852b113e7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'StorageClass' => 'STANDARD', 'UploadId' => 'gZ30jIqlUa.CInXklLQtSMJITdUnoZ1Y5GACB5UckOtspm5zbDMCkPF_qkfZzMiFZ6dksmcnqxJyIBvQMG9X9Q--', ], [ 'Initiated' => '2014-05-01T05:41:27.000Z', 'Initiator' => [ 'DisplayName' => 'ownder-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Key' => 'JavaFile', 'Owner' => [ 'DisplayName' => 'ownder-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'StorageClass' => 'STANDARD', 'UploadId' => 'b7tZSqIlo91lv1iwvWpvCiJWugw2xXLPAD7Z8cJyX9.WiIRgNrdG6Ldsn.9FtS63TCl1Uf5faTB.1U5Ckcbmdw--', ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example specifies the upload-id-marker and key-marker from previous truncated response to retrieve next setup of multipart uploads.', 'id' => 'list-next-set-of-multipart-uploads-when-previous-result-is-truncated-1482428106748', 'title' => 'List next set of multipart uploads when previous result is truncated', ], ], 'ListObjectVersions' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Prefix' => 'HappyFace.jpg', ], 'output' => [ 'Versions' => [ [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'IsLatest' => true, 'Key' => 'HappyFace.jpg', 'LastModified' => '2016-12-15T01:19:41.000Z', 'Owner' => [ 'DisplayName' => 'owner-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Size' => 3191, 'StorageClass' => 'STANDARD', 'VersionId' => 'null', ], [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'IsLatest' => false, 'Key' => 'HappyFace.jpg', 'LastModified' => '2016-12-13T00:58:26.000Z', 'Owner' => [ 'DisplayName' => 'owner-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Size' => 3191, 'StorageClass' => 'STANDARD', 'VersionId' => 'PHtexPGjH2y.zBgT8LmB7wwLI2mpbz.k', ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example return versions of an object with specific key name prefix. The request limits the number of items returned to two. If there are are more than two object version, S3 returns NextToken in the response. You can specify this token value in your next request to fetch next set of object versions.', 'id' => 'to-list-object-versions-1481910996058', 'title' => 'To list object versions', ], ], 'ListObjects' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'MaxKeys' => '2', ], 'output' => [ 'Contents' => [ [ 'ETag' => '"70ee1738b6b21e2c8a43f3a5ab0eee71"', 'Key' => 'example1.jpg', 'LastModified' => '2014-11-21T19:40:05.000Z', 'Owner' => [ 'DisplayName' => 'myname', 'ID' => '12345example25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Size' => 11, 'StorageClass' => 'STANDARD', ], [ 'ETag' => '"9c8af9a76df052144598c115ef33e511"', 'Key' => 'example2.jpg', 'LastModified' => '2013-11-15T01:10:49.000Z', 'Owner' => [ 'DisplayName' => 'myname', 'ID' => '12345example25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Size' => 713193, 'StorageClass' => 'STANDARD', ], ], 'NextMarker' => 'eyJNYXJrZXIiOiBudWxsLCAiYm90b190cnVuY2F0ZV9hbW91bnQiOiAyfQ==', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example list two objects in a bucket.', 'id' => 'to-list-objects-in-a-bucket-1473447646507', 'title' => 'To list objects in a bucket', ], ], 'ListObjectsV2' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'MaxKeys' => '2', ], 'output' => [ 'Contents' => [ [ 'ETag' => '"70ee1738b6b21e2c8a43f3a5ab0eee71"', 'Key' => 'happyface.jpg', 'LastModified' => '2014-11-21T19:40:05.000Z', 'Size' => 11, 'StorageClass' => 'STANDARD', ], [ 'ETag' => '"becf17f89c30367a9a44495d62ed521a-1"', 'Key' => 'test.jpg', 'LastModified' => '2014-05-02T04:51:50.000Z', 'Size' => 4192256, 'StorageClass' => 'STANDARD', ], ], 'IsTruncated' => true, 'KeyCount' => '2', 'MaxKeys' => '2', 'Name' => 'examplebucket', 'NextContinuationToken' => '1w41l63U0xa8q7smH50vCxyTQqdxo69O3EmK28Bi5PcROI4wI/EyIJg==', 'Prefix' => '', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example retrieves object list. The request specifies max keys to limit response to include only 2 object keys. ', 'id' => 'to-get-object-list', 'title' => 'To get object list', ], ], 'ListParts' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'bigobject', 'UploadId' => 'example7YPBOJuoFiQ9cz4P3Pe6FIZwO4f7wN93uHsNBEw97pl5eNwzExg0LAT2dUN91cOmrEQHDsP3WA60CEg--', ], 'output' => [ 'Initiator' => [ 'DisplayName' => 'owner-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Owner' => [ 'DisplayName' => 'owner-display-name', 'ID' => 'examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484be31bebcc', ], 'Parts' => [ [ 'ETag' => '"d8c2eafd90c266e19ab9dcacc479f8af"', 'LastModified' => '2016-12-16T00:11:42.000Z', 'PartNumber' => '1', 'Size' => 26246026, ], [ 'ETag' => '"d8c2eafd90c266e19ab9dcacc479f8af"', 'LastModified' => '2016-12-16T00:15:01.000Z', 'PartNumber' => '2', 'Size' => 26246026, ], ], 'StorageClass' => 'STANDARD', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example lists parts uploaded for a specific multipart upload.', 'id' => 'to-list-parts-of-a-multipart-upload-1481852006923', 'title' => 'To list parts of a multipart upload.', ], ], 'PutBucketAcl' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'GrantFullControl' => 'id=examplee7a2f25102679df27bb0ae12b3f85be6f290b936c4393484', 'GrantWrite' => 'uri=http://acs.amazonaws.com/groups/s3/LogDelivery', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example replaces existing ACL on a bucket. The ACL grants the bucket owner (specified using the owner ID) and write permission to the LogDelivery group. Because this is a replace operation, you must specify all the grants in your request. To incrementally add or remove ACL grants, you might use the console.', 'id' => 'put-bucket-acl-1482260397033', 'title' => 'Put bucket acl', ], ], 'PutBucketCors' => [ [ 'input' => [ 'Bucket' => '', 'CORSConfiguration' => [ 'CORSRules' => [ [ 'AllowedHeaders' => [ '*', ], 'AllowedMethods' => [ 'PUT', 'POST', 'DELETE', ], 'AllowedOrigins' => [ 'http://www.example.com', ], 'ExposeHeaders' => [ 'x-amz-server-side-encryption', ], 'MaxAgeSeconds' => 3000, ], [ 'AllowedHeaders' => [ 'Authorization', ], 'AllowedMethods' => [ 'GET', ], 'AllowedOrigins' => [ '*', ], 'MaxAgeSeconds' => 3000, ], ], ], 'ContentMD5' => '', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example enables PUT, POST, and DELETE requests from www.example.com, and enables GET requests from any domain.', 'id' => 'to-set-cors-configuration-on-a-bucket-1483037818805', 'title' => 'To set cors configuration on a bucket.', ], ], 'PutBucketLifecycleConfiguration' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'LifecycleConfiguration' => [ 'Rules' => [ [ 'Expiration' => [ 'Days' => 3650, ], 'Filter' => [ 'Prefix' => 'documents/', ], 'ID' => 'TestOnly', 'Status' => 'Enabled', 'Transitions' => [ [ 'Days' => 365, 'StorageClass' => 'GLACIER', ], ], ], ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example replaces existing lifecycle configuration, if any, on the specified bucket. ', 'id' => 'put-bucket-lifecycle-1482264533092', 'title' => 'Put bucket lifecycle', ], ], 'PutBucketLogging' => [ [ 'input' => [ 'Bucket' => 'sourcebucket', 'BucketLoggingStatus' => [ 'LoggingEnabled' => [ 'TargetBucket' => 'targetbucket', 'TargetGrants' => [ [ 'Grantee' => [ 'Type' => 'Group', 'URI' => 'http://acs.amazonaws.com/groups/global/AllUsers', ], 'Permission' => 'READ', ], ], 'TargetPrefix' => 'MyBucketLogs/', ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example sets logging policy on a bucket. For the Log Delivery group to deliver logs to the destination bucket, it needs permission for the READ_ACP action which the policy grants.', 'id' => 'set-logging-configuration-for-a-bucket-1482269119909', 'title' => 'Set logging configuration for a bucket', ], ], 'PutBucketNotificationConfiguration' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'NotificationConfiguration' => [ 'TopicConfigurations' => [ [ 'Events' => [ 's3:ObjectCreated:*', ], 'TopicArn' => 'arn:aws:sns:us-west-2:123456789012:s3-notification-topic', ], ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example sets notification configuration on a bucket to publish the object created events to an SNS topic.', 'id' => 'set-notification-configuration-for-a-bucket-1482270296426', 'title' => 'Set notification configuration for a bucket', ], ], 'PutBucketPolicy' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Policy' => '{"Version": "2012-10-17", "Statement": [{ "Sid": "id-1","Effect": "Allow","Principal": {"AWS": "arn:aws:iam::123456789012:root"}, "Action": [ "s3:PutObject","s3:PutObjectAcl"], "Resource": ["arn:aws:s3:::acl3/*" ] } ]}', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example sets a permission policy on a bucket.', 'id' => 'set-bucket-policy-1482448903302', 'title' => 'Set bucket policy', ], ], 'PutBucketReplication' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'ReplicationConfiguration' => [ 'Role' => 'arn:aws:iam::123456789012:role/examplerole', 'Rules' => [ [ 'Destination' => [ 'Bucket' => 'arn:aws:s3:::destinationbucket', 'StorageClass' => 'STANDARD', ], 'Prefix' => '', 'Status' => 'Enabled', ], ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example sets replication configuration on a bucket.', 'id' => 'id-1', 'title' => 'Set replication configuration on a bucket', ], ], 'PutBucketRequestPayment' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'RequestPaymentConfiguration' => [ 'Payer' => 'Requester', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example sets request payment configuration on a bucket so that person requesting the download is charged.', 'id' => 'set-request-payment-configuration-on-a-bucket-1482343596680', 'title' => 'Set request payment configuration on a bucket.', ], ], 'PutBucketTagging' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Tagging' => [ 'TagSet' => [ [ 'Key' => 'Key1', 'Value' => 'Value1', ], [ 'Key' => 'Key2', 'Value' => 'Value2', ], ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example sets tags on a bucket. Any existing tags are replaced.', 'id' => 'set-tags-on-a-bucket-1482346269066', 'title' => 'Set tags on a bucket', ], ], 'PutBucketVersioning' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'VersioningConfiguration' => [ 'MFADelete' => 'Disabled', 'Status' => 'Enabled', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example sets versioning configuration on bucket. The configuration enables versioning on the bucket.', 'id' => 'set-versioning-configuration-on-a-bucket-1482344186279', 'title' => 'Set versioning configuration on a bucket', ], ], 'PutBucketWebsite' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'ContentMD5' => '', 'WebsiteConfiguration' => [ 'ErrorDocument' => [ 'Key' => 'error.html', ], 'IndexDocument' => [ 'Suffix' => 'index.html', ], ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example adds website configuration to a bucket.', 'id' => 'set-website-configuration-on-a-bucket-1482346836261', 'title' => 'Set website configuration on a bucket', ], ], 'PutObject' => [ [ 'input' => [ 'ACL' => 'authenticated-read', 'Body' => 'filetoupload', 'Bucket' => 'examplebucket', 'Key' => 'exampleobject', ], 'output' => [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'VersionId' => 'Kirh.unyZwjQ69YxcQLA8z4F5j3kJJKr', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example uploads and object. The request specifies optional canned ACL (access control list) to all READ access to authenticated users. If the bucket is versioning enabled, S3 returns version ID in response.', 'id' => 'to-upload-an-object-and-specify-canned-acl-1483397779571', 'title' => 'To upload an object and specify canned ACL.', ], [ 'input' => [ 'Body' => 'HappyFace.jpg', 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', ], 'output' => [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'VersionId' => 'tpf3zF08nBplQK1XLOefGskR7mGDwcDk', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example uploads an object to a versioning-enabled bucket. The source file is specified using Windows file syntax. S3 returns VersionId of the newly created object.', 'id' => 'to-upload-an-object-1481760101010', 'title' => 'To upload an object', ], [ 'input' => [ 'Body' => 'filetoupload', 'Bucket' => 'examplebucket', 'Key' => 'objectkey', ], 'output' => [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'VersionId' => 'Bvq0EDKxOcXLJXNo_Lkz37eM3R4pfzyQ', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example creates an object. If the bucket is versioning enabled, S3 returns version ID in response.', 'id' => 'to-create-an-object-1483147613675', 'title' => 'To create an object.', ], [ 'input' => [ 'Body' => 'filetoupload', 'Bucket' => 'examplebucket', 'Key' => 'exampleobject', 'Metadata' => [ 'metadata1' => 'value1', 'metadata2' => 'value2', ], ], 'output' => [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'VersionId' => 'pSKidl4pHBiNwukdbcPXAIs.sshFFOc0', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example creates an object. The request also specifies optional metadata. If the bucket is versioning enabled, S3 returns version ID in response.', 'id' => 'to-upload-object-and-specify-user-defined-metadata-1483396974757', 'title' => 'To upload object and specify user-defined metadata', ], [ 'input' => [ 'Body' => 'c:\\HappyFace.jpg', 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', 'Tagging' => 'key1=value1&key2=value2', ], 'output' => [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'VersionId' => 'psM2sYY4.o1501dSx8wMvnkOzSBB.V4a', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example uploads an object. The request specifies optional object tags. The bucket is versioned, therefore S3 returns version ID of the newly created object.', 'id' => 'to-upload-an-object-and-specify-optional-tags-1481762310955', 'title' => 'To upload an object and specify optional tags', ], [ 'input' => [ 'Body' => 'filetoupload', 'Bucket' => 'examplebucket', 'Key' => 'exampleobject', 'ServerSideEncryption' => 'AES256', 'Tagging' => 'key1=value1&key2=value2', ], 'output' => [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'ServerSideEncryption' => 'AES256', 'VersionId' => 'Ri.vC6qVlA4dEnjgRV4ZHsHoFIjqEMNt', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example uploads and object. The request specifies the optional server-side encryption option. The request also specifies optional object tags. If the bucket is versioning enabled, S3 returns version ID in response.', 'id' => 'to-upload-an-object-and-specify-server-side-encryption-and-object-tags-1483398331831', 'title' => 'To upload an object and specify server-side encryption and object tags', ], [ 'input' => [ 'Body' => 'HappyFace.jpg', 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', 'ServerSideEncryption' => 'AES256', 'StorageClass' => 'STANDARD_IA', ], 'output' => [ 'ETag' => '"6805f2cfc46c0f04559748bb039d69ae"', 'ServerSideEncryption' => 'AES256', 'VersionId' => 'CG612hodqujkf8FaaNfp8U..FIhLROcp', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example uploads an object. The request specifies optional request headers to directs S3 to use specific storage class and use server-side encryption.', 'id' => 'to-upload-an-object-(specify-optional-headers)', 'title' => 'To upload an object (specify optional headers)', ], ], 'PutObjectAcl' => [ [ 'input' => [ 'AccessControlPolicy' => [], 'Bucket' => 'examplebucket', 'GrantFullControl' => 'emailaddress=user1@example.com,emailaddress=user2@example.com', 'GrantRead' => 'uri=http://acs.amazonaws.com/groups/global/AllUsers', 'Key' => 'HappyFace.jpg', ], 'output' => [], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example adds grants to an object ACL. The first permission grants user1 and user2 FULL_CONTROL and the AllUsers group READ permission.', 'id' => 'to-grant-permissions-using-object-acl-1481835549285', 'title' => 'To grant permissions using object ACL', ], ], 'PutObjectTagging' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'HappyFace.jpg', 'Tagging' => [ 'TagSet' => [ [ 'Key' => 'Key3', 'Value' => 'Value3', ], [ 'Key' => 'Key4', 'Value' => 'Value4', ], ], ], ], 'output' => [ 'VersionId' => 'null', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example adds tags to an existing object.', 'id' => 'to-add-tags-to-an-existing-object-1481764668793', 'title' => 'To add tags to an existing object', ], ], 'RestoreObject' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'Key' => 'archivedobjectkey', 'RestoreRequest' => [ 'Days' => 1, 'GlacierJobParameters' => [ 'Tier' => 'Expedited', ], ], ], 'output' => [], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example restores for one day an archived copy of an object back into Amazon S3 bucket.', 'id' => 'to-restore-an-archived-object-1483049329953', 'title' => 'To restore an archived object', ], ], 'UploadPart' => [ [ 'input' => [ 'Body' => 'fileToUpload', 'Bucket' => 'examplebucket', 'Key' => 'examplelargeobject', 'PartNumber' => '1', 'UploadId' => 'xadcOB_7YPBOJuoFiQ9cz4P3Pe6FIZwO4f7wN93uHsNBEw97pl5eNwzExg0LAT2dUN91cOmrEQHDsP3WA60CEg--', ], 'output' => [ 'ETag' => '"d8c2eafd90c266e19ab9dcacc479f8af"', ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example uploads part 1 of a multipart upload. The example specifies a file name for the part data. The Upload ID is same that is returned by the initiate multipart upload.', 'id' => 'to-upload-a-part-1481847914943', 'title' => 'To upload a part', ], ], 'UploadPartCopy' => [ [ 'input' => [ 'Bucket' => 'examplebucket', 'CopySource' => '/bucketname/sourceobjectkey', 'CopySourceRange' => 'bytes=1-100000', 'Key' => 'examplelargeobject', 'PartNumber' => '2', 'UploadId' => 'exampleuoh_10OhKhT7YukE9bjzTPRiuaCotmZM_pFngJFir9OZNrSr5cWa3cq3LZSUsfjI4FI7PkP91We7Nrw--', ], 'output' => [ 'CopyPartResult' => [ 'ETag' => '"65d16d19e65a7508a51f043180edcc36"', 'LastModified' => '2016-12-29T21:44:28.000Z', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example uploads a part of a multipart upload by copying a specified byte range from an existing object as data source.', 'id' => 'to-upload-a-part-by-copying-byte-range-from-an-existing-object-as-data-source-1483048068594', 'title' => 'To upload a part by copying byte range from an existing object as data source', ], [ 'input' => [ 'Bucket' => 'examplebucket', 'CopySource' => '/bucketname/sourceobjectkey', 'Key' => 'examplelargeobject', 'PartNumber' => '1', 'UploadId' => 'exampleuoh_10OhKhT7YukE9bjzTPRiuaCotmZM_pFngJFir9OZNrSr5cWa3cq3LZSUsfjI4FI7PkP91We7Nrw--', ], 'output' => [ 'CopyPartResult' => [ 'ETag' => '"b0c6f0e7e054ab8fa2536a2677f8734d"', 'LastModified' => '2016-12-29T21:24:43.000Z', ], ], 'comments' => [ 'input' => [], 'output' => [], ], 'description' => 'The following example uploads a part of a multipart upload by copying data from an existing object as data source.', 'id' => 'to-upload-a-part-by-copying-data-from-an-existing-object-as-data-source-1483046746348', 'title' => 'To upload a part by copying data from an existing object as data source', ], ], ],]; diff --git a/src/data/sagemaker/2017-07-24/api-2.json b/src/data/sagemaker/2017-07-24/api-2.json index 9b0b272187..b79b285324 100644 --- a/src/data/sagemaker/2017-07-24/api-2.json +++ b/src/data/sagemaker/2017-07-24/api-2.json @@ -774,7 +774,8 @@ "shapes":{ "Accept":{ "type":"string", - "max":256 + "max":256, + "pattern":".*" }, "AccountId":{ "type":"string", @@ -810,7 +811,8 @@ }, "AlgorithmImage":{ "type":"string", - "max":255 + "max":255, + "pattern":".*" }, "AlgorithmSortBy":{ "type":"string", @@ -934,7 +936,8 @@ "AttributeName":{ "type":"string", "max":256, - "min":1 + "min":1, + "pattern":".+" }, "AttributeNames":{ "type":"list", @@ -959,7 +962,8 @@ "Branch":{ "type":"string", "max":1024, - "min":1 + "min":1, + "pattern":"[^ ~^:?*\\[]+" }, "CategoricalParameterRange":{ "type":"structure", @@ -1208,7 +1212,8 @@ }, "ContentType":{ "type":"string", - "max":256 + "max":256, + "pattern":".*" }, "ContentTypes":{ "type":"list", @@ -1461,7 +1466,8 @@ "VolumeSizeInGB":{"shape":"NotebookInstanceVolumeSizeInGB"}, "AcceleratorTypes":{"shape":"NotebookInstanceAcceleratorTypes"}, "DefaultCodeRepository":{"shape":"CodeRepositoryNameOrUrl"}, - "AdditionalCodeRepositories":{"shape":"AdditionalCodeRepositoryNamesOrUrls"} + "AdditionalCodeRepositories":{"shape":"AdditionalCodeRepositoryNamesOrUrls"}, + "RootAccess":{"shape":"RootAccess"} } }, "CreateNotebookInstanceLifecycleConfigInput":{ @@ -1584,7 +1590,8 @@ "DataInputConfig":{ "type":"string", "max":1024, - "min":1 + "min":1, + "pattern":"[\\S\\s]+" }, "DataSource":{ "type":"structure", @@ -2014,7 +2021,8 @@ "VolumeSizeInGB":{"shape":"NotebookInstanceVolumeSizeInGB"}, "AcceleratorTypes":{"shape":"NotebookInstanceAcceleratorTypes"}, "DefaultCodeRepository":{"shape":"CodeRepositoryNameOrUrl"}, - "AdditionalCodeRepositories":{"shape":"AdditionalCodeRepositoryNamesOrUrls"} + "AdditionalCodeRepositories":{"shape":"AdditionalCodeRepositoryNamesOrUrls"}, + "RootAccess":{"shape":"RootAccess"} } }, "DescribeSubscribedWorkteamRequest":{ @@ -2180,12 +2188,14 @@ "EndpointArn":{ "type":"string", "max":2048, - "min":20 + "min":20, + "pattern":"arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:endpoint/.*" }, "EndpointConfigArn":{ "type":"string", "max":2048, - "min":20 + "min":20, + "pattern":"arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:endpoint-config/.*" }, "EndpointConfigName":{ "type":"string", @@ -2297,7 +2307,8 @@ }, "EnvironmentValue":{ "type":"string", - "max":1024 + "max":1024, + "pattern":"[\\S\\s]*" }, "FailureReason":{ "type":"string", @@ -2321,7 +2332,8 @@ "FilterValue":{ "type":"string", "max":1024, - "min":1 + "min":1, + "pattern":".+" }, "FinalHyperParameterTuningJobObjectiveMetric":{ "type":"structure", @@ -2743,16 +2755,19 @@ }, "JobReferenceCode":{ "type":"string", - "min":1 + "min":1, + "pattern":".+" }, "JobReferenceCodeContains":{ "type":"string", "max":255, - "min":1 + "min":1, + "pattern":".+" }, "KmsKeyId":{ "type":"string", - "max":2048 + "max":2048, + "pattern":".*" }, "LabelAttributeName":{ "type":"string", @@ -2784,7 +2799,8 @@ }, "LabelingJobAlgorithmSpecificationArn":{ "type":"string", - "max":2048 + "max":2048, + "pattern":"arn:.*" }, "LabelingJobAlgorithmsConfig":{ "type":"structure", @@ -3426,7 +3442,8 @@ "MetricName":{ "type":"string", "max":255, - "min":1 + "min":1, + "pattern":".+" }, "MetricRegex":{ "type":"string", @@ -3437,7 +3454,8 @@ "ModelArn":{ "type":"string", "max":2048, - "min":20 + "min":20, + "pattern":"arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:model/.*" }, "ModelArtifacts":{ "type":"structure", @@ -3617,7 +3635,8 @@ "NetworkInterfaceId":{"type":"string"}, "NextToken":{ "type":"string", - "max":8192 + "max":8192, + "pattern":".*" }, "NotebookInstanceAcceleratorType":{ "type":"string", @@ -3642,7 +3661,8 @@ "NotebookInstanceLifecycleConfigContent":{ "type":"string", "max":16384, - "min":1 + "min":1, + "pattern":"[\\S\\s]+" }, "NotebookInstanceLifecycleConfigList":{ "type":"list", @@ -3828,15 +3848,18 @@ }, "PaginationToken":{ "type":"string", - "max":8192 + "max":8192, + "pattern":".*" }, "ParameterKey":{ "type":"string", - "max":256 + "max":256, + "pattern":".*" }, "ParameterName":{ "type":"string", - "max":256 + "max":256, + "pattern":"[\\p{L}\\p{M}\\p{Z}\\p{S}\\p{N}\\p{P}]*" }, "ParameterRange":{ "type":"structure", @@ -3865,7 +3888,8 @@ }, "ParameterValue":{ "type":"string", - "max":256 + "max":256, + "pattern":".*" }, "ParameterValues":{ "type":"list", @@ -3885,7 +3909,11 @@ "max":5, "min":1 }, - "ProductId":{"type":"string"}, + "ProductId":{ + "type":"string", + "max":256, + "pattern":"^[a-zA-Z0-9](-*[a-zA-Z0-9])*$" + }, "ProductListings":{ "type":"list", "member":{"shape":"String"} @@ -3977,7 +4005,8 @@ "PropertyNameHint":{ "type":"string", "max":100, - "min":0 + "min":0, + "pattern":".*" }, "PropertyNameQuery":{ "type":"structure", @@ -4061,7 +4090,8 @@ }, "ResourceArn":{ "type":"string", - "max":256 + "max":256, + "pattern":"arn:.*" }, "ResourceConfig":{ "type":"structure", @@ -4112,7 +4142,8 @@ "ResourcePropertyName":{ "type":"string", "max":255, - "min":1 + "min":1, + "pattern":".+" }, "ResourceType":{ "type":"string", @@ -4120,7 +4151,8 @@ }, "ResponseMIMEType":{ "type":"string", - "max":1024 + "max":1024, + "pattern":"^[-\\w]+\\/.+$" }, "ResponseMIMETypes":{ "type":"list", @@ -4132,6 +4164,13 @@ "min":20, "pattern":"^arn:aws[a-z\\-]*:iam::\\d{12}:role/?[a-zA-Z_0-9+=,.@\\-_/]+$" }, + "RootAccess":{ + "type":"string", + "enum":[ + "Enabled", + "Disabled" + ] + }, "S3DataDistribution":{ "type":"string", "enum":[ @@ -4261,7 +4300,8 @@ }, "SecurityGroupId":{ "type":"string", - "max":32 + "max":32, + "pattern":"[-0-9a-zA-Z]+" }, "SecurityGroupIds":{ "type":"list", @@ -4386,11 +4426,13 @@ "String200":{ "type":"string", "max":200, - "min":1 + "min":1, + "pattern":".+" }, "SubnetId":{ "type":"string", - "max":32 + "max":32, + "pattern":"[-0-9a-zA-Z]+" }, "Subnets":{ "type":"list", @@ -4467,12 +4509,14 @@ "jetson_tx1", "jetson_tx2", "rasp3b", - "deeplens" + "deeplens", + "rk3399", + "rk3288" ] }, "TaskAvailabilityLifetimeInSeconds":{ "type":"integer", - "max":345600, + "max":864000, "min":1 }, "TaskCount":{ @@ -4482,12 +4526,14 @@ "TaskDescription":{ "type":"string", "max":255, - "min":1 + "min":1, + "pattern":".+" }, "TaskInput":{ "type":"string", "max":128000, - "min":2 + "min":2, + "pattern":"[\\S\\s]+" }, "TaskKeyword":{ "type":"string", @@ -4515,7 +4561,8 @@ "TemplateContent":{ "type":"string", "max":128000, - "min":1 + "min":1, + "pattern":"[\\S\\s]+" }, "TenthFractionsOfACent":{ "type":"integer", @@ -4595,6 +4642,7 @@ "SecondaryStatusTransitions":{"shape":"SecondaryStatusTransitions"}, "FinalMetricDataList":{"shape":"FinalMetricDataList"}, "EnableNetworkIsolation":{"shape":"Boolean"}, + "EnableInterContainerTrafficEncryption":{"shape":"Boolean"}, "Tags":{"shape":"TagList"} } }, @@ -4726,7 +4774,8 @@ }, "TransformEnvironmentValue":{ "type":"string", - "max":10240 + "max":10240, + "pattern":"[\\S\\s]*" }, "TransformInput":{ "type":"structure", @@ -4959,7 +5008,8 @@ "AcceleratorTypes":{"shape":"NotebookInstanceAcceleratorTypes"}, "DisassociateAcceleratorTypes":{"shape":"DisassociateNotebookInstanceAcceleratorTypes"}, "DisassociateDefaultCodeRepository":{"shape":"DisassociateDefaultCodeRepository"}, - "DisassociateAdditionalCodeRepositories":{"shape":"DisassociateAdditionalCodeRepositories"} + "DisassociateAdditionalCodeRepositories":{"shape":"DisassociateAdditionalCodeRepositories"}, + "RootAccess":{"shape":"RootAccess"} } }, "UpdateNotebookInstanceLifecycleConfigInput":{ diff --git a/src/data/sagemaker/2017-07-24/api-2.json.php b/src/data/sagemaker/2017-07-24/api-2.json.php index 5dfd24b7f3..d1ed70c147 100644 --- a/src/data/sagemaker/2017-07-24/api-2.json.php +++ b/src/data/sagemaker/2017-07-24/api-2.json.php @@ -1,3 +1,3 @@ '2.0', 'metadata' => [ 'apiVersion' => '2017-07-24', 'endpointPrefix' => 'api.sagemaker', 'jsonVersion' => '1.1', 'protocol' => 'json', 'serviceAbbreviation' => 'SageMaker', 'serviceFullName' => 'Amazon SageMaker Service', 'serviceId' => 'SageMaker', 'signatureVersion' => 'v4', 'signingName' => 'sagemaker', 'targetPrefix' => 'SageMaker', 'uid' => 'sagemaker-2017-07-24', ], 'operations' => [ 'AddTags' => [ 'name' => 'AddTags', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'AddTagsInput', ], 'output' => [ 'shape' => 'AddTagsOutput', ], ], 'CreateAlgorithm' => [ 'name' => 'CreateAlgorithm', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateAlgorithmInput', ], 'output' => [ 'shape' => 'CreateAlgorithmOutput', ], ], 'CreateCodeRepository' => [ 'name' => 'CreateCodeRepository', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateCodeRepositoryInput', ], 'output' => [ 'shape' => 'CreateCodeRepositoryOutput', ], ], 'CreateCompilationJob' => [ 'name' => 'CreateCompilationJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateCompilationJobRequest', ], 'output' => [ 'shape' => 'CreateCompilationJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceInUse', ], [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreateEndpoint' => [ 'name' => 'CreateEndpoint', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateEndpointInput', ], 'output' => [ 'shape' => 'CreateEndpointOutput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreateEndpointConfig' => [ 'name' => 'CreateEndpointConfig', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateEndpointConfigInput', ], 'output' => [ 'shape' => 'CreateEndpointConfigOutput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreateHyperParameterTuningJob' => [ 'name' => 'CreateHyperParameterTuningJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateHyperParameterTuningJobRequest', ], 'output' => [ 'shape' => 'CreateHyperParameterTuningJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceInUse', ], [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreateLabelingJob' => [ 'name' => 'CreateLabelingJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateLabelingJobRequest', ], 'output' => [ 'shape' => 'CreateLabelingJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceInUse', ], [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreateModel' => [ 'name' => 'CreateModel', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateModelInput', ], 'output' => [ 'shape' => 'CreateModelOutput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreateModelPackage' => [ 'name' => 'CreateModelPackage', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateModelPackageInput', ], 'output' => [ 'shape' => 'CreateModelPackageOutput', ], ], 'CreateNotebookInstance' => [ 'name' => 'CreateNotebookInstance', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateNotebookInstanceInput', ], 'output' => [ 'shape' => 'CreateNotebookInstanceOutput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreateNotebookInstanceLifecycleConfig' => [ 'name' => 'CreateNotebookInstanceLifecycleConfig', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateNotebookInstanceLifecycleConfigInput', ], 'output' => [ 'shape' => 'CreateNotebookInstanceLifecycleConfigOutput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreatePresignedNotebookInstanceUrl' => [ 'name' => 'CreatePresignedNotebookInstanceUrl', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreatePresignedNotebookInstanceUrlInput', ], 'output' => [ 'shape' => 'CreatePresignedNotebookInstanceUrlOutput', ], ], 'CreateTrainingJob' => [ 'name' => 'CreateTrainingJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateTrainingJobRequest', ], 'output' => [ 'shape' => 'CreateTrainingJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceInUse', ], [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreateTransformJob' => [ 'name' => 'CreateTransformJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateTransformJobRequest', ], 'output' => [ 'shape' => 'CreateTransformJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceInUse', ], [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreateWorkteam' => [ 'name' => 'CreateWorkteam', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateWorkteamRequest', ], 'output' => [ 'shape' => 'CreateWorkteamResponse', ], 'errors' => [ [ 'shape' => 'ResourceInUse', ], [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'DeleteAlgorithm' => [ 'name' => 'DeleteAlgorithm', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteAlgorithmInput', ], ], 'DeleteCodeRepository' => [ 'name' => 'DeleteCodeRepository', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteCodeRepositoryInput', ], ], 'DeleteEndpoint' => [ 'name' => 'DeleteEndpoint', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteEndpointInput', ], ], 'DeleteEndpointConfig' => [ 'name' => 'DeleteEndpointConfig', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteEndpointConfigInput', ], ], 'DeleteModel' => [ 'name' => 'DeleteModel', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteModelInput', ], ], 'DeleteModelPackage' => [ 'name' => 'DeleteModelPackage', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteModelPackageInput', ], ], 'DeleteNotebookInstance' => [ 'name' => 'DeleteNotebookInstance', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteNotebookInstanceInput', ], ], 'DeleteNotebookInstanceLifecycleConfig' => [ 'name' => 'DeleteNotebookInstanceLifecycleConfig', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteNotebookInstanceLifecycleConfigInput', ], ], 'DeleteTags' => [ 'name' => 'DeleteTags', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteTagsInput', ], 'output' => [ 'shape' => 'DeleteTagsOutput', ], ], 'DeleteWorkteam' => [ 'name' => 'DeleteWorkteam', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteWorkteamRequest', ], 'output' => [ 'shape' => 'DeleteWorkteamResponse', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'DescribeAlgorithm' => [ 'name' => 'DescribeAlgorithm', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeAlgorithmInput', ], 'output' => [ 'shape' => 'DescribeAlgorithmOutput', ], ], 'DescribeCodeRepository' => [ 'name' => 'DescribeCodeRepository', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeCodeRepositoryInput', ], 'output' => [ 'shape' => 'DescribeCodeRepositoryOutput', ], ], 'DescribeCompilationJob' => [ 'name' => 'DescribeCompilationJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeCompilationJobRequest', ], 'output' => [ 'shape' => 'DescribeCompilationJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'DescribeEndpoint' => [ 'name' => 'DescribeEndpoint', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeEndpointInput', ], 'output' => [ 'shape' => 'DescribeEndpointOutput', ], ], 'DescribeEndpointConfig' => [ 'name' => 'DescribeEndpointConfig', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeEndpointConfigInput', ], 'output' => [ 'shape' => 'DescribeEndpointConfigOutput', ], ], 'DescribeHyperParameterTuningJob' => [ 'name' => 'DescribeHyperParameterTuningJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeHyperParameterTuningJobRequest', ], 'output' => [ 'shape' => 'DescribeHyperParameterTuningJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'DescribeLabelingJob' => [ 'name' => 'DescribeLabelingJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeLabelingJobRequest', ], 'output' => [ 'shape' => 'DescribeLabelingJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'DescribeModel' => [ 'name' => 'DescribeModel', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeModelInput', ], 'output' => [ 'shape' => 'DescribeModelOutput', ], ], 'DescribeModelPackage' => [ 'name' => 'DescribeModelPackage', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeModelPackageInput', ], 'output' => [ 'shape' => 'DescribeModelPackageOutput', ], ], 'DescribeNotebookInstance' => [ 'name' => 'DescribeNotebookInstance', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeNotebookInstanceInput', ], 'output' => [ 'shape' => 'DescribeNotebookInstanceOutput', ], ], 'DescribeNotebookInstanceLifecycleConfig' => [ 'name' => 'DescribeNotebookInstanceLifecycleConfig', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeNotebookInstanceLifecycleConfigInput', ], 'output' => [ 'shape' => 'DescribeNotebookInstanceLifecycleConfigOutput', ], ], 'DescribeSubscribedWorkteam' => [ 'name' => 'DescribeSubscribedWorkteam', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeSubscribedWorkteamRequest', ], 'output' => [ 'shape' => 'DescribeSubscribedWorkteamResponse', ], ], 'DescribeTrainingJob' => [ 'name' => 'DescribeTrainingJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeTrainingJobRequest', ], 'output' => [ 'shape' => 'DescribeTrainingJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'DescribeTransformJob' => [ 'name' => 'DescribeTransformJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeTransformJobRequest', ], 'output' => [ 'shape' => 'DescribeTransformJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'DescribeWorkteam' => [ 'name' => 'DescribeWorkteam', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeWorkteamRequest', ], 'output' => [ 'shape' => 'DescribeWorkteamResponse', ], ], 'GetSearchSuggestions' => [ 'name' => 'GetSearchSuggestions', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'GetSearchSuggestionsRequest', ], 'output' => [ 'shape' => 'GetSearchSuggestionsResponse', ], ], 'ListAlgorithms' => [ 'name' => 'ListAlgorithms', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListAlgorithmsInput', ], 'output' => [ 'shape' => 'ListAlgorithmsOutput', ], ], 'ListCodeRepositories' => [ 'name' => 'ListCodeRepositories', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListCodeRepositoriesInput', ], 'output' => [ 'shape' => 'ListCodeRepositoriesOutput', ], ], 'ListCompilationJobs' => [ 'name' => 'ListCompilationJobs', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListCompilationJobsRequest', ], 'output' => [ 'shape' => 'ListCompilationJobsResponse', ], ], 'ListEndpointConfigs' => [ 'name' => 'ListEndpointConfigs', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListEndpointConfigsInput', ], 'output' => [ 'shape' => 'ListEndpointConfigsOutput', ], ], 'ListEndpoints' => [ 'name' => 'ListEndpoints', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListEndpointsInput', ], 'output' => [ 'shape' => 'ListEndpointsOutput', ], ], 'ListHyperParameterTuningJobs' => [ 'name' => 'ListHyperParameterTuningJobs', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListHyperParameterTuningJobsRequest', ], 'output' => [ 'shape' => 'ListHyperParameterTuningJobsResponse', ], ], 'ListLabelingJobs' => [ 'name' => 'ListLabelingJobs', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListLabelingJobsRequest', ], 'output' => [ 'shape' => 'ListLabelingJobsResponse', ], ], 'ListLabelingJobsForWorkteam' => [ 'name' => 'ListLabelingJobsForWorkteam', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListLabelingJobsForWorkteamRequest', ], 'output' => [ 'shape' => 'ListLabelingJobsForWorkteamResponse', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'ListModelPackages' => [ 'name' => 'ListModelPackages', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListModelPackagesInput', ], 'output' => [ 'shape' => 'ListModelPackagesOutput', ], ], 'ListModels' => [ 'name' => 'ListModels', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListModelsInput', ], 'output' => [ 'shape' => 'ListModelsOutput', ], ], 'ListNotebookInstanceLifecycleConfigs' => [ 'name' => 'ListNotebookInstanceLifecycleConfigs', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListNotebookInstanceLifecycleConfigsInput', ], 'output' => [ 'shape' => 'ListNotebookInstanceLifecycleConfigsOutput', ], ], 'ListNotebookInstances' => [ 'name' => 'ListNotebookInstances', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListNotebookInstancesInput', ], 'output' => [ 'shape' => 'ListNotebookInstancesOutput', ], ], 'ListSubscribedWorkteams' => [ 'name' => 'ListSubscribedWorkteams', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListSubscribedWorkteamsRequest', ], 'output' => [ 'shape' => 'ListSubscribedWorkteamsResponse', ], ], 'ListTags' => [ 'name' => 'ListTags', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListTagsInput', ], 'output' => [ 'shape' => 'ListTagsOutput', ], ], 'ListTrainingJobs' => [ 'name' => 'ListTrainingJobs', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListTrainingJobsRequest', ], 'output' => [ 'shape' => 'ListTrainingJobsResponse', ], ], 'ListTrainingJobsForHyperParameterTuningJob' => [ 'name' => 'ListTrainingJobsForHyperParameterTuningJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListTrainingJobsForHyperParameterTuningJobRequest', ], 'output' => [ 'shape' => 'ListTrainingJobsForHyperParameterTuningJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'ListTransformJobs' => [ 'name' => 'ListTransformJobs', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListTransformJobsRequest', ], 'output' => [ 'shape' => 'ListTransformJobsResponse', ], ], 'ListWorkteams' => [ 'name' => 'ListWorkteams', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListWorkteamsRequest', ], 'output' => [ 'shape' => 'ListWorkteamsResponse', ], ], 'RenderUiTemplate' => [ 'name' => 'RenderUiTemplate', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'RenderUiTemplateRequest', ], 'output' => [ 'shape' => 'RenderUiTemplateResponse', ], ], 'Search' => [ 'name' => 'Search', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'SearchRequest', ], 'output' => [ 'shape' => 'SearchResponse', ], ], 'StartNotebookInstance' => [ 'name' => 'StartNotebookInstance', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'StartNotebookInstanceInput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'StopCompilationJob' => [ 'name' => 'StopCompilationJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'StopCompilationJobRequest', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'StopHyperParameterTuningJob' => [ 'name' => 'StopHyperParameterTuningJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'StopHyperParameterTuningJobRequest', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'StopLabelingJob' => [ 'name' => 'StopLabelingJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'StopLabelingJobRequest', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'StopNotebookInstance' => [ 'name' => 'StopNotebookInstance', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'StopNotebookInstanceInput', ], ], 'StopTrainingJob' => [ 'name' => 'StopTrainingJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'StopTrainingJobRequest', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'StopTransformJob' => [ 'name' => 'StopTransformJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'StopTransformJobRequest', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'UpdateCodeRepository' => [ 'name' => 'UpdateCodeRepository', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'UpdateCodeRepositoryInput', ], 'output' => [ 'shape' => 'UpdateCodeRepositoryOutput', ], ], 'UpdateEndpoint' => [ 'name' => 'UpdateEndpoint', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'UpdateEndpointInput', ], 'output' => [ 'shape' => 'UpdateEndpointOutput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'UpdateEndpointWeightsAndCapacities' => [ 'name' => 'UpdateEndpointWeightsAndCapacities', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'UpdateEndpointWeightsAndCapacitiesInput', ], 'output' => [ 'shape' => 'UpdateEndpointWeightsAndCapacitiesOutput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'UpdateNotebookInstance' => [ 'name' => 'UpdateNotebookInstance', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'UpdateNotebookInstanceInput', ], 'output' => [ 'shape' => 'UpdateNotebookInstanceOutput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'UpdateNotebookInstanceLifecycleConfig' => [ 'name' => 'UpdateNotebookInstanceLifecycleConfig', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'UpdateNotebookInstanceLifecycleConfigInput', ], 'output' => [ 'shape' => 'UpdateNotebookInstanceLifecycleConfigOutput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'UpdateWorkteam' => [ 'name' => 'UpdateWorkteam', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'UpdateWorkteamRequest', ], 'output' => [ 'shape' => 'UpdateWorkteamResponse', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], ], 'shapes' => [ 'Accept' => [ 'type' => 'string', 'max' => 256, ], 'AccountId' => [ 'type' => 'string', 'pattern' => '^\\d+$', ], 'AddTagsInput' => [ 'type' => 'structure', 'required' => [ 'ResourceArn', 'Tags', ], 'members' => [ 'ResourceArn' => [ 'shape' => 'ResourceArn', ], 'Tags' => [ 'shape' => 'TagList', ], ], ], 'AddTagsOutput' => [ 'type' => 'structure', 'members' => [ 'Tags' => [ 'shape' => 'TagList', ], ], ], 'AdditionalCodeRepositoryNamesOrUrls' => [ 'type' => 'list', 'member' => [ 'shape' => 'CodeRepositoryNameOrUrl', ], 'max' => 3, ], 'AlgorithmArn' => [ 'type' => 'string', 'max' => 2048, 'min' => 1, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:algorithm/.*', ], 'AlgorithmImage' => [ 'type' => 'string', 'max' => 255, ], 'AlgorithmSortBy' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', ], ], 'AlgorithmSpecification' => [ 'type' => 'structure', 'required' => [ 'TrainingInputMode', ], 'members' => [ 'TrainingImage' => [ 'shape' => 'AlgorithmImage', ], 'AlgorithmName' => [ 'shape' => 'ArnOrName', ], 'TrainingInputMode' => [ 'shape' => 'TrainingInputMode', ], 'MetricDefinitions' => [ 'shape' => 'MetricDefinitionList', ], ], ], 'AlgorithmStatus' => [ 'type' => 'string', 'enum' => [ 'Pending', 'InProgress', 'Completed', 'Failed', 'Deleting', ], ], 'AlgorithmStatusDetails' => [ 'type' => 'structure', 'members' => [ 'ValidationStatuses' => [ 'shape' => 'AlgorithmStatusItemList', ], 'ImageScanStatuses' => [ 'shape' => 'AlgorithmStatusItemList', ], ], ], 'AlgorithmStatusItem' => [ 'type' => 'structure', 'required' => [ 'Name', 'Status', ], 'members' => [ 'Name' => [ 'shape' => 'EntityName', ], 'Status' => [ 'shape' => 'DetailedAlgorithmStatus', ], 'FailureReason' => [ 'shape' => 'String', ], ], ], 'AlgorithmStatusItemList' => [ 'type' => 'list', 'member' => [ 'shape' => 'AlgorithmStatusItem', ], ], 'AlgorithmSummary' => [ 'type' => 'structure', 'required' => [ 'AlgorithmName', 'AlgorithmArn', 'CreationTime', 'AlgorithmStatus', ], 'members' => [ 'AlgorithmName' => [ 'shape' => 'EntityName', ], 'AlgorithmArn' => [ 'shape' => 'AlgorithmArn', ], 'AlgorithmDescription' => [ 'shape' => 'EntityDescription', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'AlgorithmStatus' => [ 'shape' => 'AlgorithmStatus', ], ], ], 'AlgorithmSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'AlgorithmSummary', ], ], 'AlgorithmValidationProfile' => [ 'type' => 'structure', 'required' => [ 'ProfileName', 'TrainingJobDefinition', ], 'members' => [ 'ProfileName' => [ 'shape' => 'EntityName', ], 'TrainingJobDefinition' => [ 'shape' => 'TrainingJobDefinition', ], 'TransformJobDefinition' => [ 'shape' => 'TransformJobDefinition', ], ], ], 'AlgorithmValidationProfiles' => [ 'type' => 'list', 'member' => [ 'shape' => 'AlgorithmValidationProfile', ], 'max' => 1, 'min' => 1, ], 'AlgorithmValidationSpecification' => [ 'type' => 'structure', 'required' => [ 'ValidationRole', 'ValidationProfiles', ], 'members' => [ 'ValidationRole' => [ 'shape' => 'RoleArn', ], 'ValidationProfiles' => [ 'shape' => 'AlgorithmValidationProfiles', ], ], ], 'AnnotationConsolidationConfig' => [ 'type' => 'structure', 'required' => [ 'AnnotationConsolidationLambdaArn', ], 'members' => [ 'AnnotationConsolidationLambdaArn' => [ 'shape' => 'LambdaFunctionArn', ], ], ], 'ArnOrName' => [ 'type' => 'string', 'max' => 170, 'min' => 1, 'pattern' => '(arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:[a-z\\-]*\\/)?([a-zA-Z0-9]([a-zA-Z0-9-]){0,62})(? [ 'type' => 'string', 'enum' => [ 'None', 'Line', ], ], 'AttributeName' => [ 'type' => 'string', 'max' => 256, 'min' => 1, ], 'AttributeNames' => [ 'type' => 'list', 'member' => [ 'shape' => 'AttributeName', ], 'max' => 16, ], 'BatchStrategy' => [ 'type' => 'string', 'enum' => [ 'MultiRecord', 'SingleRecord', ], ], 'Boolean' => [ 'type' => 'boolean', ], 'BooleanOperator' => [ 'type' => 'string', 'enum' => [ 'And', 'Or', ], ], 'Branch' => [ 'type' => 'string', 'max' => 1024, 'min' => 1, ], 'CategoricalParameterRange' => [ 'type' => 'structure', 'required' => [ 'Name', 'Values', ], 'members' => [ 'Name' => [ 'shape' => 'ParameterKey', ], 'Values' => [ 'shape' => 'ParameterValues', ], ], ], 'CategoricalParameterRangeSpecification' => [ 'type' => 'structure', 'required' => [ 'Values', ], 'members' => [ 'Values' => [ 'shape' => 'ParameterValues', ], ], ], 'CategoricalParameterRanges' => [ 'type' => 'list', 'member' => [ 'shape' => 'CategoricalParameterRange', ], 'max' => 20, 'min' => 0, ], 'Cents' => [ 'type' => 'integer', 'max' => 99, 'min' => 0, ], 'CertifyForMarketplace' => [ 'type' => 'boolean', ], 'Channel' => [ 'type' => 'structure', 'required' => [ 'ChannelName', 'DataSource', ], 'members' => [ 'ChannelName' => [ 'shape' => 'ChannelName', ], 'DataSource' => [ 'shape' => 'DataSource', ], 'ContentType' => [ 'shape' => 'ContentType', ], 'CompressionType' => [ 'shape' => 'CompressionType', ], 'RecordWrapperType' => [ 'shape' => 'RecordWrapper', ], 'InputMode' => [ 'shape' => 'TrainingInputMode', ], 'ShuffleConfig' => [ 'shape' => 'ShuffleConfig', ], ], ], 'ChannelName' => [ 'type' => 'string', 'max' => 64, 'min' => 1, 'pattern' => '[A-Za-z0-9\\.\\-_]+', ], 'ChannelSpecification' => [ 'type' => 'structure', 'required' => [ 'Name', 'SupportedContentTypes', 'SupportedInputModes', ], 'members' => [ 'Name' => [ 'shape' => 'ChannelName', ], 'Description' => [ 'shape' => 'EntityDescription', ], 'IsRequired' => [ 'shape' => 'Boolean', ], 'SupportedContentTypes' => [ 'shape' => 'ContentTypes', ], 'SupportedCompressionTypes' => [ 'shape' => 'CompressionTypes', ], 'SupportedInputModes' => [ 'shape' => 'InputModes', ], ], ], 'ChannelSpecifications' => [ 'type' => 'list', 'member' => [ 'shape' => 'ChannelSpecification', ], 'max' => 8, 'min' => 1, ], 'CodeRepositoryArn' => [ 'type' => 'string', 'max' => 2048, 'min' => 1, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:code-repository/.*', ], 'CodeRepositoryContains' => [ 'type' => 'string', 'max' => 1024, 'pattern' => '[a-zA-Z0-9-]+', ], 'CodeRepositoryNameContains' => [ 'type' => 'string', 'max' => 63, 'pattern' => '[a-zA-Z0-9-]+', ], 'CodeRepositoryNameOrUrl' => [ 'type' => 'string', 'max' => 1024, 'min' => 1, 'pattern' => '^https://([^/]+)/?(.*)$|^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'CodeRepositorySortBy' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', 'LastModifiedTime', ], ], 'CodeRepositorySortOrder' => [ 'type' => 'string', 'enum' => [ 'Ascending', 'Descending', ], ], 'CodeRepositorySummary' => [ 'type' => 'structure', 'required' => [ 'CodeRepositoryName', 'CodeRepositoryArn', 'CreationTime', 'LastModifiedTime', ], 'members' => [ 'CodeRepositoryName' => [ 'shape' => 'EntityName', ], 'CodeRepositoryArn' => [ 'shape' => 'CodeRepositoryArn', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'LastModifiedTime' => [ 'shape' => 'LastModifiedTime', ], 'GitConfig' => [ 'shape' => 'GitConfig', ], ], ], 'CodeRepositorySummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'CodeRepositorySummary', ], ], 'CognitoClientId' => [ 'type' => 'string', 'max' => 128, 'min' => 1, 'pattern' => '[\\w+]+', ], 'CognitoMemberDefinition' => [ 'type' => 'structure', 'required' => [ 'UserPool', 'UserGroup', 'ClientId', ], 'members' => [ 'UserPool' => [ 'shape' => 'CognitoUserPool', ], 'UserGroup' => [ 'shape' => 'CognitoUserGroup', ], 'ClientId' => [ 'shape' => 'CognitoClientId', ], ], ], 'CognitoUserGroup' => [ 'type' => 'string', 'max' => 128, 'min' => 1, 'pattern' => '[\\p{L}\\p{M}\\p{S}\\p{N}\\p{P}]+', ], 'CognitoUserPool' => [ 'type' => 'string', 'max' => 55, 'min' => 1, 'pattern' => '[\\w-]+_[0-9a-zA-Z]+', ], 'CompilationJobArn' => [ 'type' => 'string', 'max' => 256, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:compilation-job/.*', ], 'CompilationJobStatus' => [ 'type' => 'string', 'enum' => [ 'INPROGRESS', 'COMPLETED', 'FAILED', 'STARTING', 'STOPPING', 'STOPPED', ], ], 'CompilationJobSummaries' => [ 'type' => 'list', 'member' => [ 'shape' => 'CompilationJobSummary', ], ], 'CompilationJobSummary' => [ 'type' => 'structure', 'required' => [ 'CompilationJobName', 'CompilationJobArn', 'CreationTime', 'CompilationTargetDevice', 'CompilationJobStatus', ], 'members' => [ 'CompilationJobName' => [ 'shape' => 'EntityName', ], 'CompilationJobArn' => [ 'shape' => 'CompilationJobArn', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'CompilationStartTime' => [ 'shape' => 'Timestamp', ], 'CompilationEndTime' => [ 'shape' => 'Timestamp', ], 'CompilationTargetDevice' => [ 'shape' => 'TargetDevice', ], 'LastModifiedTime' => [ 'shape' => 'LastModifiedTime', ], 'CompilationJobStatus' => [ 'shape' => 'CompilationJobStatus', ], ], ], 'CompressionType' => [ 'type' => 'string', 'enum' => [ 'None', 'Gzip', ], ], 'CompressionTypes' => [ 'type' => 'list', 'member' => [ 'shape' => 'CompressionType', ], ], 'ContainerDefinition' => [ 'type' => 'structure', 'members' => [ 'ContainerHostname' => [ 'shape' => 'ContainerHostname', ], 'Image' => [ 'shape' => 'Image', ], 'ModelDataUrl' => [ 'shape' => 'Url', ], 'Environment' => [ 'shape' => 'EnvironmentMap', ], 'ModelPackageName' => [ 'shape' => 'ArnOrName', ], ], ], 'ContainerDefinitionList' => [ 'type' => 'list', 'member' => [ 'shape' => 'ContainerDefinition', ], 'max' => 5, ], 'ContainerHostname' => [ 'type' => 'string', 'max' => 63, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'ContentClassifier' => [ 'type' => 'string', 'enum' => [ 'FreeOfPersonallyIdentifiableInformation', 'FreeOfAdultContent', ], ], 'ContentClassifiers' => [ 'type' => 'list', 'member' => [ 'shape' => 'ContentClassifier', ], 'max' => 256, ], 'ContentType' => [ 'type' => 'string', 'max' => 256, ], 'ContentTypes' => [ 'type' => 'list', 'member' => [ 'shape' => 'ContentType', ], ], 'ContinuousParameterRange' => [ 'type' => 'structure', 'required' => [ 'Name', 'MinValue', 'MaxValue', ], 'members' => [ 'Name' => [ 'shape' => 'ParameterKey', ], 'MinValue' => [ 'shape' => 'ParameterValue', ], 'MaxValue' => [ 'shape' => 'ParameterValue', ], ], ], 'ContinuousParameterRangeSpecification' => [ 'type' => 'structure', 'required' => [ 'MinValue', 'MaxValue', ], 'members' => [ 'MinValue' => [ 'shape' => 'ParameterValue', ], 'MaxValue' => [ 'shape' => 'ParameterValue', ], ], ], 'ContinuousParameterRanges' => [ 'type' => 'list', 'member' => [ 'shape' => 'ContinuousParameterRange', ], 'max' => 20, 'min' => 0, ], 'CreateAlgorithmInput' => [ 'type' => 'structure', 'required' => [ 'AlgorithmName', 'TrainingSpecification', ], 'members' => [ 'AlgorithmName' => [ 'shape' => 'EntityName', ], 'AlgorithmDescription' => [ 'shape' => 'EntityDescription', ], 'TrainingSpecification' => [ 'shape' => 'TrainingSpecification', ], 'InferenceSpecification' => [ 'shape' => 'InferenceSpecification', ], 'ValidationSpecification' => [ 'shape' => 'AlgorithmValidationSpecification', ], 'CertifyForMarketplace' => [ 'shape' => 'CertifyForMarketplace', ], ], ], 'CreateAlgorithmOutput' => [ 'type' => 'structure', 'required' => [ 'AlgorithmArn', ], 'members' => [ 'AlgorithmArn' => [ 'shape' => 'AlgorithmArn', ], ], ], 'CreateCodeRepositoryInput' => [ 'type' => 'structure', 'required' => [ 'CodeRepositoryName', 'GitConfig', ], 'members' => [ 'CodeRepositoryName' => [ 'shape' => 'EntityName', ], 'GitConfig' => [ 'shape' => 'GitConfig', ], ], ], 'CreateCodeRepositoryOutput' => [ 'type' => 'structure', 'required' => [ 'CodeRepositoryArn', ], 'members' => [ 'CodeRepositoryArn' => [ 'shape' => 'CodeRepositoryArn', ], ], ], 'CreateCompilationJobRequest' => [ 'type' => 'structure', 'required' => [ 'CompilationJobName', 'RoleArn', 'InputConfig', 'OutputConfig', 'StoppingCondition', ], 'members' => [ 'CompilationJobName' => [ 'shape' => 'EntityName', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'InputConfig' => [ 'shape' => 'InputConfig', ], 'OutputConfig' => [ 'shape' => 'OutputConfig', ], 'StoppingCondition' => [ 'shape' => 'StoppingCondition', ], ], ], 'CreateCompilationJobResponse' => [ 'type' => 'structure', 'required' => [ 'CompilationJobArn', ], 'members' => [ 'CompilationJobArn' => [ 'shape' => 'CompilationJobArn', ], ], ], 'CreateEndpointConfigInput' => [ 'type' => 'structure', 'required' => [ 'EndpointConfigName', 'ProductionVariants', ], 'members' => [ 'EndpointConfigName' => [ 'shape' => 'EndpointConfigName', ], 'ProductionVariants' => [ 'shape' => 'ProductionVariantList', ], 'Tags' => [ 'shape' => 'TagList', ], 'KmsKeyId' => [ 'shape' => 'KmsKeyId', ], ], ], 'CreateEndpointConfigOutput' => [ 'type' => 'structure', 'required' => [ 'EndpointConfigArn', ], 'members' => [ 'EndpointConfigArn' => [ 'shape' => 'EndpointConfigArn', ], ], ], 'CreateEndpointInput' => [ 'type' => 'structure', 'required' => [ 'EndpointName', 'EndpointConfigName', ], 'members' => [ 'EndpointName' => [ 'shape' => 'EndpointName', ], 'EndpointConfigName' => [ 'shape' => 'EndpointConfigName', ], 'Tags' => [ 'shape' => 'TagList', ], ], ], 'CreateEndpointOutput' => [ 'type' => 'structure', 'required' => [ 'EndpointArn', ], 'members' => [ 'EndpointArn' => [ 'shape' => 'EndpointArn', ], ], ], 'CreateHyperParameterTuningJobRequest' => [ 'type' => 'structure', 'required' => [ 'HyperParameterTuningJobName', 'HyperParameterTuningJobConfig', 'TrainingJobDefinition', ], 'members' => [ 'HyperParameterTuningJobName' => [ 'shape' => 'HyperParameterTuningJobName', ], 'HyperParameterTuningJobConfig' => [ 'shape' => 'HyperParameterTuningJobConfig', ], 'TrainingJobDefinition' => [ 'shape' => 'HyperParameterTrainingJobDefinition', ], 'WarmStartConfig' => [ 'shape' => 'HyperParameterTuningJobWarmStartConfig', ], 'Tags' => [ 'shape' => 'TagList', ], ], ], 'CreateHyperParameterTuningJobResponse' => [ 'type' => 'structure', 'required' => [ 'HyperParameterTuningJobArn', ], 'members' => [ 'HyperParameterTuningJobArn' => [ 'shape' => 'HyperParameterTuningJobArn', ], ], ], 'CreateLabelingJobRequest' => [ 'type' => 'structure', 'required' => [ 'LabelingJobName', 'LabelAttributeName', 'InputConfig', 'OutputConfig', 'RoleArn', 'HumanTaskConfig', ], 'members' => [ 'LabelingJobName' => [ 'shape' => 'LabelingJobName', ], 'LabelAttributeName' => [ 'shape' => 'LabelAttributeName', ], 'InputConfig' => [ 'shape' => 'LabelingJobInputConfig', ], 'OutputConfig' => [ 'shape' => 'LabelingJobOutputConfig', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'LabelCategoryConfigS3Uri' => [ 'shape' => 'S3Uri', ], 'StoppingConditions' => [ 'shape' => 'LabelingJobStoppingConditions', ], 'LabelingJobAlgorithmsConfig' => [ 'shape' => 'LabelingJobAlgorithmsConfig', ], 'HumanTaskConfig' => [ 'shape' => 'HumanTaskConfig', ], 'Tags' => [ 'shape' => 'TagList', ], ], ], 'CreateLabelingJobResponse' => [ 'type' => 'structure', 'required' => [ 'LabelingJobArn', ], 'members' => [ 'LabelingJobArn' => [ 'shape' => 'LabelingJobArn', ], ], ], 'CreateModelInput' => [ 'type' => 'structure', 'required' => [ 'ModelName', 'ExecutionRoleArn', ], 'members' => [ 'ModelName' => [ 'shape' => 'ModelName', ], 'PrimaryContainer' => [ 'shape' => 'ContainerDefinition', ], 'Containers' => [ 'shape' => 'ContainerDefinitionList', ], 'ExecutionRoleArn' => [ 'shape' => 'RoleArn', ], 'Tags' => [ 'shape' => 'TagList', ], 'VpcConfig' => [ 'shape' => 'VpcConfig', ], 'EnableNetworkIsolation' => [ 'shape' => 'Boolean', ], ], ], 'CreateModelOutput' => [ 'type' => 'structure', 'required' => [ 'ModelArn', ], 'members' => [ 'ModelArn' => [ 'shape' => 'ModelArn', ], ], ], 'CreateModelPackageInput' => [ 'type' => 'structure', 'required' => [ 'ModelPackageName', ], 'members' => [ 'ModelPackageName' => [ 'shape' => 'EntityName', ], 'ModelPackageDescription' => [ 'shape' => 'EntityDescription', ], 'InferenceSpecification' => [ 'shape' => 'InferenceSpecification', ], 'ValidationSpecification' => [ 'shape' => 'ModelPackageValidationSpecification', ], 'SourceAlgorithmSpecification' => [ 'shape' => 'SourceAlgorithmSpecification', ], 'CertifyForMarketplace' => [ 'shape' => 'CertifyForMarketplace', ], ], ], 'CreateModelPackageOutput' => [ 'type' => 'structure', 'required' => [ 'ModelPackageArn', ], 'members' => [ 'ModelPackageArn' => [ 'shape' => 'ModelPackageArn', ], ], ], 'CreateNotebookInstanceInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceName', 'InstanceType', 'RoleArn', ], 'members' => [ 'NotebookInstanceName' => [ 'shape' => 'NotebookInstanceName', ], 'InstanceType' => [ 'shape' => 'InstanceType', ], 'SubnetId' => [ 'shape' => 'SubnetId', ], 'SecurityGroupIds' => [ 'shape' => 'SecurityGroupIds', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'KmsKeyId' => [ 'shape' => 'KmsKeyId', ], 'Tags' => [ 'shape' => 'TagList', ], 'LifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], 'DirectInternetAccess' => [ 'shape' => 'DirectInternetAccess', ], 'VolumeSizeInGB' => [ 'shape' => 'NotebookInstanceVolumeSizeInGB', ], 'AcceleratorTypes' => [ 'shape' => 'NotebookInstanceAcceleratorTypes', ], 'DefaultCodeRepository' => [ 'shape' => 'CodeRepositoryNameOrUrl', ], 'AdditionalCodeRepositories' => [ 'shape' => 'AdditionalCodeRepositoryNamesOrUrls', ], ], ], 'CreateNotebookInstanceLifecycleConfigInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceLifecycleConfigName', ], 'members' => [ 'NotebookInstanceLifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], 'OnCreate' => [ 'shape' => 'NotebookInstanceLifecycleConfigList', ], 'OnStart' => [ 'shape' => 'NotebookInstanceLifecycleConfigList', ], ], ], 'CreateNotebookInstanceLifecycleConfigOutput' => [ 'type' => 'structure', 'members' => [ 'NotebookInstanceLifecycleConfigArn' => [ 'shape' => 'NotebookInstanceLifecycleConfigArn', ], ], ], 'CreateNotebookInstanceOutput' => [ 'type' => 'structure', 'members' => [ 'NotebookInstanceArn' => [ 'shape' => 'NotebookInstanceArn', ], ], ], 'CreatePresignedNotebookInstanceUrlInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceName', ], 'members' => [ 'NotebookInstanceName' => [ 'shape' => 'NotebookInstanceName', ], 'SessionExpirationDurationInSeconds' => [ 'shape' => 'SessionExpirationDurationInSeconds', ], ], ], 'CreatePresignedNotebookInstanceUrlOutput' => [ 'type' => 'structure', 'members' => [ 'AuthorizedUrl' => [ 'shape' => 'NotebookInstanceUrl', ], ], ], 'CreateTrainingJobRequest' => [ 'type' => 'structure', 'required' => [ 'TrainingJobName', 'AlgorithmSpecification', 'RoleArn', 'OutputDataConfig', 'ResourceConfig', 'StoppingCondition', ], 'members' => [ 'TrainingJobName' => [ 'shape' => 'TrainingJobName', ], 'HyperParameters' => [ 'shape' => 'HyperParameters', ], 'AlgorithmSpecification' => [ 'shape' => 'AlgorithmSpecification', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'InputDataConfig' => [ 'shape' => 'InputDataConfig', ], 'OutputDataConfig' => [ 'shape' => 'OutputDataConfig', ], 'ResourceConfig' => [ 'shape' => 'ResourceConfig', ], 'VpcConfig' => [ 'shape' => 'VpcConfig', ], 'StoppingCondition' => [ 'shape' => 'StoppingCondition', ], 'Tags' => [ 'shape' => 'TagList', ], 'EnableNetworkIsolation' => [ 'shape' => 'Boolean', ], 'EnableInterContainerTrafficEncryption' => [ 'shape' => 'Boolean', ], ], ], 'CreateTrainingJobResponse' => [ 'type' => 'structure', 'required' => [ 'TrainingJobArn', ], 'members' => [ 'TrainingJobArn' => [ 'shape' => 'TrainingJobArn', ], ], ], 'CreateTransformJobRequest' => [ 'type' => 'structure', 'required' => [ 'TransformJobName', 'ModelName', 'TransformInput', 'TransformOutput', 'TransformResources', ], 'members' => [ 'TransformJobName' => [ 'shape' => 'TransformJobName', ], 'ModelName' => [ 'shape' => 'ModelName', ], 'MaxConcurrentTransforms' => [ 'shape' => 'MaxConcurrentTransforms', ], 'MaxPayloadInMB' => [ 'shape' => 'MaxPayloadInMB', ], 'BatchStrategy' => [ 'shape' => 'BatchStrategy', ], 'Environment' => [ 'shape' => 'TransformEnvironmentMap', ], 'TransformInput' => [ 'shape' => 'TransformInput', ], 'TransformOutput' => [ 'shape' => 'TransformOutput', ], 'TransformResources' => [ 'shape' => 'TransformResources', ], 'Tags' => [ 'shape' => 'TagList', ], ], ], 'CreateTransformJobResponse' => [ 'type' => 'structure', 'required' => [ 'TransformJobArn', ], 'members' => [ 'TransformJobArn' => [ 'shape' => 'TransformJobArn', ], ], ], 'CreateWorkteamRequest' => [ 'type' => 'structure', 'required' => [ 'WorkteamName', 'MemberDefinitions', 'Description', ], 'members' => [ 'WorkteamName' => [ 'shape' => 'WorkteamName', ], 'MemberDefinitions' => [ 'shape' => 'MemberDefinitions', ], 'Description' => [ 'shape' => 'String200', ], 'Tags' => [ 'shape' => 'TagList', ], ], ], 'CreateWorkteamResponse' => [ 'type' => 'structure', 'members' => [ 'WorkteamArn' => [ 'shape' => 'WorkteamArn', ], ], ], 'CreationTime' => [ 'type' => 'timestamp', ], 'DataInputConfig' => [ 'type' => 'string', 'max' => 1024, 'min' => 1, ], 'DataSource' => [ 'type' => 'structure', 'required' => [ 'S3DataSource', ], 'members' => [ 'S3DataSource' => [ 'shape' => 'S3DataSource', ], ], ], 'DeleteAlgorithmInput' => [ 'type' => 'structure', 'required' => [ 'AlgorithmName', ], 'members' => [ 'AlgorithmName' => [ 'shape' => 'EntityName', ], ], ], 'DeleteCodeRepositoryInput' => [ 'type' => 'structure', 'required' => [ 'CodeRepositoryName', ], 'members' => [ 'CodeRepositoryName' => [ 'shape' => 'EntityName', ], ], ], 'DeleteEndpointConfigInput' => [ 'type' => 'structure', 'required' => [ 'EndpointConfigName', ], 'members' => [ 'EndpointConfigName' => [ 'shape' => 'EndpointConfigName', ], ], ], 'DeleteEndpointInput' => [ 'type' => 'structure', 'required' => [ 'EndpointName', ], 'members' => [ 'EndpointName' => [ 'shape' => 'EndpointName', ], ], ], 'DeleteModelInput' => [ 'type' => 'structure', 'required' => [ 'ModelName', ], 'members' => [ 'ModelName' => [ 'shape' => 'ModelName', ], ], ], 'DeleteModelPackageInput' => [ 'type' => 'structure', 'required' => [ 'ModelPackageName', ], 'members' => [ 'ModelPackageName' => [ 'shape' => 'EntityName', ], ], ], 'DeleteNotebookInstanceInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceName', ], 'members' => [ 'NotebookInstanceName' => [ 'shape' => 'NotebookInstanceName', ], ], ], 'DeleteNotebookInstanceLifecycleConfigInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceLifecycleConfigName', ], 'members' => [ 'NotebookInstanceLifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], ], ], 'DeleteTagsInput' => [ 'type' => 'structure', 'required' => [ 'ResourceArn', 'TagKeys', ], 'members' => [ 'ResourceArn' => [ 'shape' => 'ResourceArn', ], 'TagKeys' => [ 'shape' => 'TagKeyList', ], ], ], 'DeleteTagsOutput' => [ 'type' => 'structure', 'members' => [], ], 'DeleteWorkteamRequest' => [ 'type' => 'structure', 'required' => [ 'WorkteamName', ], 'members' => [ 'WorkteamName' => [ 'shape' => 'WorkteamName', ], ], ], 'DeleteWorkteamResponse' => [ 'type' => 'structure', 'required' => [ 'Success', ], 'members' => [ 'Success' => [ 'shape' => 'Success', ], ], ], 'DeployedImage' => [ 'type' => 'structure', 'members' => [ 'SpecifiedImage' => [ 'shape' => 'Image', ], 'ResolvedImage' => [ 'shape' => 'Image', ], 'ResolutionTime' => [ 'shape' => 'Timestamp', ], ], ], 'DeployedImages' => [ 'type' => 'list', 'member' => [ 'shape' => 'DeployedImage', ], ], 'DescribeAlgorithmInput' => [ 'type' => 'structure', 'required' => [ 'AlgorithmName', ], 'members' => [ 'AlgorithmName' => [ 'shape' => 'ArnOrName', ], ], ], 'DescribeAlgorithmOutput' => [ 'type' => 'structure', 'required' => [ 'AlgorithmName', 'AlgorithmArn', 'CreationTime', 'TrainingSpecification', 'AlgorithmStatus', 'AlgorithmStatusDetails', ], 'members' => [ 'AlgorithmName' => [ 'shape' => 'EntityName', ], 'AlgorithmArn' => [ 'shape' => 'AlgorithmArn', ], 'AlgorithmDescription' => [ 'shape' => 'EntityDescription', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'TrainingSpecification' => [ 'shape' => 'TrainingSpecification', ], 'InferenceSpecification' => [ 'shape' => 'InferenceSpecification', ], 'ValidationSpecification' => [ 'shape' => 'AlgorithmValidationSpecification', ], 'AlgorithmStatus' => [ 'shape' => 'AlgorithmStatus', ], 'AlgorithmStatusDetails' => [ 'shape' => 'AlgorithmStatusDetails', ], 'ProductId' => [ 'shape' => 'ProductId', ], 'CertifyForMarketplace' => [ 'shape' => 'CertifyForMarketplace', ], ], ], 'DescribeCodeRepositoryInput' => [ 'type' => 'structure', 'required' => [ 'CodeRepositoryName', ], 'members' => [ 'CodeRepositoryName' => [ 'shape' => 'EntityName', ], ], ], 'DescribeCodeRepositoryOutput' => [ 'type' => 'structure', 'required' => [ 'CodeRepositoryName', 'CodeRepositoryArn', 'CreationTime', 'LastModifiedTime', ], 'members' => [ 'CodeRepositoryName' => [ 'shape' => 'EntityName', ], 'CodeRepositoryArn' => [ 'shape' => 'CodeRepositoryArn', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'LastModifiedTime' => [ 'shape' => 'LastModifiedTime', ], 'GitConfig' => [ 'shape' => 'GitConfig', ], ], ], 'DescribeCompilationJobRequest' => [ 'type' => 'structure', 'required' => [ 'CompilationJobName', ], 'members' => [ 'CompilationJobName' => [ 'shape' => 'EntityName', ], ], ], 'DescribeCompilationJobResponse' => [ 'type' => 'structure', 'required' => [ 'CompilationJobName', 'CompilationJobArn', 'CompilationJobStatus', 'StoppingCondition', 'CreationTime', 'LastModifiedTime', 'FailureReason', 'ModelArtifacts', 'RoleArn', 'InputConfig', 'OutputConfig', ], 'members' => [ 'CompilationJobName' => [ 'shape' => 'EntityName', ], 'CompilationJobArn' => [ 'shape' => 'CompilationJobArn', ], 'CompilationJobStatus' => [ 'shape' => 'CompilationJobStatus', ], 'CompilationStartTime' => [ 'shape' => 'Timestamp', ], 'CompilationEndTime' => [ 'shape' => 'Timestamp', ], 'StoppingCondition' => [ 'shape' => 'StoppingCondition', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'LastModifiedTime' => [ 'shape' => 'LastModifiedTime', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], 'ModelArtifacts' => [ 'shape' => 'ModelArtifacts', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'InputConfig' => [ 'shape' => 'InputConfig', ], 'OutputConfig' => [ 'shape' => 'OutputConfig', ], ], ], 'DescribeEndpointConfigInput' => [ 'type' => 'structure', 'required' => [ 'EndpointConfigName', ], 'members' => [ 'EndpointConfigName' => [ 'shape' => 'EndpointConfigName', ], ], ], 'DescribeEndpointConfigOutput' => [ 'type' => 'structure', 'required' => [ 'EndpointConfigName', 'EndpointConfigArn', 'ProductionVariants', 'CreationTime', ], 'members' => [ 'EndpointConfigName' => [ 'shape' => 'EndpointConfigName', ], 'EndpointConfigArn' => [ 'shape' => 'EndpointConfigArn', ], 'ProductionVariants' => [ 'shape' => 'ProductionVariantList', ], 'KmsKeyId' => [ 'shape' => 'KmsKeyId', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], ], ], 'DescribeEndpointInput' => [ 'type' => 'structure', 'required' => [ 'EndpointName', ], 'members' => [ 'EndpointName' => [ 'shape' => 'EndpointName', ], ], ], 'DescribeEndpointOutput' => [ 'type' => 'structure', 'required' => [ 'EndpointName', 'EndpointArn', 'EndpointConfigName', 'EndpointStatus', 'CreationTime', 'LastModifiedTime', ], 'members' => [ 'EndpointName' => [ 'shape' => 'EndpointName', ], 'EndpointArn' => [ 'shape' => 'EndpointArn', ], 'EndpointConfigName' => [ 'shape' => 'EndpointConfigName', ], 'ProductionVariants' => [ 'shape' => 'ProductionVariantSummaryList', ], 'EndpointStatus' => [ 'shape' => 'EndpointStatus', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], ], ], 'DescribeHyperParameterTuningJobRequest' => [ 'type' => 'structure', 'required' => [ 'HyperParameterTuningJobName', ], 'members' => [ 'HyperParameterTuningJobName' => [ 'shape' => 'HyperParameterTuningJobName', ], ], ], 'DescribeHyperParameterTuningJobResponse' => [ 'type' => 'structure', 'required' => [ 'HyperParameterTuningJobName', 'HyperParameterTuningJobArn', 'HyperParameterTuningJobConfig', 'TrainingJobDefinition', 'HyperParameterTuningJobStatus', 'CreationTime', 'TrainingJobStatusCounters', 'ObjectiveStatusCounters', ], 'members' => [ 'HyperParameterTuningJobName' => [ 'shape' => 'HyperParameterTuningJobName', ], 'HyperParameterTuningJobArn' => [ 'shape' => 'HyperParameterTuningJobArn', ], 'HyperParameterTuningJobConfig' => [ 'shape' => 'HyperParameterTuningJobConfig', ], 'TrainingJobDefinition' => [ 'shape' => 'HyperParameterTrainingJobDefinition', ], 'HyperParameterTuningJobStatus' => [ 'shape' => 'HyperParameterTuningJobStatus', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'HyperParameterTuningEndTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], 'TrainingJobStatusCounters' => [ 'shape' => 'TrainingJobStatusCounters', ], 'ObjectiveStatusCounters' => [ 'shape' => 'ObjectiveStatusCounters', ], 'BestTrainingJob' => [ 'shape' => 'HyperParameterTrainingJobSummary', ], 'OverallBestTrainingJob' => [ 'shape' => 'HyperParameterTrainingJobSummary', ], 'WarmStartConfig' => [ 'shape' => 'HyperParameterTuningJobWarmStartConfig', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], ], ], 'DescribeLabelingJobRequest' => [ 'type' => 'structure', 'required' => [ 'LabelingJobName', ], 'members' => [ 'LabelingJobName' => [ 'shape' => 'LabelingJobName', ], ], ], 'DescribeLabelingJobResponse' => [ 'type' => 'structure', 'required' => [ 'LabelingJobStatus', 'LabelCounters', 'CreationTime', 'LastModifiedTime', 'JobReferenceCode', 'LabelingJobName', 'LabelingJobArn', 'InputConfig', 'OutputConfig', 'RoleArn', 'HumanTaskConfig', ], 'members' => [ 'LabelingJobStatus' => [ 'shape' => 'LabelingJobStatus', ], 'LabelCounters' => [ 'shape' => 'LabelCounters', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], 'JobReferenceCode' => [ 'shape' => 'JobReferenceCode', ], 'LabelingJobName' => [ 'shape' => 'LabelingJobName', ], 'LabelingJobArn' => [ 'shape' => 'LabelingJobArn', ], 'LabelAttributeName' => [ 'shape' => 'LabelAttributeName', ], 'InputConfig' => [ 'shape' => 'LabelingJobInputConfig', ], 'OutputConfig' => [ 'shape' => 'LabelingJobOutputConfig', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'LabelCategoryConfigS3Uri' => [ 'shape' => 'S3Uri', ], 'StoppingConditions' => [ 'shape' => 'LabelingJobStoppingConditions', ], 'LabelingJobAlgorithmsConfig' => [ 'shape' => 'LabelingJobAlgorithmsConfig', ], 'HumanTaskConfig' => [ 'shape' => 'HumanTaskConfig', ], 'Tags' => [ 'shape' => 'TagList', ], 'LabelingJobOutput' => [ 'shape' => 'LabelingJobOutput', ], ], ], 'DescribeModelInput' => [ 'type' => 'structure', 'required' => [ 'ModelName', ], 'members' => [ 'ModelName' => [ 'shape' => 'ModelName', ], ], ], 'DescribeModelOutput' => [ 'type' => 'structure', 'required' => [ 'ModelName', 'ExecutionRoleArn', 'CreationTime', 'ModelArn', ], 'members' => [ 'ModelName' => [ 'shape' => 'ModelName', ], 'PrimaryContainer' => [ 'shape' => 'ContainerDefinition', ], 'Containers' => [ 'shape' => 'ContainerDefinitionList', ], 'ExecutionRoleArn' => [ 'shape' => 'RoleArn', ], 'VpcConfig' => [ 'shape' => 'VpcConfig', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'ModelArn' => [ 'shape' => 'ModelArn', ], 'EnableNetworkIsolation' => [ 'shape' => 'Boolean', ], ], ], 'DescribeModelPackageInput' => [ 'type' => 'structure', 'required' => [ 'ModelPackageName', ], 'members' => [ 'ModelPackageName' => [ 'shape' => 'ArnOrName', ], ], ], 'DescribeModelPackageOutput' => [ 'type' => 'structure', 'required' => [ 'ModelPackageName', 'ModelPackageArn', 'CreationTime', 'ModelPackageStatus', 'ModelPackageStatusDetails', ], 'members' => [ 'ModelPackageName' => [ 'shape' => 'EntityName', ], 'ModelPackageArn' => [ 'shape' => 'ModelPackageArn', ], 'ModelPackageDescription' => [ 'shape' => 'EntityDescription', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'InferenceSpecification' => [ 'shape' => 'InferenceSpecification', ], 'SourceAlgorithmSpecification' => [ 'shape' => 'SourceAlgorithmSpecification', ], 'ValidationSpecification' => [ 'shape' => 'ModelPackageValidationSpecification', ], 'ModelPackageStatus' => [ 'shape' => 'ModelPackageStatus', ], 'ModelPackageStatusDetails' => [ 'shape' => 'ModelPackageStatusDetails', ], 'CertifyForMarketplace' => [ 'shape' => 'CertifyForMarketplace', ], ], ], 'DescribeNotebookInstanceInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceName', ], 'members' => [ 'NotebookInstanceName' => [ 'shape' => 'NotebookInstanceName', ], ], ], 'DescribeNotebookInstanceLifecycleConfigInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceLifecycleConfigName', ], 'members' => [ 'NotebookInstanceLifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], ], ], 'DescribeNotebookInstanceLifecycleConfigOutput' => [ 'type' => 'structure', 'members' => [ 'NotebookInstanceLifecycleConfigArn' => [ 'shape' => 'NotebookInstanceLifecycleConfigArn', ], 'NotebookInstanceLifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], 'OnCreate' => [ 'shape' => 'NotebookInstanceLifecycleConfigList', ], 'OnStart' => [ 'shape' => 'NotebookInstanceLifecycleConfigList', ], 'LastModifiedTime' => [ 'shape' => 'LastModifiedTime', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], ], ], 'DescribeNotebookInstanceOutput' => [ 'type' => 'structure', 'members' => [ 'NotebookInstanceArn' => [ 'shape' => 'NotebookInstanceArn', ], 'NotebookInstanceName' => [ 'shape' => 'NotebookInstanceName', ], 'NotebookInstanceStatus' => [ 'shape' => 'NotebookInstanceStatus', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], 'Url' => [ 'shape' => 'NotebookInstanceUrl', ], 'InstanceType' => [ 'shape' => 'InstanceType', ], 'SubnetId' => [ 'shape' => 'SubnetId', ], 'SecurityGroups' => [ 'shape' => 'SecurityGroupIds', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'KmsKeyId' => [ 'shape' => 'KmsKeyId', ], 'NetworkInterfaceId' => [ 'shape' => 'NetworkInterfaceId', ], 'LastModifiedTime' => [ 'shape' => 'LastModifiedTime', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'NotebookInstanceLifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], 'DirectInternetAccess' => [ 'shape' => 'DirectInternetAccess', ], 'VolumeSizeInGB' => [ 'shape' => 'NotebookInstanceVolumeSizeInGB', ], 'AcceleratorTypes' => [ 'shape' => 'NotebookInstanceAcceleratorTypes', ], 'DefaultCodeRepository' => [ 'shape' => 'CodeRepositoryNameOrUrl', ], 'AdditionalCodeRepositories' => [ 'shape' => 'AdditionalCodeRepositoryNamesOrUrls', ], ], ], 'DescribeSubscribedWorkteamRequest' => [ 'type' => 'structure', 'required' => [ 'WorkteamArn', ], 'members' => [ 'WorkteamArn' => [ 'shape' => 'WorkteamArn', ], ], ], 'DescribeSubscribedWorkteamResponse' => [ 'type' => 'structure', 'required' => [ 'SubscribedWorkteam', ], 'members' => [ 'SubscribedWorkteam' => [ 'shape' => 'SubscribedWorkteam', ], ], ], 'DescribeTrainingJobRequest' => [ 'type' => 'structure', 'required' => [ 'TrainingJobName', ], 'members' => [ 'TrainingJobName' => [ 'shape' => 'TrainingJobName', ], ], ], 'DescribeTrainingJobResponse' => [ 'type' => 'structure', 'required' => [ 'TrainingJobName', 'TrainingJobArn', 'ModelArtifacts', 'TrainingJobStatus', 'SecondaryStatus', 'AlgorithmSpecification', 'ResourceConfig', 'StoppingCondition', 'CreationTime', ], 'members' => [ 'TrainingJobName' => [ 'shape' => 'TrainingJobName', ], 'TrainingJobArn' => [ 'shape' => 'TrainingJobArn', ], 'TuningJobArn' => [ 'shape' => 'HyperParameterTuningJobArn', ], 'LabelingJobArn' => [ 'shape' => 'LabelingJobArn', ], 'ModelArtifacts' => [ 'shape' => 'ModelArtifacts', ], 'TrainingJobStatus' => [ 'shape' => 'TrainingJobStatus', ], 'SecondaryStatus' => [ 'shape' => 'SecondaryStatus', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], 'HyperParameters' => [ 'shape' => 'HyperParameters', ], 'AlgorithmSpecification' => [ 'shape' => 'AlgorithmSpecification', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'InputDataConfig' => [ 'shape' => 'InputDataConfig', ], 'OutputDataConfig' => [ 'shape' => 'OutputDataConfig', ], 'ResourceConfig' => [ 'shape' => 'ResourceConfig', ], 'VpcConfig' => [ 'shape' => 'VpcConfig', ], 'StoppingCondition' => [ 'shape' => 'StoppingCondition', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'TrainingStartTime' => [ 'shape' => 'Timestamp', ], 'TrainingEndTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], 'SecondaryStatusTransitions' => [ 'shape' => 'SecondaryStatusTransitions', ], 'FinalMetricDataList' => [ 'shape' => 'FinalMetricDataList', ], 'EnableNetworkIsolation' => [ 'shape' => 'Boolean', ], 'EnableInterContainerTrafficEncryption' => [ 'shape' => 'Boolean', ], ], ], 'DescribeTransformJobRequest' => [ 'type' => 'structure', 'required' => [ 'TransformJobName', ], 'members' => [ 'TransformJobName' => [ 'shape' => 'TransformJobName', ], ], ], 'DescribeTransformJobResponse' => [ 'type' => 'structure', 'required' => [ 'TransformJobName', 'TransformJobArn', 'TransformJobStatus', 'ModelName', 'TransformInput', 'TransformResources', 'CreationTime', ], 'members' => [ 'TransformJobName' => [ 'shape' => 'TransformJobName', ], 'TransformJobArn' => [ 'shape' => 'TransformJobArn', ], 'TransformJobStatus' => [ 'shape' => 'TransformJobStatus', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], 'ModelName' => [ 'shape' => 'ModelName', ], 'MaxConcurrentTransforms' => [ 'shape' => 'MaxConcurrentTransforms', ], 'MaxPayloadInMB' => [ 'shape' => 'MaxPayloadInMB', ], 'BatchStrategy' => [ 'shape' => 'BatchStrategy', ], 'Environment' => [ 'shape' => 'TransformEnvironmentMap', ], 'TransformInput' => [ 'shape' => 'TransformInput', ], 'TransformOutput' => [ 'shape' => 'TransformOutput', ], 'TransformResources' => [ 'shape' => 'TransformResources', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'TransformStartTime' => [ 'shape' => 'Timestamp', ], 'TransformEndTime' => [ 'shape' => 'Timestamp', ], 'LabelingJobArn' => [ 'shape' => 'LabelingJobArn', ], ], ], 'DescribeWorkteamRequest' => [ 'type' => 'structure', 'required' => [ 'WorkteamName', ], 'members' => [ 'WorkteamName' => [ 'shape' => 'WorkteamName', ], ], ], 'DescribeWorkteamResponse' => [ 'type' => 'structure', 'required' => [ 'Workteam', ], 'members' => [ 'Workteam' => [ 'shape' => 'Workteam', ], ], ], 'DesiredWeightAndCapacity' => [ 'type' => 'structure', 'required' => [ 'VariantName', ], 'members' => [ 'VariantName' => [ 'shape' => 'VariantName', ], 'DesiredWeight' => [ 'shape' => 'VariantWeight', ], 'DesiredInstanceCount' => [ 'shape' => 'TaskCount', ], ], ], 'DesiredWeightAndCapacityList' => [ 'type' => 'list', 'member' => [ 'shape' => 'DesiredWeightAndCapacity', ], 'min' => 1, ], 'DetailedAlgorithmStatus' => [ 'type' => 'string', 'enum' => [ 'NotStarted', 'InProgress', 'Completed', 'Failed', ], ], 'DetailedModelPackageStatus' => [ 'type' => 'string', 'enum' => [ 'NotStarted', 'InProgress', 'Completed', 'Failed', ], ], 'DirectInternetAccess' => [ 'type' => 'string', 'enum' => [ 'Enabled', 'Disabled', ], ], 'DisassociateAdditionalCodeRepositories' => [ 'type' => 'boolean', ], 'DisassociateDefaultCodeRepository' => [ 'type' => 'boolean', ], 'DisassociateNotebookInstanceAcceleratorTypes' => [ 'type' => 'boolean', ], 'DisassociateNotebookInstanceLifecycleConfig' => [ 'type' => 'boolean', ], 'Dollars' => [ 'type' => 'integer', 'max' => 1, 'min' => 0, ], 'EndpointArn' => [ 'type' => 'string', 'max' => 2048, 'min' => 20, ], 'EndpointConfigArn' => [ 'type' => 'string', 'max' => 2048, 'min' => 20, ], 'EndpointConfigName' => [ 'type' => 'string', 'max' => 63, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'EndpointConfigNameContains' => [ 'type' => 'string', 'max' => 63, 'pattern' => '[a-zA-Z0-9-]+', ], 'EndpointConfigSortKey' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', ], ], 'EndpointConfigSummary' => [ 'type' => 'structure', 'required' => [ 'EndpointConfigName', 'EndpointConfigArn', 'CreationTime', ], 'members' => [ 'EndpointConfigName' => [ 'shape' => 'EndpointConfigName', ], 'EndpointConfigArn' => [ 'shape' => 'EndpointConfigArn', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], ], ], 'EndpointConfigSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'EndpointConfigSummary', ], ], 'EndpointName' => [ 'type' => 'string', 'max' => 63, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'EndpointNameContains' => [ 'type' => 'string', 'max' => 63, 'pattern' => '[a-zA-Z0-9-]+', ], 'EndpointSortKey' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', 'Status', ], ], 'EndpointStatus' => [ 'type' => 'string', 'enum' => [ 'OutOfService', 'Creating', 'Updating', 'SystemUpdating', 'RollingBack', 'InService', 'Deleting', 'Failed', ], ], 'EndpointSummary' => [ 'type' => 'structure', 'required' => [ 'EndpointName', 'EndpointArn', 'CreationTime', 'LastModifiedTime', 'EndpointStatus', ], 'members' => [ 'EndpointName' => [ 'shape' => 'EndpointName', ], 'EndpointArn' => [ 'shape' => 'EndpointArn', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], 'EndpointStatus' => [ 'shape' => 'EndpointStatus', ], ], ], 'EndpointSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'EndpointSummary', ], ], 'EntityDescription' => [ 'type' => 'string', 'max' => 1024, 'pattern' => '[\\p{L}\\p{M}\\p{Z}\\p{S}\\p{N}\\p{P}]*', ], 'EntityName' => [ 'type' => 'string', 'max' => 63, 'min' => 1, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*$', ], 'EnvironmentKey' => [ 'type' => 'string', 'max' => 1024, 'pattern' => '[a-zA-Z_][a-zA-Z0-9_]*', ], 'EnvironmentMap' => [ 'type' => 'map', 'key' => [ 'shape' => 'EnvironmentKey', ], 'value' => [ 'shape' => 'EnvironmentValue', ], 'max' => 16, ], 'EnvironmentValue' => [ 'type' => 'string', 'max' => 1024, ], 'FailureReason' => [ 'type' => 'string', 'max' => 1024, ], 'Filter' => [ 'type' => 'structure', 'required' => [ 'Name', ], 'members' => [ 'Name' => [ 'shape' => 'ResourcePropertyName', ], 'Operator' => [ 'shape' => 'Operator', ], 'Value' => [ 'shape' => 'FilterValue', ], ], ], 'FilterList' => [ 'type' => 'list', 'member' => [ 'shape' => 'Filter', ], 'max' => 20, 'min' => 1, ], 'FilterValue' => [ 'type' => 'string', 'max' => 1024, 'min' => 1, ], 'FinalHyperParameterTuningJobObjectiveMetric' => [ 'type' => 'structure', 'required' => [ 'MetricName', 'Value', ], 'members' => [ 'Type' => [ 'shape' => 'HyperParameterTuningJobObjectiveType', ], 'MetricName' => [ 'shape' => 'MetricName', ], 'Value' => [ 'shape' => 'MetricValue', ], ], ], 'FinalMetricDataList' => [ 'type' => 'list', 'member' => [ 'shape' => 'MetricData', ], 'max' => 20, 'min' => 0, ], 'Float' => [ 'type' => 'float', ], 'Framework' => [ 'type' => 'string', 'enum' => [ 'TENSORFLOW', 'MXNET', 'ONNX', 'PYTORCH', 'XGBOOST', ], ], 'GetSearchSuggestionsRequest' => [ 'type' => 'structure', 'required' => [ 'Resource', ], 'members' => [ 'Resource' => [ 'shape' => 'ResourceType', ], 'SuggestionQuery' => [ 'shape' => 'SuggestionQuery', ], ], ], 'GetSearchSuggestionsResponse' => [ 'type' => 'structure', 'members' => [ 'PropertyNameSuggestions' => [ 'shape' => 'PropertyNameSuggestionList', ], ], ], 'GitConfig' => [ 'type' => 'structure', 'required' => [ 'RepositoryUrl', ], 'members' => [ 'RepositoryUrl' => [ 'shape' => 'GitConfigUrl', ], 'Branch' => [ 'shape' => 'Branch', ], 'SecretArn' => [ 'shape' => 'SecretArn', ], ], ], 'GitConfigForUpdate' => [ 'type' => 'structure', 'members' => [ 'SecretArn' => [ 'shape' => 'SecretArn', ], ], ], 'GitConfigUrl' => [ 'type' => 'string', 'pattern' => '^https://([^/]+)/?(.*)$', ], 'HumanTaskConfig' => [ 'type' => 'structure', 'required' => [ 'WorkteamArn', 'UiConfig', 'PreHumanTaskLambdaArn', 'TaskTitle', 'TaskDescription', 'NumberOfHumanWorkersPerDataObject', 'TaskTimeLimitInSeconds', 'AnnotationConsolidationConfig', ], 'members' => [ 'WorkteamArn' => [ 'shape' => 'WorkteamArn', ], 'UiConfig' => [ 'shape' => 'UiConfig', ], 'PreHumanTaskLambdaArn' => [ 'shape' => 'LambdaFunctionArn', ], 'TaskKeywords' => [ 'shape' => 'TaskKeywords', ], 'TaskTitle' => [ 'shape' => 'TaskTitle', ], 'TaskDescription' => [ 'shape' => 'TaskDescription', ], 'NumberOfHumanWorkersPerDataObject' => [ 'shape' => 'NumberOfHumanWorkersPerDataObject', ], 'TaskTimeLimitInSeconds' => [ 'shape' => 'TaskTimeLimitInSeconds', ], 'TaskAvailabilityLifetimeInSeconds' => [ 'shape' => 'TaskAvailabilityLifetimeInSeconds', ], 'MaxConcurrentTaskCount' => [ 'shape' => 'MaxConcurrentTaskCount', ], 'AnnotationConsolidationConfig' => [ 'shape' => 'AnnotationConsolidationConfig', ], 'PublicWorkforceTaskPrice' => [ 'shape' => 'PublicWorkforceTaskPrice', ], ], ], 'HyperParameterAlgorithmSpecification' => [ 'type' => 'structure', 'required' => [ 'TrainingInputMode', ], 'members' => [ 'TrainingImage' => [ 'shape' => 'AlgorithmImage', ], 'TrainingInputMode' => [ 'shape' => 'TrainingInputMode', ], 'AlgorithmName' => [ 'shape' => 'ArnOrName', ], 'MetricDefinitions' => [ 'shape' => 'MetricDefinitionList', ], ], ], 'HyperParameterSpecification' => [ 'type' => 'structure', 'required' => [ 'Name', 'Type', ], 'members' => [ 'Name' => [ 'shape' => 'ParameterName', ], 'Description' => [ 'shape' => 'EntityDescription', ], 'Type' => [ 'shape' => 'ParameterType', ], 'Range' => [ 'shape' => 'ParameterRange', ], 'IsTunable' => [ 'shape' => 'Boolean', ], 'IsRequired' => [ 'shape' => 'Boolean', ], 'DefaultValue' => [ 'shape' => 'ParameterValue', ], ], ], 'HyperParameterSpecifications' => [ 'type' => 'list', 'member' => [ 'shape' => 'HyperParameterSpecification', ], 'max' => 100, 'min' => 0, ], 'HyperParameterTrainingJobDefinition' => [ 'type' => 'structure', 'required' => [ 'AlgorithmSpecification', 'RoleArn', 'OutputDataConfig', 'ResourceConfig', 'StoppingCondition', ], 'members' => [ 'StaticHyperParameters' => [ 'shape' => 'HyperParameters', ], 'AlgorithmSpecification' => [ 'shape' => 'HyperParameterAlgorithmSpecification', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'InputDataConfig' => [ 'shape' => 'InputDataConfig', ], 'VpcConfig' => [ 'shape' => 'VpcConfig', ], 'OutputDataConfig' => [ 'shape' => 'OutputDataConfig', ], 'ResourceConfig' => [ 'shape' => 'ResourceConfig', ], 'StoppingCondition' => [ 'shape' => 'StoppingCondition', ], 'EnableNetworkIsolation' => [ 'shape' => 'Boolean', ], 'EnableInterContainerTrafficEncryption' => [ 'shape' => 'Boolean', ], ], ], 'HyperParameterTrainingJobSummaries' => [ 'type' => 'list', 'member' => [ 'shape' => 'HyperParameterTrainingJobSummary', ], ], 'HyperParameterTrainingJobSummary' => [ 'type' => 'structure', 'required' => [ 'TrainingJobName', 'TrainingJobArn', 'CreationTime', 'TrainingJobStatus', 'TunedHyperParameters', ], 'members' => [ 'TrainingJobName' => [ 'shape' => 'TrainingJobName', ], 'TrainingJobArn' => [ 'shape' => 'TrainingJobArn', ], 'TuningJobName' => [ 'shape' => 'HyperParameterTuningJobName', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'TrainingStartTime' => [ 'shape' => 'Timestamp', ], 'TrainingEndTime' => [ 'shape' => 'Timestamp', ], 'TrainingJobStatus' => [ 'shape' => 'TrainingJobStatus', ], 'TunedHyperParameters' => [ 'shape' => 'HyperParameters', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], 'FinalHyperParameterTuningJobObjectiveMetric' => [ 'shape' => 'FinalHyperParameterTuningJobObjectiveMetric', ], 'ObjectiveStatus' => [ 'shape' => 'ObjectiveStatus', ], ], ], 'HyperParameterTuningJobArn' => [ 'type' => 'string', 'max' => 256, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:hyper-parameter-tuning-job/.*', ], 'HyperParameterTuningJobConfig' => [ 'type' => 'structure', 'required' => [ 'Strategy', 'HyperParameterTuningJobObjective', 'ResourceLimits', 'ParameterRanges', ], 'members' => [ 'Strategy' => [ 'shape' => 'HyperParameterTuningJobStrategyType', ], 'HyperParameterTuningJobObjective' => [ 'shape' => 'HyperParameterTuningJobObjective', ], 'ResourceLimits' => [ 'shape' => 'ResourceLimits', ], 'ParameterRanges' => [ 'shape' => 'ParameterRanges', ], 'TrainingJobEarlyStoppingType' => [ 'shape' => 'TrainingJobEarlyStoppingType', ], ], ], 'HyperParameterTuningJobName' => [ 'type' => 'string', 'max' => 32, 'min' => 1, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'HyperParameterTuningJobObjective' => [ 'type' => 'structure', 'required' => [ 'Type', 'MetricName', ], 'members' => [ 'Type' => [ 'shape' => 'HyperParameterTuningJobObjectiveType', ], 'MetricName' => [ 'shape' => 'MetricName', ], ], ], 'HyperParameterTuningJobObjectiveType' => [ 'type' => 'string', 'enum' => [ 'Maximize', 'Minimize', ], ], 'HyperParameterTuningJobObjectives' => [ 'type' => 'list', 'member' => [ 'shape' => 'HyperParameterTuningJobObjective', ], ], 'HyperParameterTuningJobSortByOptions' => [ 'type' => 'string', 'enum' => [ 'Name', 'Status', 'CreationTime', ], ], 'HyperParameterTuningJobStatus' => [ 'type' => 'string', 'enum' => [ 'Completed', 'InProgress', 'Failed', 'Stopped', 'Stopping', ], ], 'HyperParameterTuningJobStrategyType' => [ 'type' => 'string', 'enum' => [ 'Bayesian', ], ], 'HyperParameterTuningJobSummaries' => [ 'type' => 'list', 'member' => [ 'shape' => 'HyperParameterTuningJobSummary', ], ], 'HyperParameterTuningJobSummary' => [ 'type' => 'structure', 'required' => [ 'HyperParameterTuningJobName', 'HyperParameterTuningJobArn', 'HyperParameterTuningJobStatus', 'Strategy', 'CreationTime', 'TrainingJobStatusCounters', 'ObjectiveStatusCounters', ], 'members' => [ 'HyperParameterTuningJobName' => [ 'shape' => 'HyperParameterTuningJobName', ], 'HyperParameterTuningJobArn' => [ 'shape' => 'HyperParameterTuningJobArn', ], 'HyperParameterTuningJobStatus' => [ 'shape' => 'HyperParameterTuningJobStatus', ], 'Strategy' => [ 'shape' => 'HyperParameterTuningJobStrategyType', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'HyperParameterTuningEndTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], 'TrainingJobStatusCounters' => [ 'shape' => 'TrainingJobStatusCounters', ], 'ObjectiveStatusCounters' => [ 'shape' => 'ObjectiveStatusCounters', ], 'ResourceLimits' => [ 'shape' => 'ResourceLimits', ], ], ], 'HyperParameterTuningJobWarmStartConfig' => [ 'type' => 'structure', 'required' => [ 'ParentHyperParameterTuningJobs', 'WarmStartType', ], 'members' => [ 'ParentHyperParameterTuningJobs' => [ 'shape' => 'ParentHyperParameterTuningJobs', ], 'WarmStartType' => [ 'shape' => 'HyperParameterTuningJobWarmStartType', ], ], ], 'HyperParameterTuningJobWarmStartType' => [ 'type' => 'string', 'enum' => [ 'IdenticalDataAndAlgorithm', 'TransferLearning', ], ], 'HyperParameters' => [ 'type' => 'map', 'key' => [ 'shape' => 'ParameterKey', ], 'value' => [ 'shape' => 'ParameterValue', ], 'max' => 100, 'min' => 0, ], 'Image' => [ 'type' => 'string', 'max' => 255, 'pattern' => '[\\S]+', ], 'ImageDigest' => [ 'type' => 'string', 'max' => 72, 'pattern' => '^[Ss][Hh][Aa]256:[0-9a-fA-F]{64}$', ], 'InferenceSpecification' => [ 'type' => 'structure', 'required' => [ 'Containers', 'SupportedTransformInstanceTypes', 'SupportedRealtimeInferenceInstanceTypes', 'SupportedContentTypes', 'SupportedResponseMIMETypes', ], 'members' => [ 'Containers' => [ 'shape' => 'ModelPackageContainerDefinitionList', ], 'SupportedTransformInstanceTypes' => [ 'shape' => 'TransformInstanceTypes', ], 'SupportedRealtimeInferenceInstanceTypes' => [ 'shape' => 'RealtimeInferenceInstanceTypes', ], 'SupportedContentTypes' => [ 'shape' => 'ContentTypes', ], 'SupportedResponseMIMETypes' => [ 'shape' => 'ResponseMIMETypes', ], ], ], 'InputConfig' => [ 'type' => 'structure', 'required' => [ 'S3Uri', 'DataInputConfig', 'Framework', ], 'members' => [ 'S3Uri' => [ 'shape' => 'S3Uri', ], 'DataInputConfig' => [ 'shape' => 'DataInputConfig', ], 'Framework' => [ 'shape' => 'Framework', ], ], ], 'InputDataConfig' => [ 'type' => 'list', 'member' => [ 'shape' => 'Channel', ], 'max' => 8, 'min' => 1, ], 'InputModes' => [ 'type' => 'list', 'member' => [ 'shape' => 'TrainingInputMode', ], 'min' => 1, ], 'InstanceType' => [ 'type' => 'string', 'enum' => [ 'ml.t2.medium', 'ml.t2.large', 'ml.t2.xlarge', 'ml.t2.2xlarge', 'ml.t3.medium', 'ml.t3.large', 'ml.t3.xlarge', 'ml.t3.2xlarge', 'ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', 'ml.m5.12xlarge', 'ml.m5.24xlarge', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge', 'ml.c5d.xlarge', 'ml.c5d.2xlarge', 'ml.c5d.4xlarge', 'ml.c5d.9xlarge', 'ml.c5d.18xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', ], ], 'IntegerParameterRange' => [ 'type' => 'structure', 'required' => [ 'Name', 'MinValue', 'MaxValue', ], 'members' => [ 'Name' => [ 'shape' => 'ParameterKey', ], 'MinValue' => [ 'shape' => 'ParameterValue', ], 'MaxValue' => [ 'shape' => 'ParameterValue', ], ], ], 'IntegerParameterRangeSpecification' => [ 'type' => 'structure', 'required' => [ 'MinValue', 'MaxValue', ], 'members' => [ 'MinValue' => [ 'shape' => 'ParameterValue', ], 'MaxValue' => [ 'shape' => 'ParameterValue', ], ], ], 'IntegerParameterRanges' => [ 'type' => 'list', 'member' => [ 'shape' => 'IntegerParameterRange', ], 'max' => 20, 'min' => 0, ], 'JobReferenceCode' => [ 'type' => 'string', 'min' => 1, ], 'JobReferenceCodeContains' => [ 'type' => 'string', 'max' => 255, 'min' => 1, ], 'KmsKeyId' => [ 'type' => 'string', 'max' => 2048, ], 'LabelAttributeName' => [ 'type' => 'string', 'max' => 127, 'min' => 1, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'LabelCounter' => [ 'type' => 'integer', 'min' => 0, ], 'LabelCounters' => [ 'type' => 'structure', 'members' => [ 'TotalLabeled' => [ 'shape' => 'LabelCounter', ], 'HumanLabeled' => [ 'shape' => 'LabelCounter', ], 'MachineLabeled' => [ 'shape' => 'LabelCounter', ], 'FailedNonRetryableError' => [ 'shape' => 'LabelCounter', ], 'Unlabeled' => [ 'shape' => 'LabelCounter', ], ], ], 'LabelCountersForWorkteam' => [ 'type' => 'structure', 'members' => [ 'HumanLabeled' => [ 'shape' => 'LabelCounter', ], 'PendingHuman' => [ 'shape' => 'LabelCounter', ], 'Total' => [ 'shape' => 'LabelCounter', ], ], ], 'LabelingJobAlgorithmSpecificationArn' => [ 'type' => 'string', 'max' => 2048, ], 'LabelingJobAlgorithmsConfig' => [ 'type' => 'structure', 'required' => [ 'LabelingJobAlgorithmSpecificationArn', ], 'members' => [ 'LabelingJobAlgorithmSpecificationArn' => [ 'shape' => 'LabelingJobAlgorithmSpecificationArn', ], 'InitialActiveLearningModelArn' => [ 'shape' => 'ModelArn', ], 'LabelingJobResourceConfig' => [ 'shape' => 'LabelingJobResourceConfig', ], ], ], 'LabelingJobArn' => [ 'type' => 'string', 'max' => 2048, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:labeling-job/.*', ], 'LabelingJobDataAttributes' => [ 'type' => 'structure', 'members' => [ 'ContentClassifiers' => [ 'shape' => 'ContentClassifiers', ], ], ], 'LabelingJobDataSource' => [ 'type' => 'structure', 'required' => [ 'S3DataSource', ], 'members' => [ 'S3DataSource' => [ 'shape' => 'LabelingJobS3DataSource', ], ], ], 'LabelingJobForWorkteamSummary' => [ 'type' => 'structure', 'required' => [ 'JobReferenceCode', 'WorkRequesterAccountId', 'CreationTime', ], 'members' => [ 'LabelingJobName' => [ 'shape' => 'LabelingJobName', ], 'JobReferenceCode' => [ 'shape' => 'JobReferenceCode', ], 'WorkRequesterAccountId' => [ 'shape' => 'AccountId', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'LabelCounters' => [ 'shape' => 'LabelCountersForWorkteam', ], ], ], 'LabelingJobForWorkteamSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'LabelingJobForWorkteamSummary', ], ], 'LabelingJobInputConfig' => [ 'type' => 'structure', 'required' => [ 'DataSource', ], 'members' => [ 'DataSource' => [ 'shape' => 'LabelingJobDataSource', ], 'DataAttributes' => [ 'shape' => 'LabelingJobDataAttributes', ], ], ], 'LabelingJobName' => [ 'type' => 'string', 'max' => 63, 'min' => 1, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'LabelingJobOutput' => [ 'type' => 'structure', 'required' => [ 'OutputDatasetS3Uri', ], 'members' => [ 'OutputDatasetS3Uri' => [ 'shape' => 'S3Uri', ], 'FinalActiveLearningModelArn' => [ 'shape' => 'ModelArn', ], ], ], 'LabelingJobOutputConfig' => [ 'type' => 'structure', 'required' => [ 'S3OutputPath', ], 'members' => [ 'S3OutputPath' => [ 'shape' => 'S3Uri', ], 'KmsKeyId' => [ 'shape' => 'KmsKeyId', ], ], ], 'LabelingJobResourceConfig' => [ 'type' => 'structure', 'members' => [ 'VolumeKmsKeyId' => [ 'shape' => 'KmsKeyId', ], ], ], 'LabelingJobS3DataSource' => [ 'type' => 'structure', 'required' => [ 'ManifestS3Uri', ], 'members' => [ 'ManifestS3Uri' => [ 'shape' => 'S3Uri', ], ], ], 'LabelingJobStatus' => [ 'type' => 'string', 'enum' => [ 'InProgress', 'Completed', 'Failed', 'Stopping', 'Stopped', ], ], 'LabelingJobStoppingConditions' => [ 'type' => 'structure', 'members' => [ 'MaxHumanLabeledObjectCount' => [ 'shape' => 'MaxHumanLabeledObjectCount', ], 'MaxPercentageOfInputDatasetLabeled' => [ 'shape' => 'MaxPercentageOfInputDatasetLabeled', ], ], ], 'LabelingJobSummary' => [ 'type' => 'structure', 'required' => [ 'LabelingJobName', 'LabelingJobArn', 'CreationTime', 'LastModifiedTime', 'LabelingJobStatus', 'LabelCounters', 'WorkteamArn', 'PreHumanTaskLambdaArn', ], 'members' => [ 'LabelingJobName' => [ 'shape' => 'LabelingJobName', ], 'LabelingJobArn' => [ 'shape' => 'LabelingJobArn', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], 'LabelingJobStatus' => [ 'shape' => 'LabelingJobStatus', ], 'LabelCounters' => [ 'shape' => 'LabelCounters', ], 'WorkteamArn' => [ 'shape' => 'WorkteamArn', ], 'PreHumanTaskLambdaArn' => [ 'shape' => 'LambdaFunctionArn', ], 'AnnotationConsolidationLambdaArn' => [ 'shape' => 'LambdaFunctionArn', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], 'LabelingJobOutput' => [ 'shape' => 'LabelingJobOutput', ], 'InputConfig' => [ 'shape' => 'LabelingJobInputConfig', ], ], ], 'LabelingJobSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'LabelingJobSummary', ], ], 'LambdaFunctionArn' => [ 'type' => 'string', 'max' => 2048, 'pattern' => 'arn:aws[a-z\\-]*:lambda:[a-z]{2}-[a-z]+-\\d{1}:\\d{12}:function:[a-zA-Z0-9-_\\.]+(:(\\$LATEST|[a-zA-Z0-9-_]+))?', ], 'LastModifiedTime' => [ 'type' => 'timestamp', ], 'ListAlgorithmsInput' => [ 'type' => 'structure', 'members' => [ 'CreationTimeAfter' => [ 'shape' => 'CreationTime', ], 'CreationTimeBefore' => [ 'shape' => 'CreationTime', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'NameContains' => [ 'shape' => 'NameContains', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'SortBy' => [ 'shape' => 'AlgorithmSortBy', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], ], ], 'ListAlgorithmsOutput' => [ 'type' => 'structure', 'required' => [ 'AlgorithmSummaryList', ], 'members' => [ 'AlgorithmSummaryList' => [ 'shape' => 'AlgorithmSummaryList', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListCodeRepositoriesInput' => [ 'type' => 'structure', 'members' => [ 'CreationTimeAfter' => [ 'shape' => 'CreationTime', ], 'CreationTimeBefore' => [ 'shape' => 'CreationTime', ], 'LastModifiedTimeAfter' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeBefore' => [ 'shape' => 'Timestamp', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'NameContains' => [ 'shape' => 'CodeRepositoryNameContains', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'SortBy' => [ 'shape' => 'CodeRepositorySortBy', ], 'SortOrder' => [ 'shape' => 'CodeRepositorySortOrder', ], ], ], 'ListCodeRepositoriesOutput' => [ 'type' => 'structure', 'required' => [ 'CodeRepositorySummaryList', ], 'members' => [ 'CodeRepositorySummaryList' => [ 'shape' => 'CodeRepositorySummaryList', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListCompilationJobsRequest' => [ 'type' => 'structure', 'members' => [ 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', 'box' => true, ], 'CreationTimeAfter' => [ 'shape' => 'CreationTime', ], 'CreationTimeBefore' => [ 'shape' => 'CreationTime', ], 'LastModifiedTimeAfter' => [ 'shape' => 'LastModifiedTime', ], 'LastModifiedTimeBefore' => [ 'shape' => 'LastModifiedTime', ], 'NameContains' => [ 'shape' => 'NameContains', ], 'StatusEquals' => [ 'shape' => 'CompilationJobStatus', ], 'SortBy' => [ 'shape' => 'ListCompilationJobsSortBy', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], ], ], 'ListCompilationJobsResponse' => [ 'type' => 'structure', 'required' => [ 'CompilationJobSummaries', ], 'members' => [ 'CompilationJobSummaries' => [ 'shape' => 'CompilationJobSummaries', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListCompilationJobsSortBy' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', 'Status', ], ], 'ListEndpointConfigsInput' => [ 'type' => 'structure', 'members' => [ 'SortBy' => [ 'shape' => 'EndpointConfigSortKey', ], 'SortOrder' => [ 'shape' => 'OrderKey', ], 'NextToken' => [ 'shape' => 'PaginationToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'NameContains' => [ 'shape' => 'EndpointConfigNameContains', ], 'CreationTimeBefore' => [ 'shape' => 'Timestamp', ], 'CreationTimeAfter' => [ 'shape' => 'Timestamp', ], ], ], 'ListEndpointConfigsOutput' => [ 'type' => 'structure', 'required' => [ 'EndpointConfigs', ], 'members' => [ 'EndpointConfigs' => [ 'shape' => 'EndpointConfigSummaryList', ], 'NextToken' => [ 'shape' => 'PaginationToken', ], ], ], 'ListEndpointsInput' => [ 'type' => 'structure', 'members' => [ 'SortBy' => [ 'shape' => 'EndpointSortKey', ], 'SortOrder' => [ 'shape' => 'OrderKey', ], 'NextToken' => [ 'shape' => 'PaginationToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'NameContains' => [ 'shape' => 'EndpointNameContains', ], 'CreationTimeBefore' => [ 'shape' => 'Timestamp', ], 'CreationTimeAfter' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeBefore' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeAfter' => [ 'shape' => 'Timestamp', ], 'StatusEquals' => [ 'shape' => 'EndpointStatus', ], ], ], 'ListEndpointsOutput' => [ 'type' => 'structure', 'required' => [ 'Endpoints', ], 'members' => [ 'Endpoints' => [ 'shape' => 'EndpointSummaryList', ], 'NextToken' => [ 'shape' => 'PaginationToken', ], ], ], 'ListHyperParameterTuningJobsRequest' => [ 'type' => 'structure', 'members' => [ 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', 'box' => true, ], 'SortBy' => [ 'shape' => 'HyperParameterTuningJobSortByOptions', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], 'NameContains' => [ 'shape' => 'NameContains', ], 'CreationTimeAfter' => [ 'shape' => 'Timestamp', ], 'CreationTimeBefore' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeAfter' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeBefore' => [ 'shape' => 'Timestamp', ], 'StatusEquals' => [ 'shape' => 'HyperParameterTuningJobStatus', ], ], ], 'ListHyperParameterTuningJobsResponse' => [ 'type' => 'structure', 'required' => [ 'HyperParameterTuningJobSummaries', ], 'members' => [ 'HyperParameterTuningJobSummaries' => [ 'shape' => 'HyperParameterTuningJobSummaries', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListLabelingJobsForWorkteamRequest' => [ 'type' => 'structure', 'required' => [ 'WorkteamArn', ], 'members' => [ 'WorkteamArn' => [ 'shape' => 'WorkteamArn', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'CreationTimeAfter' => [ 'shape' => 'Timestamp', ], 'CreationTimeBefore' => [ 'shape' => 'Timestamp', ], 'JobReferenceCodeContains' => [ 'shape' => 'JobReferenceCodeContains', ], 'SortBy' => [ 'shape' => 'ListLabelingJobsForWorkteamSortByOptions', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], ], ], 'ListLabelingJobsForWorkteamResponse' => [ 'type' => 'structure', 'required' => [ 'LabelingJobSummaryList', ], 'members' => [ 'LabelingJobSummaryList' => [ 'shape' => 'LabelingJobForWorkteamSummaryList', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListLabelingJobsForWorkteamSortByOptions' => [ 'type' => 'string', 'enum' => [ 'CreationTime', ], ], 'ListLabelingJobsRequest' => [ 'type' => 'structure', 'members' => [ 'CreationTimeAfter' => [ 'shape' => 'Timestamp', ], 'CreationTimeBefore' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeAfter' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeBefore' => [ 'shape' => 'Timestamp', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'NameContains' => [ 'shape' => 'NameContains', ], 'SortBy' => [ 'shape' => 'SortBy', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], 'StatusEquals' => [ 'shape' => 'LabelingJobStatus', ], ], ], 'ListLabelingJobsResponse' => [ 'type' => 'structure', 'members' => [ 'LabelingJobSummaryList' => [ 'shape' => 'LabelingJobSummaryList', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListModelPackagesInput' => [ 'type' => 'structure', 'members' => [ 'CreationTimeAfter' => [ 'shape' => 'CreationTime', ], 'CreationTimeBefore' => [ 'shape' => 'CreationTime', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'NameContains' => [ 'shape' => 'NameContains', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'SortBy' => [ 'shape' => 'ModelPackageSortBy', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], ], ], 'ListModelPackagesOutput' => [ 'type' => 'structure', 'required' => [ 'ModelPackageSummaryList', ], 'members' => [ 'ModelPackageSummaryList' => [ 'shape' => 'ModelPackageSummaryList', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListModelsInput' => [ 'type' => 'structure', 'members' => [ 'SortBy' => [ 'shape' => 'ModelSortKey', ], 'SortOrder' => [ 'shape' => 'OrderKey', ], 'NextToken' => [ 'shape' => 'PaginationToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'NameContains' => [ 'shape' => 'ModelNameContains', ], 'CreationTimeBefore' => [ 'shape' => 'Timestamp', ], 'CreationTimeAfter' => [ 'shape' => 'Timestamp', ], ], ], 'ListModelsOutput' => [ 'type' => 'structure', 'required' => [ 'Models', ], 'members' => [ 'Models' => [ 'shape' => 'ModelSummaryList', ], 'NextToken' => [ 'shape' => 'PaginationToken', ], ], ], 'ListNotebookInstanceLifecycleConfigsInput' => [ 'type' => 'structure', 'members' => [ 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'SortBy' => [ 'shape' => 'NotebookInstanceLifecycleConfigSortKey', ], 'SortOrder' => [ 'shape' => 'NotebookInstanceLifecycleConfigSortOrder', ], 'NameContains' => [ 'shape' => 'NotebookInstanceLifecycleConfigNameContains', ], 'CreationTimeBefore' => [ 'shape' => 'CreationTime', ], 'CreationTimeAfter' => [ 'shape' => 'CreationTime', ], 'LastModifiedTimeBefore' => [ 'shape' => 'LastModifiedTime', ], 'LastModifiedTimeAfter' => [ 'shape' => 'LastModifiedTime', ], ], ], 'ListNotebookInstanceLifecycleConfigsOutput' => [ 'type' => 'structure', 'members' => [ 'NextToken' => [ 'shape' => 'NextToken', ], 'NotebookInstanceLifecycleConfigs' => [ 'shape' => 'NotebookInstanceLifecycleConfigSummaryList', ], ], ], 'ListNotebookInstancesInput' => [ 'type' => 'structure', 'members' => [ 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'SortBy' => [ 'shape' => 'NotebookInstanceSortKey', ], 'SortOrder' => [ 'shape' => 'NotebookInstanceSortOrder', ], 'NameContains' => [ 'shape' => 'NotebookInstanceNameContains', ], 'CreationTimeBefore' => [ 'shape' => 'CreationTime', ], 'CreationTimeAfter' => [ 'shape' => 'CreationTime', ], 'LastModifiedTimeBefore' => [ 'shape' => 'LastModifiedTime', ], 'LastModifiedTimeAfter' => [ 'shape' => 'LastModifiedTime', ], 'StatusEquals' => [ 'shape' => 'NotebookInstanceStatus', ], 'NotebookInstanceLifecycleConfigNameContains' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], 'DefaultCodeRepositoryContains' => [ 'shape' => 'CodeRepositoryContains', ], 'AdditionalCodeRepositoryEquals' => [ 'shape' => 'CodeRepositoryNameOrUrl', ], ], ], 'ListNotebookInstancesOutput' => [ 'type' => 'structure', 'members' => [ 'NextToken' => [ 'shape' => 'NextToken', ], 'NotebookInstances' => [ 'shape' => 'NotebookInstanceSummaryList', ], ], ], 'ListSubscribedWorkteamsRequest' => [ 'type' => 'structure', 'members' => [ 'NameContains' => [ 'shape' => 'WorkteamName', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', 'box' => true, ], ], ], 'ListSubscribedWorkteamsResponse' => [ 'type' => 'structure', 'required' => [ 'SubscribedWorkteams', ], 'members' => [ 'SubscribedWorkteams' => [ 'shape' => 'SubscribedWorkteams', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListTagsInput' => [ 'type' => 'structure', 'required' => [ 'ResourceArn', ], 'members' => [ 'ResourceArn' => [ 'shape' => 'ResourceArn', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'ListTagsMaxResults', ], ], ], 'ListTagsMaxResults' => [ 'type' => 'integer', 'min' => 50, ], 'ListTagsOutput' => [ 'type' => 'structure', 'members' => [ 'Tags' => [ 'shape' => 'TagList', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListTrainingJobsForHyperParameterTuningJobRequest' => [ 'type' => 'structure', 'required' => [ 'HyperParameterTuningJobName', ], 'members' => [ 'HyperParameterTuningJobName' => [ 'shape' => 'HyperParameterTuningJobName', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'StatusEquals' => [ 'shape' => 'TrainingJobStatus', ], 'SortBy' => [ 'shape' => 'TrainingJobSortByOptions', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], ], ], 'ListTrainingJobsForHyperParameterTuningJobResponse' => [ 'type' => 'structure', 'required' => [ 'TrainingJobSummaries', ], 'members' => [ 'TrainingJobSummaries' => [ 'shape' => 'HyperParameterTrainingJobSummaries', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListTrainingJobsRequest' => [ 'type' => 'structure', 'members' => [ 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', 'box' => true, ], 'CreationTimeAfter' => [ 'shape' => 'Timestamp', ], 'CreationTimeBefore' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeAfter' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeBefore' => [ 'shape' => 'Timestamp', ], 'NameContains' => [ 'shape' => 'NameContains', ], 'StatusEquals' => [ 'shape' => 'TrainingJobStatus', ], 'SortBy' => [ 'shape' => 'SortBy', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], ], ], 'ListTrainingJobsResponse' => [ 'type' => 'structure', 'required' => [ 'TrainingJobSummaries', ], 'members' => [ 'TrainingJobSummaries' => [ 'shape' => 'TrainingJobSummaries', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListTransformJobsRequest' => [ 'type' => 'structure', 'members' => [ 'CreationTimeAfter' => [ 'shape' => 'Timestamp', ], 'CreationTimeBefore' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeAfter' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeBefore' => [ 'shape' => 'Timestamp', ], 'NameContains' => [ 'shape' => 'NameContains', ], 'StatusEquals' => [ 'shape' => 'TransformJobStatus', ], 'SortBy' => [ 'shape' => 'SortBy', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', 'box' => true, ], ], ], 'ListTransformJobsResponse' => [ 'type' => 'structure', 'required' => [ 'TransformJobSummaries', ], 'members' => [ 'TransformJobSummaries' => [ 'shape' => 'TransformJobSummaries', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListWorkteamsRequest' => [ 'type' => 'structure', 'members' => [ 'SortBy' => [ 'shape' => 'ListWorkteamsSortByOptions', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], 'NameContains' => [ 'shape' => 'WorkteamName', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', 'box' => true, ], ], ], 'ListWorkteamsResponse' => [ 'type' => 'structure', 'required' => [ 'Workteams', ], 'members' => [ 'Workteams' => [ 'shape' => 'Workteams', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListWorkteamsSortByOptions' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreateDate', ], ], 'MaxConcurrentTaskCount' => [ 'type' => 'integer', 'max' => 1000, 'min' => 1, ], 'MaxConcurrentTransforms' => [ 'type' => 'integer', 'min' => 0, ], 'MaxHumanLabeledObjectCount' => [ 'type' => 'integer', 'min' => 1, ], 'MaxNumberOfTrainingJobs' => [ 'type' => 'integer', 'min' => 1, ], 'MaxParallelTrainingJobs' => [ 'type' => 'integer', 'min' => 1, ], 'MaxPayloadInMB' => [ 'type' => 'integer', 'min' => 0, ], 'MaxPercentageOfInputDatasetLabeled' => [ 'type' => 'integer', 'max' => 100, 'min' => 1, ], 'MaxResults' => [ 'type' => 'integer', 'max' => 100, 'min' => 1, ], 'MaxRuntimeInSeconds' => [ 'type' => 'integer', 'min' => 1, ], 'MemberDefinition' => [ 'type' => 'structure', 'members' => [ 'CognitoMemberDefinition' => [ 'shape' => 'CognitoMemberDefinition', ], ], ], 'MemberDefinitions' => [ 'type' => 'list', 'member' => [ 'shape' => 'MemberDefinition', ], 'max' => 10, 'min' => 1, ], 'MetricData' => [ 'type' => 'structure', 'members' => [ 'MetricName' => [ 'shape' => 'MetricName', ], 'Value' => [ 'shape' => 'Float', ], 'Timestamp' => [ 'shape' => 'Timestamp', ], ], ], 'MetricDefinition' => [ 'type' => 'structure', 'required' => [ 'Name', 'Regex', ], 'members' => [ 'Name' => [ 'shape' => 'MetricName', ], 'Regex' => [ 'shape' => 'MetricRegex', ], ], ], 'MetricDefinitionList' => [ 'type' => 'list', 'member' => [ 'shape' => 'MetricDefinition', ], 'max' => 20, 'min' => 0, ], 'MetricName' => [ 'type' => 'string', 'max' => 255, 'min' => 1, ], 'MetricRegex' => [ 'type' => 'string', 'max' => 500, 'min' => 1, ], 'MetricValue' => [ 'type' => 'float', ], 'ModelArn' => [ 'type' => 'string', 'max' => 2048, 'min' => 20, ], 'ModelArtifacts' => [ 'type' => 'structure', 'required' => [ 'S3ModelArtifacts', ], 'members' => [ 'S3ModelArtifacts' => [ 'shape' => 'S3Uri', ], ], ], 'ModelName' => [ 'type' => 'string', 'max' => 63, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'ModelNameContains' => [ 'type' => 'string', 'max' => 63, 'pattern' => '[a-zA-Z0-9-]+', ], 'ModelPackageArn' => [ 'type' => 'string', 'max' => 2048, 'min' => 1, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:model-package/.*', ], 'ModelPackageContainerDefinition' => [ 'type' => 'structure', 'required' => [ 'Image', ], 'members' => [ 'ContainerHostname' => [ 'shape' => 'ContainerHostname', ], 'Image' => [ 'shape' => 'Image', ], 'ImageDigest' => [ 'shape' => 'ImageDigest', ], 'ModelDataUrl' => [ 'shape' => 'Url', ], 'ProductId' => [ 'shape' => 'ProductId', ], ], ], 'ModelPackageContainerDefinitionList' => [ 'type' => 'list', 'member' => [ 'shape' => 'ModelPackageContainerDefinition', ], 'max' => 1, 'min' => 1, ], 'ModelPackageSortBy' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', ], ], 'ModelPackageStatus' => [ 'type' => 'string', 'enum' => [ 'Pending', 'InProgress', 'Completed', 'Failed', 'Deleting', ], ], 'ModelPackageStatusDetails' => [ 'type' => 'structure', 'required' => [ 'ValidationStatuses', ], 'members' => [ 'ValidationStatuses' => [ 'shape' => 'ModelPackageStatusItemList', ], 'ImageScanStatuses' => [ 'shape' => 'ModelPackageStatusItemList', ], ], ], 'ModelPackageStatusItem' => [ 'type' => 'structure', 'required' => [ 'Name', 'Status', ], 'members' => [ 'Name' => [ 'shape' => 'EntityName', ], 'Status' => [ 'shape' => 'DetailedModelPackageStatus', ], 'FailureReason' => [ 'shape' => 'String', ], ], ], 'ModelPackageStatusItemList' => [ 'type' => 'list', 'member' => [ 'shape' => 'ModelPackageStatusItem', ], ], 'ModelPackageSummary' => [ 'type' => 'structure', 'required' => [ 'ModelPackageName', 'ModelPackageArn', 'CreationTime', 'ModelPackageStatus', ], 'members' => [ 'ModelPackageName' => [ 'shape' => 'EntityName', ], 'ModelPackageArn' => [ 'shape' => 'ModelPackageArn', ], 'ModelPackageDescription' => [ 'shape' => 'EntityDescription', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'ModelPackageStatus' => [ 'shape' => 'ModelPackageStatus', ], ], ], 'ModelPackageSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'ModelPackageSummary', ], ], 'ModelPackageValidationProfile' => [ 'type' => 'structure', 'required' => [ 'ProfileName', 'TransformJobDefinition', ], 'members' => [ 'ProfileName' => [ 'shape' => 'EntityName', ], 'TransformJobDefinition' => [ 'shape' => 'TransformJobDefinition', ], ], ], 'ModelPackageValidationProfiles' => [ 'type' => 'list', 'member' => [ 'shape' => 'ModelPackageValidationProfile', ], 'max' => 1, 'min' => 1, ], 'ModelPackageValidationSpecification' => [ 'type' => 'structure', 'required' => [ 'ValidationRole', 'ValidationProfiles', ], 'members' => [ 'ValidationRole' => [ 'shape' => 'RoleArn', ], 'ValidationProfiles' => [ 'shape' => 'ModelPackageValidationProfiles', ], ], ], 'ModelSortKey' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', ], ], 'ModelSummary' => [ 'type' => 'structure', 'required' => [ 'ModelName', 'ModelArn', 'CreationTime', ], 'members' => [ 'ModelName' => [ 'shape' => 'ModelName', ], 'ModelArn' => [ 'shape' => 'ModelArn', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], ], ], 'ModelSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'ModelSummary', ], ], 'NameContains' => [ 'type' => 'string', 'max' => 63, 'pattern' => '[a-zA-Z0-9\\-]+', ], 'NestedFilters' => [ 'type' => 'structure', 'required' => [ 'NestedPropertyName', 'Filters', ], 'members' => [ 'NestedPropertyName' => [ 'shape' => 'ResourcePropertyName', ], 'Filters' => [ 'shape' => 'FilterList', ], ], ], 'NestedFiltersList' => [ 'type' => 'list', 'member' => [ 'shape' => 'NestedFilters', ], 'max' => 20, 'min' => 1, ], 'NetworkInterfaceId' => [ 'type' => 'string', ], 'NextToken' => [ 'type' => 'string', 'max' => 8192, ], 'NotebookInstanceAcceleratorType' => [ 'type' => 'string', 'enum' => [ 'ml.eia1.medium', 'ml.eia1.large', 'ml.eia1.xlarge', ], ], 'NotebookInstanceAcceleratorTypes' => [ 'type' => 'list', 'member' => [ 'shape' => 'NotebookInstanceAcceleratorType', ], ], 'NotebookInstanceArn' => [ 'type' => 'string', 'max' => 256, ], 'NotebookInstanceLifecycleConfigArn' => [ 'type' => 'string', 'max' => 256, ], 'NotebookInstanceLifecycleConfigContent' => [ 'type' => 'string', 'max' => 16384, 'min' => 1, ], 'NotebookInstanceLifecycleConfigList' => [ 'type' => 'list', 'member' => [ 'shape' => 'NotebookInstanceLifecycleHook', ], 'max' => 1, ], 'NotebookInstanceLifecycleConfigName' => [ 'type' => 'string', 'max' => 63, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'NotebookInstanceLifecycleConfigNameContains' => [ 'type' => 'string', 'max' => 63, 'pattern' => '[a-zA-Z0-9-]+', ], 'NotebookInstanceLifecycleConfigSortKey' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', 'LastModifiedTime', ], ], 'NotebookInstanceLifecycleConfigSortOrder' => [ 'type' => 'string', 'enum' => [ 'Ascending', 'Descending', ], ], 'NotebookInstanceLifecycleConfigSummary' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceLifecycleConfigName', 'NotebookInstanceLifecycleConfigArn', ], 'members' => [ 'NotebookInstanceLifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], 'NotebookInstanceLifecycleConfigArn' => [ 'shape' => 'NotebookInstanceLifecycleConfigArn', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'LastModifiedTime' => [ 'shape' => 'LastModifiedTime', ], ], ], 'NotebookInstanceLifecycleConfigSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'NotebookInstanceLifecycleConfigSummary', ], ], 'NotebookInstanceLifecycleHook' => [ 'type' => 'structure', 'members' => [ 'Content' => [ 'shape' => 'NotebookInstanceLifecycleConfigContent', ], ], ], 'NotebookInstanceName' => [ 'type' => 'string', 'max' => 63, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'NotebookInstanceNameContains' => [ 'type' => 'string', 'max' => 63, 'pattern' => '[a-zA-Z0-9-]+', ], 'NotebookInstanceSortKey' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', 'Status', ], ], 'NotebookInstanceSortOrder' => [ 'type' => 'string', 'enum' => [ 'Ascending', 'Descending', ], ], 'NotebookInstanceStatus' => [ 'type' => 'string', 'enum' => [ 'Pending', 'InService', 'Stopping', 'Stopped', 'Failed', 'Deleting', 'Updating', ], ], 'NotebookInstanceSummary' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceName', 'NotebookInstanceArn', ], 'members' => [ 'NotebookInstanceName' => [ 'shape' => 'NotebookInstanceName', ], 'NotebookInstanceArn' => [ 'shape' => 'NotebookInstanceArn', ], 'NotebookInstanceStatus' => [ 'shape' => 'NotebookInstanceStatus', ], 'Url' => [ 'shape' => 'NotebookInstanceUrl', ], 'InstanceType' => [ 'shape' => 'InstanceType', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'LastModifiedTime' => [ 'shape' => 'LastModifiedTime', ], 'NotebookInstanceLifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], 'DefaultCodeRepository' => [ 'shape' => 'CodeRepositoryNameOrUrl', ], 'AdditionalCodeRepositories' => [ 'shape' => 'AdditionalCodeRepositoryNamesOrUrls', ], ], ], 'NotebookInstanceSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'NotebookInstanceSummary', ], ], 'NotebookInstanceUrl' => [ 'type' => 'string', ], 'NotebookInstanceVolumeSizeInGB' => [ 'type' => 'integer', 'max' => 16384, 'min' => 5, ], 'NumberOfHumanWorkersPerDataObject' => [ 'type' => 'integer', 'max' => 9, 'min' => 1, ], 'ObjectiveStatus' => [ 'type' => 'string', 'enum' => [ 'Succeeded', 'Pending', 'Failed', ], ], 'ObjectiveStatusCounter' => [ 'type' => 'integer', 'min' => 0, ], 'ObjectiveStatusCounters' => [ 'type' => 'structure', 'members' => [ 'Succeeded' => [ 'shape' => 'ObjectiveStatusCounter', ], 'Pending' => [ 'shape' => 'ObjectiveStatusCounter', ], 'Failed' => [ 'shape' => 'ObjectiveStatusCounter', ], ], ], 'Operator' => [ 'type' => 'string', 'enum' => [ 'Equals', 'NotEquals', 'GreaterThan', 'GreaterThanOrEqualTo', 'LessThan', 'LessThanOrEqualTo', 'Contains', ], ], 'OrderKey' => [ 'type' => 'string', 'enum' => [ 'Ascending', 'Descending', ], ], 'OutputConfig' => [ 'type' => 'structure', 'required' => [ 'S3OutputLocation', 'TargetDevice', ], 'members' => [ 'S3OutputLocation' => [ 'shape' => 'S3Uri', ], 'TargetDevice' => [ 'shape' => 'TargetDevice', ], ], ], 'OutputDataConfig' => [ 'type' => 'structure', 'required' => [ 'S3OutputPath', ], 'members' => [ 'KmsKeyId' => [ 'shape' => 'KmsKeyId', ], 'S3OutputPath' => [ 'shape' => 'S3Uri', ], ], ], 'PaginationToken' => [ 'type' => 'string', 'max' => 8192, ], 'ParameterKey' => [ 'type' => 'string', 'max' => 256, ], 'ParameterName' => [ 'type' => 'string', 'max' => 256, ], 'ParameterRange' => [ 'type' => 'structure', 'members' => [ 'IntegerParameterRangeSpecification' => [ 'shape' => 'IntegerParameterRangeSpecification', ], 'ContinuousParameterRangeSpecification' => [ 'shape' => 'ContinuousParameterRangeSpecification', ], 'CategoricalParameterRangeSpecification' => [ 'shape' => 'CategoricalParameterRangeSpecification', ], ], ], 'ParameterRanges' => [ 'type' => 'structure', 'members' => [ 'IntegerParameterRanges' => [ 'shape' => 'IntegerParameterRanges', ], 'ContinuousParameterRanges' => [ 'shape' => 'ContinuousParameterRanges', ], 'CategoricalParameterRanges' => [ 'shape' => 'CategoricalParameterRanges', ], ], ], 'ParameterType' => [ 'type' => 'string', 'enum' => [ 'Integer', 'Continuous', 'Categorical', 'FreeText', ], ], 'ParameterValue' => [ 'type' => 'string', 'max' => 256, ], 'ParameterValues' => [ 'type' => 'list', 'member' => [ 'shape' => 'ParameterValue', ], 'max' => 20, 'min' => 1, ], 'ParentHyperParameterTuningJob' => [ 'type' => 'structure', 'members' => [ 'HyperParameterTuningJobName' => [ 'shape' => 'HyperParameterTuningJobName', ], ], ], 'ParentHyperParameterTuningJobs' => [ 'type' => 'list', 'member' => [ 'shape' => 'ParentHyperParameterTuningJob', ], 'max' => 5, 'min' => 1, ], 'ProductId' => [ 'type' => 'string', ], 'ProductListings' => [ 'type' => 'list', 'member' => [ 'shape' => 'String', ], ], 'ProductionVariant' => [ 'type' => 'structure', 'required' => [ 'VariantName', 'ModelName', 'InitialInstanceCount', 'InstanceType', ], 'members' => [ 'VariantName' => [ 'shape' => 'VariantName', ], 'ModelName' => [ 'shape' => 'ModelName', ], 'InitialInstanceCount' => [ 'shape' => 'TaskCount', ], 'InstanceType' => [ 'shape' => 'ProductionVariantInstanceType', ], 'InitialVariantWeight' => [ 'shape' => 'VariantWeight', ], 'AcceleratorType' => [ 'shape' => 'ProductionVariantAcceleratorType', ], ], ], 'ProductionVariantAcceleratorType' => [ 'type' => 'string', 'enum' => [ 'ml.eia1.medium', 'ml.eia1.large', 'ml.eia1.xlarge', ], ], 'ProductionVariantInstanceType' => [ 'type' => 'string', 'enum' => [ 'ml.t2.medium', 'ml.t2.large', 'ml.t2.xlarge', 'ml.t2.2xlarge', 'ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', 'ml.m5.12xlarge', 'ml.m5.24xlarge', 'ml.c4.large', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', 'ml.c5.large', 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge', ], ], 'ProductionVariantList' => [ 'type' => 'list', 'member' => [ 'shape' => 'ProductionVariant', ], 'min' => 1, ], 'ProductionVariantSummary' => [ 'type' => 'structure', 'required' => [ 'VariantName', ], 'members' => [ 'VariantName' => [ 'shape' => 'VariantName', ], 'DeployedImages' => [ 'shape' => 'DeployedImages', ], 'CurrentWeight' => [ 'shape' => 'VariantWeight', ], 'DesiredWeight' => [ 'shape' => 'VariantWeight', ], 'CurrentInstanceCount' => [ 'shape' => 'TaskCount', ], 'DesiredInstanceCount' => [ 'shape' => 'TaskCount', ], ], ], 'ProductionVariantSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'ProductionVariantSummary', ], 'min' => 1, ], 'PropertyNameHint' => [ 'type' => 'string', 'max' => 100, 'min' => 0, ], 'PropertyNameQuery' => [ 'type' => 'structure', 'required' => [ 'PropertyNameHint', ], 'members' => [ 'PropertyNameHint' => [ 'shape' => 'PropertyNameHint', ], ], ], 'PropertyNameSuggestion' => [ 'type' => 'structure', 'members' => [ 'PropertyName' => [ 'shape' => 'ResourcePropertyName', ], ], ], 'PropertyNameSuggestionList' => [ 'type' => 'list', 'member' => [ 'shape' => 'PropertyNameSuggestion', ], ], 'PublicWorkforceTaskPrice' => [ 'type' => 'structure', 'members' => [ 'AmountInUsd' => [ 'shape' => 'USD', ], ], ], 'RealtimeInferenceInstanceTypes' => [ 'type' => 'list', 'member' => [ 'shape' => 'ProductionVariantInstanceType', ], ], 'RecordWrapper' => [ 'type' => 'string', 'enum' => [ 'None', 'RecordIO', ], ], 'RenderUiTemplateRequest' => [ 'type' => 'structure', 'required' => [ 'UiTemplate', 'Task', 'RoleArn', ], 'members' => [ 'UiTemplate' => [ 'shape' => 'UiTemplate', ], 'Task' => [ 'shape' => 'RenderableTask', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], ], ], 'RenderUiTemplateResponse' => [ 'type' => 'structure', 'required' => [ 'RenderedContent', 'Errors', ], 'members' => [ 'RenderedContent' => [ 'shape' => 'String', ], 'Errors' => [ 'shape' => 'RenderingErrorList', ], ], ], 'RenderableTask' => [ 'type' => 'structure', 'required' => [ 'Input', ], 'members' => [ 'Input' => [ 'shape' => 'TaskInput', ], ], ], 'RenderingError' => [ 'type' => 'structure', 'required' => [ 'Code', 'Message', ], 'members' => [ 'Code' => [ 'shape' => 'String', ], 'Message' => [ 'shape' => 'String', ], ], ], 'RenderingErrorList' => [ 'type' => 'list', 'member' => [ 'shape' => 'RenderingError', ], ], 'ResourceArn' => [ 'type' => 'string', 'max' => 256, ], 'ResourceConfig' => [ 'type' => 'structure', 'required' => [ 'InstanceType', 'InstanceCount', 'VolumeSizeInGB', ], 'members' => [ 'InstanceType' => [ 'shape' => 'TrainingInstanceType', ], 'InstanceCount' => [ 'shape' => 'TrainingInstanceCount', ], 'VolumeSizeInGB' => [ 'shape' => 'VolumeSizeInGB', ], 'VolumeKmsKeyId' => [ 'shape' => 'KmsKeyId', ], ], ], 'ResourceInUse' => [ 'type' => 'structure', 'members' => [ 'Message' => [ 'shape' => 'FailureReason', ], ], 'exception' => true, ], 'ResourceLimitExceeded' => [ 'type' => 'structure', 'members' => [ 'Message' => [ 'shape' => 'FailureReason', ], ], 'exception' => true, ], 'ResourceLimits' => [ 'type' => 'structure', 'required' => [ 'MaxNumberOfTrainingJobs', 'MaxParallelTrainingJobs', ], 'members' => [ 'MaxNumberOfTrainingJobs' => [ 'shape' => 'MaxNumberOfTrainingJobs', ], 'MaxParallelTrainingJobs' => [ 'shape' => 'MaxParallelTrainingJobs', ], ], ], 'ResourceNotFound' => [ 'type' => 'structure', 'members' => [ 'Message' => [ 'shape' => 'FailureReason', ], ], 'exception' => true, ], 'ResourcePropertyName' => [ 'type' => 'string', 'max' => 255, 'min' => 1, ], 'ResourceType' => [ 'type' => 'string', 'enum' => [ 'TrainingJob', ], ], 'ResponseMIMEType' => [ 'type' => 'string', 'max' => 1024, ], 'ResponseMIMETypes' => [ 'type' => 'list', 'member' => [ 'shape' => 'ResponseMIMEType', ], ], 'RoleArn' => [ 'type' => 'string', 'max' => 2048, 'min' => 20, 'pattern' => '^arn:aws[a-z\\-]*:iam::\\d{12}:role/?[a-zA-Z_0-9+=,.@\\-_/]+$', ], 'S3DataDistribution' => [ 'type' => 'string', 'enum' => [ 'FullyReplicated', 'ShardedByS3Key', ], ], 'S3DataSource' => [ 'type' => 'structure', 'required' => [ 'S3DataType', 'S3Uri', ], 'members' => [ 'S3DataType' => [ 'shape' => 'S3DataType', ], 'S3Uri' => [ 'shape' => 'S3Uri', ], 'S3DataDistributionType' => [ 'shape' => 'S3DataDistribution', ], 'AttributeNames' => [ 'shape' => 'AttributeNames', ], ], ], 'S3DataType' => [ 'type' => 'string', 'enum' => [ 'ManifestFile', 'S3Prefix', 'AugmentedManifestFile', ], ], 'S3Uri' => [ 'type' => 'string', 'max' => 1024, 'pattern' => '^(https|s3)://([^/]+)/?(.*)$', ], 'SearchExpression' => [ 'type' => 'structure', 'members' => [ 'Filters' => [ 'shape' => 'FilterList', ], 'NestedFilters' => [ 'shape' => 'NestedFiltersList', ], 'SubExpressions' => [ 'shape' => 'SearchExpressionList', ], 'Operator' => [ 'shape' => 'BooleanOperator', ], ], ], 'SearchExpressionList' => [ 'type' => 'list', 'member' => [ 'shape' => 'SearchExpression', ], 'max' => 20, 'min' => 1, ], 'SearchRecord' => [ 'type' => 'structure', 'members' => [ 'TrainingJob' => [ 'shape' => 'TrainingJob', ], ], ], 'SearchRequest' => [ 'type' => 'structure', 'required' => [ 'Resource', ], 'members' => [ 'Resource' => [ 'shape' => 'ResourceType', ], 'SearchExpression' => [ 'shape' => 'SearchExpression', ], 'SortBy' => [ 'shape' => 'ResourcePropertyName', ], 'SortOrder' => [ 'shape' => 'SearchSortOrder', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', 'box' => true, ], ], ], 'SearchResponse' => [ 'type' => 'structure', 'members' => [ 'Results' => [ 'shape' => 'SearchResultsList', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'SearchResultsList' => [ 'type' => 'list', 'member' => [ 'shape' => 'SearchRecord', ], ], 'SearchSortOrder' => [ 'type' => 'string', 'enum' => [ 'Ascending', 'Descending', ], ], 'SecondaryStatus' => [ 'type' => 'string', 'enum' => [ 'Starting', 'LaunchingMLInstances', 'PreparingTrainingStack', 'Downloading', 'DownloadingTrainingImage', 'Training', 'Uploading', 'Stopping', 'Stopped', 'MaxRuntimeExceeded', 'Completed', 'Failed', ], ], 'SecondaryStatusTransition' => [ 'type' => 'structure', 'required' => [ 'Status', 'StartTime', ], 'members' => [ 'Status' => [ 'shape' => 'SecondaryStatus', ], 'StartTime' => [ 'shape' => 'Timestamp', ], 'EndTime' => [ 'shape' => 'Timestamp', ], 'StatusMessage' => [ 'shape' => 'StatusMessage', ], ], ], 'SecondaryStatusTransitions' => [ 'type' => 'list', 'member' => [ 'shape' => 'SecondaryStatusTransition', ], ], 'SecretArn' => [ 'type' => 'string', 'max' => 2048, 'min' => 1, 'pattern' => 'arn:aws[a-z\\-]*:secretsmanager:[a-z0-9\\-]*:[0-9]{12}:secret:.*', ], 'SecurityGroupId' => [ 'type' => 'string', 'max' => 32, ], 'SecurityGroupIds' => [ 'type' => 'list', 'member' => [ 'shape' => 'SecurityGroupId', ], 'max' => 5, ], 'Seed' => [ 'type' => 'long', ], 'SessionExpirationDurationInSeconds' => [ 'type' => 'integer', 'max' => 43200, 'min' => 1800, ], 'ShuffleConfig' => [ 'type' => 'structure', 'required' => [ 'Seed', ], 'members' => [ 'Seed' => [ 'shape' => 'Seed', ], ], ], 'SortBy' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', 'Status', ], ], 'SortOrder' => [ 'type' => 'string', 'enum' => [ 'Ascending', 'Descending', ], ], 'SourceAlgorithm' => [ 'type' => 'structure', 'required' => [ 'AlgorithmName', ], 'members' => [ 'ModelDataUrl' => [ 'shape' => 'Url', ], 'AlgorithmName' => [ 'shape' => 'ArnOrName', ], ], ], 'SourceAlgorithmList' => [ 'type' => 'list', 'member' => [ 'shape' => 'SourceAlgorithm', ], 'max' => 1, 'min' => 1, ], 'SourceAlgorithmSpecification' => [ 'type' => 'structure', 'required' => [ 'SourceAlgorithms', ], 'members' => [ 'SourceAlgorithms' => [ 'shape' => 'SourceAlgorithmList', ], ], ], 'SplitType' => [ 'type' => 'string', 'enum' => [ 'None', 'Line', 'RecordIO', 'TFRecord', ], ], 'StartNotebookInstanceInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceName', ], 'members' => [ 'NotebookInstanceName' => [ 'shape' => 'NotebookInstanceName', ], ], ], 'StatusMessage' => [ 'type' => 'string', ], 'StopCompilationJobRequest' => [ 'type' => 'structure', 'required' => [ 'CompilationJobName', ], 'members' => [ 'CompilationJobName' => [ 'shape' => 'EntityName', ], ], ], 'StopHyperParameterTuningJobRequest' => [ 'type' => 'structure', 'required' => [ 'HyperParameterTuningJobName', ], 'members' => [ 'HyperParameterTuningJobName' => [ 'shape' => 'HyperParameterTuningJobName', ], ], ], 'StopLabelingJobRequest' => [ 'type' => 'structure', 'required' => [ 'LabelingJobName', ], 'members' => [ 'LabelingJobName' => [ 'shape' => 'LabelingJobName', ], ], ], 'StopNotebookInstanceInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceName', ], 'members' => [ 'NotebookInstanceName' => [ 'shape' => 'NotebookInstanceName', ], ], ], 'StopTrainingJobRequest' => [ 'type' => 'structure', 'required' => [ 'TrainingJobName', ], 'members' => [ 'TrainingJobName' => [ 'shape' => 'TrainingJobName', ], ], ], 'StopTransformJobRequest' => [ 'type' => 'structure', 'required' => [ 'TransformJobName', ], 'members' => [ 'TransformJobName' => [ 'shape' => 'TransformJobName', ], ], ], 'StoppingCondition' => [ 'type' => 'structure', 'members' => [ 'MaxRuntimeInSeconds' => [ 'shape' => 'MaxRuntimeInSeconds', ], ], ], 'String' => [ 'type' => 'string', ], 'String200' => [ 'type' => 'string', 'max' => 200, 'min' => 1, ], 'SubnetId' => [ 'type' => 'string', 'max' => 32, ], 'Subnets' => [ 'type' => 'list', 'member' => [ 'shape' => 'SubnetId', ], 'max' => 16, 'min' => 1, ], 'SubscribedWorkteam' => [ 'type' => 'structure', 'required' => [ 'WorkteamArn', ], 'members' => [ 'WorkteamArn' => [ 'shape' => 'WorkteamArn', ], 'MarketplaceTitle' => [ 'shape' => 'String200', ], 'SellerName' => [ 'shape' => 'String', ], 'MarketplaceDescription' => [ 'shape' => 'String200', ], 'ListingId' => [ 'shape' => 'String', ], ], ], 'SubscribedWorkteams' => [ 'type' => 'list', 'member' => [ 'shape' => 'SubscribedWorkteam', ], ], 'Success' => [ 'type' => 'boolean', ], 'SuggestionQuery' => [ 'type' => 'structure', 'members' => [ 'PropertyNameQuery' => [ 'shape' => 'PropertyNameQuery', ], ], ], 'Tag' => [ 'type' => 'structure', 'required' => [ 'Key', 'Value', ], 'members' => [ 'Key' => [ 'shape' => 'TagKey', ], 'Value' => [ 'shape' => 'TagValue', ], ], ], 'TagKey' => [ 'type' => 'string', 'max' => 128, 'min' => 1, 'pattern' => '^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-@]*)$', ], 'TagKeyList' => [ 'type' => 'list', 'member' => [ 'shape' => 'TagKey', ], 'max' => 50, 'min' => 1, ], 'TagList' => [ 'type' => 'list', 'member' => [ 'shape' => 'Tag', ], 'max' => 50, 'min' => 0, ], 'TagValue' => [ 'type' => 'string', 'max' => 256, 'min' => 0, 'pattern' => '^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-@]*)$', ], 'TargetDevice' => [ 'type' => 'string', 'enum' => [ 'ml_m4', 'ml_m5', 'ml_c4', 'ml_c5', 'ml_p2', 'ml_p3', 'jetson_tx1', 'jetson_tx2', 'rasp3b', 'deeplens', ], ], 'TaskAvailabilityLifetimeInSeconds' => [ 'type' => 'integer', 'max' => 345600, 'min' => 1, ], 'TaskCount' => [ 'type' => 'integer', 'min' => 1, ], 'TaskDescription' => [ 'type' => 'string', 'max' => 255, 'min' => 1, ], 'TaskInput' => [ 'type' => 'string', 'max' => 128000, 'min' => 2, ], 'TaskKeyword' => [ 'type' => 'string', 'max' => 30, 'min' => 1, 'pattern' => '^[A-Za-z0-9]+( [A-Za-z0-9]+)*$', ], 'TaskKeywords' => [ 'type' => 'list', 'member' => [ 'shape' => 'TaskKeyword', ], 'max' => 5, 'min' => 1, ], 'TaskTimeLimitInSeconds' => [ 'type' => 'integer', 'max' => 3600, 'min' => 1, ], 'TaskTitle' => [ 'type' => 'string', 'max' => 128, 'min' => 1, 'pattern' => '^[\\t\\n\\r -\\uD7FF\\uE000-\\uFFFD]*$', ], 'TemplateContent' => [ 'type' => 'string', 'max' => 128000, 'min' => 1, ], 'TenthFractionsOfACent' => [ 'type' => 'integer', 'max' => 9, 'min' => 0, ], 'Timestamp' => [ 'type' => 'timestamp', ], 'TrainingInputMode' => [ 'type' => 'string', 'enum' => [ 'Pipe', 'File', ], ], 'TrainingInstanceCount' => [ 'type' => 'integer', 'min' => 1, ], 'TrainingInstanceType' => [ 'type' => 'string', 'enum' => [ 'ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', 'ml.m5.12xlarge', 'ml.m5.24xlarge', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge', ], ], 'TrainingInstanceTypes' => [ 'type' => 'list', 'member' => [ 'shape' => 'TrainingInstanceType', ], ], 'TrainingJob' => [ 'type' => 'structure', 'members' => [ 'TrainingJobName' => [ 'shape' => 'TrainingJobName', ], 'TrainingJobArn' => [ 'shape' => 'TrainingJobArn', ], 'TuningJobArn' => [ 'shape' => 'HyperParameterTuningJobArn', ], 'LabelingJobArn' => [ 'shape' => 'LabelingJobArn', ], 'ModelArtifacts' => [ 'shape' => 'ModelArtifacts', ], 'TrainingJobStatus' => [ 'shape' => 'TrainingJobStatus', ], 'SecondaryStatus' => [ 'shape' => 'SecondaryStatus', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], 'HyperParameters' => [ 'shape' => 'HyperParameters', ], 'AlgorithmSpecification' => [ 'shape' => 'AlgorithmSpecification', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'InputDataConfig' => [ 'shape' => 'InputDataConfig', ], 'OutputDataConfig' => [ 'shape' => 'OutputDataConfig', ], 'ResourceConfig' => [ 'shape' => 'ResourceConfig', ], 'VpcConfig' => [ 'shape' => 'VpcConfig', ], 'StoppingCondition' => [ 'shape' => 'StoppingCondition', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'TrainingStartTime' => [ 'shape' => 'Timestamp', ], 'TrainingEndTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], 'SecondaryStatusTransitions' => [ 'shape' => 'SecondaryStatusTransitions', ], 'FinalMetricDataList' => [ 'shape' => 'FinalMetricDataList', ], 'EnableNetworkIsolation' => [ 'shape' => 'Boolean', ], 'Tags' => [ 'shape' => 'TagList', ], ], ], 'TrainingJobArn' => [ 'type' => 'string', 'max' => 256, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:training-job/.*', ], 'TrainingJobDefinition' => [ 'type' => 'structure', 'required' => [ 'TrainingInputMode', 'InputDataConfig', 'OutputDataConfig', 'ResourceConfig', 'StoppingCondition', ], 'members' => [ 'TrainingInputMode' => [ 'shape' => 'TrainingInputMode', ], 'HyperParameters' => [ 'shape' => 'HyperParameters', ], 'InputDataConfig' => [ 'shape' => 'InputDataConfig', ], 'OutputDataConfig' => [ 'shape' => 'OutputDataConfig', ], 'ResourceConfig' => [ 'shape' => 'ResourceConfig', ], 'StoppingCondition' => [ 'shape' => 'StoppingCondition', ], ], ], 'TrainingJobEarlyStoppingType' => [ 'type' => 'string', 'enum' => [ 'Off', 'Auto', ], ], 'TrainingJobName' => [ 'type' => 'string', 'max' => 63, 'min' => 1, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'TrainingJobSortByOptions' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', 'Status', 'FinalObjectiveMetricValue', ], ], 'TrainingJobStatus' => [ 'type' => 'string', 'enum' => [ 'InProgress', 'Completed', 'Failed', 'Stopping', 'Stopped', ], ], 'TrainingJobStatusCounter' => [ 'type' => 'integer', 'min' => 0, ], 'TrainingJobStatusCounters' => [ 'type' => 'structure', 'members' => [ 'Completed' => [ 'shape' => 'TrainingJobStatusCounter', ], 'InProgress' => [ 'shape' => 'TrainingJobStatusCounter', ], 'RetryableError' => [ 'shape' => 'TrainingJobStatusCounter', ], 'NonRetryableError' => [ 'shape' => 'TrainingJobStatusCounter', ], 'Stopped' => [ 'shape' => 'TrainingJobStatusCounter', ], ], ], 'TrainingJobSummaries' => [ 'type' => 'list', 'member' => [ 'shape' => 'TrainingJobSummary', ], ], 'TrainingJobSummary' => [ 'type' => 'structure', 'required' => [ 'TrainingJobName', 'TrainingJobArn', 'CreationTime', 'TrainingJobStatus', ], 'members' => [ 'TrainingJobName' => [ 'shape' => 'TrainingJobName', ], 'TrainingJobArn' => [ 'shape' => 'TrainingJobArn', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'TrainingEndTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], 'TrainingJobStatus' => [ 'shape' => 'TrainingJobStatus', ], ], ], 'TrainingSpecification' => [ 'type' => 'structure', 'required' => [ 'TrainingImage', 'SupportedTrainingInstanceTypes', 'TrainingChannels', ], 'members' => [ 'TrainingImage' => [ 'shape' => 'Image', ], 'TrainingImageDigest' => [ 'shape' => 'ImageDigest', ], 'SupportedHyperParameters' => [ 'shape' => 'HyperParameterSpecifications', ], 'SupportedTrainingInstanceTypes' => [ 'shape' => 'TrainingInstanceTypes', ], 'SupportsDistributedTraining' => [ 'shape' => 'Boolean', ], 'MetricDefinitions' => [ 'shape' => 'MetricDefinitionList', ], 'TrainingChannels' => [ 'shape' => 'ChannelSpecifications', ], 'SupportedTuningJobObjectiveMetrics' => [ 'shape' => 'HyperParameterTuningJobObjectives', ], ], ], 'TransformDataSource' => [ 'type' => 'structure', 'required' => [ 'S3DataSource', ], 'members' => [ 'S3DataSource' => [ 'shape' => 'TransformS3DataSource', ], ], ], 'TransformEnvironmentKey' => [ 'type' => 'string', 'max' => 1024, 'pattern' => '[a-zA-Z_][a-zA-Z0-9_]*', ], 'TransformEnvironmentMap' => [ 'type' => 'map', 'key' => [ 'shape' => 'TransformEnvironmentKey', ], 'value' => [ 'shape' => 'TransformEnvironmentValue', ], 'max' => 16, ], 'TransformEnvironmentValue' => [ 'type' => 'string', 'max' => 10240, ], 'TransformInput' => [ 'type' => 'structure', 'required' => [ 'DataSource', ], 'members' => [ 'DataSource' => [ 'shape' => 'TransformDataSource', ], 'ContentType' => [ 'shape' => 'ContentType', ], 'CompressionType' => [ 'shape' => 'CompressionType', ], 'SplitType' => [ 'shape' => 'SplitType', ], ], ], 'TransformInstanceCount' => [ 'type' => 'integer', 'min' => 1, ], 'TransformInstanceType' => [ 'type' => 'string', 'enum' => [ 'ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', 'ml.m5.12xlarge', 'ml.m5.24xlarge', ], ], 'TransformInstanceTypes' => [ 'type' => 'list', 'member' => [ 'shape' => 'TransformInstanceType', ], 'min' => 1, ], 'TransformJobArn' => [ 'type' => 'string', 'max' => 256, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:transform-job/.*', ], 'TransformJobDefinition' => [ 'type' => 'structure', 'required' => [ 'TransformInput', 'TransformOutput', 'TransformResources', ], 'members' => [ 'MaxConcurrentTransforms' => [ 'shape' => 'MaxConcurrentTransforms', ], 'MaxPayloadInMB' => [ 'shape' => 'MaxPayloadInMB', ], 'BatchStrategy' => [ 'shape' => 'BatchStrategy', ], 'Environment' => [ 'shape' => 'TransformEnvironmentMap', ], 'TransformInput' => [ 'shape' => 'TransformInput', ], 'TransformOutput' => [ 'shape' => 'TransformOutput', ], 'TransformResources' => [ 'shape' => 'TransformResources', ], ], ], 'TransformJobName' => [ 'type' => 'string', 'max' => 63, 'min' => 1, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'TransformJobStatus' => [ 'type' => 'string', 'enum' => [ 'InProgress', 'Completed', 'Failed', 'Stopping', 'Stopped', ], ], 'TransformJobSummaries' => [ 'type' => 'list', 'member' => [ 'shape' => 'TransformJobSummary', ], ], 'TransformJobSummary' => [ 'type' => 'structure', 'required' => [ 'TransformJobName', 'TransformJobArn', 'CreationTime', 'TransformJobStatus', ], 'members' => [ 'TransformJobName' => [ 'shape' => 'TransformJobName', ], 'TransformJobArn' => [ 'shape' => 'TransformJobArn', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'TransformEndTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], 'TransformJobStatus' => [ 'shape' => 'TransformJobStatus', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], ], ], 'TransformOutput' => [ 'type' => 'structure', 'required' => [ 'S3OutputPath', ], 'members' => [ 'S3OutputPath' => [ 'shape' => 'S3Uri', ], 'Accept' => [ 'shape' => 'Accept', ], 'AssembleWith' => [ 'shape' => 'AssemblyType', ], 'KmsKeyId' => [ 'shape' => 'KmsKeyId', ], ], ], 'TransformResources' => [ 'type' => 'structure', 'required' => [ 'InstanceType', 'InstanceCount', ], 'members' => [ 'InstanceType' => [ 'shape' => 'TransformInstanceType', ], 'InstanceCount' => [ 'shape' => 'TransformInstanceCount', ], 'VolumeKmsKeyId' => [ 'shape' => 'KmsKeyId', ], ], ], 'TransformS3DataSource' => [ 'type' => 'structure', 'required' => [ 'S3DataType', 'S3Uri', ], 'members' => [ 'S3DataType' => [ 'shape' => 'S3DataType', ], 'S3Uri' => [ 'shape' => 'S3Uri', ], ], ], 'USD' => [ 'type' => 'structure', 'members' => [ 'Dollars' => [ 'shape' => 'Dollars', ], 'Cents' => [ 'shape' => 'Cents', ], 'TenthFractionsOfACent' => [ 'shape' => 'TenthFractionsOfACent', ], ], ], 'UiConfig' => [ 'type' => 'structure', 'required' => [ 'UiTemplateS3Uri', ], 'members' => [ 'UiTemplateS3Uri' => [ 'shape' => 'S3Uri', ], ], ], 'UiTemplate' => [ 'type' => 'structure', 'required' => [ 'Content', ], 'members' => [ 'Content' => [ 'shape' => 'TemplateContent', ], ], ], 'UpdateCodeRepositoryInput' => [ 'type' => 'structure', 'required' => [ 'CodeRepositoryName', ], 'members' => [ 'CodeRepositoryName' => [ 'shape' => 'EntityName', ], 'GitConfig' => [ 'shape' => 'GitConfigForUpdate', ], ], ], 'UpdateCodeRepositoryOutput' => [ 'type' => 'structure', 'required' => [ 'CodeRepositoryArn', ], 'members' => [ 'CodeRepositoryArn' => [ 'shape' => 'CodeRepositoryArn', ], ], ], 'UpdateEndpointInput' => [ 'type' => 'structure', 'required' => [ 'EndpointName', 'EndpointConfigName', ], 'members' => [ 'EndpointName' => [ 'shape' => 'EndpointName', ], 'EndpointConfigName' => [ 'shape' => 'EndpointConfigName', ], ], ], 'UpdateEndpointOutput' => [ 'type' => 'structure', 'required' => [ 'EndpointArn', ], 'members' => [ 'EndpointArn' => [ 'shape' => 'EndpointArn', ], ], ], 'UpdateEndpointWeightsAndCapacitiesInput' => [ 'type' => 'structure', 'required' => [ 'EndpointName', 'DesiredWeightsAndCapacities', ], 'members' => [ 'EndpointName' => [ 'shape' => 'EndpointName', ], 'DesiredWeightsAndCapacities' => [ 'shape' => 'DesiredWeightAndCapacityList', ], ], ], 'UpdateEndpointWeightsAndCapacitiesOutput' => [ 'type' => 'structure', 'required' => [ 'EndpointArn', ], 'members' => [ 'EndpointArn' => [ 'shape' => 'EndpointArn', ], ], ], 'UpdateNotebookInstanceInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceName', ], 'members' => [ 'NotebookInstanceName' => [ 'shape' => 'NotebookInstanceName', ], 'InstanceType' => [ 'shape' => 'InstanceType', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'LifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], 'DisassociateLifecycleConfig' => [ 'shape' => 'DisassociateNotebookInstanceLifecycleConfig', ], 'VolumeSizeInGB' => [ 'shape' => 'NotebookInstanceVolumeSizeInGB', ], 'DefaultCodeRepository' => [ 'shape' => 'CodeRepositoryNameOrUrl', ], 'AdditionalCodeRepositories' => [ 'shape' => 'AdditionalCodeRepositoryNamesOrUrls', ], 'AcceleratorTypes' => [ 'shape' => 'NotebookInstanceAcceleratorTypes', ], 'DisassociateAcceleratorTypes' => [ 'shape' => 'DisassociateNotebookInstanceAcceleratorTypes', ], 'DisassociateDefaultCodeRepository' => [ 'shape' => 'DisassociateDefaultCodeRepository', ], 'DisassociateAdditionalCodeRepositories' => [ 'shape' => 'DisassociateAdditionalCodeRepositories', ], ], ], 'UpdateNotebookInstanceLifecycleConfigInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceLifecycleConfigName', ], 'members' => [ 'NotebookInstanceLifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], 'OnCreate' => [ 'shape' => 'NotebookInstanceLifecycleConfigList', ], 'OnStart' => [ 'shape' => 'NotebookInstanceLifecycleConfigList', ], ], ], 'UpdateNotebookInstanceLifecycleConfigOutput' => [ 'type' => 'structure', 'members' => [], ], 'UpdateNotebookInstanceOutput' => [ 'type' => 'structure', 'members' => [], ], 'UpdateWorkteamRequest' => [ 'type' => 'structure', 'required' => [ 'WorkteamName', ], 'members' => [ 'WorkteamName' => [ 'shape' => 'WorkteamName', ], 'MemberDefinitions' => [ 'shape' => 'MemberDefinitions', ], 'Description' => [ 'shape' => 'String200', ], ], ], 'UpdateWorkteamResponse' => [ 'type' => 'structure', 'required' => [ 'Workteam', ], 'members' => [ 'Workteam' => [ 'shape' => 'Workteam', ], ], ], 'Url' => [ 'type' => 'string', 'max' => 1024, 'pattern' => '^(https|s3)://([^/]+)/?(.*)$', ], 'VariantName' => [ 'type' => 'string', 'max' => 63, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'VariantWeight' => [ 'type' => 'float', 'min' => 0, ], 'VolumeSizeInGB' => [ 'type' => 'integer', 'min' => 1, ], 'VpcConfig' => [ 'type' => 'structure', 'required' => [ 'SecurityGroupIds', 'Subnets', ], 'members' => [ 'SecurityGroupIds' => [ 'shape' => 'VpcSecurityGroupIds', ], 'Subnets' => [ 'shape' => 'Subnets', ], ], ], 'VpcSecurityGroupIds' => [ 'type' => 'list', 'member' => [ 'shape' => 'SecurityGroupId', ], 'max' => 5, 'min' => 1, ], 'Workteam' => [ 'type' => 'structure', 'required' => [ 'WorkteamName', 'MemberDefinitions', 'WorkteamArn', 'Description', ], 'members' => [ 'WorkteamName' => [ 'shape' => 'WorkteamName', ], 'MemberDefinitions' => [ 'shape' => 'MemberDefinitions', ], 'WorkteamArn' => [ 'shape' => 'WorkteamArn', ], 'ProductListingIds' => [ 'shape' => 'ProductListings', ], 'Description' => [ 'shape' => 'String200', ], 'SubDomain' => [ 'shape' => 'String', ], 'CreateDate' => [ 'shape' => 'Timestamp', ], 'LastUpdatedDate' => [ 'shape' => 'Timestamp', ], ], ], 'WorkteamArn' => [ 'type' => 'string', 'max' => 256, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:workteam/.*', ], 'WorkteamName' => [ 'type' => 'string', 'max' => 63, 'min' => 1, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'Workteams' => [ 'type' => 'list', 'member' => [ 'shape' => 'Workteam', ], ], ],]; +return [ 'version' => '2.0', 'metadata' => [ 'apiVersion' => '2017-07-24', 'endpointPrefix' => 'api.sagemaker', 'jsonVersion' => '1.1', 'protocol' => 'json', 'serviceAbbreviation' => 'SageMaker', 'serviceFullName' => 'Amazon SageMaker Service', 'serviceId' => 'SageMaker', 'signatureVersion' => 'v4', 'signingName' => 'sagemaker', 'targetPrefix' => 'SageMaker', 'uid' => 'sagemaker-2017-07-24', ], 'operations' => [ 'AddTags' => [ 'name' => 'AddTags', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'AddTagsInput', ], 'output' => [ 'shape' => 'AddTagsOutput', ], ], 'CreateAlgorithm' => [ 'name' => 'CreateAlgorithm', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateAlgorithmInput', ], 'output' => [ 'shape' => 'CreateAlgorithmOutput', ], ], 'CreateCodeRepository' => [ 'name' => 'CreateCodeRepository', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateCodeRepositoryInput', ], 'output' => [ 'shape' => 'CreateCodeRepositoryOutput', ], ], 'CreateCompilationJob' => [ 'name' => 'CreateCompilationJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateCompilationJobRequest', ], 'output' => [ 'shape' => 'CreateCompilationJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceInUse', ], [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreateEndpoint' => [ 'name' => 'CreateEndpoint', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateEndpointInput', ], 'output' => [ 'shape' => 'CreateEndpointOutput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreateEndpointConfig' => [ 'name' => 'CreateEndpointConfig', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateEndpointConfigInput', ], 'output' => [ 'shape' => 'CreateEndpointConfigOutput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreateHyperParameterTuningJob' => [ 'name' => 'CreateHyperParameterTuningJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateHyperParameterTuningJobRequest', ], 'output' => [ 'shape' => 'CreateHyperParameterTuningJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceInUse', ], [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreateLabelingJob' => [ 'name' => 'CreateLabelingJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateLabelingJobRequest', ], 'output' => [ 'shape' => 'CreateLabelingJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceInUse', ], [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreateModel' => [ 'name' => 'CreateModel', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateModelInput', ], 'output' => [ 'shape' => 'CreateModelOutput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreateModelPackage' => [ 'name' => 'CreateModelPackage', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateModelPackageInput', ], 'output' => [ 'shape' => 'CreateModelPackageOutput', ], ], 'CreateNotebookInstance' => [ 'name' => 'CreateNotebookInstance', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateNotebookInstanceInput', ], 'output' => [ 'shape' => 'CreateNotebookInstanceOutput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreateNotebookInstanceLifecycleConfig' => [ 'name' => 'CreateNotebookInstanceLifecycleConfig', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateNotebookInstanceLifecycleConfigInput', ], 'output' => [ 'shape' => 'CreateNotebookInstanceLifecycleConfigOutput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreatePresignedNotebookInstanceUrl' => [ 'name' => 'CreatePresignedNotebookInstanceUrl', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreatePresignedNotebookInstanceUrlInput', ], 'output' => [ 'shape' => 'CreatePresignedNotebookInstanceUrlOutput', ], ], 'CreateTrainingJob' => [ 'name' => 'CreateTrainingJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateTrainingJobRequest', ], 'output' => [ 'shape' => 'CreateTrainingJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceInUse', ], [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreateTransformJob' => [ 'name' => 'CreateTransformJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateTransformJobRequest', ], 'output' => [ 'shape' => 'CreateTransformJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceInUse', ], [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'CreateWorkteam' => [ 'name' => 'CreateWorkteam', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'CreateWorkteamRequest', ], 'output' => [ 'shape' => 'CreateWorkteamResponse', ], 'errors' => [ [ 'shape' => 'ResourceInUse', ], [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'DeleteAlgorithm' => [ 'name' => 'DeleteAlgorithm', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteAlgorithmInput', ], ], 'DeleteCodeRepository' => [ 'name' => 'DeleteCodeRepository', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteCodeRepositoryInput', ], ], 'DeleteEndpoint' => [ 'name' => 'DeleteEndpoint', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteEndpointInput', ], ], 'DeleteEndpointConfig' => [ 'name' => 'DeleteEndpointConfig', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteEndpointConfigInput', ], ], 'DeleteModel' => [ 'name' => 'DeleteModel', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteModelInput', ], ], 'DeleteModelPackage' => [ 'name' => 'DeleteModelPackage', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteModelPackageInput', ], ], 'DeleteNotebookInstance' => [ 'name' => 'DeleteNotebookInstance', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteNotebookInstanceInput', ], ], 'DeleteNotebookInstanceLifecycleConfig' => [ 'name' => 'DeleteNotebookInstanceLifecycleConfig', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteNotebookInstanceLifecycleConfigInput', ], ], 'DeleteTags' => [ 'name' => 'DeleteTags', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteTagsInput', ], 'output' => [ 'shape' => 'DeleteTagsOutput', ], ], 'DeleteWorkteam' => [ 'name' => 'DeleteWorkteam', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DeleteWorkteamRequest', ], 'output' => [ 'shape' => 'DeleteWorkteamResponse', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'DescribeAlgorithm' => [ 'name' => 'DescribeAlgorithm', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeAlgorithmInput', ], 'output' => [ 'shape' => 'DescribeAlgorithmOutput', ], ], 'DescribeCodeRepository' => [ 'name' => 'DescribeCodeRepository', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeCodeRepositoryInput', ], 'output' => [ 'shape' => 'DescribeCodeRepositoryOutput', ], ], 'DescribeCompilationJob' => [ 'name' => 'DescribeCompilationJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeCompilationJobRequest', ], 'output' => [ 'shape' => 'DescribeCompilationJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'DescribeEndpoint' => [ 'name' => 'DescribeEndpoint', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeEndpointInput', ], 'output' => [ 'shape' => 'DescribeEndpointOutput', ], ], 'DescribeEndpointConfig' => [ 'name' => 'DescribeEndpointConfig', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeEndpointConfigInput', ], 'output' => [ 'shape' => 'DescribeEndpointConfigOutput', ], ], 'DescribeHyperParameterTuningJob' => [ 'name' => 'DescribeHyperParameterTuningJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeHyperParameterTuningJobRequest', ], 'output' => [ 'shape' => 'DescribeHyperParameterTuningJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'DescribeLabelingJob' => [ 'name' => 'DescribeLabelingJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeLabelingJobRequest', ], 'output' => [ 'shape' => 'DescribeLabelingJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'DescribeModel' => [ 'name' => 'DescribeModel', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeModelInput', ], 'output' => [ 'shape' => 'DescribeModelOutput', ], ], 'DescribeModelPackage' => [ 'name' => 'DescribeModelPackage', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeModelPackageInput', ], 'output' => [ 'shape' => 'DescribeModelPackageOutput', ], ], 'DescribeNotebookInstance' => [ 'name' => 'DescribeNotebookInstance', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeNotebookInstanceInput', ], 'output' => [ 'shape' => 'DescribeNotebookInstanceOutput', ], ], 'DescribeNotebookInstanceLifecycleConfig' => [ 'name' => 'DescribeNotebookInstanceLifecycleConfig', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeNotebookInstanceLifecycleConfigInput', ], 'output' => [ 'shape' => 'DescribeNotebookInstanceLifecycleConfigOutput', ], ], 'DescribeSubscribedWorkteam' => [ 'name' => 'DescribeSubscribedWorkteam', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeSubscribedWorkteamRequest', ], 'output' => [ 'shape' => 'DescribeSubscribedWorkteamResponse', ], ], 'DescribeTrainingJob' => [ 'name' => 'DescribeTrainingJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeTrainingJobRequest', ], 'output' => [ 'shape' => 'DescribeTrainingJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'DescribeTransformJob' => [ 'name' => 'DescribeTransformJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeTransformJobRequest', ], 'output' => [ 'shape' => 'DescribeTransformJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'DescribeWorkteam' => [ 'name' => 'DescribeWorkteam', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'DescribeWorkteamRequest', ], 'output' => [ 'shape' => 'DescribeWorkteamResponse', ], ], 'GetSearchSuggestions' => [ 'name' => 'GetSearchSuggestions', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'GetSearchSuggestionsRequest', ], 'output' => [ 'shape' => 'GetSearchSuggestionsResponse', ], ], 'ListAlgorithms' => [ 'name' => 'ListAlgorithms', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListAlgorithmsInput', ], 'output' => [ 'shape' => 'ListAlgorithmsOutput', ], ], 'ListCodeRepositories' => [ 'name' => 'ListCodeRepositories', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListCodeRepositoriesInput', ], 'output' => [ 'shape' => 'ListCodeRepositoriesOutput', ], ], 'ListCompilationJobs' => [ 'name' => 'ListCompilationJobs', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListCompilationJobsRequest', ], 'output' => [ 'shape' => 'ListCompilationJobsResponse', ], ], 'ListEndpointConfigs' => [ 'name' => 'ListEndpointConfigs', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListEndpointConfigsInput', ], 'output' => [ 'shape' => 'ListEndpointConfigsOutput', ], ], 'ListEndpoints' => [ 'name' => 'ListEndpoints', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListEndpointsInput', ], 'output' => [ 'shape' => 'ListEndpointsOutput', ], ], 'ListHyperParameterTuningJobs' => [ 'name' => 'ListHyperParameterTuningJobs', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListHyperParameterTuningJobsRequest', ], 'output' => [ 'shape' => 'ListHyperParameterTuningJobsResponse', ], ], 'ListLabelingJobs' => [ 'name' => 'ListLabelingJobs', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListLabelingJobsRequest', ], 'output' => [ 'shape' => 'ListLabelingJobsResponse', ], ], 'ListLabelingJobsForWorkteam' => [ 'name' => 'ListLabelingJobsForWorkteam', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListLabelingJobsForWorkteamRequest', ], 'output' => [ 'shape' => 'ListLabelingJobsForWorkteamResponse', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'ListModelPackages' => [ 'name' => 'ListModelPackages', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListModelPackagesInput', ], 'output' => [ 'shape' => 'ListModelPackagesOutput', ], ], 'ListModels' => [ 'name' => 'ListModels', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListModelsInput', ], 'output' => [ 'shape' => 'ListModelsOutput', ], ], 'ListNotebookInstanceLifecycleConfigs' => [ 'name' => 'ListNotebookInstanceLifecycleConfigs', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListNotebookInstanceLifecycleConfigsInput', ], 'output' => [ 'shape' => 'ListNotebookInstanceLifecycleConfigsOutput', ], ], 'ListNotebookInstances' => [ 'name' => 'ListNotebookInstances', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListNotebookInstancesInput', ], 'output' => [ 'shape' => 'ListNotebookInstancesOutput', ], ], 'ListSubscribedWorkteams' => [ 'name' => 'ListSubscribedWorkteams', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListSubscribedWorkteamsRequest', ], 'output' => [ 'shape' => 'ListSubscribedWorkteamsResponse', ], ], 'ListTags' => [ 'name' => 'ListTags', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListTagsInput', ], 'output' => [ 'shape' => 'ListTagsOutput', ], ], 'ListTrainingJobs' => [ 'name' => 'ListTrainingJobs', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListTrainingJobsRequest', ], 'output' => [ 'shape' => 'ListTrainingJobsResponse', ], ], 'ListTrainingJobsForHyperParameterTuningJob' => [ 'name' => 'ListTrainingJobsForHyperParameterTuningJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListTrainingJobsForHyperParameterTuningJobRequest', ], 'output' => [ 'shape' => 'ListTrainingJobsForHyperParameterTuningJobResponse', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'ListTransformJobs' => [ 'name' => 'ListTransformJobs', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListTransformJobsRequest', ], 'output' => [ 'shape' => 'ListTransformJobsResponse', ], ], 'ListWorkteams' => [ 'name' => 'ListWorkteams', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'ListWorkteamsRequest', ], 'output' => [ 'shape' => 'ListWorkteamsResponse', ], ], 'RenderUiTemplate' => [ 'name' => 'RenderUiTemplate', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'RenderUiTemplateRequest', ], 'output' => [ 'shape' => 'RenderUiTemplateResponse', ], ], 'Search' => [ 'name' => 'Search', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'SearchRequest', ], 'output' => [ 'shape' => 'SearchResponse', ], ], 'StartNotebookInstance' => [ 'name' => 'StartNotebookInstance', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'StartNotebookInstanceInput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'StopCompilationJob' => [ 'name' => 'StopCompilationJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'StopCompilationJobRequest', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'StopHyperParameterTuningJob' => [ 'name' => 'StopHyperParameterTuningJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'StopHyperParameterTuningJobRequest', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'StopLabelingJob' => [ 'name' => 'StopLabelingJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'StopLabelingJobRequest', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'StopNotebookInstance' => [ 'name' => 'StopNotebookInstance', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'StopNotebookInstanceInput', ], ], 'StopTrainingJob' => [ 'name' => 'StopTrainingJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'StopTrainingJobRequest', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'StopTransformJob' => [ 'name' => 'StopTransformJob', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'StopTransformJobRequest', ], 'errors' => [ [ 'shape' => 'ResourceNotFound', ], ], ], 'UpdateCodeRepository' => [ 'name' => 'UpdateCodeRepository', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'UpdateCodeRepositoryInput', ], 'output' => [ 'shape' => 'UpdateCodeRepositoryOutput', ], ], 'UpdateEndpoint' => [ 'name' => 'UpdateEndpoint', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'UpdateEndpointInput', ], 'output' => [ 'shape' => 'UpdateEndpointOutput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'UpdateEndpointWeightsAndCapacities' => [ 'name' => 'UpdateEndpointWeightsAndCapacities', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'UpdateEndpointWeightsAndCapacitiesInput', ], 'output' => [ 'shape' => 'UpdateEndpointWeightsAndCapacitiesOutput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'UpdateNotebookInstance' => [ 'name' => 'UpdateNotebookInstance', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'UpdateNotebookInstanceInput', ], 'output' => [ 'shape' => 'UpdateNotebookInstanceOutput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'UpdateNotebookInstanceLifecycleConfig' => [ 'name' => 'UpdateNotebookInstanceLifecycleConfig', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'UpdateNotebookInstanceLifecycleConfigInput', ], 'output' => [ 'shape' => 'UpdateNotebookInstanceLifecycleConfigOutput', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], 'UpdateWorkteam' => [ 'name' => 'UpdateWorkteam', 'http' => [ 'method' => 'POST', 'requestUri' => '/', ], 'input' => [ 'shape' => 'UpdateWorkteamRequest', ], 'output' => [ 'shape' => 'UpdateWorkteamResponse', ], 'errors' => [ [ 'shape' => 'ResourceLimitExceeded', ], ], ], ], 'shapes' => [ 'Accept' => [ 'type' => 'string', 'max' => 256, 'pattern' => '.*', ], 'AccountId' => [ 'type' => 'string', 'pattern' => '^\\d+$', ], 'AddTagsInput' => [ 'type' => 'structure', 'required' => [ 'ResourceArn', 'Tags', ], 'members' => [ 'ResourceArn' => [ 'shape' => 'ResourceArn', ], 'Tags' => [ 'shape' => 'TagList', ], ], ], 'AddTagsOutput' => [ 'type' => 'structure', 'members' => [ 'Tags' => [ 'shape' => 'TagList', ], ], ], 'AdditionalCodeRepositoryNamesOrUrls' => [ 'type' => 'list', 'member' => [ 'shape' => 'CodeRepositoryNameOrUrl', ], 'max' => 3, ], 'AlgorithmArn' => [ 'type' => 'string', 'max' => 2048, 'min' => 1, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:algorithm/.*', ], 'AlgorithmImage' => [ 'type' => 'string', 'max' => 255, 'pattern' => '.*', ], 'AlgorithmSortBy' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', ], ], 'AlgorithmSpecification' => [ 'type' => 'structure', 'required' => [ 'TrainingInputMode', ], 'members' => [ 'TrainingImage' => [ 'shape' => 'AlgorithmImage', ], 'AlgorithmName' => [ 'shape' => 'ArnOrName', ], 'TrainingInputMode' => [ 'shape' => 'TrainingInputMode', ], 'MetricDefinitions' => [ 'shape' => 'MetricDefinitionList', ], ], ], 'AlgorithmStatus' => [ 'type' => 'string', 'enum' => [ 'Pending', 'InProgress', 'Completed', 'Failed', 'Deleting', ], ], 'AlgorithmStatusDetails' => [ 'type' => 'structure', 'members' => [ 'ValidationStatuses' => [ 'shape' => 'AlgorithmStatusItemList', ], 'ImageScanStatuses' => [ 'shape' => 'AlgorithmStatusItemList', ], ], ], 'AlgorithmStatusItem' => [ 'type' => 'structure', 'required' => [ 'Name', 'Status', ], 'members' => [ 'Name' => [ 'shape' => 'EntityName', ], 'Status' => [ 'shape' => 'DetailedAlgorithmStatus', ], 'FailureReason' => [ 'shape' => 'String', ], ], ], 'AlgorithmStatusItemList' => [ 'type' => 'list', 'member' => [ 'shape' => 'AlgorithmStatusItem', ], ], 'AlgorithmSummary' => [ 'type' => 'structure', 'required' => [ 'AlgorithmName', 'AlgorithmArn', 'CreationTime', 'AlgorithmStatus', ], 'members' => [ 'AlgorithmName' => [ 'shape' => 'EntityName', ], 'AlgorithmArn' => [ 'shape' => 'AlgorithmArn', ], 'AlgorithmDescription' => [ 'shape' => 'EntityDescription', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'AlgorithmStatus' => [ 'shape' => 'AlgorithmStatus', ], ], ], 'AlgorithmSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'AlgorithmSummary', ], ], 'AlgorithmValidationProfile' => [ 'type' => 'structure', 'required' => [ 'ProfileName', 'TrainingJobDefinition', ], 'members' => [ 'ProfileName' => [ 'shape' => 'EntityName', ], 'TrainingJobDefinition' => [ 'shape' => 'TrainingJobDefinition', ], 'TransformJobDefinition' => [ 'shape' => 'TransformJobDefinition', ], ], ], 'AlgorithmValidationProfiles' => [ 'type' => 'list', 'member' => [ 'shape' => 'AlgorithmValidationProfile', ], 'max' => 1, 'min' => 1, ], 'AlgorithmValidationSpecification' => [ 'type' => 'structure', 'required' => [ 'ValidationRole', 'ValidationProfiles', ], 'members' => [ 'ValidationRole' => [ 'shape' => 'RoleArn', ], 'ValidationProfiles' => [ 'shape' => 'AlgorithmValidationProfiles', ], ], ], 'AnnotationConsolidationConfig' => [ 'type' => 'structure', 'required' => [ 'AnnotationConsolidationLambdaArn', ], 'members' => [ 'AnnotationConsolidationLambdaArn' => [ 'shape' => 'LambdaFunctionArn', ], ], ], 'ArnOrName' => [ 'type' => 'string', 'max' => 170, 'min' => 1, 'pattern' => '(arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:[a-z\\-]*\\/)?([a-zA-Z0-9]([a-zA-Z0-9-]){0,62})(? [ 'type' => 'string', 'enum' => [ 'None', 'Line', ], ], 'AttributeName' => [ 'type' => 'string', 'max' => 256, 'min' => 1, 'pattern' => '.+', ], 'AttributeNames' => [ 'type' => 'list', 'member' => [ 'shape' => 'AttributeName', ], 'max' => 16, ], 'BatchStrategy' => [ 'type' => 'string', 'enum' => [ 'MultiRecord', 'SingleRecord', ], ], 'Boolean' => [ 'type' => 'boolean', ], 'BooleanOperator' => [ 'type' => 'string', 'enum' => [ 'And', 'Or', ], ], 'Branch' => [ 'type' => 'string', 'max' => 1024, 'min' => 1, 'pattern' => '[^ ~^:?*\\[]+', ], 'CategoricalParameterRange' => [ 'type' => 'structure', 'required' => [ 'Name', 'Values', ], 'members' => [ 'Name' => [ 'shape' => 'ParameterKey', ], 'Values' => [ 'shape' => 'ParameterValues', ], ], ], 'CategoricalParameterRangeSpecification' => [ 'type' => 'structure', 'required' => [ 'Values', ], 'members' => [ 'Values' => [ 'shape' => 'ParameterValues', ], ], ], 'CategoricalParameterRanges' => [ 'type' => 'list', 'member' => [ 'shape' => 'CategoricalParameterRange', ], 'max' => 20, 'min' => 0, ], 'Cents' => [ 'type' => 'integer', 'max' => 99, 'min' => 0, ], 'CertifyForMarketplace' => [ 'type' => 'boolean', ], 'Channel' => [ 'type' => 'structure', 'required' => [ 'ChannelName', 'DataSource', ], 'members' => [ 'ChannelName' => [ 'shape' => 'ChannelName', ], 'DataSource' => [ 'shape' => 'DataSource', ], 'ContentType' => [ 'shape' => 'ContentType', ], 'CompressionType' => [ 'shape' => 'CompressionType', ], 'RecordWrapperType' => [ 'shape' => 'RecordWrapper', ], 'InputMode' => [ 'shape' => 'TrainingInputMode', ], 'ShuffleConfig' => [ 'shape' => 'ShuffleConfig', ], ], ], 'ChannelName' => [ 'type' => 'string', 'max' => 64, 'min' => 1, 'pattern' => '[A-Za-z0-9\\.\\-_]+', ], 'ChannelSpecification' => [ 'type' => 'structure', 'required' => [ 'Name', 'SupportedContentTypes', 'SupportedInputModes', ], 'members' => [ 'Name' => [ 'shape' => 'ChannelName', ], 'Description' => [ 'shape' => 'EntityDescription', ], 'IsRequired' => [ 'shape' => 'Boolean', ], 'SupportedContentTypes' => [ 'shape' => 'ContentTypes', ], 'SupportedCompressionTypes' => [ 'shape' => 'CompressionTypes', ], 'SupportedInputModes' => [ 'shape' => 'InputModes', ], ], ], 'ChannelSpecifications' => [ 'type' => 'list', 'member' => [ 'shape' => 'ChannelSpecification', ], 'max' => 8, 'min' => 1, ], 'CodeRepositoryArn' => [ 'type' => 'string', 'max' => 2048, 'min' => 1, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:code-repository/.*', ], 'CodeRepositoryContains' => [ 'type' => 'string', 'max' => 1024, 'pattern' => '[a-zA-Z0-9-]+', ], 'CodeRepositoryNameContains' => [ 'type' => 'string', 'max' => 63, 'pattern' => '[a-zA-Z0-9-]+', ], 'CodeRepositoryNameOrUrl' => [ 'type' => 'string', 'max' => 1024, 'min' => 1, 'pattern' => '^https://([^/]+)/?(.*)$|^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'CodeRepositorySortBy' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', 'LastModifiedTime', ], ], 'CodeRepositorySortOrder' => [ 'type' => 'string', 'enum' => [ 'Ascending', 'Descending', ], ], 'CodeRepositorySummary' => [ 'type' => 'structure', 'required' => [ 'CodeRepositoryName', 'CodeRepositoryArn', 'CreationTime', 'LastModifiedTime', ], 'members' => [ 'CodeRepositoryName' => [ 'shape' => 'EntityName', ], 'CodeRepositoryArn' => [ 'shape' => 'CodeRepositoryArn', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'LastModifiedTime' => [ 'shape' => 'LastModifiedTime', ], 'GitConfig' => [ 'shape' => 'GitConfig', ], ], ], 'CodeRepositorySummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'CodeRepositorySummary', ], ], 'CognitoClientId' => [ 'type' => 'string', 'max' => 128, 'min' => 1, 'pattern' => '[\\w+]+', ], 'CognitoMemberDefinition' => [ 'type' => 'structure', 'required' => [ 'UserPool', 'UserGroup', 'ClientId', ], 'members' => [ 'UserPool' => [ 'shape' => 'CognitoUserPool', ], 'UserGroup' => [ 'shape' => 'CognitoUserGroup', ], 'ClientId' => [ 'shape' => 'CognitoClientId', ], ], ], 'CognitoUserGroup' => [ 'type' => 'string', 'max' => 128, 'min' => 1, 'pattern' => '[\\p{L}\\p{M}\\p{S}\\p{N}\\p{P}]+', ], 'CognitoUserPool' => [ 'type' => 'string', 'max' => 55, 'min' => 1, 'pattern' => '[\\w-]+_[0-9a-zA-Z]+', ], 'CompilationJobArn' => [ 'type' => 'string', 'max' => 256, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:compilation-job/.*', ], 'CompilationJobStatus' => [ 'type' => 'string', 'enum' => [ 'INPROGRESS', 'COMPLETED', 'FAILED', 'STARTING', 'STOPPING', 'STOPPED', ], ], 'CompilationJobSummaries' => [ 'type' => 'list', 'member' => [ 'shape' => 'CompilationJobSummary', ], ], 'CompilationJobSummary' => [ 'type' => 'structure', 'required' => [ 'CompilationJobName', 'CompilationJobArn', 'CreationTime', 'CompilationTargetDevice', 'CompilationJobStatus', ], 'members' => [ 'CompilationJobName' => [ 'shape' => 'EntityName', ], 'CompilationJobArn' => [ 'shape' => 'CompilationJobArn', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'CompilationStartTime' => [ 'shape' => 'Timestamp', ], 'CompilationEndTime' => [ 'shape' => 'Timestamp', ], 'CompilationTargetDevice' => [ 'shape' => 'TargetDevice', ], 'LastModifiedTime' => [ 'shape' => 'LastModifiedTime', ], 'CompilationJobStatus' => [ 'shape' => 'CompilationJobStatus', ], ], ], 'CompressionType' => [ 'type' => 'string', 'enum' => [ 'None', 'Gzip', ], ], 'CompressionTypes' => [ 'type' => 'list', 'member' => [ 'shape' => 'CompressionType', ], ], 'ContainerDefinition' => [ 'type' => 'structure', 'members' => [ 'ContainerHostname' => [ 'shape' => 'ContainerHostname', ], 'Image' => [ 'shape' => 'Image', ], 'ModelDataUrl' => [ 'shape' => 'Url', ], 'Environment' => [ 'shape' => 'EnvironmentMap', ], 'ModelPackageName' => [ 'shape' => 'ArnOrName', ], ], ], 'ContainerDefinitionList' => [ 'type' => 'list', 'member' => [ 'shape' => 'ContainerDefinition', ], 'max' => 5, ], 'ContainerHostname' => [ 'type' => 'string', 'max' => 63, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'ContentClassifier' => [ 'type' => 'string', 'enum' => [ 'FreeOfPersonallyIdentifiableInformation', 'FreeOfAdultContent', ], ], 'ContentClassifiers' => [ 'type' => 'list', 'member' => [ 'shape' => 'ContentClassifier', ], 'max' => 256, ], 'ContentType' => [ 'type' => 'string', 'max' => 256, 'pattern' => '.*', ], 'ContentTypes' => [ 'type' => 'list', 'member' => [ 'shape' => 'ContentType', ], ], 'ContinuousParameterRange' => [ 'type' => 'structure', 'required' => [ 'Name', 'MinValue', 'MaxValue', ], 'members' => [ 'Name' => [ 'shape' => 'ParameterKey', ], 'MinValue' => [ 'shape' => 'ParameterValue', ], 'MaxValue' => [ 'shape' => 'ParameterValue', ], ], ], 'ContinuousParameterRangeSpecification' => [ 'type' => 'structure', 'required' => [ 'MinValue', 'MaxValue', ], 'members' => [ 'MinValue' => [ 'shape' => 'ParameterValue', ], 'MaxValue' => [ 'shape' => 'ParameterValue', ], ], ], 'ContinuousParameterRanges' => [ 'type' => 'list', 'member' => [ 'shape' => 'ContinuousParameterRange', ], 'max' => 20, 'min' => 0, ], 'CreateAlgorithmInput' => [ 'type' => 'structure', 'required' => [ 'AlgorithmName', 'TrainingSpecification', ], 'members' => [ 'AlgorithmName' => [ 'shape' => 'EntityName', ], 'AlgorithmDescription' => [ 'shape' => 'EntityDescription', ], 'TrainingSpecification' => [ 'shape' => 'TrainingSpecification', ], 'InferenceSpecification' => [ 'shape' => 'InferenceSpecification', ], 'ValidationSpecification' => [ 'shape' => 'AlgorithmValidationSpecification', ], 'CertifyForMarketplace' => [ 'shape' => 'CertifyForMarketplace', ], ], ], 'CreateAlgorithmOutput' => [ 'type' => 'structure', 'required' => [ 'AlgorithmArn', ], 'members' => [ 'AlgorithmArn' => [ 'shape' => 'AlgorithmArn', ], ], ], 'CreateCodeRepositoryInput' => [ 'type' => 'structure', 'required' => [ 'CodeRepositoryName', 'GitConfig', ], 'members' => [ 'CodeRepositoryName' => [ 'shape' => 'EntityName', ], 'GitConfig' => [ 'shape' => 'GitConfig', ], ], ], 'CreateCodeRepositoryOutput' => [ 'type' => 'structure', 'required' => [ 'CodeRepositoryArn', ], 'members' => [ 'CodeRepositoryArn' => [ 'shape' => 'CodeRepositoryArn', ], ], ], 'CreateCompilationJobRequest' => [ 'type' => 'structure', 'required' => [ 'CompilationJobName', 'RoleArn', 'InputConfig', 'OutputConfig', 'StoppingCondition', ], 'members' => [ 'CompilationJobName' => [ 'shape' => 'EntityName', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'InputConfig' => [ 'shape' => 'InputConfig', ], 'OutputConfig' => [ 'shape' => 'OutputConfig', ], 'StoppingCondition' => [ 'shape' => 'StoppingCondition', ], ], ], 'CreateCompilationJobResponse' => [ 'type' => 'structure', 'required' => [ 'CompilationJobArn', ], 'members' => [ 'CompilationJobArn' => [ 'shape' => 'CompilationJobArn', ], ], ], 'CreateEndpointConfigInput' => [ 'type' => 'structure', 'required' => [ 'EndpointConfigName', 'ProductionVariants', ], 'members' => [ 'EndpointConfigName' => [ 'shape' => 'EndpointConfigName', ], 'ProductionVariants' => [ 'shape' => 'ProductionVariantList', ], 'Tags' => [ 'shape' => 'TagList', ], 'KmsKeyId' => [ 'shape' => 'KmsKeyId', ], ], ], 'CreateEndpointConfigOutput' => [ 'type' => 'structure', 'required' => [ 'EndpointConfigArn', ], 'members' => [ 'EndpointConfigArn' => [ 'shape' => 'EndpointConfigArn', ], ], ], 'CreateEndpointInput' => [ 'type' => 'structure', 'required' => [ 'EndpointName', 'EndpointConfigName', ], 'members' => [ 'EndpointName' => [ 'shape' => 'EndpointName', ], 'EndpointConfigName' => [ 'shape' => 'EndpointConfigName', ], 'Tags' => [ 'shape' => 'TagList', ], ], ], 'CreateEndpointOutput' => [ 'type' => 'structure', 'required' => [ 'EndpointArn', ], 'members' => [ 'EndpointArn' => [ 'shape' => 'EndpointArn', ], ], ], 'CreateHyperParameterTuningJobRequest' => [ 'type' => 'structure', 'required' => [ 'HyperParameterTuningJobName', 'HyperParameterTuningJobConfig', 'TrainingJobDefinition', ], 'members' => [ 'HyperParameterTuningJobName' => [ 'shape' => 'HyperParameterTuningJobName', ], 'HyperParameterTuningJobConfig' => [ 'shape' => 'HyperParameterTuningJobConfig', ], 'TrainingJobDefinition' => [ 'shape' => 'HyperParameterTrainingJobDefinition', ], 'WarmStartConfig' => [ 'shape' => 'HyperParameterTuningJobWarmStartConfig', ], 'Tags' => [ 'shape' => 'TagList', ], ], ], 'CreateHyperParameterTuningJobResponse' => [ 'type' => 'structure', 'required' => [ 'HyperParameterTuningJobArn', ], 'members' => [ 'HyperParameterTuningJobArn' => [ 'shape' => 'HyperParameterTuningJobArn', ], ], ], 'CreateLabelingJobRequest' => [ 'type' => 'structure', 'required' => [ 'LabelingJobName', 'LabelAttributeName', 'InputConfig', 'OutputConfig', 'RoleArn', 'HumanTaskConfig', ], 'members' => [ 'LabelingJobName' => [ 'shape' => 'LabelingJobName', ], 'LabelAttributeName' => [ 'shape' => 'LabelAttributeName', ], 'InputConfig' => [ 'shape' => 'LabelingJobInputConfig', ], 'OutputConfig' => [ 'shape' => 'LabelingJobOutputConfig', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'LabelCategoryConfigS3Uri' => [ 'shape' => 'S3Uri', ], 'StoppingConditions' => [ 'shape' => 'LabelingJobStoppingConditions', ], 'LabelingJobAlgorithmsConfig' => [ 'shape' => 'LabelingJobAlgorithmsConfig', ], 'HumanTaskConfig' => [ 'shape' => 'HumanTaskConfig', ], 'Tags' => [ 'shape' => 'TagList', ], ], ], 'CreateLabelingJobResponse' => [ 'type' => 'structure', 'required' => [ 'LabelingJobArn', ], 'members' => [ 'LabelingJobArn' => [ 'shape' => 'LabelingJobArn', ], ], ], 'CreateModelInput' => [ 'type' => 'structure', 'required' => [ 'ModelName', 'ExecutionRoleArn', ], 'members' => [ 'ModelName' => [ 'shape' => 'ModelName', ], 'PrimaryContainer' => [ 'shape' => 'ContainerDefinition', ], 'Containers' => [ 'shape' => 'ContainerDefinitionList', ], 'ExecutionRoleArn' => [ 'shape' => 'RoleArn', ], 'Tags' => [ 'shape' => 'TagList', ], 'VpcConfig' => [ 'shape' => 'VpcConfig', ], 'EnableNetworkIsolation' => [ 'shape' => 'Boolean', ], ], ], 'CreateModelOutput' => [ 'type' => 'structure', 'required' => [ 'ModelArn', ], 'members' => [ 'ModelArn' => [ 'shape' => 'ModelArn', ], ], ], 'CreateModelPackageInput' => [ 'type' => 'structure', 'required' => [ 'ModelPackageName', ], 'members' => [ 'ModelPackageName' => [ 'shape' => 'EntityName', ], 'ModelPackageDescription' => [ 'shape' => 'EntityDescription', ], 'InferenceSpecification' => [ 'shape' => 'InferenceSpecification', ], 'ValidationSpecification' => [ 'shape' => 'ModelPackageValidationSpecification', ], 'SourceAlgorithmSpecification' => [ 'shape' => 'SourceAlgorithmSpecification', ], 'CertifyForMarketplace' => [ 'shape' => 'CertifyForMarketplace', ], ], ], 'CreateModelPackageOutput' => [ 'type' => 'structure', 'required' => [ 'ModelPackageArn', ], 'members' => [ 'ModelPackageArn' => [ 'shape' => 'ModelPackageArn', ], ], ], 'CreateNotebookInstanceInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceName', 'InstanceType', 'RoleArn', ], 'members' => [ 'NotebookInstanceName' => [ 'shape' => 'NotebookInstanceName', ], 'InstanceType' => [ 'shape' => 'InstanceType', ], 'SubnetId' => [ 'shape' => 'SubnetId', ], 'SecurityGroupIds' => [ 'shape' => 'SecurityGroupIds', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'KmsKeyId' => [ 'shape' => 'KmsKeyId', ], 'Tags' => [ 'shape' => 'TagList', ], 'LifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], 'DirectInternetAccess' => [ 'shape' => 'DirectInternetAccess', ], 'VolumeSizeInGB' => [ 'shape' => 'NotebookInstanceVolumeSizeInGB', ], 'AcceleratorTypes' => [ 'shape' => 'NotebookInstanceAcceleratorTypes', ], 'DefaultCodeRepository' => [ 'shape' => 'CodeRepositoryNameOrUrl', ], 'AdditionalCodeRepositories' => [ 'shape' => 'AdditionalCodeRepositoryNamesOrUrls', ], 'RootAccess' => [ 'shape' => 'RootAccess', ], ], ], 'CreateNotebookInstanceLifecycleConfigInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceLifecycleConfigName', ], 'members' => [ 'NotebookInstanceLifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], 'OnCreate' => [ 'shape' => 'NotebookInstanceLifecycleConfigList', ], 'OnStart' => [ 'shape' => 'NotebookInstanceLifecycleConfigList', ], ], ], 'CreateNotebookInstanceLifecycleConfigOutput' => [ 'type' => 'structure', 'members' => [ 'NotebookInstanceLifecycleConfigArn' => [ 'shape' => 'NotebookInstanceLifecycleConfigArn', ], ], ], 'CreateNotebookInstanceOutput' => [ 'type' => 'structure', 'members' => [ 'NotebookInstanceArn' => [ 'shape' => 'NotebookInstanceArn', ], ], ], 'CreatePresignedNotebookInstanceUrlInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceName', ], 'members' => [ 'NotebookInstanceName' => [ 'shape' => 'NotebookInstanceName', ], 'SessionExpirationDurationInSeconds' => [ 'shape' => 'SessionExpirationDurationInSeconds', ], ], ], 'CreatePresignedNotebookInstanceUrlOutput' => [ 'type' => 'structure', 'members' => [ 'AuthorizedUrl' => [ 'shape' => 'NotebookInstanceUrl', ], ], ], 'CreateTrainingJobRequest' => [ 'type' => 'structure', 'required' => [ 'TrainingJobName', 'AlgorithmSpecification', 'RoleArn', 'OutputDataConfig', 'ResourceConfig', 'StoppingCondition', ], 'members' => [ 'TrainingJobName' => [ 'shape' => 'TrainingJobName', ], 'HyperParameters' => [ 'shape' => 'HyperParameters', ], 'AlgorithmSpecification' => [ 'shape' => 'AlgorithmSpecification', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'InputDataConfig' => [ 'shape' => 'InputDataConfig', ], 'OutputDataConfig' => [ 'shape' => 'OutputDataConfig', ], 'ResourceConfig' => [ 'shape' => 'ResourceConfig', ], 'VpcConfig' => [ 'shape' => 'VpcConfig', ], 'StoppingCondition' => [ 'shape' => 'StoppingCondition', ], 'Tags' => [ 'shape' => 'TagList', ], 'EnableNetworkIsolation' => [ 'shape' => 'Boolean', ], 'EnableInterContainerTrafficEncryption' => [ 'shape' => 'Boolean', ], ], ], 'CreateTrainingJobResponse' => [ 'type' => 'structure', 'required' => [ 'TrainingJobArn', ], 'members' => [ 'TrainingJobArn' => [ 'shape' => 'TrainingJobArn', ], ], ], 'CreateTransformJobRequest' => [ 'type' => 'structure', 'required' => [ 'TransformJobName', 'ModelName', 'TransformInput', 'TransformOutput', 'TransformResources', ], 'members' => [ 'TransformJobName' => [ 'shape' => 'TransformJobName', ], 'ModelName' => [ 'shape' => 'ModelName', ], 'MaxConcurrentTransforms' => [ 'shape' => 'MaxConcurrentTransforms', ], 'MaxPayloadInMB' => [ 'shape' => 'MaxPayloadInMB', ], 'BatchStrategy' => [ 'shape' => 'BatchStrategy', ], 'Environment' => [ 'shape' => 'TransformEnvironmentMap', ], 'TransformInput' => [ 'shape' => 'TransformInput', ], 'TransformOutput' => [ 'shape' => 'TransformOutput', ], 'TransformResources' => [ 'shape' => 'TransformResources', ], 'Tags' => [ 'shape' => 'TagList', ], ], ], 'CreateTransformJobResponse' => [ 'type' => 'structure', 'required' => [ 'TransformJobArn', ], 'members' => [ 'TransformJobArn' => [ 'shape' => 'TransformJobArn', ], ], ], 'CreateWorkteamRequest' => [ 'type' => 'structure', 'required' => [ 'WorkteamName', 'MemberDefinitions', 'Description', ], 'members' => [ 'WorkteamName' => [ 'shape' => 'WorkteamName', ], 'MemberDefinitions' => [ 'shape' => 'MemberDefinitions', ], 'Description' => [ 'shape' => 'String200', ], 'Tags' => [ 'shape' => 'TagList', ], ], ], 'CreateWorkteamResponse' => [ 'type' => 'structure', 'members' => [ 'WorkteamArn' => [ 'shape' => 'WorkteamArn', ], ], ], 'CreationTime' => [ 'type' => 'timestamp', ], 'DataInputConfig' => [ 'type' => 'string', 'max' => 1024, 'min' => 1, 'pattern' => '[\\S\\s]+', ], 'DataSource' => [ 'type' => 'structure', 'required' => [ 'S3DataSource', ], 'members' => [ 'S3DataSource' => [ 'shape' => 'S3DataSource', ], ], ], 'DeleteAlgorithmInput' => [ 'type' => 'structure', 'required' => [ 'AlgorithmName', ], 'members' => [ 'AlgorithmName' => [ 'shape' => 'EntityName', ], ], ], 'DeleteCodeRepositoryInput' => [ 'type' => 'structure', 'required' => [ 'CodeRepositoryName', ], 'members' => [ 'CodeRepositoryName' => [ 'shape' => 'EntityName', ], ], ], 'DeleteEndpointConfigInput' => [ 'type' => 'structure', 'required' => [ 'EndpointConfigName', ], 'members' => [ 'EndpointConfigName' => [ 'shape' => 'EndpointConfigName', ], ], ], 'DeleteEndpointInput' => [ 'type' => 'structure', 'required' => [ 'EndpointName', ], 'members' => [ 'EndpointName' => [ 'shape' => 'EndpointName', ], ], ], 'DeleteModelInput' => [ 'type' => 'structure', 'required' => [ 'ModelName', ], 'members' => [ 'ModelName' => [ 'shape' => 'ModelName', ], ], ], 'DeleteModelPackageInput' => [ 'type' => 'structure', 'required' => [ 'ModelPackageName', ], 'members' => [ 'ModelPackageName' => [ 'shape' => 'EntityName', ], ], ], 'DeleteNotebookInstanceInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceName', ], 'members' => [ 'NotebookInstanceName' => [ 'shape' => 'NotebookInstanceName', ], ], ], 'DeleteNotebookInstanceLifecycleConfigInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceLifecycleConfigName', ], 'members' => [ 'NotebookInstanceLifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], ], ], 'DeleteTagsInput' => [ 'type' => 'structure', 'required' => [ 'ResourceArn', 'TagKeys', ], 'members' => [ 'ResourceArn' => [ 'shape' => 'ResourceArn', ], 'TagKeys' => [ 'shape' => 'TagKeyList', ], ], ], 'DeleteTagsOutput' => [ 'type' => 'structure', 'members' => [], ], 'DeleteWorkteamRequest' => [ 'type' => 'structure', 'required' => [ 'WorkteamName', ], 'members' => [ 'WorkteamName' => [ 'shape' => 'WorkteamName', ], ], ], 'DeleteWorkteamResponse' => [ 'type' => 'structure', 'required' => [ 'Success', ], 'members' => [ 'Success' => [ 'shape' => 'Success', ], ], ], 'DeployedImage' => [ 'type' => 'structure', 'members' => [ 'SpecifiedImage' => [ 'shape' => 'Image', ], 'ResolvedImage' => [ 'shape' => 'Image', ], 'ResolutionTime' => [ 'shape' => 'Timestamp', ], ], ], 'DeployedImages' => [ 'type' => 'list', 'member' => [ 'shape' => 'DeployedImage', ], ], 'DescribeAlgorithmInput' => [ 'type' => 'structure', 'required' => [ 'AlgorithmName', ], 'members' => [ 'AlgorithmName' => [ 'shape' => 'ArnOrName', ], ], ], 'DescribeAlgorithmOutput' => [ 'type' => 'structure', 'required' => [ 'AlgorithmName', 'AlgorithmArn', 'CreationTime', 'TrainingSpecification', 'AlgorithmStatus', 'AlgorithmStatusDetails', ], 'members' => [ 'AlgorithmName' => [ 'shape' => 'EntityName', ], 'AlgorithmArn' => [ 'shape' => 'AlgorithmArn', ], 'AlgorithmDescription' => [ 'shape' => 'EntityDescription', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'TrainingSpecification' => [ 'shape' => 'TrainingSpecification', ], 'InferenceSpecification' => [ 'shape' => 'InferenceSpecification', ], 'ValidationSpecification' => [ 'shape' => 'AlgorithmValidationSpecification', ], 'AlgorithmStatus' => [ 'shape' => 'AlgorithmStatus', ], 'AlgorithmStatusDetails' => [ 'shape' => 'AlgorithmStatusDetails', ], 'ProductId' => [ 'shape' => 'ProductId', ], 'CertifyForMarketplace' => [ 'shape' => 'CertifyForMarketplace', ], ], ], 'DescribeCodeRepositoryInput' => [ 'type' => 'structure', 'required' => [ 'CodeRepositoryName', ], 'members' => [ 'CodeRepositoryName' => [ 'shape' => 'EntityName', ], ], ], 'DescribeCodeRepositoryOutput' => [ 'type' => 'structure', 'required' => [ 'CodeRepositoryName', 'CodeRepositoryArn', 'CreationTime', 'LastModifiedTime', ], 'members' => [ 'CodeRepositoryName' => [ 'shape' => 'EntityName', ], 'CodeRepositoryArn' => [ 'shape' => 'CodeRepositoryArn', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'LastModifiedTime' => [ 'shape' => 'LastModifiedTime', ], 'GitConfig' => [ 'shape' => 'GitConfig', ], ], ], 'DescribeCompilationJobRequest' => [ 'type' => 'structure', 'required' => [ 'CompilationJobName', ], 'members' => [ 'CompilationJobName' => [ 'shape' => 'EntityName', ], ], ], 'DescribeCompilationJobResponse' => [ 'type' => 'structure', 'required' => [ 'CompilationJobName', 'CompilationJobArn', 'CompilationJobStatus', 'StoppingCondition', 'CreationTime', 'LastModifiedTime', 'FailureReason', 'ModelArtifacts', 'RoleArn', 'InputConfig', 'OutputConfig', ], 'members' => [ 'CompilationJobName' => [ 'shape' => 'EntityName', ], 'CompilationJobArn' => [ 'shape' => 'CompilationJobArn', ], 'CompilationJobStatus' => [ 'shape' => 'CompilationJobStatus', ], 'CompilationStartTime' => [ 'shape' => 'Timestamp', ], 'CompilationEndTime' => [ 'shape' => 'Timestamp', ], 'StoppingCondition' => [ 'shape' => 'StoppingCondition', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'LastModifiedTime' => [ 'shape' => 'LastModifiedTime', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], 'ModelArtifacts' => [ 'shape' => 'ModelArtifacts', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'InputConfig' => [ 'shape' => 'InputConfig', ], 'OutputConfig' => [ 'shape' => 'OutputConfig', ], ], ], 'DescribeEndpointConfigInput' => [ 'type' => 'structure', 'required' => [ 'EndpointConfigName', ], 'members' => [ 'EndpointConfigName' => [ 'shape' => 'EndpointConfigName', ], ], ], 'DescribeEndpointConfigOutput' => [ 'type' => 'structure', 'required' => [ 'EndpointConfigName', 'EndpointConfigArn', 'ProductionVariants', 'CreationTime', ], 'members' => [ 'EndpointConfigName' => [ 'shape' => 'EndpointConfigName', ], 'EndpointConfigArn' => [ 'shape' => 'EndpointConfigArn', ], 'ProductionVariants' => [ 'shape' => 'ProductionVariantList', ], 'KmsKeyId' => [ 'shape' => 'KmsKeyId', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], ], ], 'DescribeEndpointInput' => [ 'type' => 'structure', 'required' => [ 'EndpointName', ], 'members' => [ 'EndpointName' => [ 'shape' => 'EndpointName', ], ], ], 'DescribeEndpointOutput' => [ 'type' => 'structure', 'required' => [ 'EndpointName', 'EndpointArn', 'EndpointConfigName', 'EndpointStatus', 'CreationTime', 'LastModifiedTime', ], 'members' => [ 'EndpointName' => [ 'shape' => 'EndpointName', ], 'EndpointArn' => [ 'shape' => 'EndpointArn', ], 'EndpointConfigName' => [ 'shape' => 'EndpointConfigName', ], 'ProductionVariants' => [ 'shape' => 'ProductionVariantSummaryList', ], 'EndpointStatus' => [ 'shape' => 'EndpointStatus', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], ], ], 'DescribeHyperParameterTuningJobRequest' => [ 'type' => 'structure', 'required' => [ 'HyperParameterTuningJobName', ], 'members' => [ 'HyperParameterTuningJobName' => [ 'shape' => 'HyperParameterTuningJobName', ], ], ], 'DescribeHyperParameterTuningJobResponse' => [ 'type' => 'structure', 'required' => [ 'HyperParameterTuningJobName', 'HyperParameterTuningJobArn', 'HyperParameterTuningJobConfig', 'TrainingJobDefinition', 'HyperParameterTuningJobStatus', 'CreationTime', 'TrainingJobStatusCounters', 'ObjectiveStatusCounters', ], 'members' => [ 'HyperParameterTuningJobName' => [ 'shape' => 'HyperParameterTuningJobName', ], 'HyperParameterTuningJobArn' => [ 'shape' => 'HyperParameterTuningJobArn', ], 'HyperParameterTuningJobConfig' => [ 'shape' => 'HyperParameterTuningJobConfig', ], 'TrainingJobDefinition' => [ 'shape' => 'HyperParameterTrainingJobDefinition', ], 'HyperParameterTuningJobStatus' => [ 'shape' => 'HyperParameterTuningJobStatus', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'HyperParameterTuningEndTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], 'TrainingJobStatusCounters' => [ 'shape' => 'TrainingJobStatusCounters', ], 'ObjectiveStatusCounters' => [ 'shape' => 'ObjectiveStatusCounters', ], 'BestTrainingJob' => [ 'shape' => 'HyperParameterTrainingJobSummary', ], 'OverallBestTrainingJob' => [ 'shape' => 'HyperParameterTrainingJobSummary', ], 'WarmStartConfig' => [ 'shape' => 'HyperParameterTuningJobWarmStartConfig', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], ], ], 'DescribeLabelingJobRequest' => [ 'type' => 'structure', 'required' => [ 'LabelingJobName', ], 'members' => [ 'LabelingJobName' => [ 'shape' => 'LabelingJobName', ], ], ], 'DescribeLabelingJobResponse' => [ 'type' => 'structure', 'required' => [ 'LabelingJobStatus', 'LabelCounters', 'CreationTime', 'LastModifiedTime', 'JobReferenceCode', 'LabelingJobName', 'LabelingJobArn', 'InputConfig', 'OutputConfig', 'RoleArn', 'HumanTaskConfig', ], 'members' => [ 'LabelingJobStatus' => [ 'shape' => 'LabelingJobStatus', ], 'LabelCounters' => [ 'shape' => 'LabelCounters', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], 'JobReferenceCode' => [ 'shape' => 'JobReferenceCode', ], 'LabelingJobName' => [ 'shape' => 'LabelingJobName', ], 'LabelingJobArn' => [ 'shape' => 'LabelingJobArn', ], 'LabelAttributeName' => [ 'shape' => 'LabelAttributeName', ], 'InputConfig' => [ 'shape' => 'LabelingJobInputConfig', ], 'OutputConfig' => [ 'shape' => 'LabelingJobOutputConfig', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'LabelCategoryConfigS3Uri' => [ 'shape' => 'S3Uri', ], 'StoppingConditions' => [ 'shape' => 'LabelingJobStoppingConditions', ], 'LabelingJobAlgorithmsConfig' => [ 'shape' => 'LabelingJobAlgorithmsConfig', ], 'HumanTaskConfig' => [ 'shape' => 'HumanTaskConfig', ], 'Tags' => [ 'shape' => 'TagList', ], 'LabelingJobOutput' => [ 'shape' => 'LabelingJobOutput', ], ], ], 'DescribeModelInput' => [ 'type' => 'structure', 'required' => [ 'ModelName', ], 'members' => [ 'ModelName' => [ 'shape' => 'ModelName', ], ], ], 'DescribeModelOutput' => [ 'type' => 'structure', 'required' => [ 'ModelName', 'ExecutionRoleArn', 'CreationTime', 'ModelArn', ], 'members' => [ 'ModelName' => [ 'shape' => 'ModelName', ], 'PrimaryContainer' => [ 'shape' => 'ContainerDefinition', ], 'Containers' => [ 'shape' => 'ContainerDefinitionList', ], 'ExecutionRoleArn' => [ 'shape' => 'RoleArn', ], 'VpcConfig' => [ 'shape' => 'VpcConfig', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'ModelArn' => [ 'shape' => 'ModelArn', ], 'EnableNetworkIsolation' => [ 'shape' => 'Boolean', ], ], ], 'DescribeModelPackageInput' => [ 'type' => 'structure', 'required' => [ 'ModelPackageName', ], 'members' => [ 'ModelPackageName' => [ 'shape' => 'ArnOrName', ], ], ], 'DescribeModelPackageOutput' => [ 'type' => 'structure', 'required' => [ 'ModelPackageName', 'ModelPackageArn', 'CreationTime', 'ModelPackageStatus', 'ModelPackageStatusDetails', ], 'members' => [ 'ModelPackageName' => [ 'shape' => 'EntityName', ], 'ModelPackageArn' => [ 'shape' => 'ModelPackageArn', ], 'ModelPackageDescription' => [ 'shape' => 'EntityDescription', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'InferenceSpecification' => [ 'shape' => 'InferenceSpecification', ], 'SourceAlgorithmSpecification' => [ 'shape' => 'SourceAlgorithmSpecification', ], 'ValidationSpecification' => [ 'shape' => 'ModelPackageValidationSpecification', ], 'ModelPackageStatus' => [ 'shape' => 'ModelPackageStatus', ], 'ModelPackageStatusDetails' => [ 'shape' => 'ModelPackageStatusDetails', ], 'CertifyForMarketplace' => [ 'shape' => 'CertifyForMarketplace', ], ], ], 'DescribeNotebookInstanceInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceName', ], 'members' => [ 'NotebookInstanceName' => [ 'shape' => 'NotebookInstanceName', ], ], ], 'DescribeNotebookInstanceLifecycleConfigInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceLifecycleConfigName', ], 'members' => [ 'NotebookInstanceLifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], ], ], 'DescribeNotebookInstanceLifecycleConfigOutput' => [ 'type' => 'structure', 'members' => [ 'NotebookInstanceLifecycleConfigArn' => [ 'shape' => 'NotebookInstanceLifecycleConfigArn', ], 'NotebookInstanceLifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], 'OnCreate' => [ 'shape' => 'NotebookInstanceLifecycleConfigList', ], 'OnStart' => [ 'shape' => 'NotebookInstanceLifecycleConfigList', ], 'LastModifiedTime' => [ 'shape' => 'LastModifiedTime', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], ], ], 'DescribeNotebookInstanceOutput' => [ 'type' => 'structure', 'members' => [ 'NotebookInstanceArn' => [ 'shape' => 'NotebookInstanceArn', ], 'NotebookInstanceName' => [ 'shape' => 'NotebookInstanceName', ], 'NotebookInstanceStatus' => [ 'shape' => 'NotebookInstanceStatus', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], 'Url' => [ 'shape' => 'NotebookInstanceUrl', ], 'InstanceType' => [ 'shape' => 'InstanceType', ], 'SubnetId' => [ 'shape' => 'SubnetId', ], 'SecurityGroups' => [ 'shape' => 'SecurityGroupIds', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'KmsKeyId' => [ 'shape' => 'KmsKeyId', ], 'NetworkInterfaceId' => [ 'shape' => 'NetworkInterfaceId', ], 'LastModifiedTime' => [ 'shape' => 'LastModifiedTime', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'NotebookInstanceLifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], 'DirectInternetAccess' => [ 'shape' => 'DirectInternetAccess', ], 'VolumeSizeInGB' => [ 'shape' => 'NotebookInstanceVolumeSizeInGB', ], 'AcceleratorTypes' => [ 'shape' => 'NotebookInstanceAcceleratorTypes', ], 'DefaultCodeRepository' => [ 'shape' => 'CodeRepositoryNameOrUrl', ], 'AdditionalCodeRepositories' => [ 'shape' => 'AdditionalCodeRepositoryNamesOrUrls', ], 'RootAccess' => [ 'shape' => 'RootAccess', ], ], ], 'DescribeSubscribedWorkteamRequest' => [ 'type' => 'structure', 'required' => [ 'WorkteamArn', ], 'members' => [ 'WorkteamArn' => [ 'shape' => 'WorkteamArn', ], ], ], 'DescribeSubscribedWorkteamResponse' => [ 'type' => 'structure', 'required' => [ 'SubscribedWorkteam', ], 'members' => [ 'SubscribedWorkteam' => [ 'shape' => 'SubscribedWorkteam', ], ], ], 'DescribeTrainingJobRequest' => [ 'type' => 'structure', 'required' => [ 'TrainingJobName', ], 'members' => [ 'TrainingJobName' => [ 'shape' => 'TrainingJobName', ], ], ], 'DescribeTrainingJobResponse' => [ 'type' => 'structure', 'required' => [ 'TrainingJobName', 'TrainingJobArn', 'ModelArtifacts', 'TrainingJobStatus', 'SecondaryStatus', 'AlgorithmSpecification', 'ResourceConfig', 'StoppingCondition', 'CreationTime', ], 'members' => [ 'TrainingJobName' => [ 'shape' => 'TrainingJobName', ], 'TrainingJobArn' => [ 'shape' => 'TrainingJobArn', ], 'TuningJobArn' => [ 'shape' => 'HyperParameterTuningJobArn', ], 'LabelingJobArn' => [ 'shape' => 'LabelingJobArn', ], 'ModelArtifacts' => [ 'shape' => 'ModelArtifacts', ], 'TrainingJobStatus' => [ 'shape' => 'TrainingJobStatus', ], 'SecondaryStatus' => [ 'shape' => 'SecondaryStatus', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], 'HyperParameters' => [ 'shape' => 'HyperParameters', ], 'AlgorithmSpecification' => [ 'shape' => 'AlgorithmSpecification', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'InputDataConfig' => [ 'shape' => 'InputDataConfig', ], 'OutputDataConfig' => [ 'shape' => 'OutputDataConfig', ], 'ResourceConfig' => [ 'shape' => 'ResourceConfig', ], 'VpcConfig' => [ 'shape' => 'VpcConfig', ], 'StoppingCondition' => [ 'shape' => 'StoppingCondition', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'TrainingStartTime' => [ 'shape' => 'Timestamp', ], 'TrainingEndTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], 'SecondaryStatusTransitions' => [ 'shape' => 'SecondaryStatusTransitions', ], 'FinalMetricDataList' => [ 'shape' => 'FinalMetricDataList', ], 'EnableNetworkIsolation' => [ 'shape' => 'Boolean', ], 'EnableInterContainerTrafficEncryption' => [ 'shape' => 'Boolean', ], ], ], 'DescribeTransformJobRequest' => [ 'type' => 'structure', 'required' => [ 'TransformJobName', ], 'members' => [ 'TransformJobName' => [ 'shape' => 'TransformJobName', ], ], ], 'DescribeTransformJobResponse' => [ 'type' => 'structure', 'required' => [ 'TransformJobName', 'TransformJobArn', 'TransformJobStatus', 'ModelName', 'TransformInput', 'TransformResources', 'CreationTime', ], 'members' => [ 'TransformJobName' => [ 'shape' => 'TransformJobName', ], 'TransformJobArn' => [ 'shape' => 'TransformJobArn', ], 'TransformJobStatus' => [ 'shape' => 'TransformJobStatus', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], 'ModelName' => [ 'shape' => 'ModelName', ], 'MaxConcurrentTransforms' => [ 'shape' => 'MaxConcurrentTransforms', ], 'MaxPayloadInMB' => [ 'shape' => 'MaxPayloadInMB', ], 'BatchStrategy' => [ 'shape' => 'BatchStrategy', ], 'Environment' => [ 'shape' => 'TransformEnvironmentMap', ], 'TransformInput' => [ 'shape' => 'TransformInput', ], 'TransformOutput' => [ 'shape' => 'TransformOutput', ], 'TransformResources' => [ 'shape' => 'TransformResources', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'TransformStartTime' => [ 'shape' => 'Timestamp', ], 'TransformEndTime' => [ 'shape' => 'Timestamp', ], 'LabelingJobArn' => [ 'shape' => 'LabelingJobArn', ], ], ], 'DescribeWorkteamRequest' => [ 'type' => 'structure', 'required' => [ 'WorkteamName', ], 'members' => [ 'WorkteamName' => [ 'shape' => 'WorkteamName', ], ], ], 'DescribeWorkteamResponse' => [ 'type' => 'structure', 'required' => [ 'Workteam', ], 'members' => [ 'Workteam' => [ 'shape' => 'Workteam', ], ], ], 'DesiredWeightAndCapacity' => [ 'type' => 'structure', 'required' => [ 'VariantName', ], 'members' => [ 'VariantName' => [ 'shape' => 'VariantName', ], 'DesiredWeight' => [ 'shape' => 'VariantWeight', ], 'DesiredInstanceCount' => [ 'shape' => 'TaskCount', ], ], ], 'DesiredWeightAndCapacityList' => [ 'type' => 'list', 'member' => [ 'shape' => 'DesiredWeightAndCapacity', ], 'min' => 1, ], 'DetailedAlgorithmStatus' => [ 'type' => 'string', 'enum' => [ 'NotStarted', 'InProgress', 'Completed', 'Failed', ], ], 'DetailedModelPackageStatus' => [ 'type' => 'string', 'enum' => [ 'NotStarted', 'InProgress', 'Completed', 'Failed', ], ], 'DirectInternetAccess' => [ 'type' => 'string', 'enum' => [ 'Enabled', 'Disabled', ], ], 'DisassociateAdditionalCodeRepositories' => [ 'type' => 'boolean', ], 'DisassociateDefaultCodeRepository' => [ 'type' => 'boolean', ], 'DisassociateNotebookInstanceAcceleratorTypes' => [ 'type' => 'boolean', ], 'DisassociateNotebookInstanceLifecycleConfig' => [ 'type' => 'boolean', ], 'Dollars' => [ 'type' => 'integer', 'max' => 1, 'min' => 0, ], 'EndpointArn' => [ 'type' => 'string', 'max' => 2048, 'min' => 20, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:endpoint/.*', ], 'EndpointConfigArn' => [ 'type' => 'string', 'max' => 2048, 'min' => 20, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:endpoint-config/.*', ], 'EndpointConfigName' => [ 'type' => 'string', 'max' => 63, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'EndpointConfigNameContains' => [ 'type' => 'string', 'max' => 63, 'pattern' => '[a-zA-Z0-9-]+', ], 'EndpointConfigSortKey' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', ], ], 'EndpointConfigSummary' => [ 'type' => 'structure', 'required' => [ 'EndpointConfigName', 'EndpointConfigArn', 'CreationTime', ], 'members' => [ 'EndpointConfigName' => [ 'shape' => 'EndpointConfigName', ], 'EndpointConfigArn' => [ 'shape' => 'EndpointConfigArn', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], ], ], 'EndpointConfigSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'EndpointConfigSummary', ], ], 'EndpointName' => [ 'type' => 'string', 'max' => 63, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'EndpointNameContains' => [ 'type' => 'string', 'max' => 63, 'pattern' => '[a-zA-Z0-9-]+', ], 'EndpointSortKey' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', 'Status', ], ], 'EndpointStatus' => [ 'type' => 'string', 'enum' => [ 'OutOfService', 'Creating', 'Updating', 'SystemUpdating', 'RollingBack', 'InService', 'Deleting', 'Failed', ], ], 'EndpointSummary' => [ 'type' => 'structure', 'required' => [ 'EndpointName', 'EndpointArn', 'CreationTime', 'LastModifiedTime', 'EndpointStatus', ], 'members' => [ 'EndpointName' => [ 'shape' => 'EndpointName', ], 'EndpointArn' => [ 'shape' => 'EndpointArn', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], 'EndpointStatus' => [ 'shape' => 'EndpointStatus', ], ], ], 'EndpointSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'EndpointSummary', ], ], 'EntityDescription' => [ 'type' => 'string', 'max' => 1024, 'pattern' => '[\\p{L}\\p{M}\\p{Z}\\p{S}\\p{N}\\p{P}]*', ], 'EntityName' => [ 'type' => 'string', 'max' => 63, 'min' => 1, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*$', ], 'EnvironmentKey' => [ 'type' => 'string', 'max' => 1024, 'pattern' => '[a-zA-Z_][a-zA-Z0-9_]*', ], 'EnvironmentMap' => [ 'type' => 'map', 'key' => [ 'shape' => 'EnvironmentKey', ], 'value' => [ 'shape' => 'EnvironmentValue', ], 'max' => 16, ], 'EnvironmentValue' => [ 'type' => 'string', 'max' => 1024, 'pattern' => '[\\S\\s]*', ], 'FailureReason' => [ 'type' => 'string', 'max' => 1024, ], 'Filter' => [ 'type' => 'structure', 'required' => [ 'Name', ], 'members' => [ 'Name' => [ 'shape' => 'ResourcePropertyName', ], 'Operator' => [ 'shape' => 'Operator', ], 'Value' => [ 'shape' => 'FilterValue', ], ], ], 'FilterList' => [ 'type' => 'list', 'member' => [ 'shape' => 'Filter', ], 'max' => 20, 'min' => 1, ], 'FilterValue' => [ 'type' => 'string', 'max' => 1024, 'min' => 1, 'pattern' => '.+', ], 'FinalHyperParameterTuningJobObjectiveMetric' => [ 'type' => 'structure', 'required' => [ 'MetricName', 'Value', ], 'members' => [ 'Type' => [ 'shape' => 'HyperParameterTuningJobObjectiveType', ], 'MetricName' => [ 'shape' => 'MetricName', ], 'Value' => [ 'shape' => 'MetricValue', ], ], ], 'FinalMetricDataList' => [ 'type' => 'list', 'member' => [ 'shape' => 'MetricData', ], 'max' => 20, 'min' => 0, ], 'Float' => [ 'type' => 'float', ], 'Framework' => [ 'type' => 'string', 'enum' => [ 'TENSORFLOW', 'MXNET', 'ONNX', 'PYTORCH', 'XGBOOST', ], ], 'GetSearchSuggestionsRequest' => [ 'type' => 'structure', 'required' => [ 'Resource', ], 'members' => [ 'Resource' => [ 'shape' => 'ResourceType', ], 'SuggestionQuery' => [ 'shape' => 'SuggestionQuery', ], ], ], 'GetSearchSuggestionsResponse' => [ 'type' => 'structure', 'members' => [ 'PropertyNameSuggestions' => [ 'shape' => 'PropertyNameSuggestionList', ], ], ], 'GitConfig' => [ 'type' => 'structure', 'required' => [ 'RepositoryUrl', ], 'members' => [ 'RepositoryUrl' => [ 'shape' => 'GitConfigUrl', ], 'Branch' => [ 'shape' => 'Branch', ], 'SecretArn' => [ 'shape' => 'SecretArn', ], ], ], 'GitConfigForUpdate' => [ 'type' => 'structure', 'members' => [ 'SecretArn' => [ 'shape' => 'SecretArn', ], ], ], 'GitConfigUrl' => [ 'type' => 'string', 'pattern' => '^https://([^/]+)/?(.*)$', ], 'HumanTaskConfig' => [ 'type' => 'structure', 'required' => [ 'WorkteamArn', 'UiConfig', 'PreHumanTaskLambdaArn', 'TaskTitle', 'TaskDescription', 'NumberOfHumanWorkersPerDataObject', 'TaskTimeLimitInSeconds', 'AnnotationConsolidationConfig', ], 'members' => [ 'WorkteamArn' => [ 'shape' => 'WorkteamArn', ], 'UiConfig' => [ 'shape' => 'UiConfig', ], 'PreHumanTaskLambdaArn' => [ 'shape' => 'LambdaFunctionArn', ], 'TaskKeywords' => [ 'shape' => 'TaskKeywords', ], 'TaskTitle' => [ 'shape' => 'TaskTitle', ], 'TaskDescription' => [ 'shape' => 'TaskDescription', ], 'NumberOfHumanWorkersPerDataObject' => [ 'shape' => 'NumberOfHumanWorkersPerDataObject', ], 'TaskTimeLimitInSeconds' => [ 'shape' => 'TaskTimeLimitInSeconds', ], 'TaskAvailabilityLifetimeInSeconds' => [ 'shape' => 'TaskAvailabilityLifetimeInSeconds', ], 'MaxConcurrentTaskCount' => [ 'shape' => 'MaxConcurrentTaskCount', ], 'AnnotationConsolidationConfig' => [ 'shape' => 'AnnotationConsolidationConfig', ], 'PublicWorkforceTaskPrice' => [ 'shape' => 'PublicWorkforceTaskPrice', ], ], ], 'HyperParameterAlgorithmSpecification' => [ 'type' => 'structure', 'required' => [ 'TrainingInputMode', ], 'members' => [ 'TrainingImage' => [ 'shape' => 'AlgorithmImage', ], 'TrainingInputMode' => [ 'shape' => 'TrainingInputMode', ], 'AlgorithmName' => [ 'shape' => 'ArnOrName', ], 'MetricDefinitions' => [ 'shape' => 'MetricDefinitionList', ], ], ], 'HyperParameterSpecification' => [ 'type' => 'structure', 'required' => [ 'Name', 'Type', ], 'members' => [ 'Name' => [ 'shape' => 'ParameterName', ], 'Description' => [ 'shape' => 'EntityDescription', ], 'Type' => [ 'shape' => 'ParameterType', ], 'Range' => [ 'shape' => 'ParameterRange', ], 'IsTunable' => [ 'shape' => 'Boolean', ], 'IsRequired' => [ 'shape' => 'Boolean', ], 'DefaultValue' => [ 'shape' => 'ParameterValue', ], ], ], 'HyperParameterSpecifications' => [ 'type' => 'list', 'member' => [ 'shape' => 'HyperParameterSpecification', ], 'max' => 100, 'min' => 0, ], 'HyperParameterTrainingJobDefinition' => [ 'type' => 'structure', 'required' => [ 'AlgorithmSpecification', 'RoleArn', 'OutputDataConfig', 'ResourceConfig', 'StoppingCondition', ], 'members' => [ 'StaticHyperParameters' => [ 'shape' => 'HyperParameters', ], 'AlgorithmSpecification' => [ 'shape' => 'HyperParameterAlgorithmSpecification', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'InputDataConfig' => [ 'shape' => 'InputDataConfig', ], 'VpcConfig' => [ 'shape' => 'VpcConfig', ], 'OutputDataConfig' => [ 'shape' => 'OutputDataConfig', ], 'ResourceConfig' => [ 'shape' => 'ResourceConfig', ], 'StoppingCondition' => [ 'shape' => 'StoppingCondition', ], 'EnableNetworkIsolation' => [ 'shape' => 'Boolean', ], 'EnableInterContainerTrafficEncryption' => [ 'shape' => 'Boolean', ], ], ], 'HyperParameterTrainingJobSummaries' => [ 'type' => 'list', 'member' => [ 'shape' => 'HyperParameterTrainingJobSummary', ], ], 'HyperParameterTrainingJobSummary' => [ 'type' => 'structure', 'required' => [ 'TrainingJobName', 'TrainingJobArn', 'CreationTime', 'TrainingJobStatus', 'TunedHyperParameters', ], 'members' => [ 'TrainingJobName' => [ 'shape' => 'TrainingJobName', ], 'TrainingJobArn' => [ 'shape' => 'TrainingJobArn', ], 'TuningJobName' => [ 'shape' => 'HyperParameterTuningJobName', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'TrainingStartTime' => [ 'shape' => 'Timestamp', ], 'TrainingEndTime' => [ 'shape' => 'Timestamp', ], 'TrainingJobStatus' => [ 'shape' => 'TrainingJobStatus', ], 'TunedHyperParameters' => [ 'shape' => 'HyperParameters', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], 'FinalHyperParameterTuningJobObjectiveMetric' => [ 'shape' => 'FinalHyperParameterTuningJobObjectiveMetric', ], 'ObjectiveStatus' => [ 'shape' => 'ObjectiveStatus', ], ], ], 'HyperParameterTuningJobArn' => [ 'type' => 'string', 'max' => 256, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:hyper-parameter-tuning-job/.*', ], 'HyperParameterTuningJobConfig' => [ 'type' => 'structure', 'required' => [ 'Strategy', 'HyperParameterTuningJobObjective', 'ResourceLimits', 'ParameterRanges', ], 'members' => [ 'Strategy' => [ 'shape' => 'HyperParameterTuningJobStrategyType', ], 'HyperParameterTuningJobObjective' => [ 'shape' => 'HyperParameterTuningJobObjective', ], 'ResourceLimits' => [ 'shape' => 'ResourceLimits', ], 'ParameterRanges' => [ 'shape' => 'ParameterRanges', ], 'TrainingJobEarlyStoppingType' => [ 'shape' => 'TrainingJobEarlyStoppingType', ], ], ], 'HyperParameterTuningJobName' => [ 'type' => 'string', 'max' => 32, 'min' => 1, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'HyperParameterTuningJobObjective' => [ 'type' => 'structure', 'required' => [ 'Type', 'MetricName', ], 'members' => [ 'Type' => [ 'shape' => 'HyperParameterTuningJobObjectiveType', ], 'MetricName' => [ 'shape' => 'MetricName', ], ], ], 'HyperParameterTuningJobObjectiveType' => [ 'type' => 'string', 'enum' => [ 'Maximize', 'Minimize', ], ], 'HyperParameterTuningJobObjectives' => [ 'type' => 'list', 'member' => [ 'shape' => 'HyperParameterTuningJobObjective', ], ], 'HyperParameterTuningJobSortByOptions' => [ 'type' => 'string', 'enum' => [ 'Name', 'Status', 'CreationTime', ], ], 'HyperParameterTuningJobStatus' => [ 'type' => 'string', 'enum' => [ 'Completed', 'InProgress', 'Failed', 'Stopped', 'Stopping', ], ], 'HyperParameterTuningJobStrategyType' => [ 'type' => 'string', 'enum' => [ 'Bayesian', ], ], 'HyperParameterTuningJobSummaries' => [ 'type' => 'list', 'member' => [ 'shape' => 'HyperParameterTuningJobSummary', ], ], 'HyperParameterTuningJobSummary' => [ 'type' => 'structure', 'required' => [ 'HyperParameterTuningJobName', 'HyperParameterTuningJobArn', 'HyperParameterTuningJobStatus', 'Strategy', 'CreationTime', 'TrainingJobStatusCounters', 'ObjectiveStatusCounters', ], 'members' => [ 'HyperParameterTuningJobName' => [ 'shape' => 'HyperParameterTuningJobName', ], 'HyperParameterTuningJobArn' => [ 'shape' => 'HyperParameterTuningJobArn', ], 'HyperParameterTuningJobStatus' => [ 'shape' => 'HyperParameterTuningJobStatus', ], 'Strategy' => [ 'shape' => 'HyperParameterTuningJobStrategyType', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'HyperParameterTuningEndTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], 'TrainingJobStatusCounters' => [ 'shape' => 'TrainingJobStatusCounters', ], 'ObjectiveStatusCounters' => [ 'shape' => 'ObjectiveStatusCounters', ], 'ResourceLimits' => [ 'shape' => 'ResourceLimits', ], ], ], 'HyperParameterTuningJobWarmStartConfig' => [ 'type' => 'structure', 'required' => [ 'ParentHyperParameterTuningJobs', 'WarmStartType', ], 'members' => [ 'ParentHyperParameterTuningJobs' => [ 'shape' => 'ParentHyperParameterTuningJobs', ], 'WarmStartType' => [ 'shape' => 'HyperParameterTuningJobWarmStartType', ], ], ], 'HyperParameterTuningJobWarmStartType' => [ 'type' => 'string', 'enum' => [ 'IdenticalDataAndAlgorithm', 'TransferLearning', ], ], 'HyperParameters' => [ 'type' => 'map', 'key' => [ 'shape' => 'ParameterKey', ], 'value' => [ 'shape' => 'ParameterValue', ], 'max' => 100, 'min' => 0, ], 'Image' => [ 'type' => 'string', 'max' => 255, 'pattern' => '[\\S]+', ], 'ImageDigest' => [ 'type' => 'string', 'max' => 72, 'pattern' => '^[Ss][Hh][Aa]256:[0-9a-fA-F]{64}$', ], 'InferenceSpecification' => [ 'type' => 'structure', 'required' => [ 'Containers', 'SupportedTransformInstanceTypes', 'SupportedRealtimeInferenceInstanceTypes', 'SupportedContentTypes', 'SupportedResponseMIMETypes', ], 'members' => [ 'Containers' => [ 'shape' => 'ModelPackageContainerDefinitionList', ], 'SupportedTransformInstanceTypes' => [ 'shape' => 'TransformInstanceTypes', ], 'SupportedRealtimeInferenceInstanceTypes' => [ 'shape' => 'RealtimeInferenceInstanceTypes', ], 'SupportedContentTypes' => [ 'shape' => 'ContentTypes', ], 'SupportedResponseMIMETypes' => [ 'shape' => 'ResponseMIMETypes', ], ], ], 'InputConfig' => [ 'type' => 'structure', 'required' => [ 'S3Uri', 'DataInputConfig', 'Framework', ], 'members' => [ 'S3Uri' => [ 'shape' => 'S3Uri', ], 'DataInputConfig' => [ 'shape' => 'DataInputConfig', ], 'Framework' => [ 'shape' => 'Framework', ], ], ], 'InputDataConfig' => [ 'type' => 'list', 'member' => [ 'shape' => 'Channel', ], 'max' => 8, 'min' => 1, ], 'InputModes' => [ 'type' => 'list', 'member' => [ 'shape' => 'TrainingInputMode', ], 'min' => 1, ], 'InstanceType' => [ 'type' => 'string', 'enum' => [ 'ml.t2.medium', 'ml.t2.large', 'ml.t2.xlarge', 'ml.t2.2xlarge', 'ml.t3.medium', 'ml.t3.large', 'ml.t3.xlarge', 'ml.t3.2xlarge', 'ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', 'ml.m5.12xlarge', 'ml.m5.24xlarge', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge', 'ml.c5d.xlarge', 'ml.c5d.2xlarge', 'ml.c5d.4xlarge', 'ml.c5d.9xlarge', 'ml.c5d.18xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', ], ], 'IntegerParameterRange' => [ 'type' => 'structure', 'required' => [ 'Name', 'MinValue', 'MaxValue', ], 'members' => [ 'Name' => [ 'shape' => 'ParameterKey', ], 'MinValue' => [ 'shape' => 'ParameterValue', ], 'MaxValue' => [ 'shape' => 'ParameterValue', ], ], ], 'IntegerParameterRangeSpecification' => [ 'type' => 'structure', 'required' => [ 'MinValue', 'MaxValue', ], 'members' => [ 'MinValue' => [ 'shape' => 'ParameterValue', ], 'MaxValue' => [ 'shape' => 'ParameterValue', ], ], ], 'IntegerParameterRanges' => [ 'type' => 'list', 'member' => [ 'shape' => 'IntegerParameterRange', ], 'max' => 20, 'min' => 0, ], 'JobReferenceCode' => [ 'type' => 'string', 'min' => 1, 'pattern' => '.+', ], 'JobReferenceCodeContains' => [ 'type' => 'string', 'max' => 255, 'min' => 1, 'pattern' => '.+', ], 'KmsKeyId' => [ 'type' => 'string', 'max' => 2048, 'pattern' => '.*', ], 'LabelAttributeName' => [ 'type' => 'string', 'max' => 127, 'min' => 1, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'LabelCounter' => [ 'type' => 'integer', 'min' => 0, ], 'LabelCounters' => [ 'type' => 'structure', 'members' => [ 'TotalLabeled' => [ 'shape' => 'LabelCounter', ], 'HumanLabeled' => [ 'shape' => 'LabelCounter', ], 'MachineLabeled' => [ 'shape' => 'LabelCounter', ], 'FailedNonRetryableError' => [ 'shape' => 'LabelCounter', ], 'Unlabeled' => [ 'shape' => 'LabelCounter', ], ], ], 'LabelCountersForWorkteam' => [ 'type' => 'structure', 'members' => [ 'HumanLabeled' => [ 'shape' => 'LabelCounter', ], 'PendingHuman' => [ 'shape' => 'LabelCounter', ], 'Total' => [ 'shape' => 'LabelCounter', ], ], ], 'LabelingJobAlgorithmSpecificationArn' => [ 'type' => 'string', 'max' => 2048, 'pattern' => 'arn:.*', ], 'LabelingJobAlgorithmsConfig' => [ 'type' => 'structure', 'required' => [ 'LabelingJobAlgorithmSpecificationArn', ], 'members' => [ 'LabelingJobAlgorithmSpecificationArn' => [ 'shape' => 'LabelingJobAlgorithmSpecificationArn', ], 'InitialActiveLearningModelArn' => [ 'shape' => 'ModelArn', ], 'LabelingJobResourceConfig' => [ 'shape' => 'LabelingJobResourceConfig', ], ], ], 'LabelingJobArn' => [ 'type' => 'string', 'max' => 2048, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:labeling-job/.*', ], 'LabelingJobDataAttributes' => [ 'type' => 'structure', 'members' => [ 'ContentClassifiers' => [ 'shape' => 'ContentClassifiers', ], ], ], 'LabelingJobDataSource' => [ 'type' => 'structure', 'required' => [ 'S3DataSource', ], 'members' => [ 'S3DataSource' => [ 'shape' => 'LabelingJobS3DataSource', ], ], ], 'LabelingJobForWorkteamSummary' => [ 'type' => 'structure', 'required' => [ 'JobReferenceCode', 'WorkRequesterAccountId', 'CreationTime', ], 'members' => [ 'LabelingJobName' => [ 'shape' => 'LabelingJobName', ], 'JobReferenceCode' => [ 'shape' => 'JobReferenceCode', ], 'WorkRequesterAccountId' => [ 'shape' => 'AccountId', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'LabelCounters' => [ 'shape' => 'LabelCountersForWorkteam', ], ], ], 'LabelingJobForWorkteamSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'LabelingJobForWorkteamSummary', ], ], 'LabelingJobInputConfig' => [ 'type' => 'structure', 'required' => [ 'DataSource', ], 'members' => [ 'DataSource' => [ 'shape' => 'LabelingJobDataSource', ], 'DataAttributes' => [ 'shape' => 'LabelingJobDataAttributes', ], ], ], 'LabelingJobName' => [ 'type' => 'string', 'max' => 63, 'min' => 1, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'LabelingJobOutput' => [ 'type' => 'structure', 'required' => [ 'OutputDatasetS3Uri', ], 'members' => [ 'OutputDatasetS3Uri' => [ 'shape' => 'S3Uri', ], 'FinalActiveLearningModelArn' => [ 'shape' => 'ModelArn', ], ], ], 'LabelingJobOutputConfig' => [ 'type' => 'structure', 'required' => [ 'S3OutputPath', ], 'members' => [ 'S3OutputPath' => [ 'shape' => 'S3Uri', ], 'KmsKeyId' => [ 'shape' => 'KmsKeyId', ], ], ], 'LabelingJobResourceConfig' => [ 'type' => 'structure', 'members' => [ 'VolumeKmsKeyId' => [ 'shape' => 'KmsKeyId', ], ], ], 'LabelingJobS3DataSource' => [ 'type' => 'structure', 'required' => [ 'ManifestS3Uri', ], 'members' => [ 'ManifestS3Uri' => [ 'shape' => 'S3Uri', ], ], ], 'LabelingJobStatus' => [ 'type' => 'string', 'enum' => [ 'InProgress', 'Completed', 'Failed', 'Stopping', 'Stopped', ], ], 'LabelingJobStoppingConditions' => [ 'type' => 'structure', 'members' => [ 'MaxHumanLabeledObjectCount' => [ 'shape' => 'MaxHumanLabeledObjectCount', ], 'MaxPercentageOfInputDatasetLabeled' => [ 'shape' => 'MaxPercentageOfInputDatasetLabeled', ], ], ], 'LabelingJobSummary' => [ 'type' => 'structure', 'required' => [ 'LabelingJobName', 'LabelingJobArn', 'CreationTime', 'LastModifiedTime', 'LabelingJobStatus', 'LabelCounters', 'WorkteamArn', 'PreHumanTaskLambdaArn', ], 'members' => [ 'LabelingJobName' => [ 'shape' => 'LabelingJobName', ], 'LabelingJobArn' => [ 'shape' => 'LabelingJobArn', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], 'LabelingJobStatus' => [ 'shape' => 'LabelingJobStatus', ], 'LabelCounters' => [ 'shape' => 'LabelCounters', ], 'WorkteamArn' => [ 'shape' => 'WorkteamArn', ], 'PreHumanTaskLambdaArn' => [ 'shape' => 'LambdaFunctionArn', ], 'AnnotationConsolidationLambdaArn' => [ 'shape' => 'LambdaFunctionArn', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], 'LabelingJobOutput' => [ 'shape' => 'LabelingJobOutput', ], 'InputConfig' => [ 'shape' => 'LabelingJobInputConfig', ], ], ], 'LabelingJobSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'LabelingJobSummary', ], ], 'LambdaFunctionArn' => [ 'type' => 'string', 'max' => 2048, 'pattern' => 'arn:aws[a-z\\-]*:lambda:[a-z]{2}-[a-z]+-\\d{1}:\\d{12}:function:[a-zA-Z0-9-_\\.]+(:(\\$LATEST|[a-zA-Z0-9-_]+))?', ], 'LastModifiedTime' => [ 'type' => 'timestamp', ], 'ListAlgorithmsInput' => [ 'type' => 'structure', 'members' => [ 'CreationTimeAfter' => [ 'shape' => 'CreationTime', ], 'CreationTimeBefore' => [ 'shape' => 'CreationTime', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'NameContains' => [ 'shape' => 'NameContains', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'SortBy' => [ 'shape' => 'AlgorithmSortBy', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], ], ], 'ListAlgorithmsOutput' => [ 'type' => 'structure', 'required' => [ 'AlgorithmSummaryList', ], 'members' => [ 'AlgorithmSummaryList' => [ 'shape' => 'AlgorithmSummaryList', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListCodeRepositoriesInput' => [ 'type' => 'structure', 'members' => [ 'CreationTimeAfter' => [ 'shape' => 'CreationTime', ], 'CreationTimeBefore' => [ 'shape' => 'CreationTime', ], 'LastModifiedTimeAfter' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeBefore' => [ 'shape' => 'Timestamp', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'NameContains' => [ 'shape' => 'CodeRepositoryNameContains', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'SortBy' => [ 'shape' => 'CodeRepositorySortBy', ], 'SortOrder' => [ 'shape' => 'CodeRepositorySortOrder', ], ], ], 'ListCodeRepositoriesOutput' => [ 'type' => 'structure', 'required' => [ 'CodeRepositorySummaryList', ], 'members' => [ 'CodeRepositorySummaryList' => [ 'shape' => 'CodeRepositorySummaryList', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListCompilationJobsRequest' => [ 'type' => 'structure', 'members' => [ 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', 'box' => true, ], 'CreationTimeAfter' => [ 'shape' => 'CreationTime', ], 'CreationTimeBefore' => [ 'shape' => 'CreationTime', ], 'LastModifiedTimeAfter' => [ 'shape' => 'LastModifiedTime', ], 'LastModifiedTimeBefore' => [ 'shape' => 'LastModifiedTime', ], 'NameContains' => [ 'shape' => 'NameContains', ], 'StatusEquals' => [ 'shape' => 'CompilationJobStatus', ], 'SortBy' => [ 'shape' => 'ListCompilationJobsSortBy', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], ], ], 'ListCompilationJobsResponse' => [ 'type' => 'structure', 'required' => [ 'CompilationJobSummaries', ], 'members' => [ 'CompilationJobSummaries' => [ 'shape' => 'CompilationJobSummaries', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListCompilationJobsSortBy' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', 'Status', ], ], 'ListEndpointConfigsInput' => [ 'type' => 'structure', 'members' => [ 'SortBy' => [ 'shape' => 'EndpointConfigSortKey', ], 'SortOrder' => [ 'shape' => 'OrderKey', ], 'NextToken' => [ 'shape' => 'PaginationToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'NameContains' => [ 'shape' => 'EndpointConfigNameContains', ], 'CreationTimeBefore' => [ 'shape' => 'Timestamp', ], 'CreationTimeAfter' => [ 'shape' => 'Timestamp', ], ], ], 'ListEndpointConfigsOutput' => [ 'type' => 'structure', 'required' => [ 'EndpointConfigs', ], 'members' => [ 'EndpointConfigs' => [ 'shape' => 'EndpointConfigSummaryList', ], 'NextToken' => [ 'shape' => 'PaginationToken', ], ], ], 'ListEndpointsInput' => [ 'type' => 'structure', 'members' => [ 'SortBy' => [ 'shape' => 'EndpointSortKey', ], 'SortOrder' => [ 'shape' => 'OrderKey', ], 'NextToken' => [ 'shape' => 'PaginationToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'NameContains' => [ 'shape' => 'EndpointNameContains', ], 'CreationTimeBefore' => [ 'shape' => 'Timestamp', ], 'CreationTimeAfter' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeBefore' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeAfter' => [ 'shape' => 'Timestamp', ], 'StatusEquals' => [ 'shape' => 'EndpointStatus', ], ], ], 'ListEndpointsOutput' => [ 'type' => 'structure', 'required' => [ 'Endpoints', ], 'members' => [ 'Endpoints' => [ 'shape' => 'EndpointSummaryList', ], 'NextToken' => [ 'shape' => 'PaginationToken', ], ], ], 'ListHyperParameterTuningJobsRequest' => [ 'type' => 'structure', 'members' => [ 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', 'box' => true, ], 'SortBy' => [ 'shape' => 'HyperParameterTuningJobSortByOptions', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], 'NameContains' => [ 'shape' => 'NameContains', ], 'CreationTimeAfter' => [ 'shape' => 'Timestamp', ], 'CreationTimeBefore' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeAfter' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeBefore' => [ 'shape' => 'Timestamp', ], 'StatusEquals' => [ 'shape' => 'HyperParameterTuningJobStatus', ], ], ], 'ListHyperParameterTuningJobsResponse' => [ 'type' => 'structure', 'required' => [ 'HyperParameterTuningJobSummaries', ], 'members' => [ 'HyperParameterTuningJobSummaries' => [ 'shape' => 'HyperParameterTuningJobSummaries', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListLabelingJobsForWorkteamRequest' => [ 'type' => 'structure', 'required' => [ 'WorkteamArn', ], 'members' => [ 'WorkteamArn' => [ 'shape' => 'WorkteamArn', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'CreationTimeAfter' => [ 'shape' => 'Timestamp', ], 'CreationTimeBefore' => [ 'shape' => 'Timestamp', ], 'JobReferenceCodeContains' => [ 'shape' => 'JobReferenceCodeContains', ], 'SortBy' => [ 'shape' => 'ListLabelingJobsForWorkteamSortByOptions', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], ], ], 'ListLabelingJobsForWorkteamResponse' => [ 'type' => 'structure', 'required' => [ 'LabelingJobSummaryList', ], 'members' => [ 'LabelingJobSummaryList' => [ 'shape' => 'LabelingJobForWorkteamSummaryList', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListLabelingJobsForWorkteamSortByOptions' => [ 'type' => 'string', 'enum' => [ 'CreationTime', ], ], 'ListLabelingJobsRequest' => [ 'type' => 'structure', 'members' => [ 'CreationTimeAfter' => [ 'shape' => 'Timestamp', ], 'CreationTimeBefore' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeAfter' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeBefore' => [ 'shape' => 'Timestamp', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'NameContains' => [ 'shape' => 'NameContains', ], 'SortBy' => [ 'shape' => 'SortBy', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], 'StatusEquals' => [ 'shape' => 'LabelingJobStatus', ], ], ], 'ListLabelingJobsResponse' => [ 'type' => 'structure', 'members' => [ 'LabelingJobSummaryList' => [ 'shape' => 'LabelingJobSummaryList', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListModelPackagesInput' => [ 'type' => 'structure', 'members' => [ 'CreationTimeAfter' => [ 'shape' => 'CreationTime', ], 'CreationTimeBefore' => [ 'shape' => 'CreationTime', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'NameContains' => [ 'shape' => 'NameContains', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'SortBy' => [ 'shape' => 'ModelPackageSortBy', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], ], ], 'ListModelPackagesOutput' => [ 'type' => 'structure', 'required' => [ 'ModelPackageSummaryList', ], 'members' => [ 'ModelPackageSummaryList' => [ 'shape' => 'ModelPackageSummaryList', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListModelsInput' => [ 'type' => 'structure', 'members' => [ 'SortBy' => [ 'shape' => 'ModelSortKey', ], 'SortOrder' => [ 'shape' => 'OrderKey', ], 'NextToken' => [ 'shape' => 'PaginationToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'NameContains' => [ 'shape' => 'ModelNameContains', ], 'CreationTimeBefore' => [ 'shape' => 'Timestamp', ], 'CreationTimeAfter' => [ 'shape' => 'Timestamp', ], ], ], 'ListModelsOutput' => [ 'type' => 'structure', 'required' => [ 'Models', ], 'members' => [ 'Models' => [ 'shape' => 'ModelSummaryList', ], 'NextToken' => [ 'shape' => 'PaginationToken', ], ], ], 'ListNotebookInstanceLifecycleConfigsInput' => [ 'type' => 'structure', 'members' => [ 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'SortBy' => [ 'shape' => 'NotebookInstanceLifecycleConfigSortKey', ], 'SortOrder' => [ 'shape' => 'NotebookInstanceLifecycleConfigSortOrder', ], 'NameContains' => [ 'shape' => 'NotebookInstanceLifecycleConfigNameContains', ], 'CreationTimeBefore' => [ 'shape' => 'CreationTime', ], 'CreationTimeAfter' => [ 'shape' => 'CreationTime', ], 'LastModifiedTimeBefore' => [ 'shape' => 'LastModifiedTime', ], 'LastModifiedTimeAfter' => [ 'shape' => 'LastModifiedTime', ], ], ], 'ListNotebookInstanceLifecycleConfigsOutput' => [ 'type' => 'structure', 'members' => [ 'NextToken' => [ 'shape' => 'NextToken', ], 'NotebookInstanceLifecycleConfigs' => [ 'shape' => 'NotebookInstanceLifecycleConfigSummaryList', ], ], ], 'ListNotebookInstancesInput' => [ 'type' => 'structure', 'members' => [ 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'SortBy' => [ 'shape' => 'NotebookInstanceSortKey', ], 'SortOrder' => [ 'shape' => 'NotebookInstanceSortOrder', ], 'NameContains' => [ 'shape' => 'NotebookInstanceNameContains', ], 'CreationTimeBefore' => [ 'shape' => 'CreationTime', ], 'CreationTimeAfter' => [ 'shape' => 'CreationTime', ], 'LastModifiedTimeBefore' => [ 'shape' => 'LastModifiedTime', ], 'LastModifiedTimeAfter' => [ 'shape' => 'LastModifiedTime', ], 'StatusEquals' => [ 'shape' => 'NotebookInstanceStatus', ], 'NotebookInstanceLifecycleConfigNameContains' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], 'DefaultCodeRepositoryContains' => [ 'shape' => 'CodeRepositoryContains', ], 'AdditionalCodeRepositoryEquals' => [ 'shape' => 'CodeRepositoryNameOrUrl', ], ], ], 'ListNotebookInstancesOutput' => [ 'type' => 'structure', 'members' => [ 'NextToken' => [ 'shape' => 'NextToken', ], 'NotebookInstances' => [ 'shape' => 'NotebookInstanceSummaryList', ], ], ], 'ListSubscribedWorkteamsRequest' => [ 'type' => 'structure', 'members' => [ 'NameContains' => [ 'shape' => 'WorkteamName', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', 'box' => true, ], ], ], 'ListSubscribedWorkteamsResponse' => [ 'type' => 'structure', 'required' => [ 'SubscribedWorkteams', ], 'members' => [ 'SubscribedWorkteams' => [ 'shape' => 'SubscribedWorkteams', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListTagsInput' => [ 'type' => 'structure', 'required' => [ 'ResourceArn', ], 'members' => [ 'ResourceArn' => [ 'shape' => 'ResourceArn', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'ListTagsMaxResults', ], ], ], 'ListTagsMaxResults' => [ 'type' => 'integer', 'min' => 50, ], 'ListTagsOutput' => [ 'type' => 'structure', 'members' => [ 'Tags' => [ 'shape' => 'TagList', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListTrainingJobsForHyperParameterTuningJobRequest' => [ 'type' => 'structure', 'required' => [ 'HyperParameterTuningJobName', ], 'members' => [ 'HyperParameterTuningJobName' => [ 'shape' => 'HyperParameterTuningJobName', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', ], 'StatusEquals' => [ 'shape' => 'TrainingJobStatus', ], 'SortBy' => [ 'shape' => 'TrainingJobSortByOptions', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], ], ], 'ListTrainingJobsForHyperParameterTuningJobResponse' => [ 'type' => 'structure', 'required' => [ 'TrainingJobSummaries', ], 'members' => [ 'TrainingJobSummaries' => [ 'shape' => 'HyperParameterTrainingJobSummaries', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListTrainingJobsRequest' => [ 'type' => 'structure', 'members' => [ 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', 'box' => true, ], 'CreationTimeAfter' => [ 'shape' => 'Timestamp', ], 'CreationTimeBefore' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeAfter' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeBefore' => [ 'shape' => 'Timestamp', ], 'NameContains' => [ 'shape' => 'NameContains', ], 'StatusEquals' => [ 'shape' => 'TrainingJobStatus', ], 'SortBy' => [ 'shape' => 'SortBy', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], ], ], 'ListTrainingJobsResponse' => [ 'type' => 'structure', 'required' => [ 'TrainingJobSummaries', ], 'members' => [ 'TrainingJobSummaries' => [ 'shape' => 'TrainingJobSummaries', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListTransformJobsRequest' => [ 'type' => 'structure', 'members' => [ 'CreationTimeAfter' => [ 'shape' => 'Timestamp', ], 'CreationTimeBefore' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeAfter' => [ 'shape' => 'Timestamp', ], 'LastModifiedTimeBefore' => [ 'shape' => 'Timestamp', ], 'NameContains' => [ 'shape' => 'NameContains', ], 'StatusEquals' => [ 'shape' => 'TransformJobStatus', ], 'SortBy' => [ 'shape' => 'SortBy', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', 'box' => true, ], ], ], 'ListTransformJobsResponse' => [ 'type' => 'structure', 'required' => [ 'TransformJobSummaries', ], 'members' => [ 'TransformJobSummaries' => [ 'shape' => 'TransformJobSummaries', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListWorkteamsRequest' => [ 'type' => 'structure', 'members' => [ 'SortBy' => [ 'shape' => 'ListWorkteamsSortByOptions', ], 'SortOrder' => [ 'shape' => 'SortOrder', ], 'NameContains' => [ 'shape' => 'WorkteamName', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', 'box' => true, ], ], ], 'ListWorkteamsResponse' => [ 'type' => 'structure', 'required' => [ 'Workteams', ], 'members' => [ 'Workteams' => [ 'shape' => 'Workteams', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'ListWorkteamsSortByOptions' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreateDate', ], ], 'MaxConcurrentTaskCount' => [ 'type' => 'integer', 'max' => 1000, 'min' => 1, ], 'MaxConcurrentTransforms' => [ 'type' => 'integer', 'min' => 0, ], 'MaxHumanLabeledObjectCount' => [ 'type' => 'integer', 'min' => 1, ], 'MaxNumberOfTrainingJobs' => [ 'type' => 'integer', 'min' => 1, ], 'MaxParallelTrainingJobs' => [ 'type' => 'integer', 'min' => 1, ], 'MaxPayloadInMB' => [ 'type' => 'integer', 'min' => 0, ], 'MaxPercentageOfInputDatasetLabeled' => [ 'type' => 'integer', 'max' => 100, 'min' => 1, ], 'MaxResults' => [ 'type' => 'integer', 'max' => 100, 'min' => 1, ], 'MaxRuntimeInSeconds' => [ 'type' => 'integer', 'min' => 1, ], 'MemberDefinition' => [ 'type' => 'structure', 'members' => [ 'CognitoMemberDefinition' => [ 'shape' => 'CognitoMemberDefinition', ], ], ], 'MemberDefinitions' => [ 'type' => 'list', 'member' => [ 'shape' => 'MemberDefinition', ], 'max' => 10, 'min' => 1, ], 'MetricData' => [ 'type' => 'structure', 'members' => [ 'MetricName' => [ 'shape' => 'MetricName', ], 'Value' => [ 'shape' => 'Float', ], 'Timestamp' => [ 'shape' => 'Timestamp', ], ], ], 'MetricDefinition' => [ 'type' => 'structure', 'required' => [ 'Name', 'Regex', ], 'members' => [ 'Name' => [ 'shape' => 'MetricName', ], 'Regex' => [ 'shape' => 'MetricRegex', ], ], ], 'MetricDefinitionList' => [ 'type' => 'list', 'member' => [ 'shape' => 'MetricDefinition', ], 'max' => 20, 'min' => 0, ], 'MetricName' => [ 'type' => 'string', 'max' => 255, 'min' => 1, 'pattern' => '.+', ], 'MetricRegex' => [ 'type' => 'string', 'max' => 500, 'min' => 1, ], 'MetricValue' => [ 'type' => 'float', ], 'ModelArn' => [ 'type' => 'string', 'max' => 2048, 'min' => 20, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:model/.*', ], 'ModelArtifacts' => [ 'type' => 'structure', 'required' => [ 'S3ModelArtifacts', ], 'members' => [ 'S3ModelArtifacts' => [ 'shape' => 'S3Uri', ], ], ], 'ModelName' => [ 'type' => 'string', 'max' => 63, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'ModelNameContains' => [ 'type' => 'string', 'max' => 63, 'pattern' => '[a-zA-Z0-9-]+', ], 'ModelPackageArn' => [ 'type' => 'string', 'max' => 2048, 'min' => 1, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:model-package/.*', ], 'ModelPackageContainerDefinition' => [ 'type' => 'structure', 'required' => [ 'Image', ], 'members' => [ 'ContainerHostname' => [ 'shape' => 'ContainerHostname', ], 'Image' => [ 'shape' => 'Image', ], 'ImageDigest' => [ 'shape' => 'ImageDigest', ], 'ModelDataUrl' => [ 'shape' => 'Url', ], 'ProductId' => [ 'shape' => 'ProductId', ], ], ], 'ModelPackageContainerDefinitionList' => [ 'type' => 'list', 'member' => [ 'shape' => 'ModelPackageContainerDefinition', ], 'max' => 1, 'min' => 1, ], 'ModelPackageSortBy' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', ], ], 'ModelPackageStatus' => [ 'type' => 'string', 'enum' => [ 'Pending', 'InProgress', 'Completed', 'Failed', 'Deleting', ], ], 'ModelPackageStatusDetails' => [ 'type' => 'structure', 'required' => [ 'ValidationStatuses', ], 'members' => [ 'ValidationStatuses' => [ 'shape' => 'ModelPackageStatusItemList', ], 'ImageScanStatuses' => [ 'shape' => 'ModelPackageStatusItemList', ], ], ], 'ModelPackageStatusItem' => [ 'type' => 'structure', 'required' => [ 'Name', 'Status', ], 'members' => [ 'Name' => [ 'shape' => 'EntityName', ], 'Status' => [ 'shape' => 'DetailedModelPackageStatus', ], 'FailureReason' => [ 'shape' => 'String', ], ], ], 'ModelPackageStatusItemList' => [ 'type' => 'list', 'member' => [ 'shape' => 'ModelPackageStatusItem', ], ], 'ModelPackageSummary' => [ 'type' => 'structure', 'required' => [ 'ModelPackageName', 'ModelPackageArn', 'CreationTime', 'ModelPackageStatus', ], 'members' => [ 'ModelPackageName' => [ 'shape' => 'EntityName', ], 'ModelPackageArn' => [ 'shape' => 'ModelPackageArn', ], 'ModelPackageDescription' => [ 'shape' => 'EntityDescription', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'ModelPackageStatus' => [ 'shape' => 'ModelPackageStatus', ], ], ], 'ModelPackageSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'ModelPackageSummary', ], ], 'ModelPackageValidationProfile' => [ 'type' => 'structure', 'required' => [ 'ProfileName', 'TransformJobDefinition', ], 'members' => [ 'ProfileName' => [ 'shape' => 'EntityName', ], 'TransformJobDefinition' => [ 'shape' => 'TransformJobDefinition', ], ], ], 'ModelPackageValidationProfiles' => [ 'type' => 'list', 'member' => [ 'shape' => 'ModelPackageValidationProfile', ], 'max' => 1, 'min' => 1, ], 'ModelPackageValidationSpecification' => [ 'type' => 'structure', 'required' => [ 'ValidationRole', 'ValidationProfiles', ], 'members' => [ 'ValidationRole' => [ 'shape' => 'RoleArn', ], 'ValidationProfiles' => [ 'shape' => 'ModelPackageValidationProfiles', ], ], ], 'ModelSortKey' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', ], ], 'ModelSummary' => [ 'type' => 'structure', 'required' => [ 'ModelName', 'ModelArn', 'CreationTime', ], 'members' => [ 'ModelName' => [ 'shape' => 'ModelName', ], 'ModelArn' => [ 'shape' => 'ModelArn', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], ], ], 'ModelSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'ModelSummary', ], ], 'NameContains' => [ 'type' => 'string', 'max' => 63, 'pattern' => '[a-zA-Z0-9\\-]+', ], 'NestedFilters' => [ 'type' => 'structure', 'required' => [ 'NestedPropertyName', 'Filters', ], 'members' => [ 'NestedPropertyName' => [ 'shape' => 'ResourcePropertyName', ], 'Filters' => [ 'shape' => 'FilterList', ], ], ], 'NestedFiltersList' => [ 'type' => 'list', 'member' => [ 'shape' => 'NestedFilters', ], 'max' => 20, 'min' => 1, ], 'NetworkInterfaceId' => [ 'type' => 'string', ], 'NextToken' => [ 'type' => 'string', 'max' => 8192, 'pattern' => '.*', ], 'NotebookInstanceAcceleratorType' => [ 'type' => 'string', 'enum' => [ 'ml.eia1.medium', 'ml.eia1.large', 'ml.eia1.xlarge', ], ], 'NotebookInstanceAcceleratorTypes' => [ 'type' => 'list', 'member' => [ 'shape' => 'NotebookInstanceAcceleratorType', ], ], 'NotebookInstanceArn' => [ 'type' => 'string', 'max' => 256, ], 'NotebookInstanceLifecycleConfigArn' => [ 'type' => 'string', 'max' => 256, ], 'NotebookInstanceLifecycleConfigContent' => [ 'type' => 'string', 'max' => 16384, 'min' => 1, 'pattern' => '[\\S\\s]+', ], 'NotebookInstanceLifecycleConfigList' => [ 'type' => 'list', 'member' => [ 'shape' => 'NotebookInstanceLifecycleHook', ], 'max' => 1, ], 'NotebookInstanceLifecycleConfigName' => [ 'type' => 'string', 'max' => 63, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'NotebookInstanceLifecycleConfigNameContains' => [ 'type' => 'string', 'max' => 63, 'pattern' => '[a-zA-Z0-9-]+', ], 'NotebookInstanceLifecycleConfigSortKey' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', 'LastModifiedTime', ], ], 'NotebookInstanceLifecycleConfigSortOrder' => [ 'type' => 'string', 'enum' => [ 'Ascending', 'Descending', ], ], 'NotebookInstanceLifecycleConfigSummary' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceLifecycleConfigName', 'NotebookInstanceLifecycleConfigArn', ], 'members' => [ 'NotebookInstanceLifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], 'NotebookInstanceLifecycleConfigArn' => [ 'shape' => 'NotebookInstanceLifecycleConfigArn', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'LastModifiedTime' => [ 'shape' => 'LastModifiedTime', ], ], ], 'NotebookInstanceLifecycleConfigSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'NotebookInstanceLifecycleConfigSummary', ], ], 'NotebookInstanceLifecycleHook' => [ 'type' => 'structure', 'members' => [ 'Content' => [ 'shape' => 'NotebookInstanceLifecycleConfigContent', ], ], ], 'NotebookInstanceName' => [ 'type' => 'string', 'max' => 63, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'NotebookInstanceNameContains' => [ 'type' => 'string', 'max' => 63, 'pattern' => '[a-zA-Z0-9-]+', ], 'NotebookInstanceSortKey' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', 'Status', ], ], 'NotebookInstanceSortOrder' => [ 'type' => 'string', 'enum' => [ 'Ascending', 'Descending', ], ], 'NotebookInstanceStatus' => [ 'type' => 'string', 'enum' => [ 'Pending', 'InService', 'Stopping', 'Stopped', 'Failed', 'Deleting', 'Updating', ], ], 'NotebookInstanceSummary' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceName', 'NotebookInstanceArn', ], 'members' => [ 'NotebookInstanceName' => [ 'shape' => 'NotebookInstanceName', ], 'NotebookInstanceArn' => [ 'shape' => 'NotebookInstanceArn', ], 'NotebookInstanceStatus' => [ 'shape' => 'NotebookInstanceStatus', ], 'Url' => [ 'shape' => 'NotebookInstanceUrl', ], 'InstanceType' => [ 'shape' => 'InstanceType', ], 'CreationTime' => [ 'shape' => 'CreationTime', ], 'LastModifiedTime' => [ 'shape' => 'LastModifiedTime', ], 'NotebookInstanceLifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], 'DefaultCodeRepository' => [ 'shape' => 'CodeRepositoryNameOrUrl', ], 'AdditionalCodeRepositories' => [ 'shape' => 'AdditionalCodeRepositoryNamesOrUrls', ], ], ], 'NotebookInstanceSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'NotebookInstanceSummary', ], ], 'NotebookInstanceUrl' => [ 'type' => 'string', ], 'NotebookInstanceVolumeSizeInGB' => [ 'type' => 'integer', 'max' => 16384, 'min' => 5, ], 'NumberOfHumanWorkersPerDataObject' => [ 'type' => 'integer', 'max' => 9, 'min' => 1, ], 'ObjectiveStatus' => [ 'type' => 'string', 'enum' => [ 'Succeeded', 'Pending', 'Failed', ], ], 'ObjectiveStatusCounter' => [ 'type' => 'integer', 'min' => 0, ], 'ObjectiveStatusCounters' => [ 'type' => 'structure', 'members' => [ 'Succeeded' => [ 'shape' => 'ObjectiveStatusCounter', ], 'Pending' => [ 'shape' => 'ObjectiveStatusCounter', ], 'Failed' => [ 'shape' => 'ObjectiveStatusCounter', ], ], ], 'Operator' => [ 'type' => 'string', 'enum' => [ 'Equals', 'NotEquals', 'GreaterThan', 'GreaterThanOrEqualTo', 'LessThan', 'LessThanOrEqualTo', 'Contains', ], ], 'OrderKey' => [ 'type' => 'string', 'enum' => [ 'Ascending', 'Descending', ], ], 'OutputConfig' => [ 'type' => 'structure', 'required' => [ 'S3OutputLocation', 'TargetDevice', ], 'members' => [ 'S3OutputLocation' => [ 'shape' => 'S3Uri', ], 'TargetDevice' => [ 'shape' => 'TargetDevice', ], ], ], 'OutputDataConfig' => [ 'type' => 'structure', 'required' => [ 'S3OutputPath', ], 'members' => [ 'KmsKeyId' => [ 'shape' => 'KmsKeyId', ], 'S3OutputPath' => [ 'shape' => 'S3Uri', ], ], ], 'PaginationToken' => [ 'type' => 'string', 'max' => 8192, 'pattern' => '.*', ], 'ParameterKey' => [ 'type' => 'string', 'max' => 256, 'pattern' => '.*', ], 'ParameterName' => [ 'type' => 'string', 'max' => 256, 'pattern' => '[\\p{L}\\p{M}\\p{Z}\\p{S}\\p{N}\\p{P}]*', ], 'ParameterRange' => [ 'type' => 'structure', 'members' => [ 'IntegerParameterRangeSpecification' => [ 'shape' => 'IntegerParameterRangeSpecification', ], 'ContinuousParameterRangeSpecification' => [ 'shape' => 'ContinuousParameterRangeSpecification', ], 'CategoricalParameterRangeSpecification' => [ 'shape' => 'CategoricalParameterRangeSpecification', ], ], ], 'ParameterRanges' => [ 'type' => 'structure', 'members' => [ 'IntegerParameterRanges' => [ 'shape' => 'IntegerParameterRanges', ], 'ContinuousParameterRanges' => [ 'shape' => 'ContinuousParameterRanges', ], 'CategoricalParameterRanges' => [ 'shape' => 'CategoricalParameterRanges', ], ], ], 'ParameterType' => [ 'type' => 'string', 'enum' => [ 'Integer', 'Continuous', 'Categorical', 'FreeText', ], ], 'ParameterValue' => [ 'type' => 'string', 'max' => 256, 'pattern' => '.*', ], 'ParameterValues' => [ 'type' => 'list', 'member' => [ 'shape' => 'ParameterValue', ], 'max' => 20, 'min' => 1, ], 'ParentHyperParameterTuningJob' => [ 'type' => 'structure', 'members' => [ 'HyperParameterTuningJobName' => [ 'shape' => 'HyperParameterTuningJobName', ], ], ], 'ParentHyperParameterTuningJobs' => [ 'type' => 'list', 'member' => [ 'shape' => 'ParentHyperParameterTuningJob', ], 'max' => 5, 'min' => 1, ], 'ProductId' => [ 'type' => 'string', 'max' => 256, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*$', ], 'ProductListings' => [ 'type' => 'list', 'member' => [ 'shape' => 'String', ], ], 'ProductionVariant' => [ 'type' => 'structure', 'required' => [ 'VariantName', 'ModelName', 'InitialInstanceCount', 'InstanceType', ], 'members' => [ 'VariantName' => [ 'shape' => 'VariantName', ], 'ModelName' => [ 'shape' => 'ModelName', ], 'InitialInstanceCount' => [ 'shape' => 'TaskCount', ], 'InstanceType' => [ 'shape' => 'ProductionVariantInstanceType', ], 'InitialVariantWeight' => [ 'shape' => 'VariantWeight', ], 'AcceleratorType' => [ 'shape' => 'ProductionVariantAcceleratorType', ], ], ], 'ProductionVariantAcceleratorType' => [ 'type' => 'string', 'enum' => [ 'ml.eia1.medium', 'ml.eia1.large', 'ml.eia1.xlarge', ], ], 'ProductionVariantInstanceType' => [ 'type' => 'string', 'enum' => [ 'ml.t2.medium', 'ml.t2.large', 'ml.t2.xlarge', 'ml.t2.2xlarge', 'ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', 'ml.m5.12xlarge', 'ml.m5.24xlarge', 'ml.c4.large', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', 'ml.c5.large', 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge', ], ], 'ProductionVariantList' => [ 'type' => 'list', 'member' => [ 'shape' => 'ProductionVariant', ], 'min' => 1, ], 'ProductionVariantSummary' => [ 'type' => 'structure', 'required' => [ 'VariantName', ], 'members' => [ 'VariantName' => [ 'shape' => 'VariantName', ], 'DeployedImages' => [ 'shape' => 'DeployedImages', ], 'CurrentWeight' => [ 'shape' => 'VariantWeight', ], 'DesiredWeight' => [ 'shape' => 'VariantWeight', ], 'CurrentInstanceCount' => [ 'shape' => 'TaskCount', ], 'DesiredInstanceCount' => [ 'shape' => 'TaskCount', ], ], ], 'ProductionVariantSummaryList' => [ 'type' => 'list', 'member' => [ 'shape' => 'ProductionVariantSummary', ], 'min' => 1, ], 'PropertyNameHint' => [ 'type' => 'string', 'max' => 100, 'min' => 0, 'pattern' => '.*', ], 'PropertyNameQuery' => [ 'type' => 'structure', 'required' => [ 'PropertyNameHint', ], 'members' => [ 'PropertyNameHint' => [ 'shape' => 'PropertyNameHint', ], ], ], 'PropertyNameSuggestion' => [ 'type' => 'structure', 'members' => [ 'PropertyName' => [ 'shape' => 'ResourcePropertyName', ], ], ], 'PropertyNameSuggestionList' => [ 'type' => 'list', 'member' => [ 'shape' => 'PropertyNameSuggestion', ], ], 'PublicWorkforceTaskPrice' => [ 'type' => 'structure', 'members' => [ 'AmountInUsd' => [ 'shape' => 'USD', ], ], ], 'RealtimeInferenceInstanceTypes' => [ 'type' => 'list', 'member' => [ 'shape' => 'ProductionVariantInstanceType', ], ], 'RecordWrapper' => [ 'type' => 'string', 'enum' => [ 'None', 'RecordIO', ], ], 'RenderUiTemplateRequest' => [ 'type' => 'structure', 'required' => [ 'UiTemplate', 'Task', 'RoleArn', ], 'members' => [ 'UiTemplate' => [ 'shape' => 'UiTemplate', ], 'Task' => [ 'shape' => 'RenderableTask', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], ], ], 'RenderUiTemplateResponse' => [ 'type' => 'structure', 'required' => [ 'RenderedContent', 'Errors', ], 'members' => [ 'RenderedContent' => [ 'shape' => 'String', ], 'Errors' => [ 'shape' => 'RenderingErrorList', ], ], ], 'RenderableTask' => [ 'type' => 'structure', 'required' => [ 'Input', ], 'members' => [ 'Input' => [ 'shape' => 'TaskInput', ], ], ], 'RenderingError' => [ 'type' => 'structure', 'required' => [ 'Code', 'Message', ], 'members' => [ 'Code' => [ 'shape' => 'String', ], 'Message' => [ 'shape' => 'String', ], ], ], 'RenderingErrorList' => [ 'type' => 'list', 'member' => [ 'shape' => 'RenderingError', ], ], 'ResourceArn' => [ 'type' => 'string', 'max' => 256, 'pattern' => 'arn:.*', ], 'ResourceConfig' => [ 'type' => 'structure', 'required' => [ 'InstanceType', 'InstanceCount', 'VolumeSizeInGB', ], 'members' => [ 'InstanceType' => [ 'shape' => 'TrainingInstanceType', ], 'InstanceCount' => [ 'shape' => 'TrainingInstanceCount', ], 'VolumeSizeInGB' => [ 'shape' => 'VolumeSizeInGB', ], 'VolumeKmsKeyId' => [ 'shape' => 'KmsKeyId', ], ], ], 'ResourceInUse' => [ 'type' => 'structure', 'members' => [ 'Message' => [ 'shape' => 'FailureReason', ], ], 'exception' => true, ], 'ResourceLimitExceeded' => [ 'type' => 'structure', 'members' => [ 'Message' => [ 'shape' => 'FailureReason', ], ], 'exception' => true, ], 'ResourceLimits' => [ 'type' => 'structure', 'required' => [ 'MaxNumberOfTrainingJobs', 'MaxParallelTrainingJobs', ], 'members' => [ 'MaxNumberOfTrainingJobs' => [ 'shape' => 'MaxNumberOfTrainingJobs', ], 'MaxParallelTrainingJobs' => [ 'shape' => 'MaxParallelTrainingJobs', ], ], ], 'ResourceNotFound' => [ 'type' => 'structure', 'members' => [ 'Message' => [ 'shape' => 'FailureReason', ], ], 'exception' => true, ], 'ResourcePropertyName' => [ 'type' => 'string', 'max' => 255, 'min' => 1, 'pattern' => '.+', ], 'ResourceType' => [ 'type' => 'string', 'enum' => [ 'TrainingJob', ], ], 'ResponseMIMEType' => [ 'type' => 'string', 'max' => 1024, 'pattern' => '^[-\\w]+\\/.+$', ], 'ResponseMIMETypes' => [ 'type' => 'list', 'member' => [ 'shape' => 'ResponseMIMEType', ], ], 'RoleArn' => [ 'type' => 'string', 'max' => 2048, 'min' => 20, 'pattern' => '^arn:aws[a-z\\-]*:iam::\\d{12}:role/?[a-zA-Z_0-9+=,.@\\-_/]+$', ], 'RootAccess' => [ 'type' => 'string', 'enum' => [ 'Enabled', 'Disabled', ], ], 'S3DataDistribution' => [ 'type' => 'string', 'enum' => [ 'FullyReplicated', 'ShardedByS3Key', ], ], 'S3DataSource' => [ 'type' => 'structure', 'required' => [ 'S3DataType', 'S3Uri', ], 'members' => [ 'S3DataType' => [ 'shape' => 'S3DataType', ], 'S3Uri' => [ 'shape' => 'S3Uri', ], 'S3DataDistributionType' => [ 'shape' => 'S3DataDistribution', ], 'AttributeNames' => [ 'shape' => 'AttributeNames', ], ], ], 'S3DataType' => [ 'type' => 'string', 'enum' => [ 'ManifestFile', 'S3Prefix', 'AugmentedManifestFile', ], ], 'S3Uri' => [ 'type' => 'string', 'max' => 1024, 'pattern' => '^(https|s3)://([^/]+)/?(.*)$', ], 'SearchExpression' => [ 'type' => 'structure', 'members' => [ 'Filters' => [ 'shape' => 'FilterList', ], 'NestedFilters' => [ 'shape' => 'NestedFiltersList', ], 'SubExpressions' => [ 'shape' => 'SearchExpressionList', ], 'Operator' => [ 'shape' => 'BooleanOperator', ], ], ], 'SearchExpressionList' => [ 'type' => 'list', 'member' => [ 'shape' => 'SearchExpression', ], 'max' => 20, 'min' => 1, ], 'SearchRecord' => [ 'type' => 'structure', 'members' => [ 'TrainingJob' => [ 'shape' => 'TrainingJob', ], ], ], 'SearchRequest' => [ 'type' => 'structure', 'required' => [ 'Resource', ], 'members' => [ 'Resource' => [ 'shape' => 'ResourceType', ], 'SearchExpression' => [ 'shape' => 'SearchExpression', ], 'SortBy' => [ 'shape' => 'ResourcePropertyName', ], 'SortOrder' => [ 'shape' => 'SearchSortOrder', ], 'NextToken' => [ 'shape' => 'NextToken', ], 'MaxResults' => [ 'shape' => 'MaxResults', 'box' => true, ], ], ], 'SearchResponse' => [ 'type' => 'structure', 'members' => [ 'Results' => [ 'shape' => 'SearchResultsList', ], 'NextToken' => [ 'shape' => 'NextToken', ], ], ], 'SearchResultsList' => [ 'type' => 'list', 'member' => [ 'shape' => 'SearchRecord', ], ], 'SearchSortOrder' => [ 'type' => 'string', 'enum' => [ 'Ascending', 'Descending', ], ], 'SecondaryStatus' => [ 'type' => 'string', 'enum' => [ 'Starting', 'LaunchingMLInstances', 'PreparingTrainingStack', 'Downloading', 'DownloadingTrainingImage', 'Training', 'Uploading', 'Stopping', 'Stopped', 'MaxRuntimeExceeded', 'Completed', 'Failed', ], ], 'SecondaryStatusTransition' => [ 'type' => 'structure', 'required' => [ 'Status', 'StartTime', ], 'members' => [ 'Status' => [ 'shape' => 'SecondaryStatus', ], 'StartTime' => [ 'shape' => 'Timestamp', ], 'EndTime' => [ 'shape' => 'Timestamp', ], 'StatusMessage' => [ 'shape' => 'StatusMessage', ], ], ], 'SecondaryStatusTransitions' => [ 'type' => 'list', 'member' => [ 'shape' => 'SecondaryStatusTransition', ], ], 'SecretArn' => [ 'type' => 'string', 'max' => 2048, 'min' => 1, 'pattern' => 'arn:aws[a-z\\-]*:secretsmanager:[a-z0-9\\-]*:[0-9]{12}:secret:.*', ], 'SecurityGroupId' => [ 'type' => 'string', 'max' => 32, 'pattern' => '[-0-9a-zA-Z]+', ], 'SecurityGroupIds' => [ 'type' => 'list', 'member' => [ 'shape' => 'SecurityGroupId', ], 'max' => 5, ], 'Seed' => [ 'type' => 'long', ], 'SessionExpirationDurationInSeconds' => [ 'type' => 'integer', 'max' => 43200, 'min' => 1800, ], 'ShuffleConfig' => [ 'type' => 'structure', 'required' => [ 'Seed', ], 'members' => [ 'Seed' => [ 'shape' => 'Seed', ], ], ], 'SortBy' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', 'Status', ], ], 'SortOrder' => [ 'type' => 'string', 'enum' => [ 'Ascending', 'Descending', ], ], 'SourceAlgorithm' => [ 'type' => 'structure', 'required' => [ 'AlgorithmName', ], 'members' => [ 'ModelDataUrl' => [ 'shape' => 'Url', ], 'AlgorithmName' => [ 'shape' => 'ArnOrName', ], ], ], 'SourceAlgorithmList' => [ 'type' => 'list', 'member' => [ 'shape' => 'SourceAlgorithm', ], 'max' => 1, 'min' => 1, ], 'SourceAlgorithmSpecification' => [ 'type' => 'structure', 'required' => [ 'SourceAlgorithms', ], 'members' => [ 'SourceAlgorithms' => [ 'shape' => 'SourceAlgorithmList', ], ], ], 'SplitType' => [ 'type' => 'string', 'enum' => [ 'None', 'Line', 'RecordIO', 'TFRecord', ], ], 'StartNotebookInstanceInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceName', ], 'members' => [ 'NotebookInstanceName' => [ 'shape' => 'NotebookInstanceName', ], ], ], 'StatusMessage' => [ 'type' => 'string', ], 'StopCompilationJobRequest' => [ 'type' => 'structure', 'required' => [ 'CompilationJobName', ], 'members' => [ 'CompilationJobName' => [ 'shape' => 'EntityName', ], ], ], 'StopHyperParameterTuningJobRequest' => [ 'type' => 'structure', 'required' => [ 'HyperParameterTuningJobName', ], 'members' => [ 'HyperParameterTuningJobName' => [ 'shape' => 'HyperParameterTuningJobName', ], ], ], 'StopLabelingJobRequest' => [ 'type' => 'structure', 'required' => [ 'LabelingJobName', ], 'members' => [ 'LabelingJobName' => [ 'shape' => 'LabelingJobName', ], ], ], 'StopNotebookInstanceInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceName', ], 'members' => [ 'NotebookInstanceName' => [ 'shape' => 'NotebookInstanceName', ], ], ], 'StopTrainingJobRequest' => [ 'type' => 'structure', 'required' => [ 'TrainingJobName', ], 'members' => [ 'TrainingJobName' => [ 'shape' => 'TrainingJobName', ], ], ], 'StopTransformJobRequest' => [ 'type' => 'structure', 'required' => [ 'TransformJobName', ], 'members' => [ 'TransformJobName' => [ 'shape' => 'TransformJobName', ], ], ], 'StoppingCondition' => [ 'type' => 'structure', 'members' => [ 'MaxRuntimeInSeconds' => [ 'shape' => 'MaxRuntimeInSeconds', ], ], ], 'String' => [ 'type' => 'string', ], 'String200' => [ 'type' => 'string', 'max' => 200, 'min' => 1, 'pattern' => '.+', ], 'SubnetId' => [ 'type' => 'string', 'max' => 32, 'pattern' => '[-0-9a-zA-Z]+', ], 'Subnets' => [ 'type' => 'list', 'member' => [ 'shape' => 'SubnetId', ], 'max' => 16, 'min' => 1, ], 'SubscribedWorkteam' => [ 'type' => 'structure', 'required' => [ 'WorkteamArn', ], 'members' => [ 'WorkteamArn' => [ 'shape' => 'WorkteamArn', ], 'MarketplaceTitle' => [ 'shape' => 'String200', ], 'SellerName' => [ 'shape' => 'String', ], 'MarketplaceDescription' => [ 'shape' => 'String200', ], 'ListingId' => [ 'shape' => 'String', ], ], ], 'SubscribedWorkteams' => [ 'type' => 'list', 'member' => [ 'shape' => 'SubscribedWorkteam', ], ], 'Success' => [ 'type' => 'boolean', ], 'SuggestionQuery' => [ 'type' => 'structure', 'members' => [ 'PropertyNameQuery' => [ 'shape' => 'PropertyNameQuery', ], ], ], 'Tag' => [ 'type' => 'structure', 'required' => [ 'Key', 'Value', ], 'members' => [ 'Key' => [ 'shape' => 'TagKey', ], 'Value' => [ 'shape' => 'TagValue', ], ], ], 'TagKey' => [ 'type' => 'string', 'max' => 128, 'min' => 1, 'pattern' => '^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-@]*)$', ], 'TagKeyList' => [ 'type' => 'list', 'member' => [ 'shape' => 'TagKey', ], 'max' => 50, 'min' => 1, ], 'TagList' => [ 'type' => 'list', 'member' => [ 'shape' => 'Tag', ], 'max' => 50, 'min' => 0, ], 'TagValue' => [ 'type' => 'string', 'max' => 256, 'min' => 0, 'pattern' => '^([\\p{L}\\p{Z}\\p{N}_.:/=+\\-@]*)$', ], 'TargetDevice' => [ 'type' => 'string', 'enum' => [ 'ml_m4', 'ml_m5', 'ml_c4', 'ml_c5', 'ml_p2', 'ml_p3', 'jetson_tx1', 'jetson_tx2', 'rasp3b', 'deeplens', 'rk3399', 'rk3288', ], ], 'TaskAvailabilityLifetimeInSeconds' => [ 'type' => 'integer', 'max' => 864000, 'min' => 1, ], 'TaskCount' => [ 'type' => 'integer', 'min' => 1, ], 'TaskDescription' => [ 'type' => 'string', 'max' => 255, 'min' => 1, 'pattern' => '.+', ], 'TaskInput' => [ 'type' => 'string', 'max' => 128000, 'min' => 2, 'pattern' => '[\\S\\s]+', ], 'TaskKeyword' => [ 'type' => 'string', 'max' => 30, 'min' => 1, 'pattern' => '^[A-Za-z0-9]+( [A-Za-z0-9]+)*$', ], 'TaskKeywords' => [ 'type' => 'list', 'member' => [ 'shape' => 'TaskKeyword', ], 'max' => 5, 'min' => 1, ], 'TaskTimeLimitInSeconds' => [ 'type' => 'integer', 'max' => 3600, 'min' => 1, ], 'TaskTitle' => [ 'type' => 'string', 'max' => 128, 'min' => 1, 'pattern' => '^[\\t\\n\\r -\\uD7FF\\uE000-\\uFFFD]*$', ], 'TemplateContent' => [ 'type' => 'string', 'max' => 128000, 'min' => 1, 'pattern' => '[\\S\\s]+', ], 'TenthFractionsOfACent' => [ 'type' => 'integer', 'max' => 9, 'min' => 0, ], 'Timestamp' => [ 'type' => 'timestamp', ], 'TrainingInputMode' => [ 'type' => 'string', 'enum' => [ 'Pipe', 'File', ], ], 'TrainingInstanceCount' => [ 'type' => 'integer', 'min' => 1, ], 'TrainingInstanceType' => [ 'type' => 'string', 'enum' => [ 'ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', 'ml.m5.12xlarge', 'ml.m5.24xlarge', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge', ], ], 'TrainingInstanceTypes' => [ 'type' => 'list', 'member' => [ 'shape' => 'TrainingInstanceType', ], ], 'TrainingJob' => [ 'type' => 'structure', 'members' => [ 'TrainingJobName' => [ 'shape' => 'TrainingJobName', ], 'TrainingJobArn' => [ 'shape' => 'TrainingJobArn', ], 'TuningJobArn' => [ 'shape' => 'HyperParameterTuningJobArn', ], 'LabelingJobArn' => [ 'shape' => 'LabelingJobArn', ], 'ModelArtifacts' => [ 'shape' => 'ModelArtifacts', ], 'TrainingJobStatus' => [ 'shape' => 'TrainingJobStatus', ], 'SecondaryStatus' => [ 'shape' => 'SecondaryStatus', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], 'HyperParameters' => [ 'shape' => 'HyperParameters', ], 'AlgorithmSpecification' => [ 'shape' => 'AlgorithmSpecification', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'InputDataConfig' => [ 'shape' => 'InputDataConfig', ], 'OutputDataConfig' => [ 'shape' => 'OutputDataConfig', ], 'ResourceConfig' => [ 'shape' => 'ResourceConfig', ], 'VpcConfig' => [ 'shape' => 'VpcConfig', ], 'StoppingCondition' => [ 'shape' => 'StoppingCondition', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'TrainingStartTime' => [ 'shape' => 'Timestamp', ], 'TrainingEndTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], 'SecondaryStatusTransitions' => [ 'shape' => 'SecondaryStatusTransitions', ], 'FinalMetricDataList' => [ 'shape' => 'FinalMetricDataList', ], 'EnableNetworkIsolation' => [ 'shape' => 'Boolean', ], 'EnableInterContainerTrafficEncryption' => [ 'shape' => 'Boolean', ], 'Tags' => [ 'shape' => 'TagList', ], ], ], 'TrainingJobArn' => [ 'type' => 'string', 'max' => 256, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:training-job/.*', ], 'TrainingJobDefinition' => [ 'type' => 'structure', 'required' => [ 'TrainingInputMode', 'InputDataConfig', 'OutputDataConfig', 'ResourceConfig', 'StoppingCondition', ], 'members' => [ 'TrainingInputMode' => [ 'shape' => 'TrainingInputMode', ], 'HyperParameters' => [ 'shape' => 'HyperParameters', ], 'InputDataConfig' => [ 'shape' => 'InputDataConfig', ], 'OutputDataConfig' => [ 'shape' => 'OutputDataConfig', ], 'ResourceConfig' => [ 'shape' => 'ResourceConfig', ], 'StoppingCondition' => [ 'shape' => 'StoppingCondition', ], ], ], 'TrainingJobEarlyStoppingType' => [ 'type' => 'string', 'enum' => [ 'Off', 'Auto', ], ], 'TrainingJobName' => [ 'type' => 'string', 'max' => 63, 'min' => 1, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'TrainingJobSortByOptions' => [ 'type' => 'string', 'enum' => [ 'Name', 'CreationTime', 'Status', 'FinalObjectiveMetricValue', ], ], 'TrainingJobStatus' => [ 'type' => 'string', 'enum' => [ 'InProgress', 'Completed', 'Failed', 'Stopping', 'Stopped', ], ], 'TrainingJobStatusCounter' => [ 'type' => 'integer', 'min' => 0, ], 'TrainingJobStatusCounters' => [ 'type' => 'structure', 'members' => [ 'Completed' => [ 'shape' => 'TrainingJobStatusCounter', ], 'InProgress' => [ 'shape' => 'TrainingJobStatusCounter', ], 'RetryableError' => [ 'shape' => 'TrainingJobStatusCounter', ], 'NonRetryableError' => [ 'shape' => 'TrainingJobStatusCounter', ], 'Stopped' => [ 'shape' => 'TrainingJobStatusCounter', ], ], ], 'TrainingJobSummaries' => [ 'type' => 'list', 'member' => [ 'shape' => 'TrainingJobSummary', ], ], 'TrainingJobSummary' => [ 'type' => 'structure', 'required' => [ 'TrainingJobName', 'TrainingJobArn', 'CreationTime', 'TrainingJobStatus', ], 'members' => [ 'TrainingJobName' => [ 'shape' => 'TrainingJobName', ], 'TrainingJobArn' => [ 'shape' => 'TrainingJobArn', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'TrainingEndTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], 'TrainingJobStatus' => [ 'shape' => 'TrainingJobStatus', ], ], ], 'TrainingSpecification' => [ 'type' => 'structure', 'required' => [ 'TrainingImage', 'SupportedTrainingInstanceTypes', 'TrainingChannels', ], 'members' => [ 'TrainingImage' => [ 'shape' => 'Image', ], 'TrainingImageDigest' => [ 'shape' => 'ImageDigest', ], 'SupportedHyperParameters' => [ 'shape' => 'HyperParameterSpecifications', ], 'SupportedTrainingInstanceTypes' => [ 'shape' => 'TrainingInstanceTypes', ], 'SupportsDistributedTraining' => [ 'shape' => 'Boolean', ], 'MetricDefinitions' => [ 'shape' => 'MetricDefinitionList', ], 'TrainingChannels' => [ 'shape' => 'ChannelSpecifications', ], 'SupportedTuningJobObjectiveMetrics' => [ 'shape' => 'HyperParameterTuningJobObjectives', ], ], ], 'TransformDataSource' => [ 'type' => 'structure', 'required' => [ 'S3DataSource', ], 'members' => [ 'S3DataSource' => [ 'shape' => 'TransformS3DataSource', ], ], ], 'TransformEnvironmentKey' => [ 'type' => 'string', 'max' => 1024, 'pattern' => '[a-zA-Z_][a-zA-Z0-9_]*', ], 'TransformEnvironmentMap' => [ 'type' => 'map', 'key' => [ 'shape' => 'TransformEnvironmentKey', ], 'value' => [ 'shape' => 'TransformEnvironmentValue', ], 'max' => 16, ], 'TransformEnvironmentValue' => [ 'type' => 'string', 'max' => 10240, 'pattern' => '[\\S\\s]*', ], 'TransformInput' => [ 'type' => 'structure', 'required' => [ 'DataSource', ], 'members' => [ 'DataSource' => [ 'shape' => 'TransformDataSource', ], 'ContentType' => [ 'shape' => 'ContentType', ], 'CompressionType' => [ 'shape' => 'CompressionType', ], 'SplitType' => [ 'shape' => 'SplitType', ], ], ], 'TransformInstanceCount' => [ 'type' => 'integer', 'min' => 1, ], 'TransformInstanceType' => [ 'type' => 'string', 'enum' => [ 'ml.m4.xlarge', 'ml.m4.2xlarge', 'ml.m4.4xlarge', 'ml.m4.10xlarge', 'ml.m4.16xlarge', 'ml.c4.xlarge', 'ml.c4.2xlarge', 'ml.c4.4xlarge', 'ml.c4.8xlarge', 'ml.p2.xlarge', 'ml.p2.8xlarge', 'ml.p2.16xlarge', 'ml.p3.2xlarge', 'ml.p3.8xlarge', 'ml.p3.16xlarge', 'ml.c5.xlarge', 'ml.c5.2xlarge', 'ml.c5.4xlarge', 'ml.c5.9xlarge', 'ml.c5.18xlarge', 'ml.m5.large', 'ml.m5.xlarge', 'ml.m5.2xlarge', 'ml.m5.4xlarge', 'ml.m5.12xlarge', 'ml.m5.24xlarge', ], ], 'TransformInstanceTypes' => [ 'type' => 'list', 'member' => [ 'shape' => 'TransformInstanceType', ], 'min' => 1, ], 'TransformJobArn' => [ 'type' => 'string', 'max' => 256, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:transform-job/.*', ], 'TransformJobDefinition' => [ 'type' => 'structure', 'required' => [ 'TransformInput', 'TransformOutput', 'TransformResources', ], 'members' => [ 'MaxConcurrentTransforms' => [ 'shape' => 'MaxConcurrentTransforms', ], 'MaxPayloadInMB' => [ 'shape' => 'MaxPayloadInMB', ], 'BatchStrategy' => [ 'shape' => 'BatchStrategy', ], 'Environment' => [ 'shape' => 'TransformEnvironmentMap', ], 'TransformInput' => [ 'shape' => 'TransformInput', ], 'TransformOutput' => [ 'shape' => 'TransformOutput', ], 'TransformResources' => [ 'shape' => 'TransformResources', ], ], ], 'TransformJobName' => [ 'type' => 'string', 'max' => 63, 'min' => 1, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'TransformJobStatus' => [ 'type' => 'string', 'enum' => [ 'InProgress', 'Completed', 'Failed', 'Stopping', 'Stopped', ], ], 'TransformJobSummaries' => [ 'type' => 'list', 'member' => [ 'shape' => 'TransformJobSummary', ], ], 'TransformJobSummary' => [ 'type' => 'structure', 'required' => [ 'TransformJobName', 'TransformJobArn', 'CreationTime', 'TransformJobStatus', ], 'members' => [ 'TransformJobName' => [ 'shape' => 'TransformJobName', ], 'TransformJobArn' => [ 'shape' => 'TransformJobArn', ], 'CreationTime' => [ 'shape' => 'Timestamp', ], 'TransformEndTime' => [ 'shape' => 'Timestamp', ], 'LastModifiedTime' => [ 'shape' => 'Timestamp', ], 'TransformJobStatus' => [ 'shape' => 'TransformJobStatus', ], 'FailureReason' => [ 'shape' => 'FailureReason', ], ], ], 'TransformOutput' => [ 'type' => 'structure', 'required' => [ 'S3OutputPath', ], 'members' => [ 'S3OutputPath' => [ 'shape' => 'S3Uri', ], 'Accept' => [ 'shape' => 'Accept', ], 'AssembleWith' => [ 'shape' => 'AssemblyType', ], 'KmsKeyId' => [ 'shape' => 'KmsKeyId', ], ], ], 'TransformResources' => [ 'type' => 'structure', 'required' => [ 'InstanceType', 'InstanceCount', ], 'members' => [ 'InstanceType' => [ 'shape' => 'TransformInstanceType', ], 'InstanceCount' => [ 'shape' => 'TransformInstanceCount', ], 'VolumeKmsKeyId' => [ 'shape' => 'KmsKeyId', ], ], ], 'TransformS3DataSource' => [ 'type' => 'structure', 'required' => [ 'S3DataType', 'S3Uri', ], 'members' => [ 'S3DataType' => [ 'shape' => 'S3DataType', ], 'S3Uri' => [ 'shape' => 'S3Uri', ], ], ], 'USD' => [ 'type' => 'structure', 'members' => [ 'Dollars' => [ 'shape' => 'Dollars', ], 'Cents' => [ 'shape' => 'Cents', ], 'TenthFractionsOfACent' => [ 'shape' => 'TenthFractionsOfACent', ], ], ], 'UiConfig' => [ 'type' => 'structure', 'required' => [ 'UiTemplateS3Uri', ], 'members' => [ 'UiTemplateS3Uri' => [ 'shape' => 'S3Uri', ], ], ], 'UiTemplate' => [ 'type' => 'structure', 'required' => [ 'Content', ], 'members' => [ 'Content' => [ 'shape' => 'TemplateContent', ], ], ], 'UpdateCodeRepositoryInput' => [ 'type' => 'structure', 'required' => [ 'CodeRepositoryName', ], 'members' => [ 'CodeRepositoryName' => [ 'shape' => 'EntityName', ], 'GitConfig' => [ 'shape' => 'GitConfigForUpdate', ], ], ], 'UpdateCodeRepositoryOutput' => [ 'type' => 'structure', 'required' => [ 'CodeRepositoryArn', ], 'members' => [ 'CodeRepositoryArn' => [ 'shape' => 'CodeRepositoryArn', ], ], ], 'UpdateEndpointInput' => [ 'type' => 'structure', 'required' => [ 'EndpointName', 'EndpointConfigName', ], 'members' => [ 'EndpointName' => [ 'shape' => 'EndpointName', ], 'EndpointConfigName' => [ 'shape' => 'EndpointConfigName', ], ], ], 'UpdateEndpointOutput' => [ 'type' => 'structure', 'required' => [ 'EndpointArn', ], 'members' => [ 'EndpointArn' => [ 'shape' => 'EndpointArn', ], ], ], 'UpdateEndpointWeightsAndCapacitiesInput' => [ 'type' => 'structure', 'required' => [ 'EndpointName', 'DesiredWeightsAndCapacities', ], 'members' => [ 'EndpointName' => [ 'shape' => 'EndpointName', ], 'DesiredWeightsAndCapacities' => [ 'shape' => 'DesiredWeightAndCapacityList', ], ], ], 'UpdateEndpointWeightsAndCapacitiesOutput' => [ 'type' => 'structure', 'required' => [ 'EndpointArn', ], 'members' => [ 'EndpointArn' => [ 'shape' => 'EndpointArn', ], ], ], 'UpdateNotebookInstanceInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceName', ], 'members' => [ 'NotebookInstanceName' => [ 'shape' => 'NotebookInstanceName', ], 'InstanceType' => [ 'shape' => 'InstanceType', ], 'RoleArn' => [ 'shape' => 'RoleArn', ], 'LifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], 'DisassociateLifecycleConfig' => [ 'shape' => 'DisassociateNotebookInstanceLifecycleConfig', ], 'VolumeSizeInGB' => [ 'shape' => 'NotebookInstanceVolumeSizeInGB', ], 'DefaultCodeRepository' => [ 'shape' => 'CodeRepositoryNameOrUrl', ], 'AdditionalCodeRepositories' => [ 'shape' => 'AdditionalCodeRepositoryNamesOrUrls', ], 'AcceleratorTypes' => [ 'shape' => 'NotebookInstanceAcceleratorTypes', ], 'DisassociateAcceleratorTypes' => [ 'shape' => 'DisassociateNotebookInstanceAcceleratorTypes', ], 'DisassociateDefaultCodeRepository' => [ 'shape' => 'DisassociateDefaultCodeRepository', ], 'DisassociateAdditionalCodeRepositories' => [ 'shape' => 'DisassociateAdditionalCodeRepositories', ], 'RootAccess' => [ 'shape' => 'RootAccess', ], ], ], 'UpdateNotebookInstanceLifecycleConfigInput' => [ 'type' => 'structure', 'required' => [ 'NotebookInstanceLifecycleConfigName', ], 'members' => [ 'NotebookInstanceLifecycleConfigName' => [ 'shape' => 'NotebookInstanceLifecycleConfigName', ], 'OnCreate' => [ 'shape' => 'NotebookInstanceLifecycleConfigList', ], 'OnStart' => [ 'shape' => 'NotebookInstanceLifecycleConfigList', ], ], ], 'UpdateNotebookInstanceLifecycleConfigOutput' => [ 'type' => 'structure', 'members' => [], ], 'UpdateNotebookInstanceOutput' => [ 'type' => 'structure', 'members' => [], ], 'UpdateWorkteamRequest' => [ 'type' => 'structure', 'required' => [ 'WorkteamName', ], 'members' => [ 'WorkteamName' => [ 'shape' => 'WorkteamName', ], 'MemberDefinitions' => [ 'shape' => 'MemberDefinitions', ], 'Description' => [ 'shape' => 'String200', ], ], ], 'UpdateWorkteamResponse' => [ 'type' => 'structure', 'required' => [ 'Workteam', ], 'members' => [ 'Workteam' => [ 'shape' => 'Workteam', ], ], ], 'Url' => [ 'type' => 'string', 'max' => 1024, 'pattern' => '^(https|s3)://([^/]+)/?(.*)$', ], 'VariantName' => [ 'type' => 'string', 'max' => 63, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'VariantWeight' => [ 'type' => 'float', 'min' => 0, ], 'VolumeSizeInGB' => [ 'type' => 'integer', 'min' => 1, ], 'VpcConfig' => [ 'type' => 'structure', 'required' => [ 'SecurityGroupIds', 'Subnets', ], 'members' => [ 'SecurityGroupIds' => [ 'shape' => 'VpcSecurityGroupIds', ], 'Subnets' => [ 'shape' => 'Subnets', ], ], ], 'VpcSecurityGroupIds' => [ 'type' => 'list', 'member' => [ 'shape' => 'SecurityGroupId', ], 'max' => 5, 'min' => 1, ], 'Workteam' => [ 'type' => 'structure', 'required' => [ 'WorkteamName', 'MemberDefinitions', 'WorkteamArn', 'Description', ], 'members' => [ 'WorkteamName' => [ 'shape' => 'WorkteamName', ], 'MemberDefinitions' => [ 'shape' => 'MemberDefinitions', ], 'WorkteamArn' => [ 'shape' => 'WorkteamArn', ], 'ProductListingIds' => [ 'shape' => 'ProductListings', ], 'Description' => [ 'shape' => 'String200', ], 'SubDomain' => [ 'shape' => 'String', ], 'CreateDate' => [ 'shape' => 'Timestamp', ], 'LastUpdatedDate' => [ 'shape' => 'Timestamp', ], ], ], 'WorkteamArn' => [ 'type' => 'string', 'max' => 256, 'pattern' => 'arn:aws[a-z\\-]*:sagemaker:[a-z0-9\\-]*:[0-9]{12}:workteam/.*', ], 'WorkteamName' => [ 'type' => 'string', 'max' => 63, 'min' => 1, 'pattern' => '^[a-zA-Z0-9](-*[a-zA-Z0-9])*', ], 'Workteams' => [ 'type' => 'list', 'member' => [ 'shape' => 'Workteam', ], ], ],]; diff --git a/src/data/sagemaker/2017-07-24/docs-2.json b/src/data/sagemaker/2017-07-24/docs-2.json index 5bd3e912a8..28f624e9da 100644 --- a/src/data/sagemaker/2017-07-24/docs-2.json +++ b/src/data/sagemaker/2017-07-24/docs-2.json @@ -1,28 +1,28 @@ { "version": "2.0", - "service": "Definition of the public APIs exposed by SageMaker", + "service": "

Provides APIs for creating and managing Amazon SageMaker resources.

", "operations": { "AddTags": "

Adds or overwrites one or more tags for the specified Amazon SageMaker resource. You can add tags to notebook instances, training jobs, hyperparameter tuning jobs, models, endpoint configurations, and endpoints.

Each tag consists of a key and an optional value. Tag keys must be unique per resource. For more information about tags, see For more information, see AWS Tagging Strategies.

Tags that you add to a hyperparameter tuning job by calling this API are also added to any training jobs that the hyperparameter tuning job launches after you call this API, but not to training jobs that the hyperparameter tuning job launched before you called this API. To make sure that the tags associated with a hyperparameter tuning job are also added to all training jobs that the hyperparameter tuning job launches, add the tags when you first create the tuning job by specifying them in the Tags parameter of CreateHyperParameterTuningJob

", "CreateAlgorithm": "

Create a machine learning algorithm that you can use in Amazon SageMaker and list in the AWS Marketplace.

", "CreateCodeRepository": "

Creates a Git repository as a resource in your Amazon SageMaker account. You can associate the repository with notebook instances so that you can use Git source control for the notebooks you create. The Git repository is a resource in your Amazon SageMaker account, so it can be associated with more than one notebook instance, and it persists independently from the lifecycle of any notebook instances it is associated with.

The repository can be hosted either in AWS CodeCommit or in any other Git repository.

", "CreateCompilationJob": "

Starts a model compilation job. After the model has been compiled, Amazon SageMaker saves the resulting model artifacts to an Amazon Simple Storage Service (Amazon S3) bucket that you specify.

If you choose to host your model using Amazon SageMaker hosting services, you can use the resulting model artifacts as part of the model. You can also use the artifacts with AWS IoT Greengrass. In that case, deploy them as an ML resource.

In the request body, you provide the following:

You can also provide a Tag to track the model compilation job's resource use and costs. The response body contains the CompilationJobArn for the compiled job.

To stop a model compilation job, use StopCompilationJob. To get information about a particular model compilation job, use DescribeCompilationJob. To get information about multiple model compilation jobs, use ListCompilationJobs.

", - "CreateEndpoint": "

Creates an endpoint using the endpoint configuration specified in the request. Amazon SageMaker uses the endpoint to provision resources and deploy models. You create the endpoint configuration with the CreateEndpointConfig API.

Use this API only for hosting models using Amazon SageMaker hosting services.

The endpoint name must be unique within an AWS Region in your AWS account.

When it receives the request, Amazon SageMaker creates the endpoint, launches the resources (ML compute instances), and deploys the model(s) on them.

When Amazon SageMaker receives the request, it sets the endpoint status to Creating. After it creates the endpoint, it sets the status to InService. Amazon SageMaker can then process incoming requests for inferences. To check the status of an endpoint, use the DescribeEndpoint API.

For an example, see Exercise 1: Using the K-Means Algorithm Provided by Amazon SageMaker.

If any of the models hosted at this endpoint get model data from an Amazon S3 location, Amazon SageMaker uses AWS Security Token Service to download model artifacts from the S3 path you provided. AWS STS is activated in your IAM user account by default. If you previously deactivated AWS STS for a region, you need to reactivate AWS STS for that region. For more information, see Activating and Deactivating AWS STS i an AWS Region in the AWS Identity and Access Management User Guide.

", - "CreateEndpointConfig": "

Creates an endpoint configuration that Amazon SageMaker hosting services uses to deploy models. In the configuration, you identify one or more models, created using the CreateModel API, to deploy and the resources that you want Amazon SageMaker to provision. Then you call the CreateEndpoint API.

Use this API only if you want to use Amazon SageMaker hosting services to deploy models into production.

In the request, you define one or more ProductionVariants, each of which identifies a model. Each ProductionVariant parameter also describes the resources that you want Amazon SageMaker to provision. This includes the number and type of ML compute instances to deploy.

If you are hosting multiple models, you also assign a VariantWeight to specify how much traffic you want to allocate to each model. For example, suppose that you want to host two models, A and B, and you assign traffic weight 2 for model A and 1 for model B. Amazon SageMaker distributes two-thirds of the traffic to Model A, and one-third to model B.

", + "CreateEndpoint": "

Creates an endpoint using the endpoint configuration specified in the request. Amazon SageMaker uses the endpoint to provision resources and deploy models. You create the endpoint configuration with the CreateEndpointConfig API.

Use this API only for hosting models using Amazon SageMaker hosting services.

The endpoint name must be unique within an AWS Region in your AWS account.

When it receives the request, Amazon SageMaker creates the endpoint, launches the resources (ML compute instances), and deploys the model(s) on them.

When Amazon SageMaker receives the request, it sets the endpoint status to Creating. After it creates the endpoint, it sets the status to InService. Amazon SageMaker can then process incoming requests for inferences. To check the status of an endpoint, use the DescribeEndpoint API.

For an example, see Exercise 1: Using the K-Means Algorithm Provided by Amazon SageMaker.

If any of the models hosted at this endpoint get model data from an Amazon S3 location, Amazon SageMaker uses AWS Security Token Service to download model artifacts from the S3 path you provided. AWS STS is activated in your IAM user account by default. If you previously deactivated AWS STS for a region, you need to reactivate AWS STS for that region. For more information, see Activating and Deactivating AWS STS i an AWS Region in the AWS Identity and Access Management User Guide.

", + "CreateEndpointConfig": "

Creates an endpoint configuration that Amazon SageMaker hosting services uses to deploy models. In the configuration, you identify one or more models, created using the CreateModel API, to deploy and the resources that you want Amazon SageMaker to provision. Then you call the CreateEndpoint API.

Use this API only if you want to use Amazon SageMaker hosting services to deploy models into production.

In the request, you define one or more ProductionVariants, each of which identifies a model. Each ProductionVariant parameter also describes the resources that you want Amazon SageMaker to provision. This includes the number and type of ML compute instances to deploy.

If you are hosting multiple models, you also assign a VariantWeight to specify how much traffic you want to allocate to each model. For example, suppose that you want to host two models, A and B, and you assign traffic weight 2 for model A and 1 for model B. Amazon SageMaker distributes two-thirds of the traffic to Model A, and one-third to model B.

", "CreateHyperParameterTuningJob": "

Starts a hyperparameter tuning job. A hyperparameter tuning job finds the best version of a model by running many training jobs on your dataset using the algorithm you choose and values for hyperparameters within ranges that you specify. It then chooses the hyperparameter values that result in a model that performs the best, as measured by an objective metric that you choose.

", "CreateLabelingJob": "

Creates a job that uses workers to label the data objects in your input dataset. You can use the labeled data to train machine learning models.

You can select your workforce from one of three providers:

You can also use automated data labeling to reduce the number of data objects that need to be labeled by a human. Automated data labeling uses active learning to determine if a data object can be labeled by machine or if it needs to be sent to a human worker. For more information, see Using Automated Data Labeling.

The data objects to be labeled are contained in an Amazon S3 bucket. You create a manifest file that describes the location of each object. For more information, see Using Input and Output Data.

The output can be used as the manifest file for another labeling job or as training data for your machine learning models.

", "CreateModel": "

Creates a model in Amazon SageMaker. In the request, you name the model and describe a primary container. For the primary container, you specify the docker image containing inference code, artifacts (from prior training), and custom environment map that the inference code uses when you deploy the model for predictions.

Use this API to create a model if you want to use Amazon SageMaker hosting services or run a batch transform job.

To host your model, you create an endpoint configuration with the CreateEndpointConfig API, and then create an endpoint with the CreateEndpoint API. Amazon SageMaker then deploys all of the containers that you defined for the model in the hosting environment.

To run a batch transform using your model, you start a job with the CreateTransformJob API. Amazon SageMaker uses your model and your dataset to get inferences which are then saved to a specified S3 location.

In the CreateModel request, you must define a container with the PrimaryContainer parameter.

In the request, you also provide an IAM role that Amazon SageMaker can assume to access model artifacts and docker image for deployment on ML compute hosting instances or for batch transform jobs. In addition, you also use the IAM role to manage permissions the inference code needs. For example, if the inference code access any other AWS resources, you grant necessary permissions via this role.

", "CreateModelPackage": "

Creates a model package that you can use to create Amazon SageMaker models or list on AWS Marketplace. Buyers can subscribe to model packages listed on AWS Marketplace to create models in Amazon SageMaker.

To create a model package by specifying a Docker container that contains your inference code and the Amazon S3 location of your model artifacts, provide values for InferenceSpecification. To create a model from an algorithm resource that you created or subscribed to in AWS Marketplace, provide a value for SourceAlgorithmSpecification.

", - "CreateNotebookInstance": "

Creates an Amazon SageMaker notebook instance. A notebook instance is a machine learning (ML) compute instance running on a Jupyter notebook.

In a CreateNotebookInstance request, specify the type of ML compute instance that you want to run. Amazon SageMaker launches the instance, installs common libraries that you can use to explore datasets for model training, and attaches an ML storage volume to the notebook instance.

Amazon SageMaker also provides a set of example notebooks. Each notebook demonstrates how to use Amazon SageMaker with a specific algorithm or with a machine learning framework.

After receiving the request, Amazon SageMaker does the following:

  1. Creates a network interface in the Amazon SageMaker VPC.

  2. (Option) If you specified SubnetId, Amazon SageMaker creates a network interface in your own VPC, which is inferred from the subnet ID that you provide in the input. When creating this network interface, Amazon SageMaker attaches the security group that you specified in the request to the network interface that it creates in your VPC.

  3. Launches an EC2 instance of the type specified in the request in the Amazon SageMaker VPC. If you specified SubnetId of your VPC, Amazon SageMaker specifies both network interfaces when launching this instance. This enables inbound traffic from your own VPC to the notebook instance, assuming that the security groups allow it.

After creating the notebook instance, Amazon SageMaker returns its Amazon Resource Name (ARN).

After Amazon SageMaker creates the notebook instance, you can connect to the Jupyter server and work in Jupyter notebooks. For example, you can write code to explore a dataset that you can use for model training, train a model, host models by creating Amazon SageMaker endpoints, and validate hosted models.

For more information, see How It Works.

", - "CreateNotebookInstanceLifecycleConfig": "

Creates a lifecycle configuration that you can associate with a notebook instance. A lifecycle configuration is a collection of shell scripts that run when you create or start a notebook instance.

Each lifecycle configuration script has a limit of 16384 characters.

The value of the $PATH environment variable that is available to both scripts is /sbin:bin:/usr/sbin:/usr/bin.

View CloudWatch Logs for notebook instance lifecycle configurations in log group /aws/sagemaker/NotebookInstances in log stream [notebook-instance-name]/[LifecycleConfigHook].

Lifecycle configuration scripts cannot run for longer than 5 minutes. If a script runs for longer than 5 minutes, it fails and the notebook instance is not created or started.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

", - "CreatePresignedNotebookInstanceUrl": "

Returns a URL that you can use to connect to the Jupyter server from a notebook instance. In the Amazon SageMaker console, when you choose Open next to a notebook instance, Amazon SageMaker opens a new tab showing the Jupyter server home page from the notebook instance. The console uses this API to get the URL and show the page.

You can restrict access to this API and to the URL that it returns to a list of IP addresses that you specify. To restrict access, attach an IAM policy that denies access to this API unless the call comes from an IP address in the specified list to every AWS Identity and Access Management user, group, or role used to access the notebook instance. Use the NotIpAddress condition operator and the aws:SourceIP condition context key to specify the list of IP addresses that you want to have access to the notebook instance. For more information, see Limit Access to a Notebook Instance by IP Address.

", - "CreateTrainingJob": "

Starts a model training job. After training completes, Amazon SageMaker saves the resulting model artifacts to an Amazon S3 location that you specify.

If you choose to host your model using Amazon SageMaker hosting services, you can use the resulting model artifacts as part of the model. You can also use the artifacts in a deep learning service other than Amazon SageMaker, provided that you know how to use them for inferences.

In the request body, you provide the following:

For more information about Amazon SageMaker, see How It Works.

", - "CreateTransformJob": "

Starts a transform job. A transform job uses a trained model to get inferences on a dataset and saves these results to an Amazon S3 location that you specify.

To perform batch transformations, you create a transform job and use the data that you have readily available.

In the request body, you provide the following:

For more information about how batch transformation works Amazon SageMaker, see How It Works.

", + "CreateNotebookInstance": "

Creates an Amazon SageMaker notebook instance. A notebook instance is a machine learning (ML) compute instance running on a Jupyter notebook.

In a CreateNotebookInstance request, specify the type of ML compute instance that you want to run. Amazon SageMaker launches the instance, installs common libraries that you can use to explore datasets for model training, and attaches an ML storage volume to the notebook instance.

Amazon SageMaker also provides a set of example notebooks. Each notebook demonstrates how to use Amazon SageMaker with a specific algorithm or with a machine learning framework.

After receiving the request, Amazon SageMaker does the following:

  1. Creates a network interface in the Amazon SageMaker VPC.

  2. (Option) If you specified SubnetId, Amazon SageMaker creates a network interface in your own VPC, which is inferred from the subnet ID that you provide in the input. When creating this network interface, Amazon SageMaker attaches the security group that you specified in the request to the network interface that it creates in your VPC.

  3. Launches an EC2 instance of the type specified in the request in the Amazon SageMaker VPC. If you specified SubnetId of your VPC, Amazon SageMaker specifies both network interfaces when launching this instance. This enables inbound traffic from your own VPC to the notebook instance, assuming that the security groups allow it.

After creating the notebook instance, Amazon SageMaker returns its Amazon Resource Name (ARN).

After Amazon SageMaker creates the notebook instance, you can connect to the Jupyter server and work in Jupyter notebooks. For example, you can write code to explore a dataset that you can use for model training, train a model, host models by creating Amazon SageMaker endpoints, and validate hosted models.

For more information, see How It Works.

", + "CreateNotebookInstanceLifecycleConfig": "

Creates a lifecycle configuration that you can associate with a notebook instance. A lifecycle configuration is a collection of shell scripts that run when you create or start a notebook instance.

Each lifecycle configuration script has a limit of 16384 characters.

The value of the $PATH environment variable that is available to both scripts is /sbin:bin:/usr/sbin:/usr/bin.

View CloudWatch Logs for notebook instance lifecycle configurations in log group /aws/sagemaker/NotebookInstances in log stream [notebook-instance-name]/[LifecycleConfigHook].

Lifecycle configuration scripts cannot run for longer than 5 minutes. If a script runs for longer than 5 minutes, it fails and the notebook instance is not created or started.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

", + "CreatePresignedNotebookInstanceUrl": "

Returns a URL that you can use to connect to the Jupyter server from a notebook instance. In the Amazon SageMaker console, when you choose Open next to a notebook instance, Amazon SageMaker opens a new tab showing the Jupyter server home page from the notebook instance. The console uses this API to get the URL and show the page.

You can restrict access to this API and to the URL that it returns to a list of IP addresses that you specify. To restrict access, attach an IAM policy that denies access to this API unless the call comes from an IP address in the specified list to every AWS Identity and Access Management user, group, or role used to access the notebook instance. Use the NotIpAddress condition operator and the aws:SourceIP condition context key to specify the list of IP addresses that you want to have access to the notebook instance. For more information, see Limit Access to a Notebook Instance by IP Address.

", + "CreateTrainingJob": "

Starts a model training job. After training completes, Amazon SageMaker saves the resulting model artifacts to an Amazon S3 location that you specify.

If you choose to host your model using Amazon SageMaker hosting services, you can use the resulting model artifacts as part of the model. You can also use the artifacts in a machine learning service other than Amazon SageMaker, provided that you know how to use them for inferences.

In the request body, you provide the following:

For more information about Amazon SageMaker, see How It Works.

", + "CreateTransformJob": "

Starts a transform job. A transform job uses a trained model to get inferences on a dataset and saves these results to an Amazon S3 location that you specify.

To perform batch transformations, you create a transform job and use the data that you have readily available.

In the request body, you provide the following:

For more information about how batch transformation works Amazon SageMaker, see How It Works.

", "CreateWorkteam": "

Creates a new work team for labeling your data. A work team is defined by one or more Amazon Cognito user pools. You must first create the user pools before you can create a work team.

You cannot create more than 25 work teams in an account and region.

", "DeleteAlgorithm": "

Removes the specified algorithm from your account.

", "DeleteCodeRepository": "

Deletes the specified Git repository from your account.

", "DeleteEndpoint": "

Deletes an endpoint. Amazon SageMaker frees up all of the resources that were deployed when the endpoint was created.

Amazon SageMaker retires any custom KMS key grants associated with the endpoint, meaning you don't need to use the RevokeGrant API call.

", "DeleteEndpointConfig": "

Deletes an endpoint configuration. The DeleteEndpointConfig API deletes only the specified configuration. It does not delete endpoints created using the configuration.

", - "DeleteModel": "

Deletes a model. The DeleteModel API deletes only the model entry that was created in Amazon SageMaker when you called the CreateModel API. It does not delete model artifacts, inference code, or the IAM role that you specified when creating the model.

", + "DeleteModel": "

Deletes a model. The DeleteModel API deletes only the model entry that was created in Amazon SageMaker when you called the CreateModel API. It does not delete model artifacts, inference code, or the IAM role that you specified when creating the model.

", "DeleteModelPackage": "

Deletes a model package.

A model package is used to create Amazon SageMaker models or list on AWS Marketplace. Buyers can subscribe to model packages listed on AWS Marketplace to create models in Amazon SageMaker.

", "DeleteNotebookInstance": "

Deletes an Amazon SageMaker notebook instance. Before you can delete a notebook instance, you must call the StopNotebookInstance API.

When you delete a notebook instance, you lose all of your data. Amazon SageMaker removes the ML compute instance, and deletes the ML storage volume and the network interface associated with the notebook instance.

", "DeleteNotebookInstanceLifecycleConfig": "

Deletes a notebook instance lifecycle configuration.

", @@ -38,7 +38,7 @@ "DescribeModel": "

Describes a model that you created using the CreateModel API.

", "DescribeModelPackage": "

Returns a description of the specified model package, which is used to create Amazon SageMaker models or list them on AWS Marketplace.

To create models in Amazon SageMaker, buyers can subscribe to model packages listed on AWS Marketplace.

", "DescribeNotebookInstance": "

Returns information about a notebook instance.

", - "DescribeNotebookInstanceLifecycleConfig": "

Returns a description of a notebook instance lifecycle configuration.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

", + "DescribeNotebookInstanceLifecycleConfig": "

Returns a description of a notebook instance lifecycle configuration.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

", "DescribeSubscribedWorkteam": "

Gets information about a work team provided by a vendor. It returns details about the subscription with a vendor in the AWS Marketplace.

", "DescribeTrainingJob": "

Returns information about a training job.

", "DescribeTransformJob": "

Returns information about a transform job.

", @@ -53,7 +53,7 @@ "ListLabelingJobs": "

Gets a list of labeling jobs.

", "ListLabelingJobsForWorkteam": "

Gets a list of labeling jobs assigned to a specified work team.

", "ListModelPackages": "

Lists the model packages that have been created.

", - "ListModels": "

Lists models created with the CreateModel API.

", + "ListModels": "

Lists models created with the CreateModel API.

", "ListNotebookInstanceLifecycleConfigs": "

Lists notebook instance lifestyle configurations created with the CreateNotebookInstanceLifecycleConfig API.

", "ListNotebookInstances": "

Returns a list of the Amazon SageMaker notebook instances in the requester's account in an AWS Region.

", "ListSubscribedWorkteams": "

Gets a list of the work teams that you are subscribed to in the AWS Marketplace. The list may be empty if no work team satisfies the filter specified in the NameContains parameter.

", @@ -69,11 +69,11 @@ "StopHyperParameterTuningJob": "

Stops a running hyperparameter tuning job and all running training jobs that the tuning job launched.

All model artifacts output from the training jobs are stored in Amazon Simple Storage Service (Amazon S3). All data that the training jobs write to Amazon CloudWatch Logs are still available in CloudWatch. After the tuning job moves to the Stopped state, it releases all reserved resources for the tuning job.

", "StopLabelingJob": "

Stops a running labeling job. A job that is stopped cannot be restarted. Any results obtained before the job is stopped are placed in the Amazon S3 output bucket.

", "StopNotebookInstance": "

Terminates the ML compute instance. Before terminating the instance, Amazon SageMaker disconnects the ML storage volume from it. Amazon SageMaker preserves the ML storage volume.

To access data on the ML storage volume for a notebook instance that has been terminated, call the StartNotebookInstance API. StartNotebookInstance launches another ML compute instance, configures it, and attaches the preserved ML storage volume so you can continue your work.

", - "StopTrainingJob": "

Stops a training job. To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays job termination for 120 seconds. Algorithms might use this 120-second window to save the model artifacts, so the results of the training is not lost.

Training algorithms provided by Amazon SageMaker save the intermediate results of a model training job. This intermediate data is a valid model artifact. You can use the model artifacts that are saved when Amazon SageMaker stops a training job to create a model.

When it receives a StopTrainingJob request, Amazon SageMaker changes the status of the job to Stopping. After Amazon SageMaker stops the job, it sets the status to Stopped.

", + "StopTrainingJob": "

Stops a training job. To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays job termination for 120 seconds. Algorithms might use this 120-second window to save the model artifacts, so the results of the training is not lost.

When it receives a StopTrainingJob request, Amazon SageMaker changes the status of the job to Stopping. After Amazon SageMaker stops the job, it sets the status to Stopped.

", "StopTransformJob": "

Stops a transform job.

When Amazon SageMaker receives a StopTransformJob request, the status of the job changes to Stopping. After Amazon SageMaker stops the job, the status is set to Stopped. When you stop a transform job before it is completed, Amazon SageMaker doesn't store the job's output in Amazon S3.

", "UpdateCodeRepository": "

Updates the specified Git repository with the specified values.

", - "UpdateEndpoint": "

Deploys the new EndpointConfig specified in the request, switches to using newly created endpoint, and then deletes resources provisioned for the endpoint using the previous EndpointConfig (there is no availability loss).

When Amazon SageMaker receives the request, it sets the endpoint status to Updating. After updating the endpoint, it sets the status to InService. To check the status of an endpoint, use the DescribeEndpoint API.

You cannot update an endpoint with the current EndpointConfig. To update an endpoint, you must create a new EndpointConfig.

", - "UpdateEndpointWeightsAndCapacities": "

Updates variant weight of one or more variants associated with an existing endpoint, or capacity of one variant associated with an existing endpoint. When it receives the request, Amazon SageMaker sets the endpoint status to Updating. After updating the endpoint, it sets the status to InService. To check the status of an endpoint, use the DescribeEndpoint API.

", + "UpdateEndpoint": "

Deploys the new EndpointConfig specified in the request, switches to using newly created endpoint, and then deletes resources provisioned for the endpoint using the previous EndpointConfig (there is no availability loss).

When Amazon SageMaker receives the request, it sets the endpoint status to Updating. After updating the endpoint, it sets the status to InService. To check the status of an endpoint, use the DescribeEndpoint API.

You cannot update an endpoint with the current EndpointConfig. To update an endpoint, you must create a new EndpointConfig.

", + "UpdateEndpointWeightsAndCapacities": "

Updates variant weight of one or more variants associated with an existing endpoint, or capacity of one variant associated with an existing endpoint. When it receives the request, Amazon SageMaker sets the endpoint status to Updating. After updating the endpoint, it sets the status to InService. To check the status of an endpoint, use the DescribeEndpoint API.

", "UpdateNotebookInstance": "

Updates a notebook instance. NotebookInstance updates include upgrading or downgrading the ML compute instance used for your notebook instance to accommodate changes in your workload requirements. You can also update the VPC security groups.

", "UpdateNotebookInstanceLifecycleConfig": "

Updates a notebook instance lifecycle configuration created with the CreateNotebookInstanceLifecycleConfig API.

", "UpdateWorkteam": "

Updates an existing work team with new member definitions or description.

" @@ -107,7 +107,7 @@ "CreateNotebookInstanceInput$AdditionalCodeRepositories": "

An array of up to three Git repositories to associate with the notebook instance. These can be either the names of Git repositories stored as resources in your account, or the URL of Git repositories in AWS CodeCommit or in any other Git repository. These repositories are cloned at the same level as the default repository of your notebook instance. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

", "DescribeNotebookInstanceOutput$AdditionalCodeRepositories": "

An array of up to three Git repositories associated with the notebook instance. These can be either the names of Git repositories stored as resources in your account, or the URL of Git repositories in AWS CodeCommit or in any other Git repository. These repositories are cloned at the same level as the default repository of your notebook instance. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

", "NotebookInstanceSummary$AdditionalCodeRepositories": "

An array of up to three Git repositories associated with the notebook instance. These can be either the names of Git repositories stored as resources in your account, or the URL of Git repositories in AWS CodeCommit or in any other Git repository. These repositories are cloned at the same level as the default repository of your notebook instance. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

", - "UpdateNotebookInstanceInput$AdditionalCodeRepositories": "

An array of up to three Git repositories to associate with the notebook instance. These can be either the names of Git repositories stored as resources in your account, or the URL of Git repositories in AWS CodeCommit or in any other Git repository.. These repositories are cloned at the same level as the default repository of your notebook instance. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

" + "UpdateNotebookInstanceInput$AdditionalCodeRepositories": "

An array of up to three Git repositories to associate with the notebook instance. These can be either the names of Git repositories stored as resources in your account, or the URL of Git repositories in AWS CodeCommit or in any other Git repository. These repositories are cloned at the same level as the default repository of your notebook instance. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

" } }, "AlgorithmArn": { @@ -121,8 +121,8 @@ "AlgorithmImage": { "base": null, "refs": { - "AlgorithmSpecification$TrainingImage": "

The registry path of the Docker image that contains the training algorithm. For information about docker registry paths for built-in algorithms, see Algorithms Provided by Amazon SageMaker: Common Parameters.

", - "HyperParameterAlgorithmSpecification$TrainingImage": "

The registry path of the Docker image that contains the training algorithm. For information about Docker registry paths for built-in algorithms, see Algorithms Provided by Amazon SageMaker: Common Parameters.

" + "AlgorithmSpecification$TrainingImage": "

The registry path of the Docker image that contains the training algorithm. For information about docker registry paths for built-in algorithms, see Algorithms Provided by Amazon SageMaker: Common Parameters. Amazon SageMaker supports both registry/repository[:tag] and registry/repository[@digest] image path formats. For more information, see Using Your Own Algorithms with Amazon SageMaker.

", + "HyperParameterAlgorithmSpecification$TrainingImage": "

The registry path of the Docker image that contains the training algorithm. For information about Docker registry paths for built-in algorithms, see Algorithms Provided by Amazon SageMaker: Common Parameters. Amazon SageMaker supports both registry/repository[:tag] and registry/repository[@digest] image path formats. For more information, see Using Your Own Algorithms with Amazon SageMaker.

" } }, "AlgorithmSortBy": { @@ -132,9 +132,9 @@ } }, "AlgorithmSpecification": { - "base": "

Specifies the training algorithm to use in a CreateTrainingJob request.

For more information about algorithms provided by Amazon SageMaker, see Algorithms. For information about using your own algorithms, see Using Your Own Algorithms with Amazon SageMaker.

", + "base": "

Specifies the training algorithm to use in a CreateTrainingJob request.

For more information about algorithms provided by Amazon SageMaker, see Algorithms. For information about using your own algorithms, see Using Your Own Algorithms with Amazon SageMaker.

", "refs": { - "CreateTrainingJobRequest$AlgorithmSpecification": "

The registry path of the Docker image that contains the training algorithm and algorithm-specific metadata, including the input mode. For more information about algorithms provided by Amazon SageMaker, see Algorithms. For information about providing your own algorithms, see Using Your Own Algorithms with Amazon SageMaker.

", + "CreateTrainingJobRequest$AlgorithmSpecification": "

The registry path of the Docker image that contains the training algorithm and algorithm-specific metadata, including the input mode. For more information about algorithms provided by Amazon SageMaker, see Algorithms. For information about providing your own algorithms, see Using Your Own Algorithms with Amazon SageMaker.

", "DescribeTrainingJobResponse$AlgorithmSpecification": "

Information about the algorithm used for training, and algorithm metadata.

", "TrainingJob$AlgorithmSpecification": "

Information about the algorithm used for training, and algorithm metadata.

" } @@ -234,8 +234,8 @@ "BatchStrategy": { "base": null, "refs": { - "CreateTransformJobRequest$BatchStrategy": "

Determines the number of records to include in a mini-batch. If you want to include only one record in a mini-batch, specify SingleRecord.. If you want mini-batches to contain a maximum of the number of records specified in the MaxPayloadInMB parameter, specify MultiRecord.

If you set SplitType to Line and BatchStrategy to MultiRecord, a batch transform automatically splits your input data into the specified payload size. There's no need to split the dataset into smaller files or to use larger payload sizes unless the records in your dataset are very large.

", - "DescribeTransformJobResponse$BatchStrategy": "

If you want to include only one record in a batch, specify SingleRecord.. If you want batches to contain a maximum of the number of records specified in the MaxPayloadInMB parameter, specify MultiRecord.S

", + "CreateTransformJobRequest$BatchStrategy": "

Specifies the number of records to include in a mini-batch for an HTTP inference request. A record is a single unit of input data that inference can be made on. For example, a single line in a CSV file is a record.

To enable the batch strategy, you must set SplitType to Line, RecordIO, or TFRecord.

To use only one record when making an HTTP invocation request to a container, set BatchStrategy to SingleRecord and SplitType to Line.

To fit as many records in a mini-batch as can fit within the MaxPayloadInMB limit, set BatchStrategy to MultiRecord and SplitType to Line.

", + "DescribeTransformJobResponse$BatchStrategy": "

Specifies the number of records to include in a mini-batch for an HTTP inference request. A record is a single unit of input data that inference can be made on. For example, a single line in a CSV file is a record.

To enable the batch strategy, you must set SplitType to Line, RecordIO, or TFRecord.

", "TransformJobDefinition$BatchStrategy": "

A string that determines the number of records included in a single mini-batch.

SingleRecord means only one record is used per mini-batch. MultiRecord means a mini-batch is set to contain as many records that can fit within the MaxPayloadInMB limit.

" } }, @@ -245,15 +245,16 @@ "ChannelSpecification$IsRequired": "

Indicates whether the channel is required by the algorithm.

", "CreateModelInput$EnableNetworkIsolation": "

Isolates the model container. No inbound or outbound network calls can be made to or from the model container.

The Semantic Segmentation built-in algorithm does not support network isolation.

", "CreateTrainingJobRequest$EnableNetworkIsolation": "

Isolates the training container. No inbound or outbound network calls can be made, except for calls between peers within a training cluster for distributed training. If you enable network isolation for training jobs that are configured to use a VPC, Amazon SageMaker downloads and uploads customer data and model artifacts through the specified VPC, but the training container does not have network access.

The Semantic Segmentation built-in algorithm does not support network isolation.

", - "CreateTrainingJobRequest$EnableInterContainerTrafficEncryption": "

To encrypt all communications between ML compute instances in distributed training, choose True,. Encryption provides greater security for distributed training, but training can take longer because of additional communications between ML compute instances.

", + "CreateTrainingJobRequest$EnableInterContainerTrafficEncryption": "

To encrypt all communications between ML compute instances in distributed training, choose True. Encryption provides greater security for distributed training, but training might take longer. How long it takes depends on the amount of communication between compute instances, especially if you use a deep learning algorithm in distributed training. For more information, see Protect Communications Between ML Compute Instances in a Distributed Training Job.

", "DescribeModelOutput$EnableNetworkIsolation": "

If True, no inbound or outbound network calls can be made to or from the model container.

The Semantic Segmentation built-in algorithm does not support network isolation.

", "DescribeTrainingJobResponse$EnableNetworkIsolation": "

If you want to allow inbound or outbound network calls, except for calls between peers within a training cluster for distributed training, choose True. If you enable network isolation for training jobs that are configured to use a VPC, Amazon SageMaker downloads and uploads customer data and model artifacts through the specified VPC, but the training container does not have network access.

The Semantic Segmentation built-in algorithm does not support network isolation.

", - "DescribeTrainingJobResponse$EnableInterContainerTrafficEncryption": "

To encrypt all communications between ML compute instances in distributed training, specify True. Encryption provides greater security for distributed training, but training take longer because of the additional communications between ML compute instances.

", + "DescribeTrainingJobResponse$EnableInterContainerTrafficEncryption": "

To encrypt all communications between ML compute instances in distributed training, choose True. Encryption provides greater security for distributed training, but training might take longer. How long it takes depends on the amount of communication between compute instances, especially if you use a deep learning algorithm in distributed training.

", "HyperParameterSpecification$IsTunable": "

Indicates whether this hyperparameter is tunable in a hyperparameter tuning job.

", "HyperParameterSpecification$IsRequired": "

Indicates whether this hyperparameter is required.

", "HyperParameterTrainingJobDefinition$EnableNetworkIsolation": "

Isolates the training container. No inbound or outbound network calls can be made, except for calls between peers within a training cluster for distributed training. If network isolation is used for training jobs that are configured to use a VPC, Amazon SageMaker downloads and uploads customer data and model artifacts through the specified VPC, but the training container does not have network access.

The Semantic Segmentation built-in algorithm does not support network isolation.

", - "HyperParameterTrainingJobDefinition$EnableInterContainerTrafficEncryption": "

To encrypt all communications between ML compute instances in distributed training, specify True. Encryption provides greater security for distributed training, but training take longer because of the additional communications between ML compute instances.

", + "HyperParameterTrainingJobDefinition$EnableInterContainerTrafficEncryption": "

To encrypt all communications between ML compute instances in distributed training, choose True. Encryption provides greater security for distributed training, but training might take longer. How long it takes depends on the amount of communication between compute instances, especially if you use a deep learning algorithm in distributed training.

", "TrainingJob$EnableNetworkIsolation": "

If the TrainingJob was created with network isolation, the value is set to true. If network isolation is enabled, nodes can't communicate beyond the VPC they run in.

", + "TrainingJob$EnableInterContainerTrafficEncryption": "

To encrypt all communications between ML compute instances in distributed training, choose True. Encryption provides greater security for distributed training, but training might take longer. How long it takes depends on the amount of communication between compute instances, especially if you use a deep learning algorithm in distributed training.

", "TrainingSpecification$SupportsDistributedTraining": "

Indicates whether the algorithm supports distributed training. If set to false, buyers can’t request more than one instance during training.

" } }, @@ -467,7 +468,7 @@ "ContainerHostname": { "base": null, "refs": { - "ContainerDefinition$ContainerHostname": "

The DNS host name for the container after Amazon SageMaker deploys it.

", + "ContainerDefinition$ContainerHostname": "

This parameter is ignored.

", "ModelPackageContainerDefinition$ContainerHostname": "

The DNS host name for the Docker container.

" } }, @@ -956,8 +957,8 @@ "DirectInternetAccess": { "base": null, "refs": { - "CreateNotebookInstanceInput$DirectInternetAccess": "

Sets whether Amazon SageMaker provides internet access to the notebook instance. If you set this to Disabled this notebook instance will be able to access resources only in your VPC, and will not be able to connect to Amazon SageMaker training and endpoint services unless your configure a NAT Gateway in your VPC.

For more information, see Notebook Instances Are Internet-Enabled by Default. You can set the value of this parameter to Disabled only if you set a value for the SubnetId parameter.

", - "DescribeNotebookInstanceOutput$DirectInternetAccess": "

Describes whether Amazon SageMaker provides internet access to the notebook instance. If this value is set to Disabled, he notebook instance does not have internet access, and cannot connect to Amazon SageMaker training and endpoint services.

For more information, see Notebook Instances Are Internet-Enabled by Default.

" + "CreateNotebookInstanceInput$DirectInternetAccess": "

Sets whether Amazon SageMaker provides internet access to the notebook instance. If you set this to Disabled this notebook instance will be able to access resources only in your VPC, and will not be able to connect to Amazon SageMaker training and endpoint services unless your configure a NAT Gateway in your VPC.

For more information, see Notebook Instances Are Internet-Enabled by Default. You can set the value of this parameter to Disabled only if you set a value for the SubnetId parameter.

", + "DescribeNotebookInstanceOutput$DirectInternetAccess": "

Describes whether Amazon SageMaker provides internet access to the notebook instance. If this value is set to Disabled, the notebook instance does not have internet access, and cannot connect to Amazon SageMaker training and endpoint services.

For more information, see Notebook Instances Are Internet-Enabled by Default.

" } }, "DisassociateAdditionalCodeRepositories": { @@ -1011,8 +1012,8 @@ "EndpointConfigName": { "base": null, "refs": { - "CreateEndpointConfigInput$EndpointConfigName": "

The name of the endpoint configuration. You specify this name in a CreateEndpoint request.

", - "CreateEndpointInput$EndpointConfigName": "

The name of an endpoint configuration. For more information, see CreateEndpointConfig.

", + "CreateEndpointConfigInput$EndpointConfigName": "

The name of the endpoint configuration. You specify this name in a CreateEndpoint request.

", + "CreateEndpointInput$EndpointConfigName": "

The name of an endpoint configuration. For more information, see CreateEndpointConfig.

", "DeleteEndpointConfigInput$EndpointConfigName": "

The name of the endpoint configuration that you want to delete.

", "DescribeEndpointConfigInput$EndpointConfigName": "

The name of the endpoint configuration.

", "DescribeEndpointConfigOutput$EndpointConfigName": "

Name of the Amazon SageMaker endpoint configuration.

", @@ -1073,7 +1074,7 @@ "base": null, "refs": { "DescribeEndpointOutput$EndpointStatus": "

The status of the endpoint.

", - "EndpointSummary$EndpointStatus": "

The status of the endpoint.

To get a list of endpoints with a specified status, use the ListEndpointsInput$StatusEquals filter.

", + "EndpointSummary$EndpointStatus": "

The status of the endpoint.

To get a list of endpoints with a specified status, use the ListEndpointsInput$StatusEquals filter.

", "ListEndpointsInput$StatusEquals": "

A filter that returns only endpoints with the specified status.

" } }, @@ -1157,7 +1158,7 @@ "DescribeLabelingJobResponse$FailureReason": "

If the job failed, the reason that it failed.

", "DescribeNotebookInstanceOutput$FailureReason": "

If status is Failed, the reason it failed.

", "DescribeTrainingJobResponse$FailureReason": "

If the training job failed, the reason it failed.

", - "DescribeTransformJobResponse$FailureReason": "

If the transform job failed, the reason that it failed.

", + "DescribeTransformJobResponse$FailureReason": "

If the transform job failed, FailureReason describes why it failed. A transform job creates a log file, which includes error messages, and stores it as an Amazon S3 object. For more information, see Log Amazon SageMaker Events with Amazon CloudWatch.

", "HyperParameterTrainingJobSummary$FailureReason": "

The reason that the training job failed.

", "LabelingJobSummary$FailureReason": "

If the LabelingJobStatus field is Failed, this field contains a description of the error.

", "ResourceInUse$Message": null, @@ -1386,7 +1387,7 @@ "HyperParameters": { "base": null, "refs": { - "CreateTrainingJobRequest$HyperParameters": "

Algorithm-specific parameters that influence the quality of the model. You set hyperparameters before you start the learning process. For a list of hyperparameters for each training algorithm provided by Amazon SageMaker, see Algorithms.

You can specify a maximum of 100 hyperparameters. Each hyperparameter is a key-value pair. Each key and value is limited to 256 characters, as specified by the Length Constraint.

", + "CreateTrainingJobRequest$HyperParameters": "

Algorithm-specific parameters that influence the quality of the model. You set hyperparameters before you start the learning process. For a list of hyperparameters for each training algorithm provided by Amazon SageMaker, see Algorithms.

You can specify a maximum of 100 hyperparameters. Each hyperparameter is a key-value pair. Each key and value is limited to 256 characters, as specified by the Length Constraint.

", "DescribeTrainingJobResponse$HyperParameters": "

Algorithm-specific parameters.

", "HyperParameterTrainingJobDefinition$StaticHyperParameters": "

Specifies the values of hyperparameters that do not change for the tuning job.

", "HyperParameterTrainingJobSummary$TunedHyperParameters": "

A list of the hyperparameters for which you specified ranges to search.

", @@ -1397,10 +1398,10 @@ "Image": { "base": null, "refs": { - "ContainerDefinition$Image": "

The Amazon EC2 Container Registry (Amazon ECR) path where inference code is stored. If you are using your own custom algorithm instead of an algorithm provided by Amazon SageMaker, the inference code must meet Amazon SageMaker requirements. Amazon SageMaker supports both registry/repository[:tag] and registry/repository[@digest] image path formats. For more information, see Using Your Own Algorithms with Amazon SageMaker

", + "ContainerDefinition$Image": "

The Amazon EC2 Container Registry (Amazon ECR) path where inference code is stored. If you are using your own custom algorithm instead of an algorithm provided by Amazon SageMaker, the inference code must meet Amazon SageMaker requirements. Amazon SageMaker supports both registry/repository[:tag] and registry/repository[@digest] image path formats. For more information, see Using Your Own Algorithms with Amazon SageMaker

", "DeployedImage$SpecifiedImage": "

The image path you specified when you created the model.

", "DeployedImage$ResolvedImage": "

The specific digest path of the image hosted in this ProductionVariant.

", - "ModelPackageContainerDefinition$Image": "

The Amazon EC2 Container Registry (Amazon ECR) path where inference code is stored.

If you are using your own custom algorithm instead of an algorithm provided by Amazon SageMaker, the inference code must meet Amazon SageMaker requirements. Amazon SageMaker supports both registry/repository[:tag] and registry/repository[@digest] image path formats. For more information, see Using Your Own Algorithms with Amazon SageMaker.

", + "ModelPackageContainerDefinition$Image": "

The Amazon EC2 Container Registry (Amazon ECR) path where inference code is stored.

If you are using your own custom algorithm instead of an algorithm provided by Amazon SageMaker, the inference code must meet Amazon SageMaker requirements. Amazon SageMaker supports both registry/repository[:tag] and registry/repository[@digest] image path formats. For more information, see Using Your Own Algorithms with Amazon SageMaker.

", "TrainingSpecification$TrainingImage": "

The Amazon ECR registry path of the Docker image that contains the training algorithm.

" } }, @@ -1892,7 +1893,7 @@ "MaxConcurrentTransforms": { "base": null, "refs": { - "CreateTransformJobRequest$MaxConcurrentTransforms": "

The maximum number of parallel requests that can be sent to an algorithm container on an instance. This is good for algorithms that implement multiple workers on larger instances . The default value is 1. To allow Amazon SageMaker to determine the appropriate number for MaxConcurrentTransforms, do not set the value in the API.

", + "CreateTransformJobRequest$MaxConcurrentTransforms": "

The maximum number of parallel requests that can be sent to each instance in a transform job. The default value is 1. To allow Amazon SageMaker to determine the appropriate number for MaxConcurrentTransforms, set the value to 0.

", "DescribeTransformJobResponse$MaxConcurrentTransforms": "

The maximum number of parallel requests on each instance node that can be launched in a transform job. The default value is 1.

", "TransformJobDefinition$MaxConcurrentTransforms": "

The maximum number of parallel requests that can be sent to each instance in a transform job. The default value is 1.

" } @@ -1918,7 +1919,7 @@ "MaxPayloadInMB": { "base": null, "refs": { - "CreateTransformJobRequest$MaxPayloadInMB": "

The maximum payload size allowed, in MB. A payload is the data portion of a record (without metadata). The value in MaxPayloadInMB must be greater or equal to the size of a single record. You can approximate the size of a record by dividing the size of your dataset by the number of records. Then multiply this value by the number of records you want in a mini-batch. We recommend to enter a slightly larger value than this to ensure the records fit within the maximum payload size. The default value is 6 MB.

For cases where the payload might be arbitrarily large and is transmitted using HTTP chunked encoding, set the value to 0. This feature only works in supported algorithms. Currently, Amazon SageMaker built-in algorithms do not support this feature.

", + "CreateTransformJobRequest$MaxPayloadInMB": "

The maximum allowed size of the payload, in MB. A payload is the data portion of a record (without metadata). The value in MaxPayloadInMB must be greater than, or equal to, the size of a single record. To estimate the size of a record in MB, divide the size of your dataset by the number of records. To ensure that the records fit within the maximum payload size, we recommend using a slightly larger value. The default value is 6 MB.

For cases where the payload might be arbitrarily large and is transmitted using HTTP chunked encoding, set the value to 0. This feature works only in supported algorithms. Currently, Amazon SageMaker built-in algorithms do not support HTTP chunked encoding.

", "DescribeTransformJobResponse$MaxPayloadInMB": "

The maximum payload size, in MB, used in the transform job.

", "TransformJobDefinition$MaxPayloadInMB": "

The maximum payload size allowed, in MB. A payload is the data portion of a record (without metadata).

" } @@ -2004,7 +2005,7 @@ "MetricRegex": { "base": null, "refs": { - "MetricDefinition$Regex": "

A regular expression that searches the output of a training job and gets the value of the metric. For more information about using regular expressions to define metrics, see Defining Objective Metrics.

" + "MetricDefinition$Regex": "

A regular expression that searches the output of a training job and gets the value of the metric. For more information about using regular expressions to define metrics, see Defining Objective Metrics.

" } }, "MetricValue": { @@ -2268,16 +2269,16 @@ "NotebookInstanceLifecycleConfigName": { "base": null, "refs": { - "CreateNotebookInstanceInput$LifecycleConfigName": "

The name of a lifecycle configuration to associate with the notebook instance. For information about lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

", + "CreateNotebookInstanceInput$LifecycleConfigName": "

The name of a lifecycle configuration to associate with the notebook instance. For information about lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

", "CreateNotebookInstanceLifecycleConfigInput$NotebookInstanceLifecycleConfigName": "

The name of the lifecycle configuration.

", "DeleteNotebookInstanceLifecycleConfigInput$NotebookInstanceLifecycleConfigName": "

The name of the lifecycle configuration to delete.

", "DescribeNotebookInstanceLifecycleConfigInput$NotebookInstanceLifecycleConfigName": "

The name of the lifecycle configuration to describe.

", "DescribeNotebookInstanceLifecycleConfigOutput$NotebookInstanceLifecycleConfigName": "

The name of the lifecycle configuration.

", - "DescribeNotebookInstanceOutput$NotebookInstanceLifecycleConfigName": "

Returns the name of a notebook instance lifecycle configuration.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance

", + "DescribeNotebookInstanceOutput$NotebookInstanceLifecycleConfigName": "

Returns the name of a notebook instance lifecycle configuration.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance

", "ListNotebookInstancesInput$NotebookInstanceLifecycleConfigNameContains": "

A string in the name of a notebook instances lifecycle configuration associated with this notebook instance. This filter returns only notebook instances associated with a lifecycle configuration with a name that contains the specified string.

", "NotebookInstanceLifecycleConfigSummary$NotebookInstanceLifecycleConfigName": "

The name of the lifecycle configuration.

", - "NotebookInstanceSummary$NotebookInstanceLifecycleConfigName": "

The name of a notebook instance lifecycle configuration associated with this notebook instance.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

", - "UpdateNotebookInstanceInput$LifecycleConfigName": "

The name of a lifecycle configuration to associate with the notebook instance. For information about lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

", + "NotebookInstanceSummary$NotebookInstanceLifecycleConfigName": "

The name of a notebook instance lifecycle configuration associated with this notebook instance.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

", + "UpdateNotebookInstanceInput$LifecycleConfigName": "

The name of a lifecycle configuration to associate with the notebook instance. For information about lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

", "UpdateNotebookInstanceLifecycleConfigInput$NotebookInstanceLifecycleConfigName": "

The name of the lifecycle configuration.

" } }, @@ -2312,7 +2313,7 @@ } }, "NotebookInstanceLifecycleHook": { - "base": "

Contains the notebook instance lifecycle configuration script.

Each lifecycle configuration script has a limit of 16384 characters.

The value of the $PATH environment variable that is available to both scripts is /sbin:bin:/usr/sbin:/usr/bin.

View CloudWatch Logs for notebook instance lifecycle configurations in log group /aws/sagemaker/NotebookInstances in log stream [notebook-instance-name]/[LifecycleConfigHook].

Lifecycle configuration scripts cannot run for longer than 5 minutes. If a script runs for longer than 5 minutes, it fails and the notebook instance is not created or started.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

", + "base": "

Contains the notebook instance lifecycle configuration script.

Each lifecycle configuration script has a limit of 16384 characters.

The value of the $PATH environment variable that is available to both scripts is /sbin:bin:/usr/sbin:/usr/bin.

View CloudWatch Logs for notebook instance lifecycle configurations in log group /aws/sagemaker/NotebookInstances in log stream [notebook-instance-name]/[LifecycleConfigHook].

Lifecycle configuration scripts cannot run for longer than 5 minutes. If a script runs for longer than 5 minutes, it fails and the notebook instance is not created or started.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

", "refs": { "NotebookInstanceLifecycleConfigList$member": null } @@ -2421,9 +2422,9 @@ "OrderKey": { "base": null, "refs": { - "ListEndpointConfigsInput$SortOrder": "

The sort order for results. The default is Ascending.

", - "ListEndpointsInput$SortOrder": "

The sort order for results. The default is Ascending.

", - "ListModelsInput$SortOrder": "

The sort order for results. The default is Ascending.

" + "ListEndpointConfigsInput$SortOrder": "

The sort order for results. The default is Descending.

", + "ListEndpointsInput$SortOrder": "

The sort order for results. The default is Descending.

", + "ListModelsInput$SortOrder": "

The sort order for results. The default is Descending.

" } }, "OutputConfig": { @@ -2519,7 +2520,7 @@ "ParentHyperParameterTuningJobs": { "base": null, "refs": { - "HyperParameterTuningJobWarmStartConfig$ParentHyperParameterTuningJobs": "

An array of hyperparameter tuning jobs that are used as the starting point for the new hyperparameter tuning job. For more information about warm starting a hyperparameter tuning job, see Using a Previous Hyperparameter Tuning Job as a Starting Point.

Hyperparameter tuning jobs created before October 1, 2018 cannot be used as parent jobs for warm start tuning jobs.

" + "HyperParameterTuningJobWarmStartConfig$ParentHyperParameterTuningJobs": "

An array of hyperparameter tuning jobs that are used as the starting point for the new hyperparameter tuning job. For more information about warm starting a hyperparameter tuning job, see Using a Previous Hyperparameter Tuning Job as a Starting Point.

Hyperparameter tuning jobs created before October 1, 2018 cannot be used as parent jobs for warm start tuning jobs.

" } }, "ProductId": { @@ -2717,9 +2718,9 @@ "AlgorithmValidationSpecification$ValidationRole": "

The IAM roles that Amazon SageMaker uses to run the training jobs.

", "CreateCompilationJobRequest$RoleArn": "

The Amazon Resource Name (ARN) of an IIAMAM role that enables Amazon SageMaker to perform tasks on your behalf.

During model compilation, Amazon SageMaker needs your permission to:

You grant permissions for all of these tasks to an IAM role. To pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission. For more information, see Amazon SageMaker Roles.

", "CreateLabelingJobRequest$RoleArn": "

The Amazon Resource Number (ARN) that Amazon SageMaker assumes to perform tasks on your behalf during data labeling. You must grant this role the necessary permissions so that Amazon SageMaker can successfully complete data labeling.

", - "CreateModelInput$ExecutionRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that Amazon SageMaker can assume to access model artifacts and docker image for deployment on ML compute instances or for batch transform jobs. Deploying on ML compute instances is part of model hosting. For more information, see Amazon SageMaker Roles.

To be able to pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission.

", - "CreateNotebookInstanceInput$RoleArn": "

When you send any requests to AWS resources from the notebook instance, Amazon SageMaker assumes this role to perform tasks on your behalf. You must grant this role necessary permissions so Amazon SageMaker can perform these tasks. The policy must allow the Amazon SageMaker service principal (sagemaker.amazonaws.com) permissions to assume this role. For more information, see Amazon SageMaker Roles.

To be able to pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission.

", - "CreateTrainingJobRequest$RoleArn": "

The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.

During model training, Amazon SageMaker needs your permission to read input data from an S3 bucket, download a Docker image that contains training code, write model artifacts to an S3 bucket, write logs to Amazon CloudWatch Logs, and publish metrics to Amazon CloudWatch. You grant permissions for all of these tasks to an IAM role. For more information, see Amazon SageMaker Roles.

To be able to pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission.

", + "CreateModelInput$ExecutionRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that Amazon SageMaker can assume to access model artifacts and docker image for deployment on ML compute instances or for batch transform jobs. Deploying on ML compute instances is part of model hosting. For more information, see Amazon SageMaker Roles.

To be able to pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission.

", + "CreateNotebookInstanceInput$RoleArn": "

When you send any requests to AWS resources from the notebook instance, Amazon SageMaker assumes this role to perform tasks on your behalf. You must grant this role necessary permissions so Amazon SageMaker can perform these tasks. The policy must allow the Amazon SageMaker service principal (sagemaker.amazonaws.com) permissions to assume this role. For more information, see Amazon SageMaker Roles.

To be able to pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission.

", + "CreateTrainingJobRequest$RoleArn": "

The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.

During model training, Amazon SageMaker needs your permission to read input data from an S3 bucket, download a Docker image that contains training code, write model artifacts to an S3 bucket, write logs to Amazon CloudWatch Logs, and publish metrics to Amazon CloudWatch. You grant permissions for all of these tasks to an IAM role. For more information, see Amazon SageMaker Roles.

To be able to pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission.

", "DescribeCompilationJobResponse$RoleArn": "

The Amazon Resource Name (ARN) of the model compilation job.

", "DescribeLabelingJobResponse$RoleArn": "

The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf during data labeling.

", "DescribeModelOutput$ExecutionRoleArn": "

The Amazon Resource Name (ARN) of the IAM role that you specified for the model.

", @@ -2729,7 +2730,15 @@ "ModelPackageValidationSpecification$ValidationRole": "

The IAM roles to be used for the validation of the model package.

", "RenderUiTemplateRequest$RoleArn": "

The Amazon Resource Name (ARN) that has access to the S3 objects that are used by the template.

", "TrainingJob$RoleArn": "

The AWS Identity and Access Management (IAM) role configured for the training job.

", - "UpdateNotebookInstanceInput$RoleArn": "

The Amazon Resource Name (ARN) of the IAM role that Amazon SageMaker can assume to access the notebook instance. For more information, see Amazon SageMaker Roles.

To be able to pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission.

" + "UpdateNotebookInstanceInput$RoleArn": "

The Amazon Resource Name (ARN) of the IAM role that Amazon SageMaker can assume to access the notebook instance. For more information, see Amazon SageMaker Roles.

To be able to pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission.

" + } + }, + "RootAccess": { + "base": null, + "refs": { + "CreateNotebookInstanceInput$RootAccess": "

Whether root access is enabled or disabled for users of the notebook instance. The default value is Enabled.

Lifecycle configurations need root access to be able to set up a notebook instance. Because of this, lifecycle configurations associated with a notebook instance always run with root access even if you disable root access for users.

", + "DescribeNotebookInstanceOutput$RootAccess": "

Whether root access is enabled or disabled for users of the notebook instance.

Lifecycle configurations need root access to be able to set up a notebook instance. Because of this, lifecycle configurations associated with a notebook instance always run with root access even if you disable root access for users.

", + "UpdateNotebookInstanceInput$RootAccess": "

Whether root access is enabled or disabled for users of the notebook instance. The default value is Enabled.

If you set this to Disabled, users don't have root access on the notebook instance, but lifecycle configuration scripts still run with root permissions.

" } }, "S3DataDistribution": { @@ -2764,7 +2773,7 @@ "OutputConfig$S3OutputLocation": "

Identifies the S3 path where you want Amazon SageMaker to store the model artifacts. For example, s3://bucket-name/key-name-prefix.

", "OutputDataConfig$S3OutputPath": "

Identifies the S3 path where you want Amazon SageMaker to store the model artifacts. For example, s3://bucket-name/key-name-prefix.

", "S3DataSource$S3Uri": "

Depending on the value specified for the S3DataType, identifies either a key name prefix or a manifest. For example:

", - "TransformOutput$S3OutputPath": "

The Amazon S3 path where you want Amazon SageMaker to store the results of the transform job. For example, s3://bucket-name/key-name-prefix.

For every S3 object used as input for the transform job, the transformed data is stored in a corresponding subfolder in the location under the output prefix. For example, for the input data s3://bucket-name/input-name-prefix/dataset01/data.csv the transformed data is stored at s3://bucket-name/key-name-prefix/dataset01/. This is based on the original name, as a series of .part files (.part0001, part0002, etc.).

", + "TransformOutput$S3OutputPath": "

The Amazon S3 path where you want Amazon SageMaker to store the results of the transform job. For example, s3://bucket-name/key-name-prefix.

For every S3 object used as input for the transform job, batch transform stores the transformed data with an .out suffix in a corresponding subfolder in the location in the output prefix. For example, for the input data stored at s3://bucket-name/input-name-prefix/dataset01/data.csv, batch transform stores the transformed data at s3://bucket-name/output-name-prefix/input-name-prefix/data.csv.out. Batch transform doesn't upload partially processed objects. For an input S3 object that contains multiple records, it creates an .out file only if the transform job succeeds on the entire file. When the input contains multiple S3 objects, the batch transform job processes the listed S3 objects and uploads only the output for successfully processed objects. If any object fails in the transform job batch transform marks the job as failed to prompt investigation.

", "TransformS3DataSource$S3Uri": "

Depending on the value specified for the S3DataType, identifies either a key name prefix or a manifest. For example:

", "UiConfig$UiTemplateS3Uri": "

The Amazon S3 bucket location of the UI template. For more information about the contents of a UI template, see Creating Your Custom Labeling Task Template.

" } @@ -2915,7 +2924,7 @@ "SplitType": { "base": null, "refs": { - "TransformInput$SplitType": "

The method to use to split the transform job's data into smaller batches. If you don't want to split the data, specify None. If you want to split records on a newline character boundary, specify Line. To split records according to the RecordIO format, specify RecordIO. The default value is None.

Amazon SageMaker sends the maximum number of records per batch in each request up to the MaxPayloadInMB limit. For more information, see RecordIO data format.

For information about the RecordIO format, see Data Format.

" + "TransformInput$SplitType": "

The method to use to split the transform job's data files into smaller batches. Splitting is necessary when the total size of each object is too large to fit in a single request. You can also use data splitting to improve performance by processing multiple concurrent mini-batches. The default value for SplitType is None, which indicates that input data files are not split, and request payloads contain the entire contents of an input object. Set the value of this parameter to Line to split records on a newline character boundary. SplitType also supports a number of record-oriented binary data formats.

When splitting is enabled, the size of a mini-batch depends on the values of the BatchStrategy and MaxPayloadInMB parameters. When the value of BatchStrategy is MultiRecord, Amazon SageMaker sends the maximum number of records in each request, up to the MaxPayloadInMB limit. If the value of BatchStrategy is SingleRecord, Amazon SageMaker sends individual records in each request.

Some data formats represent a record as a binary payload wrapped with extra padding bytes. When splitting is applied to a binary data format, padding is removed if the value of BatchStrategy is set to SingleRecord. Padding is not removed if the value of BatchStrategy is set to MultiRecord.

For more information about the RecordIO, see Data Format in the MXNet documentation. For more information about the TFRecord, see Consuming TFRecord data in the TensorFlow documentation.

" } }, "StartNotebookInstanceInput": { @@ -3058,18 +3067,18 @@ "refs": { "AddTagsInput$Tags": "

An array of Tag objects. Each tag is a key-value pair. Only the key parameter is required. If you don't specify a value, Amazon SageMaker sets the value to an empty string.

", "AddTagsOutput$Tags": "

A list of tags associated with the Amazon SageMaker resource.

", - "CreateEndpointConfigInput$Tags": "

An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

", - "CreateEndpointInput$Tags": "

An array of key-value pairs. For more information, see Using Cost Allocation Tagsin the AWS Billing and Cost Management User Guide.

", + "CreateEndpointConfigInput$Tags": "

An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

", + "CreateEndpointInput$Tags": "

An array of key-value pairs. For more information, see Using Cost Allocation Tagsin the AWS Billing and Cost Management User Guide.

", "CreateHyperParameterTuningJobRequest$Tags": "

An array of key-value pairs. You can use tags to categorize your AWS resources in different ways, for example, by purpose, owner, or environment. For more information, see AWS Tagging Strategies.

Tags that you specify for the tuning job are also added to all training jobs that the tuning job launches.

", "CreateLabelingJobRequest$Tags": "

An array of key/value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

", - "CreateModelInput$Tags": "

An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

", + "CreateModelInput$Tags": "

An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

", "CreateNotebookInstanceInput$Tags": "

A list of tags to associate with the notebook instance. You can add tags later by using the CreateTags API.

", - "CreateTrainingJobRequest$Tags": "

An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

", - "CreateTransformJobRequest$Tags": "

(Optional) An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

", + "CreateTrainingJobRequest$Tags": "

An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

", + "CreateTransformJobRequest$Tags": "

(Optional) An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

", "CreateWorkteamRequest$Tags": "

", "DescribeLabelingJobResponse$Tags": "

An array of key/value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

", "ListTagsOutput$Tags": "

An array of Tag objects, each with a tag key and a value.

", - "TrainingJob$Tags": "

An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

" + "TrainingJob$Tags": "

An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

" } }, "TagValue": { @@ -3233,11 +3242,11 @@ "TrainingInputMode": { "base": null, "refs": { - "AlgorithmSpecification$TrainingInputMode": "

The input mode that the algorithm supports. For the input modes that Amazon SageMaker algorithms support, see Algorithms. If an algorithm supports the File input mode, Amazon SageMaker downloads the training data from S3 to the provisioned ML storage Volume, and mounts the directory to docker volume for training container. If an algorithm supports the Pipe input mode, Amazon SageMaker streams data directly from S3 to the container.

In File mode, make sure you provision ML storage volume with sufficient capacity to accommodate the data download from S3. In addition to the training data, the ML storage volume also stores the output model. The algorithm container use ML storage volume to also store intermediate information, if any.

For distributed algorithms using File mode, training data is distributed uniformly, and your training duration is predictable if the input data objects size is approximately same. Amazon SageMaker does not split the files any further for model training. If the object sizes are skewed, training won't be optimal as the data distribution is also skewed where one host in a training cluster is overloaded, thus becoming bottleneck in training.

", + "AlgorithmSpecification$TrainingInputMode": "

The input mode that the algorithm supports. For the input modes that Amazon SageMaker algorithms support, see Algorithms. If an algorithm supports the File input mode, Amazon SageMaker downloads the training data from S3 to the provisioned ML storage Volume, and mounts the directory to docker volume for training container. If an algorithm supports the Pipe input mode, Amazon SageMaker streams data directly from S3 to the container.

In File mode, make sure you provision ML storage volume with sufficient capacity to accommodate the data download from S3. In addition to the training data, the ML storage volume also stores the output model. The algorithm container use ML storage volume to also store intermediate information, if any.

For distributed algorithms using File mode, training data is distributed uniformly, and your training duration is predictable if the input data objects size is approximately same. Amazon SageMaker does not split the files any further for model training. If the object sizes are skewed, training won't be optimal as the data distribution is also skewed where one host in a training cluster is overloaded, thus becoming bottleneck in training.

", "Channel$InputMode": "

(Optional) The input mode to use for the data channel in a training job. If you don't set a value for InputMode, Amazon SageMaker uses the value set for TrainingInputMode. Use this parameter to override the TrainingInputMode setting in a AlgorithmSpecification request when you have a channel that needs a different input mode from the training job's general setting. To download the data from Amazon Simple Storage Service (Amazon S3) to the provisioned ML storage volume, and mount the directory to a Docker volume, use File input mode. To stream data directly from Amazon S3 to the container, choose Pipe input mode.

To use a model for incremental training, choose File input model.

", - "HyperParameterAlgorithmSpecification$TrainingInputMode": "

The input mode that the algorithm supports: File or Pipe. In File input mode, Amazon SageMaker downloads the training data from Amazon S3 to the storage volume that is attached to the training instance and mounts the directory to the Docker volume for the training container. In Pipe input mode, Amazon SageMaker streams data directly from Amazon S3 to the container.

If you specify File mode, make sure that you provision the storage volume that is attached to the training instance with enough capacity to accommodate the training data downloaded from Amazon S3, the model artifacts, and intermediate information.

For more information about input modes, see Algorithms.

", + "HyperParameterAlgorithmSpecification$TrainingInputMode": "

The input mode that the algorithm supports: File or Pipe. In File input mode, Amazon SageMaker downloads the training data from Amazon S3 to the storage volume that is attached to the training instance and mounts the directory to the Docker volume for the training container. In Pipe input mode, Amazon SageMaker streams data directly from Amazon S3 to the container.

If you specify File mode, make sure that you provision the storage volume that is attached to the training instance with enough capacity to accommodate the training data downloaded from Amazon S3, the model artifacts, and intermediate information.

For more information about input modes, see Algorithms.

", "InputModes$member": null, - "TrainingJobDefinition$TrainingInputMode": "

The input mode used by the algorithm for the training job. For the input modes that Amazon SageMaker algorithms support, see Algorithms.

If an algorithm supports the File input mode, Amazon SageMaker downloads the training data from S3 to the provisioned ML storage Volume, and mounts the directory to docker volume for training container. If an algorithm supports the Pipe input mode, Amazon SageMaker streams data directly from S3 to the container.

" + "TrainingJobDefinition$TrainingInputMode": "

The input mode used by the algorithm for the training job. For the input modes that Amazon SageMaker algorithms support, see Algorithms.

If an algorithm supports the File input mode, Amazon SageMaker downloads the training data from S3 to the provisioned ML storage Volume, and mounts the directory to docker volume for training container. If an algorithm supports the Pipe input mode, Amazon SageMaker streams data directly from S3 to the container.

" } }, "TrainingInstanceCount": { @@ -3368,7 +3377,7 @@ "base": null, "refs": { "CreateTransformJobRequest$Environment": "

The environment variables to set in the Docker container. We support up to 16 key and values entries in the map.

", - "DescribeTransformJobResponse$Environment": "

", + "DescribeTransformJobResponse$Environment": "

The environment variables to set in the Docker container. We support up to 16 key and values entries in the map.

", "TransformJobDefinition$Environment": "

The environment variables to set in the Docker container. We support up to 16 key and values entries in the map.

" } }, @@ -3451,7 +3460,7 @@ } }, "TransformOutput": { - "base": "

Describes the results of a transform job output.

", + "base": "

Describes the results of a transform job.

", "refs": { "CreateTransformJobRequest$TransformOutput": "

Describes the results of the transform job.

", "DescribeTransformJobResponse$TransformOutput": "

Identifies the Amazon S3 location where you want Amazon SageMaker to save the results from the transform job.

", @@ -3582,14 +3591,14 @@ } }, "VpcConfig": { - "base": "

Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. For more information, see Protect Endpoints by Using an Amazon Virtual Private Cloud and Protect Training Jobs by Using an Amazon Virtual Private Cloud.

", - "refs": { - "CreateModelInput$VpcConfig": "

A VpcConfig object that specifies the VPC that you want your model to connect to. Control access to and from your model container by configuring the VPC. VpcConfig is used in hosting services and in batch transform. For more information, see Protect Endpoints by Using an Amazon Virtual Private Cloud and Protect Data in Batch Transform Jobs by Using an Amazon Virtual Private Cloud.

", - "CreateTrainingJobRequest$VpcConfig": "

A VpcConfig object that specifies the VPC that you want your training job to connect to. Control access to and from your training container by configuring the VPC. For more information, see Protect Training Jobs by Using an Amazon Virtual Private Cloud.

", - "DescribeModelOutput$VpcConfig": "

A VpcConfig object that specifies the VPC that this model has access to. For more information, see Protect Endpoints by Using an Amazon Virtual Private Cloud

", - "DescribeTrainingJobResponse$VpcConfig": "

A VpcConfig object that specifies the VPC that this training job has access to. For more information, see Protect Training Jobs by Using an Amazon Virtual Private Cloud.

", - "HyperParameterTrainingJobDefinition$VpcConfig": "

The VpcConfig object that specifies the VPC that you want the training jobs that this hyperparameter tuning job launches to connect to. Control access to and from your training container by configuring the VPC. For more information, see Protect Training Jobs by Using an Amazon Virtual Private Cloud.

", - "TrainingJob$VpcConfig": "

A VpcConfig object that specifies the VPC that this training job has access to. For more information, see Protect Training Jobs by Using an Amazon Virtual Private Cloud.

" + "base": "

Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. For more information, see Protect Endpoints by Using an Amazon Virtual Private Cloud and Protect Training Jobs by Using an Amazon Virtual Private Cloud.

", + "refs": { + "CreateModelInput$VpcConfig": "

A VpcConfig object that specifies the VPC that you want your model to connect to. Control access to and from your model container by configuring the VPC. VpcConfig is used in hosting services and in batch transform. For more information, see Protect Endpoints by Using an Amazon Virtual Private Cloud and Protect Data in Batch Transform Jobs by Using an Amazon Virtual Private Cloud.

", + "CreateTrainingJobRequest$VpcConfig": "

A VpcConfig object that specifies the VPC that you want your training job to connect to. Control access to and from your training container by configuring the VPC. For more information, see Protect Training Jobs by Using an Amazon Virtual Private Cloud.

", + "DescribeModelOutput$VpcConfig": "

A VpcConfig object that specifies the VPC that this model has access to. For more information, see Protect Endpoints by Using an Amazon Virtual Private Cloud

", + "DescribeTrainingJobResponse$VpcConfig": "

A VpcConfig object that specifies the VPC that this training job has access to. For more information, see Protect Training Jobs by Using an Amazon Virtual Private Cloud.

", + "HyperParameterTrainingJobDefinition$VpcConfig": "

The VpcConfig object that specifies the VPC that you want the training jobs that this hyperparameter tuning job launches to connect to. Control access to and from your training container by configuring the VPC. For more information, see Protect Training Jobs by Using an Amazon Virtual Private Cloud.

", + "TrainingJob$VpcConfig": "

A VpcConfig object that specifies the VPC that this training job has access to. For more information, see Protect Training Jobs by Using an Amazon Virtual Private Cloud.

" } }, "VpcSecurityGroupIds": { diff --git a/src/data/sagemaker/2017-07-24/docs-2.json.php b/src/data/sagemaker/2017-07-24/docs-2.json.php index a6646c8839..073bb47e30 100644 --- a/src/data/sagemaker/2017-07-24/docs-2.json.php +++ b/src/data/sagemaker/2017-07-24/docs-2.json.php @@ -1,3 +1,3 @@ '2.0', 'service' => 'Definition of the public APIs exposed by SageMaker', 'operations' => [ 'AddTags' => '

Adds or overwrites one or more tags for the specified Amazon SageMaker resource. You can add tags to notebook instances, training jobs, hyperparameter tuning jobs, models, endpoint configurations, and endpoints.

Each tag consists of a key and an optional value. Tag keys must be unique per resource. For more information about tags, see For more information, see AWS Tagging Strategies.

Tags that you add to a hyperparameter tuning job by calling this API are also added to any training jobs that the hyperparameter tuning job launches after you call this API, but not to training jobs that the hyperparameter tuning job launched before you called this API. To make sure that the tags associated with a hyperparameter tuning job are also added to all training jobs that the hyperparameter tuning job launches, add the tags when you first create the tuning job by specifying them in the Tags parameter of CreateHyperParameterTuningJob

', 'CreateAlgorithm' => '

Create a machine learning algorithm that you can use in Amazon SageMaker and list in the AWS Marketplace.

', 'CreateCodeRepository' => '

Creates a Git repository as a resource in your Amazon SageMaker account. You can associate the repository with notebook instances so that you can use Git source control for the notebooks you create. The Git repository is a resource in your Amazon SageMaker account, so it can be associated with more than one notebook instance, and it persists independently from the lifecycle of any notebook instances it is associated with.

The repository can be hosted either in AWS CodeCommit or in any other Git repository.

', 'CreateCompilationJob' => '

Starts a model compilation job. After the model has been compiled, Amazon SageMaker saves the resulting model artifacts to an Amazon Simple Storage Service (Amazon S3) bucket that you specify.

If you choose to host your model using Amazon SageMaker hosting services, you can use the resulting model artifacts as part of the model. You can also use the artifacts with AWS IoT Greengrass. In that case, deploy them as an ML resource.

In the request body, you provide the following:

You can also provide a Tag to track the model compilation job\'s resource use and costs. The response body contains the CompilationJobArn for the compiled job.

To stop a model compilation job, use StopCompilationJob. To get information about a particular model compilation job, use DescribeCompilationJob. To get information about multiple model compilation jobs, use ListCompilationJobs.

', 'CreateEndpoint' => '

Creates an endpoint using the endpoint configuration specified in the request. Amazon SageMaker uses the endpoint to provision resources and deploy models. You create the endpoint configuration with the CreateEndpointConfig API.

Use this API only for hosting models using Amazon SageMaker hosting services.

The endpoint name must be unique within an AWS Region in your AWS account.

When it receives the request, Amazon SageMaker creates the endpoint, launches the resources (ML compute instances), and deploys the model(s) on them.

When Amazon SageMaker receives the request, it sets the endpoint status to Creating. After it creates the endpoint, it sets the status to InService. Amazon SageMaker can then process incoming requests for inferences. To check the status of an endpoint, use the DescribeEndpoint API.

For an example, see Exercise 1: Using the K-Means Algorithm Provided by Amazon SageMaker.

If any of the models hosted at this endpoint get model data from an Amazon S3 location, Amazon SageMaker uses AWS Security Token Service to download model artifacts from the S3 path you provided. AWS STS is activated in your IAM user account by default. If you previously deactivated AWS STS for a region, you need to reactivate AWS STS for that region. For more information, see Activating and Deactivating AWS STS i an AWS Region in the AWS Identity and Access Management User Guide.

', 'CreateEndpointConfig' => '

Creates an endpoint configuration that Amazon SageMaker hosting services uses to deploy models. In the configuration, you identify one or more models, created using the CreateModel API, to deploy and the resources that you want Amazon SageMaker to provision. Then you call the CreateEndpoint API.

Use this API only if you want to use Amazon SageMaker hosting services to deploy models into production.

In the request, you define one or more ProductionVariants, each of which identifies a model. Each ProductionVariant parameter also describes the resources that you want Amazon SageMaker to provision. This includes the number and type of ML compute instances to deploy.

If you are hosting multiple models, you also assign a VariantWeight to specify how much traffic you want to allocate to each model. For example, suppose that you want to host two models, A and B, and you assign traffic weight 2 for model A and 1 for model B. Amazon SageMaker distributes two-thirds of the traffic to Model A, and one-third to model B.

', 'CreateHyperParameterTuningJob' => '

Starts a hyperparameter tuning job. A hyperparameter tuning job finds the best version of a model by running many training jobs on your dataset using the algorithm you choose and values for hyperparameters within ranges that you specify. It then chooses the hyperparameter values that result in a model that performs the best, as measured by an objective metric that you choose.

', 'CreateLabelingJob' => '

Creates a job that uses workers to label the data objects in your input dataset. You can use the labeled data to train machine learning models.

You can select your workforce from one of three providers:

You can also use automated data labeling to reduce the number of data objects that need to be labeled by a human. Automated data labeling uses active learning to determine if a data object can be labeled by machine or if it needs to be sent to a human worker. For more information, see Using Automated Data Labeling.

The data objects to be labeled are contained in an Amazon S3 bucket. You create a manifest file that describes the location of each object. For more information, see Using Input and Output Data.

The output can be used as the manifest file for another labeling job or as training data for your machine learning models.

', 'CreateModel' => '

Creates a model in Amazon SageMaker. In the request, you name the model and describe a primary container. For the primary container, you specify the docker image containing inference code, artifacts (from prior training), and custom environment map that the inference code uses when you deploy the model for predictions.

Use this API to create a model if you want to use Amazon SageMaker hosting services or run a batch transform job.

To host your model, you create an endpoint configuration with the CreateEndpointConfig API, and then create an endpoint with the CreateEndpoint API. Amazon SageMaker then deploys all of the containers that you defined for the model in the hosting environment.

To run a batch transform using your model, you start a job with the CreateTransformJob API. Amazon SageMaker uses your model and your dataset to get inferences which are then saved to a specified S3 location.

In the CreateModel request, you must define a container with the PrimaryContainer parameter.

In the request, you also provide an IAM role that Amazon SageMaker can assume to access model artifacts and docker image for deployment on ML compute hosting instances or for batch transform jobs. In addition, you also use the IAM role to manage permissions the inference code needs. For example, if the inference code access any other AWS resources, you grant necessary permissions via this role.

', 'CreateModelPackage' => '

Creates a model package that you can use to create Amazon SageMaker models or list on AWS Marketplace. Buyers can subscribe to model packages listed on AWS Marketplace to create models in Amazon SageMaker.

To create a model package by specifying a Docker container that contains your inference code and the Amazon S3 location of your model artifacts, provide values for InferenceSpecification. To create a model from an algorithm resource that you created or subscribed to in AWS Marketplace, provide a value for SourceAlgorithmSpecification.

', 'CreateNotebookInstance' => '

Creates an Amazon SageMaker notebook instance. A notebook instance is a machine learning (ML) compute instance running on a Jupyter notebook.

In a CreateNotebookInstance request, specify the type of ML compute instance that you want to run. Amazon SageMaker launches the instance, installs common libraries that you can use to explore datasets for model training, and attaches an ML storage volume to the notebook instance.

Amazon SageMaker also provides a set of example notebooks. Each notebook demonstrates how to use Amazon SageMaker with a specific algorithm or with a machine learning framework.

After receiving the request, Amazon SageMaker does the following:

  1. Creates a network interface in the Amazon SageMaker VPC.

  2. (Option) If you specified SubnetId, Amazon SageMaker creates a network interface in your own VPC, which is inferred from the subnet ID that you provide in the input. When creating this network interface, Amazon SageMaker attaches the security group that you specified in the request to the network interface that it creates in your VPC.

  3. Launches an EC2 instance of the type specified in the request in the Amazon SageMaker VPC. If you specified SubnetId of your VPC, Amazon SageMaker specifies both network interfaces when launching this instance. This enables inbound traffic from your own VPC to the notebook instance, assuming that the security groups allow it.

After creating the notebook instance, Amazon SageMaker returns its Amazon Resource Name (ARN).

After Amazon SageMaker creates the notebook instance, you can connect to the Jupyter server and work in Jupyter notebooks. For example, you can write code to explore a dataset that you can use for model training, train a model, host models by creating Amazon SageMaker endpoints, and validate hosted models.

For more information, see How It Works.

', 'CreateNotebookInstanceLifecycleConfig' => '

Creates a lifecycle configuration that you can associate with a notebook instance. A lifecycle configuration is a collection of shell scripts that run when you create or start a notebook instance.

Each lifecycle configuration script has a limit of 16384 characters.

The value of the $PATH environment variable that is available to both scripts is /sbin:bin:/usr/sbin:/usr/bin.

View CloudWatch Logs for notebook instance lifecycle configurations in log group /aws/sagemaker/NotebookInstances in log stream [notebook-instance-name]/[LifecycleConfigHook].

Lifecycle configuration scripts cannot run for longer than 5 minutes. If a script runs for longer than 5 minutes, it fails and the notebook instance is not created or started.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

', 'CreatePresignedNotebookInstanceUrl' => '

Returns a URL that you can use to connect to the Jupyter server from a notebook instance. In the Amazon SageMaker console, when you choose Open next to a notebook instance, Amazon SageMaker opens a new tab showing the Jupyter server home page from the notebook instance. The console uses this API to get the URL and show the page.

You can restrict access to this API and to the URL that it returns to a list of IP addresses that you specify. To restrict access, attach an IAM policy that denies access to this API unless the call comes from an IP address in the specified list to every AWS Identity and Access Management user, group, or role used to access the notebook instance. Use the NotIpAddress condition operator and the aws:SourceIP condition context key to specify the list of IP addresses that you want to have access to the notebook instance. For more information, see Limit Access to a Notebook Instance by IP Address.

', 'CreateTrainingJob' => '

Starts a model training job. After training completes, Amazon SageMaker saves the resulting model artifacts to an Amazon S3 location that you specify.

If you choose to host your model using Amazon SageMaker hosting services, you can use the resulting model artifacts as part of the model. You can also use the artifacts in a deep learning service other than Amazon SageMaker, provided that you know how to use them for inferences.

In the request body, you provide the following:

For more information about Amazon SageMaker, see How It Works.

', 'CreateTransformJob' => '

Starts a transform job. A transform job uses a trained model to get inferences on a dataset and saves these results to an Amazon S3 location that you specify.

To perform batch transformations, you create a transform job and use the data that you have readily available.

In the request body, you provide the following:

For more information about how batch transformation works Amazon SageMaker, see How It Works.

', 'CreateWorkteam' => '

Creates a new work team for labeling your data. A work team is defined by one or more Amazon Cognito user pools. You must first create the user pools before you can create a work team.

You cannot create more than 25 work teams in an account and region.

', 'DeleteAlgorithm' => '

Removes the specified algorithm from your account.

', 'DeleteCodeRepository' => '

Deletes the specified Git repository from your account.

', 'DeleteEndpoint' => '

Deletes an endpoint. Amazon SageMaker frees up all of the resources that were deployed when the endpoint was created.

Amazon SageMaker retires any custom KMS key grants associated with the endpoint, meaning you don\'t need to use the RevokeGrant API call.

', 'DeleteEndpointConfig' => '

Deletes an endpoint configuration. The DeleteEndpointConfig API deletes only the specified configuration. It does not delete endpoints created using the configuration.

', 'DeleteModel' => '

Deletes a model. The DeleteModel API deletes only the model entry that was created in Amazon SageMaker when you called the CreateModel API. It does not delete model artifacts, inference code, or the IAM role that you specified when creating the model.

', 'DeleteModelPackage' => '

Deletes a model package.

A model package is used to create Amazon SageMaker models or list on AWS Marketplace. Buyers can subscribe to model packages listed on AWS Marketplace to create models in Amazon SageMaker.

', 'DeleteNotebookInstance' => '

Deletes an Amazon SageMaker notebook instance. Before you can delete a notebook instance, you must call the StopNotebookInstance API.

When you delete a notebook instance, you lose all of your data. Amazon SageMaker removes the ML compute instance, and deletes the ML storage volume and the network interface associated with the notebook instance.

', 'DeleteNotebookInstanceLifecycleConfig' => '

Deletes a notebook instance lifecycle configuration.

', 'DeleteTags' => '

Deletes the specified tags from an Amazon SageMaker resource.

To list a resource\'s tags, use the ListTags API.

When you call this API to delete tags from a hyperparameter tuning job, the deleted tags are not removed from training jobs that the hyperparameter tuning job launched before you called this API.

', 'DeleteWorkteam' => '

Deletes an existing work team. This operation can\'t be undone.

', 'DescribeAlgorithm' => '

Returns a description of the specified algorithm that is in your account.

', 'DescribeCodeRepository' => '

Gets details about the specified Git repository.

', 'DescribeCompilationJob' => '

Returns information about a model compilation job.

To create a model compilation job, use CreateCompilationJob. To get information about multiple model compilation jobs, use ListCompilationJobs.

', 'DescribeEndpoint' => '

Returns the description of an endpoint.

', 'DescribeEndpointConfig' => '

Returns the description of an endpoint configuration created using the CreateEndpointConfig API.

', 'DescribeHyperParameterTuningJob' => '

Gets a description of a hyperparameter tuning job.

', 'DescribeLabelingJob' => '

Gets information about a labeling job.

', 'DescribeModel' => '

Describes a model that you created using the CreateModel API.

', 'DescribeModelPackage' => '

Returns a description of the specified model package, which is used to create Amazon SageMaker models or list them on AWS Marketplace.

To create models in Amazon SageMaker, buyers can subscribe to model packages listed on AWS Marketplace.

', 'DescribeNotebookInstance' => '

Returns information about a notebook instance.

', 'DescribeNotebookInstanceLifecycleConfig' => '

Returns a description of a notebook instance lifecycle configuration.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

', 'DescribeSubscribedWorkteam' => '

Gets information about a work team provided by a vendor. It returns details about the subscription with a vendor in the AWS Marketplace.

', 'DescribeTrainingJob' => '

Returns information about a training job.

', 'DescribeTransformJob' => '

Returns information about a transform job.

', 'DescribeWorkteam' => '

Gets information about a specific work team. You can see information such as the create date, the last updated date, membership information, and the work team\'s Amazon Resource Name (ARN).

', 'GetSearchSuggestions' => '

An auto-complete API for the search functionality in the Amazon SageMaker console. It returns suggestions of possible matches for the property name to use in Search queries. Provides suggestions for HyperParameters, Tags, and Metrics.

', 'ListAlgorithms' => '

Lists the machine learning algorithms that have been created.

', 'ListCodeRepositories' => '

Gets a list of the Git repositories in your account.

', 'ListCompilationJobs' => '

Lists model compilation jobs that satisfy various filters.

To create a model compilation job, use CreateCompilationJob. To get information about a particular model compilation job you have created, use DescribeCompilationJob.

', 'ListEndpointConfigs' => '

Lists endpoint configurations.

', 'ListEndpoints' => '

Lists endpoints.

', 'ListHyperParameterTuningJobs' => '

Gets a list of HyperParameterTuningJobSummary objects that describe the hyperparameter tuning jobs launched in your account.

', 'ListLabelingJobs' => '

Gets a list of labeling jobs.

', 'ListLabelingJobsForWorkteam' => '

Gets a list of labeling jobs assigned to a specified work team.

', 'ListModelPackages' => '

Lists the model packages that have been created.

', 'ListModels' => '

Lists models created with the CreateModel API.

', 'ListNotebookInstanceLifecycleConfigs' => '

Lists notebook instance lifestyle configurations created with the CreateNotebookInstanceLifecycleConfig API.

', 'ListNotebookInstances' => '

Returns a list of the Amazon SageMaker notebook instances in the requester\'s account in an AWS Region.

', 'ListSubscribedWorkteams' => '

Gets a list of the work teams that you are subscribed to in the AWS Marketplace. The list may be empty if no work team satisfies the filter specified in the NameContains parameter.

', 'ListTags' => '

Returns the tags for the specified Amazon SageMaker resource.

', 'ListTrainingJobs' => '

Lists training jobs.

', 'ListTrainingJobsForHyperParameterTuningJob' => '

Gets a list of TrainingJobSummary objects that describe the training jobs that a hyperparameter tuning job launched.

', 'ListTransformJobs' => '

Lists transform jobs.

', 'ListWorkteams' => '

Gets a list of work teams that you have defined in a region. The list may be empty if no work team satisfies the filter specified in the NameContains parameter.

', 'RenderUiTemplate' => '

Renders the UI template so that you can preview the worker\'s experience.

', 'Search' => '

Finds Amazon SageMaker resources that match a search query. Matching resource objects are returned as a list of SearchResult objects in the response. You can sort the search results by any resource property in a ascending or descending order.

You can query against the following value types: numerical, text, Booleans, and timestamps.

', 'StartNotebookInstance' => '

Launches an ML compute instance with the latest version of the libraries and attaches your ML storage volume. After configuring the notebook instance, Amazon SageMaker sets the notebook instance status to InService. A notebook instance\'s status must be InService before you can connect to your Jupyter notebook.

', 'StopCompilationJob' => '

Stops a model compilation job.

To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal. This gracefully shuts the job down. If the job hasn\'t stopped, it sends the SIGKILL signal.

When it receives a StopCompilationJob request, Amazon SageMaker changes the CompilationJobSummary$CompilationJobStatus of the job to Stopping. After Amazon SageMaker stops the job, it sets the CompilationJobSummary$CompilationJobStatus to Stopped.

', 'StopHyperParameterTuningJob' => '

Stops a running hyperparameter tuning job and all running training jobs that the tuning job launched.

All model artifacts output from the training jobs are stored in Amazon Simple Storage Service (Amazon S3). All data that the training jobs write to Amazon CloudWatch Logs are still available in CloudWatch. After the tuning job moves to the Stopped state, it releases all reserved resources for the tuning job.

', 'StopLabelingJob' => '

Stops a running labeling job. A job that is stopped cannot be restarted. Any results obtained before the job is stopped are placed in the Amazon S3 output bucket.

', 'StopNotebookInstance' => '

Terminates the ML compute instance. Before terminating the instance, Amazon SageMaker disconnects the ML storage volume from it. Amazon SageMaker preserves the ML storage volume.

To access data on the ML storage volume for a notebook instance that has been terminated, call the StartNotebookInstance API. StartNotebookInstance launches another ML compute instance, configures it, and attaches the preserved ML storage volume so you can continue your work.

', 'StopTrainingJob' => '

Stops a training job. To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays job termination for 120 seconds. Algorithms might use this 120-second window to save the model artifacts, so the results of the training is not lost.

Training algorithms provided by Amazon SageMaker save the intermediate results of a model training job. This intermediate data is a valid model artifact. You can use the model artifacts that are saved when Amazon SageMaker stops a training job to create a model.

When it receives a StopTrainingJob request, Amazon SageMaker changes the status of the job to Stopping. After Amazon SageMaker stops the job, it sets the status to Stopped.

', 'StopTransformJob' => '

Stops a transform job.

When Amazon SageMaker receives a StopTransformJob request, the status of the job changes to Stopping. After Amazon SageMaker stops the job, the status is set to Stopped. When you stop a transform job before it is completed, Amazon SageMaker doesn\'t store the job\'s output in Amazon S3.

', 'UpdateCodeRepository' => '

Updates the specified Git repository with the specified values.

', 'UpdateEndpoint' => '

Deploys the new EndpointConfig specified in the request, switches to using newly created endpoint, and then deletes resources provisioned for the endpoint using the previous EndpointConfig (there is no availability loss).

When Amazon SageMaker receives the request, it sets the endpoint status to Updating. After updating the endpoint, it sets the status to InService. To check the status of an endpoint, use the DescribeEndpoint API.

You cannot update an endpoint with the current EndpointConfig. To update an endpoint, you must create a new EndpointConfig.

', 'UpdateEndpointWeightsAndCapacities' => '

Updates variant weight of one or more variants associated with an existing endpoint, or capacity of one variant associated with an existing endpoint. When it receives the request, Amazon SageMaker sets the endpoint status to Updating. After updating the endpoint, it sets the status to InService. To check the status of an endpoint, use the DescribeEndpoint API.

', 'UpdateNotebookInstance' => '

Updates a notebook instance. NotebookInstance updates include upgrading or downgrading the ML compute instance used for your notebook instance to accommodate changes in your workload requirements. You can also update the VPC security groups.

', 'UpdateNotebookInstanceLifecycleConfig' => '

Updates a notebook instance lifecycle configuration created with the CreateNotebookInstanceLifecycleConfig API.

', 'UpdateWorkteam' => '

Updates an existing work team with new member definitions or description.

', ], 'shapes' => [ 'Accept' => [ 'base' => NULL, 'refs' => [ 'TransformOutput$Accept' => '

The MIME type used to specify the output data. Amazon SageMaker uses the MIME type with each http call to transfer data from the transform job.

', ], ], 'AccountId' => [ 'base' => NULL, 'refs' => [ 'LabelingJobForWorkteamSummary$WorkRequesterAccountId' => '

', ], ], 'AddTagsInput' => [ 'base' => NULL, 'refs' => [], ], 'AddTagsOutput' => [ 'base' => NULL, 'refs' => [], ], 'AdditionalCodeRepositoryNamesOrUrls' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$AdditionalCodeRepositories' => '

An array of up to three Git repositories to associate with the notebook instance. These can be either the names of Git repositories stored as resources in your account, or the URL of Git repositories in AWS CodeCommit or in any other Git repository. These repositories are cloned at the same level as the default repository of your notebook instance. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

', 'DescribeNotebookInstanceOutput$AdditionalCodeRepositories' => '

An array of up to three Git repositories associated with the notebook instance. These can be either the names of Git repositories stored as resources in your account, or the URL of Git repositories in AWS CodeCommit or in any other Git repository. These repositories are cloned at the same level as the default repository of your notebook instance. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

', 'NotebookInstanceSummary$AdditionalCodeRepositories' => '

An array of up to three Git repositories associated with the notebook instance. These can be either the names of Git repositories stored as resources in your account, or the URL of Git repositories in AWS CodeCommit or in any other Git repository. These repositories are cloned at the same level as the default repository of your notebook instance. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

', 'UpdateNotebookInstanceInput$AdditionalCodeRepositories' => '

An array of up to three Git repositories to associate with the notebook instance. These can be either the names of Git repositories stored as resources in your account, or the URL of Git repositories in AWS CodeCommit or in any other Git repository.. These repositories are cloned at the same level as the default repository of your notebook instance. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

', ], ], 'AlgorithmArn' => [ 'base' => NULL, 'refs' => [ 'AlgorithmSummary$AlgorithmArn' => '

The Amazon Resource Name (ARN) of the algorithm.

', 'CreateAlgorithmOutput$AlgorithmArn' => '

The Amazon Resource Name (ARN) of the new algorithm.

', 'DescribeAlgorithmOutput$AlgorithmArn' => '

The Amazon Resource Name (ARN) of the algorithm.

', ], ], 'AlgorithmImage' => [ 'base' => NULL, 'refs' => [ 'AlgorithmSpecification$TrainingImage' => '

The registry path of the Docker image that contains the training algorithm. For information about docker registry paths for built-in algorithms, see Algorithms Provided by Amazon SageMaker: Common Parameters.

', 'HyperParameterAlgorithmSpecification$TrainingImage' => '

The registry path of the Docker image that contains the training algorithm. For information about Docker registry paths for built-in algorithms, see Algorithms Provided by Amazon SageMaker: Common Parameters.

', ], ], 'AlgorithmSortBy' => [ 'base' => NULL, 'refs' => [ 'ListAlgorithmsInput$SortBy' => '

The parameter by which to sort the results. The default is CreationTime.

', ], ], 'AlgorithmSpecification' => [ 'base' => '

Specifies the training algorithm to use in a CreateTrainingJob request.

For more information about algorithms provided by Amazon SageMaker, see Algorithms. For information about using your own algorithms, see Using Your Own Algorithms with Amazon SageMaker.

', 'refs' => [ 'CreateTrainingJobRequest$AlgorithmSpecification' => '

The registry path of the Docker image that contains the training algorithm and algorithm-specific metadata, including the input mode. For more information about algorithms provided by Amazon SageMaker, see Algorithms. For information about providing your own algorithms, see Using Your Own Algorithms with Amazon SageMaker.

', 'DescribeTrainingJobResponse$AlgorithmSpecification' => '

Information about the algorithm used for training, and algorithm metadata.

', 'TrainingJob$AlgorithmSpecification' => '

Information about the algorithm used for training, and algorithm metadata.

', ], ], 'AlgorithmStatus' => [ 'base' => NULL, 'refs' => [ 'AlgorithmSummary$AlgorithmStatus' => '

The overall status of the algorithm.

', 'DescribeAlgorithmOutput$AlgorithmStatus' => '

The current status of the algorithm.

', ], ], 'AlgorithmStatusDetails' => [ 'base' => '

Specifies the validation and image scan statuses of the algorithm.

', 'refs' => [ 'DescribeAlgorithmOutput$AlgorithmStatusDetails' => '

Details about the current status of the algorithm.

', ], ], 'AlgorithmStatusItem' => [ 'base' => '

Represents the overall status of an algorithm.

', 'refs' => [ 'AlgorithmStatusItemList$member' => NULL, ], ], 'AlgorithmStatusItemList' => [ 'base' => NULL, 'refs' => [ 'AlgorithmStatusDetails$ValidationStatuses' => '

The status of algorithm validation.

', 'AlgorithmStatusDetails$ImageScanStatuses' => '

The status of the scan of the algorithm\'s Docker image container.

', ], ], 'AlgorithmSummary' => [ 'base' => '

Provides summary information about an algorithm.

', 'refs' => [ 'AlgorithmSummaryList$member' => NULL, ], ], 'AlgorithmSummaryList' => [ 'base' => NULL, 'refs' => [ 'ListAlgorithmsOutput$AlgorithmSummaryList' => '

>An array of AlgorithmSummary objects, each of which lists an algorithm.

', ], ], 'AlgorithmValidationProfile' => [ 'base' => '

Defines a training job and a batch transform job that Amazon SageMaker runs to validate your algorithm.

The data provided in the validation profile is made available to your buyers on AWS Marketplace.

', 'refs' => [ 'AlgorithmValidationProfiles$member' => NULL, ], ], 'AlgorithmValidationProfiles' => [ 'base' => NULL, 'refs' => [ 'AlgorithmValidationSpecification$ValidationProfiles' => '

An array of AlgorithmValidationProfile objects, each of which specifies a training job and batch transform job that Amazon SageMaker runs to validate your algorithm.

', ], ], 'AlgorithmValidationSpecification' => [ 'base' => '

Specifies configurations for one or more training jobs that Amazon SageMaker runs to test the algorithm.

', 'refs' => [ 'CreateAlgorithmInput$ValidationSpecification' => '

Specifies configurations for one or more training jobs and that Amazon SageMaker runs to test the algorithm\'s training code and, optionally, one or more batch transform jobs that Amazon SageMaker runs to test the algorithm\'s inference code.

', 'DescribeAlgorithmOutput$ValidationSpecification' => '

Details about configurations for one or more training jobs that Amazon SageMaker runs to test the algorithm.

', ], ], 'AnnotationConsolidationConfig' => [ 'base' => '

Configures how labels are consolidated across human workers.

', 'refs' => [ 'HumanTaskConfig$AnnotationConsolidationConfig' => '

Configures how labels are consolidated across human workers.

', ], ], 'ArnOrName' => [ 'base' => NULL, 'refs' => [ 'AlgorithmSpecification$AlgorithmName' => '

The name of the algorithm resource to use for the training job. This must be an algorithm resource that you created or subscribe to on AWS Marketplace. If you specify a value for this parameter, you can\'t specify a value for TrainingImage.

', 'ContainerDefinition$ModelPackageName' => '

The name of the model package to use to create the model.

', 'DescribeAlgorithmInput$AlgorithmName' => '

The name of the algorithm to describe.

', 'DescribeModelPackageInput$ModelPackageName' => '

The name of the model package to describe.

', 'HyperParameterAlgorithmSpecification$AlgorithmName' => '

The name of the resource algorithm to use for the hyperparameter tuning job. If you specify a value for this parameter, do not specify a value for TrainingImage.

', 'SourceAlgorithm$AlgorithmName' => '

The name of an algorithm that was used to create the model package. The algorithm must be either an algorithm resource in your Amazon SageMaker account or an algorithm in AWS Marketplace that you are subscribed to.

', ], ], 'AssemblyType' => [ 'base' => NULL, 'refs' => [ 'TransformOutput$AssembleWith' => '

Defines how to assemble the results of the transform job as a single S3 object. Choose a format that is most convenient to you. To concatenate the results in binary format, specify None. To add a newline character at the end of every transformed record, specify Line.

', ], ], 'AttributeName' => [ 'base' => NULL, 'refs' => [ 'AttributeNames$member' => NULL, ], ], 'AttributeNames' => [ 'base' => NULL, 'refs' => [ 'S3DataSource$AttributeNames' => '

A list of one or more attribute names to use that are found in a specified augmented manifest file.

', ], ], 'BatchStrategy' => [ 'base' => NULL, 'refs' => [ 'CreateTransformJobRequest$BatchStrategy' => '

Determines the number of records to include in a mini-batch. If you want to include only one record in a mini-batch, specify SingleRecord.. If you want mini-batches to contain a maximum of the number of records specified in the MaxPayloadInMB parameter, specify MultiRecord.

If you set SplitType to Line and BatchStrategy to MultiRecord, a batch transform automatically splits your input data into the specified payload size. There\'s no need to split the dataset into smaller files or to use larger payload sizes unless the records in your dataset are very large.

', 'DescribeTransformJobResponse$BatchStrategy' => '

If you want to include only one record in a batch, specify SingleRecord.. If you want batches to contain a maximum of the number of records specified in the MaxPayloadInMB parameter, specify MultiRecord.S

', 'TransformJobDefinition$BatchStrategy' => '

A string that determines the number of records included in a single mini-batch.

SingleRecord means only one record is used per mini-batch. MultiRecord means a mini-batch is set to contain as many records that can fit within the MaxPayloadInMB limit.

', ], ], 'Boolean' => [ 'base' => NULL, 'refs' => [ 'ChannelSpecification$IsRequired' => '

Indicates whether the channel is required by the algorithm.

', 'CreateModelInput$EnableNetworkIsolation' => '

Isolates the model container. No inbound or outbound network calls can be made to or from the model container.

The Semantic Segmentation built-in algorithm does not support network isolation.

', 'CreateTrainingJobRequest$EnableNetworkIsolation' => '

Isolates the training container. No inbound or outbound network calls can be made, except for calls between peers within a training cluster for distributed training. If you enable network isolation for training jobs that are configured to use a VPC, Amazon SageMaker downloads and uploads customer data and model artifacts through the specified VPC, but the training container does not have network access.

The Semantic Segmentation built-in algorithm does not support network isolation.

', 'CreateTrainingJobRequest$EnableInterContainerTrafficEncryption' => '

To encrypt all communications between ML compute instances in distributed training, choose True,. Encryption provides greater security for distributed training, but training can take longer because of additional communications between ML compute instances.

', 'DescribeModelOutput$EnableNetworkIsolation' => '

If True, no inbound or outbound network calls can be made to or from the model container.

The Semantic Segmentation built-in algorithm does not support network isolation.

', 'DescribeTrainingJobResponse$EnableNetworkIsolation' => '

If you want to allow inbound or outbound network calls, except for calls between peers within a training cluster for distributed training, choose True. If you enable network isolation for training jobs that are configured to use a VPC, Amazon SageMaker downloads and uploads customer data and model artifacts through the specified VPC, but the training container does not have network access.

The Semantic Segmentation built-in algorithm does not support network isolation.

', 'DescribeTrainingJobResponse$EnableInterContainerTrafficEncryption' => '

To encrypt all communications between ML compute instances in distributed training, specify True. Encryption provides greater security for distributed training, but training take longer because of the additional communications between ML compute instances.

', 'HyperParameterSpecification$IsTunable' => '

Indicates whether this hyperparameter is tunable in a hyperparameter tuning job.

', 'HyperParameterSpecification$IsRequired' => '

Indicates whether this hyperparameter is required.

', 'HyperParameterTrainingJobDefinition$EnableNetworkIsolation' => '

Isolates the training container. No inbound or outbound network calls can be made, except for calls between peers within a training cluster for distributed training. If network isolation is used for training jobs that are configured to use a VPC, Amazon SageMaker downloads and uploads customer data and model artifacts through the specified VPC, but the training container does not have network access.

The Semantic Segmentation built-in algorithm does not support network isolation.

', 'HyperParameterTrainingJobDefinition$EnableInterContainerTrafficEncryption' => '

To encrypt all communications between ML compute instances in distributed training, specify True. Encryption provides greater security for distributed training, but training take longer because of the additional communications between ML compute instances.

', 'TrainingJob$EnableNetworkIsolation' => '

If the TrainingJob was created with network isolation, the value is set to true. If network isolation is enabled, nodes can\'t communicate beyond the VPC they run in.

', 'TrainingSpecification$SupportsDistributedTraining' => '

Indicates whether the algorithm supports distributed training. If set to false, buyers can’t request more than one instance during training.

', ], ], 'BooleanOperator' => [ 'base' => NULL, 'refs' => [ 'SearchExpression$Operator' => '

A Boolean operator used to evaluate the search expression. If you want every conditional statement in all lists to be satisfied for the entire search expression to be true, specify And. If only a single conditional statement needs to be true for the entire search expression to be true, specify Or. The default value is And.

', ], ], 'Branch' => [ 'base' => NULL, 'refs' => [ 'GitConfig$Branch' => '

The default branch for the Git repository.

', ], ], 'CategoricalParameterRange' => [ 'base' => '

A list of categorical hyperparameters to tune.

', 'refs' => [ 'CategoricalParameterRanges$member' => NULL, ], ], 'CategoricalParameterRangeSpecification' => [ 'base' => '

Defines the possible values for a categorical hyperparameter.

', 'refs' => [ 'ParameterRange$CategoricalParameterRangeSpecification' => '

A CategoricalParameterRangeSpecification object that defines the possible values for a categorical hyperparameter.

', ], ], 'CategoricalParameterRanges' => [ 'base' => NULL, 'refs' => [ 'ParameterRanges$CategoricalParameterRanges' => '

The array of CategoricalParameterRange objects that specify ranges of categorical hyperparameters that a hyperparameter tuning job searches.

', ], ], 'Cents' => [ 'base' => NULL, 'refs' => [ 'USD$Cents' => '

The fractional portion, in cents, of the amount.

', ], ], 'CertifyForMarketplace' => [ 'base' => NULL, 'refs' => [ 'CreateAlgorithmInput$CertifyForMarketplace' => '

Whether to certify the algorithm so that it can be listed in AWS Marketplace.

', 'CreateModelPackageInput$CertifyForMarketplace' => '

Whether to certify the model package for listing on AWS Marketplace.

', 'DescribeAlgorithmOutput$CertifyForMarketplace' => '

Whether the algorithm is certified to be listed in AWS Marketplace.

', 'DescribeModelPackageOutput$CertifyForMarketplace' => '

Whether the model package is certified for listing on AWS Marketplace.

', ], ], 'Channel' => [ 'base' => '

A channel is a named input source that training algorithms can consume.

', 'refs' => [ 'InputDataConfig$member' => NULL, ], ], 'ChannelName' => [ 'base' => NULL, 'refs' => [ 'Channel$ChannelName' => '

The name of the channel.

', 'ChannelSpecification$Name' => '

The name of the channel.

', ], ], 'ChannelSpecification' => [ 'base' => '

Defines a named input source, called a channel, to be used by an algorithm.

', 'refs' => [ 'ChannelSpecifications$member' => NULL, ], ], 'ChannelSpecifications' => [ 'base' => NULL, 'refs' => [ 'TrainingSpecification$TrainingChannels' => '

A list of ChannelSpecification objects, which specify the input sources to be used by the algorithm.

', ], ], 'CodeRepositoryArn' => [ 'base' => NULL, 'refs' => [ 'CodeRepositorySummary$CodeRepositoryArn' => '

The Amazon Resource Name (ARN) of the Git repository.

', 'CreateCodeRepositoryOutput$CodeRepositoryArn' => '

The Amazon Resource Name (ARN) of the new repository.

', 'DescribeCodeRepositoryOutput$CodeRepositoryArn' => '

The Amazon Resource Name (ARN) of the Git repository.

', 'UpdateCodeRepositoryOutput$CodeRepositoryArn' => '

The ARN of the Git repository.

', ], ], 'CodeRepositoryContains' => [ 'base' => NULL, 'refs' => [ 'ListNotebookInstancesInput$DefaultCodeRepositoryContains' => '

A string in the name or URL of a Git repository associated with this notebook instance. This filter returns only notebook instances associated with a git repository with a name that contains the specified string.

', ], ], 'CodeRepositoryNameContains' => [ 'base' => NULL, 'refs' => [ 'ListCodeRepositoriesInput$NameContains' => '

A string in the Git repositories name. This filter returns only repositories whose name contains the specified string.

', ], ], 'CodeRepositoryNameOrUrl' => [ 'base' => NULL, 'refs' => [ 'AdditionalCodeRepositoryNamesOrUrls$member' => NULL, 'CreateNotebookInstanceInput$DefaultCodeRepository' => '

A Git repository to associate with the notebook instance as its default code repository. This can be either the name of a Git repository stored as a resource in your account, or the URL of a Git repository in AWS CodeCommit or in any other Git repository. When you open a notebook instance, it opens in the directory that contains this repository. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

', 'DescribeNotebookInstanceOutput$DefaultCodeRepository' => '

The Git repository associated with the notebook instance as its default code repository. This can be either the name of a Git repository stored as a resource in your account, or the URL of a Git repository in AWS CodeCommit or in any other Git repository. When you open a notebook instance, it opens in the directory that contains this repository. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

', 'ListNotebookInstancesInput$AdditionalCodeRepositoryEquals' => '

A filter that returns only notebook instances with associated with the specified git repository.

', 'NotebookInstanceSummary$DefaultCodeRepository' => '

The Git repository associated with the notebook instance as its default code repository. This can be either the name of a Git repository stored as a resource in your account, or the URL of a Git repository in AWS CodeCommit or in any other Git repository. When you open a notebook instance, it opens in the directory that contains this repository. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

', 'UpdateNotebookInstanceInput$DefaultCodeRepository' => '

The Git repository to associate with the notebook instance as its default code repository. This can be either the name of a Git repository stored as a resource in your account, or the URL of a Git repository in AWS CodeCommit or in any other Git repository. When you open a notebook instance, it opens in the directory that contains this repository. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

', ], ], 'CodeRepositorySortBy' => [ 'base' => NULL, 'refs' => [ 'ListCodeRepositoriesInput$SortBy' => '

The field to sort results by. The default is Name.

', ], ], 'CodeRepositorySortOrder' => [ 'base' => NULL, 'refs' => [ 'ListCodeRepositoriesInput$SortOrder' => '

The sort order for results. The default is Ascending.

', ], ], 'CodeRepositorySummary' => [ 'base' => '

Specifies summary information about a Git repository.

', 'refs' => [ 'CodeRepositorySummaryList$member' => NULL, ], ], 'CodeRepositorySummaryList' => [ 'base' => NULL, 'refs' => [ 'ListCodeRepositoriesOutput$CodeRepositorySummaryList' => '

Gets a list of summaries of the Git repositories. Each summary specifies the following values for the repository:

', ], ], 'CognitoClientId' => [ 'base' => NULL, 'refs' => [ 'CognitoMemberDefinition$ClientId' => '

An identifier for an application client. You must create the app client ID using Amazon Cognito.

', ], ], 'CognitoMemberDefinition' => [ 'base' => '

Identifies a Amazon Cognito user group. A user group can be used in on or more work teams.

', 'refs' => [ 'MemberDefinition$CognitoMemberDefinition' => '

The Amazon Cognito user group that is part of the work team.

', ], ], 'CognitoUserGroup' => [ 'base' => NULL, 'refs' => [ 'CognitoMemberDefinition$UserGroup' => '

An identifier for a user group.

', ], ], 'CognitoUserPool' => [ 'base' => NULL, 'refs' => [ 'CognitoMemberDefinition$UserPool' => '

An identifier for a user pool. The user pool must be in the same region as the service that you are calling.

', ], ], 'CompilationJobArn' => [ 'base' => NULL, 'refs' => [ 'CompilationJobSummary$CompilationJobArn' => '

The Amazon Resource Name (ARN) of the model compilation job.

', 'CreateCompilationJobResponse$CompilationJobArn' => '

If the action is successful, the service sends back an HTTP 200 response. Amazon SageMaker returns the following data in JSON format:

', 'DescribeCompilationJobResponse$CompilationJobArn' => '

The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker assumes to perform the model compilation job.

', ], ], 'CompilationJobStatus' => [ 'base' => NULL, 'refs' => [ 'CompilationJobSummary$CompilationJobStatus' => '

The status of the model compilation job.

', 'DescribeCompilationJobResponse$CompilationJobStatus' => '

The status of the model compilation job.

', 'ListCompilationJobsRequest$StatusEquals' => '

A filter that retrieves model compilation jobs with a specific DescribeCompilationJobResponse$CompilationJobStatus status.

', ], ], 'CompilationJobSummaries' => [ 'base' => NULL, 'refs' => [ 'ListCompilationJobsResponse$CompilationJobSummaries' => '

An array of CompilationJobSummary objects, each describing a model compilation job.

', ], ], 'CompilationJobSummary' => [ 'base' => '

A summary of a model compilation job.

', 'refs' => [ 'CompilationJobSummaries$member' => NULL, ], ], 'CompressionType' => [ 'base' => NULL, 'refs' => [ 'Channel$CompressionType' => '

If training data is compressed, the compression type. The default value is None. CompressionType is used only in Pipe input mode. In File mode, leave this field unset or set it to None.

', 'CompressionTypes$member' => NULL, 'TransformInput$CompressionType' => '

If your transform data is compressed, specify the compression type. Amazon SageMaker automatically decompresses the data for the transform job accordingly. The default value is None.

', ], ], 'CompressionTypes' => [ 'base' => NULL, 'refs' => [ 'ChannelSpecification$SupportedCompressionTypes' => '

The allowed compression types, if data compression is used.

', ], ], 'ContainerDefinition' => [ 'base' => '

Describes the container, as part of model definition.

', 'refs' => [ 'ContainerDefinitionList$member' => NULL, 'CreateModelInput$PrimaryContainer' => '

The location of the primary docker image containing inference code, associated artifacts, and custom environment map that the inference code uses when the model is deployed for predictions.

', 'DescribeModelOutput$PrimaryContainer' => '

The location of the primary inference code, associated artifacts, and custom environment map that the inference code uses when it is deployed in production.

', ], ], 'ContainerDefinitionList' => [ 'base' => NULL, 'refs' => [ 'CreateModelInput$Containers' => '

Specifies the containers in the inference pipeline.

', 'DescribeModelOutput$Containers' => '

The containers in the inference pipeline.

', ], ], 'ContainerHostname' => [ 'base' => NULL, 'refs' => [ 'ContainerDefinition$ContainerHostname' => '

The DNS host name for the container after Amazon SageMaker deploys it.

', 'ModelPackageContainerDefinition$ContainerHostname' => '

The DNS host name for the Docker container.

', ], ], 'ContentClassifier' => [ 'base' => NULL, 'refs' => [ 'ContentClassifiers$member' => NULL, ], ], 'ContentClassifiers' => [ 'base' => NULL, 'refs' => [ 'LabelingJobDataAttributes$ContentClassifiers' => '

Declares that your content is free of personally identifiable information or adult content. Amazon SageMaker may restrict the Amazon Mechanical Turk workers that can view your task based on this information.

', ], ], 'ContentType' => [ 'base' => NULL, 'refs' => [ 'Channel$ContentType' => '

The MIME type of the data.

', 'ContentTypes$member' => NULL, 'TransformInput$ContentType' => '

The multipurpose internet mail extension (MIME) type of the data. Amazon SageMaker uses the MIME type with each http call to transfer data to the transform job.

', ], ], 'ContentTypes' => [ 'base' => NULL, 'refs' => [ 'ChannelSpecification$SupportedContentTypes' => '

The supported MIME types for the data.

', 'InferenceSpecification$SupportedContentTypes' => '

The supported MIME types for the input data.

', ], ], 'ContinuousParameterRange' => [ 'base' => '

A list of continuous hyperparameters to tune.

', 'refs' => [ 'ContinuousParameterRanges$member' => NULL, ], ], 'ContinuousParameterRangeSpecification' => [ 'base' => '

Defines the possible values for a continuous hyperparameter.

', 'refs' => [ 'ParameterRange$ContinuousParameterRangeSpecification' => '

A ContinuousParameterRangeSpecification object that defines the possible values for a continuous hyperparameter.

', ], ], 'ContinuousParameterRanges' => [ 'base' => NULL, 'refs' => [ 'ParameterRanges$ContinuousParameterRanges' => '

The array of ContinuousParameterRange objects that specify ranges of continuous hyperparameters that a hyperparameter tuning job searches.

', ], ], 'CreateAlgorithmInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateAlgorithmOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateCodeRepositoryInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateCodeRepositoryOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateCompilationJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'CreateCompilationJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'CreateEndpointConfigInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateEndpointConfigOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateEndpointInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateEndpointOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateHyperParameterTuningJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'CreateHyperParameterTuningJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'CreateLabelingJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'CreateLabelingJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'CreateModelInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateModelOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateModelPackageInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateModelPackageOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateNotebookInstanceInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateNotebookInstanceLifecycleConfigInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateNotebookInstanceLifecycleConfigOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateNotebookInstanceOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreatePresignedNotebookInstanceUrlInput' => [ 'base' => NULL, 'refs' => [], ], 'CreatePresignedNotebookInstanceUrlOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateTrainingJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'CreateTrainingJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'CreateTransformJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'CreateTransformJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'CreateWorkteamRequest' => [ 'base' => NULL, 'refs' => [], ], 'CreateWorkteamResponse' => [ 'base' => NULL, 'refs' => [], ], 'CreationTime' => [ 'base' => NULL, 'refs' => [ 'AlgorithmSummary$CreationTime' => '

A timestamp that shows when the algorithm was created.

', 'CodeRepositorySummary$CreationTime' => '

The date and time that the Git repository was created.

', 'CompilationJobSummary$CreationTime' => '

The time when the model compilation job was created.

', 'DescribeAlgorithmOutput$CreationTime' => '

A timestamp specifying when the algorithm was created.

', 'DescribeCodeRepositoryOutput$CreationTime' => '

The date and time that the repository was created.

', 'DescribeCompilationJobResponse$CreationTime' => '

The time that the model compilation job was created.

', 'DescribeModelPackageOutput$CreationTime' => '

A timestamp specifying when the model package was created.

', 'DescribeNotebookInstanceLifecycleConfigOutput$CreationTime' => '

A timestamp that tells when the lifecycle configuration was created.

', 'DescribeNotebookInstanceOutput$CreationTime' => '

A timestamp. Use this parameter to return the time when the notebook instance was created

', 'ListAlgorithmsInput$CreationTimeAfter' => '

A filter that returns only algorithms created after the specified time (timestamp).

', 'ListAlgorithmsInput$CreationTimeBefore' => '

A filter that returns only algorithms created before the specified time (timestamp).

', 'ListCodeRepositoriesInput$CreationTimeAfter' => '

A filter that returns only Git repositories that were created after the specified time.

', 'ListCodeRepositoriesInput$CreationTimeBefore' => '

A filter that returns only Git repositories that were created before the specified time.

', 'ListCompilationJobsRequest$CreationTimeAfter' => '

A filter that returns the model compilation jobs that were created after a specified time.

', 'ListCompilationJobsRequest$CreationTimeBefore' => '

A filter that returns the model compilation jobs that were created before a specified time.

', 'ListModelPackagesInput$CreationTimeAfter' => '

A filter that returns only model packages created after the specified time (timestamp).

', 'ListModelPackagesInput$CreationTimeBefore' => '

A filter that returns only model packages created before the specified time (timestamp).

', 'ListNotebookInstanceLifecycleConfigsInput$CreationTimeBefore' => '

A filter that returns only lifecycle configurations that were created before the specified time (timestamp).

', 'ListNotebookInstanceLifecycleConfigsInput$CreationTimeAfter' => '

A filter that returns only lifecycle configurations that were created after the specified time (timestamp).

', 'ListNotebookInstancesInput$CreationTimeBefore' => '

A filter that returns only notebook instances that were created before the specified time (timestamp).

', 'ListNotebookInstancesInput$CreationTimeAfter' => '

A filter that returns only notebook instances that were created after the specified time (timestamp).

', 'ModelPackageSummary$CreationTime' => '

A timestamp that shows when the model package was created.

', 'NotebookInstanceLifecycleConfigSummary$CreationTime' => '

A timestamp that tells when the lifecycle configuration was created.

', 'NotebookInstanceSummary$CreationTime' => '

A timestamp that shows when the notebook instance was created.

', ], ], 'DataInputConfig' => [ 'base' => NULL, 'refs' => [ 'InputConfig$DataInputConfig' => '

Specifies the name and shape of the expected data inputs for your trained model with a JSON dictionary form. The data inputs are InputConfig$Framework specific.

', ], ], 'DataSource' => [ 'base' => '

Describes the location of the channel data.

', 'refs' => [ 'Channel$DataSource' => '

The location of the channel data.

', ], ], 'DeleteAlgorithmInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteCodeRepositoryInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteEndpointConfigInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteEndpointInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteModelInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteModelPackageInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteNotebookInstanceInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteNotebookInstanceLifecycleConfigInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteTagsInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteTagsOutput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteWorkteamRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteWorkteamResponse' => [ 'base' => NULL, 'refs' => [], ], 'DeployedImage' => [ 'base' => '

Gets the Amazon EC2 Container Registry path of the docker image of the model that is hosted in this ProductionVariant.

If you used the registry/repository[:tag] form to specify the image path of the primary container when you created the model hosted in this ProductionVariant, the path resolves to a path of the form registry/repository[@digest]. A digest is a hash value that identifies a specific version of an image. For information about Amazon ECR paths, see Pulling an Image in the Amazon ECR User Guide.

', 'refs' => [ 'DeployedImages$member' => NULL, ], ], 'DeployedImages' => [ 'base' => NULL, 'refs' => [ 'ProductionVariantSummary$DeployedImages' => '

An array of DeployedImage objects that specify the Amazon EC2 Container Registry paths of the inference images deployed on instances of this ProductionVariant.

', ], ], 'DescribeAlgorithmInput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeAlgorithmOutput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeCodeRepositoryInput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeCodeRepositoryOutput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeCompilationJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'DescribeCompilationJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'DescribeEndpointConfigInput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeEndpointConfigOutput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeEndpointInput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeEndpointOutput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeHyperParameterTuningJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'DescribeHyperParameterTuningJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'DescribeLabelingJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'DescribeLabelingJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'DescribeModelInput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeModelOutput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeModelPackageInput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeModelPackageOutput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeNotebookInstanceInput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeNotebookInstanceLifecycleConfigInput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeNotebookInstanceLifecycleConfigOutput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeNotebookInstanceOutput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeSubscribedWorkteamRequest' => [ 'base' => NULL, 'refs' => [], ], 'DescribeSubscribedWorkteamResponse' => [ 'base' => NULL, 'refs' => [], ], 'DescribeTrainingJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'DescribeTrainingJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'DescribeTransformJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'DescribeTransformJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'DescribeWorkteamRequest' => [ 'base' => NULL, 'refs' => [], ], 'DescribeWorkteamResponse' => [ 'base' => NULL, 'refs' => [], ], 'DesiredWeightAndCapacity' => [ 'base' => '

Specifies weight and capacity values for a production variant.

', 'refs' => [ 'DesiredWeightAndCapacityList$member' => NULL, ], ], 'DesiredWeightAndCapacityList' => [ 'base' => NULL, 'refs' => [ 'UpdateEndpointWeightsAndCapacitiesInput$DesiredWeightsAndCapacities' => '

An object that provides new capacity and weight values for a variant.

', ], ], 'DetailedAlgorithmStatus' => [ 'base' => NULL, 'refs' => [ 'AlgorithmStatusItem$Status' => '

The current status.

', ], ], 'DetailedModelPackageStatus' => [ 'base' => NULL, 'refs' => [ 'ModelPackageStatusItem$Status' => '

The current status.

', ], ], 'DirectInternetAccess' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$DirectInternetAccess' => '

Sets whether Amazon SageMaker provides internet access to the notebook instance. If you set this to Disabled this notebook instance will be able to access resources only in your VPC, and will not be able to connect to Amazon SageMaker training and endpoint services unless your configure a NAT Gateway in your VPC.

For more information, see Notebook Instances Are Internet-Enabled by Default. You can set the value of this parameter to Disabled only if you set a value for the SubnetId parameter.

', 'DescribeNotebookInstanceOutput$DirectInternetAccess' => '

Describes whether Amazon SageMaker provides internet access to the notebook instance. If this value is set to Disabled, he notebook instance does not have internet access, and cannot connect to Amazon SageMaker training and endpoint services.

For more information, see Notebook Instances Are Internet-Enabled by Default.

', ], ], 'DisassociateAdditionalCodeRepositories' => [ 'base' => NULL, 'refs' => [ 'UpdateNotebookInstanceInput$DisassociateAdditionalCodeRepositories' => '

A list of names or URLs of the default Git repositories to remove from this notebook instance.

', ], ], 'DisassociateDefaultCodeRepository' => [ 'base' => NULL, 'refs' => [ 'UpdateNotebookInstanceInput$DisassociateDefaultCodeRepository' => '

The name or URL of the default Git repository to remove from this notebook instance.

', ], ], 'DisassociateNotebookInstanceAcceleratorTypes' => [ 'base' => NULL, 'refs' => [ 'UpdateNotebookInstanceInput$DisassociateAcceleratorTypes' => '

A list of the Elastic Inference (EI) instance types to remove from this notebook instance.

', ], ], 'DisassociateNotebookInstanceLifecycleConfig' => [ 'base' => NULL, 'refs' => [ 'UpdateNotebookInstanceInput$DisassociateLifecycleConfig' => '

Set to true to remove the notebook instance lifecycle configuration currently associated with the notebook instance.

', ], ], 'Dollars' => [ 'base' => NULL, 'refs' => [ 'USD$Dollars' => '

The whole number of dollars in the amount.

', ], ], 'EndpointArn' => [ 'base' => NULL, 'refs' => [ 'CreateEndpointOutput$EndpointArn' => '

The Amazon Resource Name (ARN) of the endpoint.

', 'DescribeEndpointOutput$EndpointArn' => '

The Amazon Resource Name (ARN) of the endpoint.

', 'EndpointSummary$EndpointArn' => '

The Amazon Resource Name (ARN) of the endpoint.

', 'UpdateEndpointOutput$EndpointArn' => '

The Amazon Resource Name (ARN) of the endpoint.

', 'UpdateEndpointWeightsAndCapacitiesOutput$EndpointArn' => '

The Amazon Resource Name (ARN) of the updated endpoint.

', ], ], 'EndpointConfigArn' => [ 'base' => NULL, 'refs' => [ 'CreateEndpointConfigOutput$EndpointConfigArn' => '

The Amazon Resource Name (ARN) of the endpoint configuration.

', 'DescribeEndpointConfigOutput$EndpointConfigArn' => '

The Amazon Resource Name (ARN) of the endpoint configuration.

', 'EndpointConfigSummary$EndpointConfigArn' => '

The Amazon Resource Name (ARN) of the endpoint configuration.

', ], ], 'EndpointConfigName' => [ 'base' => NULL, 'refs' => [ 'CreateEndpointConfigInput$EndpointConfigName' => '

The name of the endpoint configuration. You specify this name in a CreateEndpoint request.

', 'CreateEndpointInput$EndpointConfigName' => '

The name of an endpoint configuration. For more information, see CreateEndpointConfig.

', 'DeleteEndpointConfigInput$EndpointConfigName' => '

The name of the endpoint configuration that you want to delete.

', 'DescribeEndpointConfigInput$EndpointConfigName' => '

The name of the endpoint configuration.

', 'DescribeEndpointConfigOutput$EndpointConfigName' => '

Name of the Amazon SageMaker endpoint configuration.

', 'DescribeEndpointOutput$EndpointConfigName' => '

The name of the endpoint configuration associated with this endpoint.

', 'EndpointConfigSummary$EndpointConfigName' => '

The name of the endpoint configuration.

', 'UpdateEndpointInput$EndpointConfigName' => '

The name of the new endpoint configuration.

', ], ], 'EndpointConfigNameContains' => [ 'base' => NULL, 'refs' => [ 'ListEndpointConfigsInput$NameContains' => '

A string in the endpoint configuration name. This filter returns only endpoint configurations whose name contains the specified string.

', ], ], 'EndpointConfigSortKey' => [ 'base' => NULL, 'refs' => [ 'ListEndpointConfigsInput$SortBy' => '

The field to sort results by. The default is CreationTime.

', ], ], 'EndpointConfigSummary' => [ 'base' => '

Provides summary information for an endpoint configuration.

', 'refs' => [ 'EndpointConfigSummaryList$member' => NULL, ], ], 'EndpointConfigSummaryList' => [ 'base' => NULL, 'refs' => [ 'ListEndpointConfigsOutput$EndpointConfigs' => '

An array of endpoint configurations.

', ], ], 'EndpointName' => [ 'base' => NULL, 'refs' => [ 'CreateEndpointInput$EndpointName' => '

The name of the endpoint. The name must be unique within an AWS Region in your AWS account.

', 'DeleteEndpointInput$EndpointName' => '

The name of the endpoint that you want to delete.

', 'DescribeEndpointInput$EndpointName' => '

The name of the endpoint.

', 'DescribeEndpointOutput$EndpointName' => '

Name of the endpoint.

', 'EndpointSummary$EndpointName' => '

The name of the endpoint.

', 'UpdateEndpointInput$EndpointName' => '

The name of the endpoint whose configuration you want to update.

', 'UpdateEndpointWeightsAndCapacitiesInput$EndpointName' => '

The name of an existing Amazon SageMaker endpoint.

', ], ], 'EndpointNameContains' => [ 'base' => NULL, 'refs' => [ 'ListEndpointsInput$NameContains' => '

A string in endpoint names. This filter returns only endpoints whose name contains the specified string.

', ], ], 'EndpointSortKey' => [ 'base' => NULL, 'refs' => [ 'ListEndpointsInput$SortBy' => '

Sorts the list of results. The default is CreationTime.

', ], ], 'EndpointStatus' => [ 'base' => NULL, 'refs' => [ 'DescribeEndpointOutput$EndpointStatus' => '

The status of the endpoint.

', 'EndpointSummary$EndpointStatus' => '

The status of the endpoint.

To get a list of endpoints with a specified status, use the ListEndpointsInput$StatusEquals filter.

', 'ListEndpointsInput$StatusEquals' => '

A filter that returns only endpoints with the specified status.

', ], ], 'EndpointSummary' => [ 'base' => '

Provides summary information for an endpoint.

', 'refs' => [ 'EndpointSummaryList$member' => NULL, ], ], 'EndpointSummaryList' => [ 'base' => NULL, 'refs' => [ 'ListEndpointsOutput$Endpoints' => '

An array or endpoint objects.

', ], ], 'EntityDescription' => [ 'base' => NULL, 'refs' => [ 'AlgorithmSummary$AlgorithmDescription' => '

A brief description of the algorithm.

', 'ChannelSpecification$Description' => '

A brief description of the channel.

', 'CreateAlgorithmInput$AlgorithmDescription' => '

A description of the algorithm.

', 'CreateModelPackageInput$ModelPackageDescription' => '

A description of the model package.

', 'DescribeAlgorithmOutput$AlgorithmDescription' => '

A brief summary about the algorithm.

', 'DescribeModelPackageOutput$ModelPackageDescription' => '

A brief summary of the model package.

', 'HyperParameterSpecification$Description' => '

A brief description of the hyperparameter.

', 'ModelPackageSummary$ModelPackageDescription' => '

A brief description of the model package.

', ], ], 'EntityName' => [ 'base' => NULL, 'refs' => [ 'AlgorithmStatusItem$Name' => '

The name of the algorithm for which the overall status is being reported.

', 'AlgorithmSummary$AlgorithmName' => '

The name of the algorithm that is described by the summary.

', 'AlgorithmValidationProfile$ProfileName' => '

The name of the profile for the algorithm. The name must have 1 to 63 characters. Valid characters are a-z, A-Z, 0-9, and - (hyphen).

', 'CodeRepositorySummary$CodeRepositoryName' => '

The name of the Git repository.

', 'CompilationJobSummary$CompilationJobName' => '

The name of the model compilation job that you want a summary for.

', 'CreateAlgorithmInput$AlgorithmName' => '

The name of the algorithm.

', 'CreateCodeRepositoryInput$CodeRepositoryName' => '

The name of the Git repository. The name must have 1 to 63 characters. Valid characters are a-z, A-Z, 0-9, and - (hyphen).

', 'CreateCompilationJobRequest$CompilationJobName' => '

A name for the model compilation job. The name must be unique within the AWS Region and within your AWS account.

', 'CreateModelPackageInput$ModelPackageName' => '

The name of the model package. The name must have 1 to 63 characters. Valid characters are a-z, A-Z, 0-9, and - (hyphen).

', 'DeleteAlgorithmInput$AlgorithmName' => '

The name of the algorithm to delete.

', 'DeleteCodeRepositoryInput$CodeRepositoryName' => '

The name of the Git repository to delete.

', 'DeleteModelPackageInput$ModelPackageName' => '

The name of the model package. The name must have 1 to 63 characters. Valid characters are a-z, A-Z, 0-9, and - (hyphen).

', 'DescribeAlgorithmOutput$AlgorithmName' => '

The name of the algorithm being described.

', 'DescribeCodeRepositoryInput$CodeRepositoryName' => '

The name of the Git repository to describe.

', 'DescribeCodeRepositoryOutput$CodeRepositoryName' => '

The name of the Git repository.

', 'DescribeCompilationJobRequest$CompilationJobName' => '

The name of the model compilation job that you want information about.

', 'DescribeCompilationJobResponse$CompilationJobName' => '

The name of the model compilation job.

', 'DescribeModelPackageOutput$ModelPackageName' => '

The name of the model package being described.

', 'ModelPackageStatusItem$Name' => '

The name of the model package for which the overall status is being reported.

', 'ModelPackageSummary$ModelPackageName' => '

The name of the model package.

', 'ModelPackageValidationProfile$ProfileName' => '

The name of the profile for the model package.

', 'StopCompilationJobRequest$CompilationJobName' => '

The name of the model compilation job to stop.

', 'UpdateCodeRepositoryInput$CodeRepositoryName' => '

The name of the Git repository to update.

', ], ], 'EnvironmentKey' => [ 'base' => NULL, 'refs' => [ 'EnvironmentMap$key' => NULL, ], ], 'EnvironmentMap' => [ 'base' => NULL, 'refs' => [ 'ContainerDefinition$Environment' => '

The environment variables to set in the Docker container. Each key and value in the Environment string to string map can have length of up to 1024. We support up to 16 entries in the map.

', ], ], 'EnvironmentValue' => [ 'base' => NULL, 'refs' => [ 'EnvironmentMap$value' => NULL, ], ], 'FailureReason' => [ 'base' => NULL, 'refs' => [ 'DescribeCompilationJobResponse$FailureReason' => '

If a model compilation job failed, the reason it failed.

', 'DescribeEndpointOutput$FailureReason' => '

If the status of the endpoint is Failed, the reason why it failed.

', 'DescribeHyperParameterTuningJobResponse$FailureReason' => '

If the tuning job failed, the reason it failed.

', 'DescribeLabelingJobResponse$FailureReason' => '

If the job failed, the reason that it failed.

', 'DescribeNotebookInstanceOutput$FailureReason' => '

If status is Failed, the reason it failed.

', 'DescribeTrainingJobResponse$FailureReason' => '

If the training job failed, the reason it failed.

', 'DescribeTransformJobResponse$FailureReason' => '

If the transform job failed, the reason that it failed.

', 'HyperParameterTrainingJobSummary$FailureReason' => '

The reason that the training job failed.

', 'LabelingJobSummary$FailureReason' => '

If the LabelingJobStatus field is Failed, this field contains a description of the error.

', 'ResourceInUse$Message' => NULL, 'ResourceLimitExceeded$Message' => NULL, 'ResourceNotFound$Message' => NULL, 'TrainingJob$FailureReason' => '

If the training job failed, the reason it failed.

', 'TransformJobSummary$FailureReason' => '

If the transform job failed, the reason it failed.

', ], ], 'Filter' => [ 'base' => '

A conditional statement for a search expression that includes a Boolean operator, a resource property, and a value.

If you don\'t specify an Operator and a Value, the filter searches for only the specified property. For example, defining a Filter for the FailureReason for the TrainingJob Resource searches for training job objects that have a value in the FailureReason field.

If you specify a Value, but not an Operator, Amazon SageMaker uses the equals operator as the default.

In search, there are several property types:

Metrics

To define a metric filter, enter a value using the form "Metrics.<name>", where <name> is a metric name. For example, the following filter searches for training jobs with an "accuracy" metric greater than "0.9":

{

"Name": "Metrics.accuracy",

"Operator": "GREATER_THAN",

"Value": "0.9"

}

HyperParameters

To define a hyperparameter filter, enter a value with the form "HyperParameters.<name>". Decimal hyperparameter values are treated as a decimal in a comparison if the specified Value is also a decimal value. If the specified Value is an integer, the decimal hyperparameter values are treated as integers. For example, the following filter is satisfied by training jobs with a "learning_rate" hyperparameter that is less than "0.5":

{

"Name": "HyperParameters.learning_rate",

"Operator": "LESS_THAN",

"Value": "0.5"

}

Tags

To define a tag filter, enter a value with the form "Tags.<key>".

', 'refs' => [ 'FilterList$member' => NULL, ], ], 'FilterList' => [ 'base' => NULL, 'refs' => [ 'NestedFilters$Filters' => '

A list of filters. Each filter acts on a property. Filters must contain at least one Filters value. For example, a NestedFilters call might include a filter on the PropertyName parameter of the InputDataConfig property: InputDataConfig.DataSource.S3DataSource.S3Uri.

', 'SearchExpression$Filters' => '

A list of filter objects.

', ], ], 'FilterValue' => [ 'base' => NULL, 'refs' => [ 'Filter$Value' => '

A value used with Resource and Operator to determine if objects satisfy the filter\'s condition. For numerical properties, Value must be an integer or floating-point decimal. For timestamp properties, Value must be an ISO 8601 date-time string of the following format: YYYY-mm-dd\'T\'HH:MM:SS.

', ], ], 'FinalHyperParameterTuningJobObjectiveMetric' => [ 'base' => '

Shows the final value for the objective metric for a training job that was launched by a hyperparameter tuning job. You define the objective metric in the HyperParameterTuningJobObjective parameter of HyperParameterTuningJobConfig.

', 'refs' => [ 'HyperParameterTrainingJobSummary$FinalHyperParameterTuningJobObjectiveMetric' => '

The FinalHyperParameterTuningJobObjectiveMetric object that specifies the value of the objective metric of the tuning job that launched this training job.

', ], ], 'FinalMetricDataList' => [ 'base' => NULL, 'refs' => [ 'DescribeTrainingJobResponse$FinalMetricDataList' => '

A collection of MetricData objects that specify the names, values, and dates and times that the training algorithm emitted to Amazon CloudWatch.

', 'TrainingJob$FinalMetricDataList' => '

A list of final metric values that are set when the training job completes. Used only if the training job was configured to use metrics.

', ], ], 'Float' => [ 'base' => NULL, 'refs' => [ 'MetricData$Value' => '

The value of the metric.

', ], ], 'Framework' => [ 'base' => NULL, 'refs' => [ 'InputConfig$Framework' => '

Identifies the framework in which the model was trained. For example: TENSORFLOW.

', ], ], 'GetSearchSuggestionsRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetSearchSuggestionsResponse' => [ 'base' => NULL, 'refs' => [], ], 'GitConfig' => [ 'base' => '

Specifies configuration details for a Git repository in your AWS account.

', 'refs' => [ 'CodeRepositorySummary$GitConfig' => '

Configuration details for the Git repository, including the URL where it is located and the ARN of the AWS Secrets Manager secret that contains the credentials used to access the repository.

', 'CreateCodeRepositoryInput$GitConfig' => '

Specifies details about the repository, including the URL where the repository is located, the default branch, and credentials to use to access the repository.

', 'DescribeCodeRepositoryOutput$GitConfig' => '

Configuration details about the repository, including the URL where the repository is located, the default branch, and the Amazon Resource Name (ARN) of the AWS Secrets Manager secret that contains the credentials used to access the repository.

', ], ], 'GitConfigForUpdate' => [ 'base' => '

Specifies configuration details for a Git repository when the repository is updated.

', 'refs' => [ 'UpdateCodeRepositoryInput$GitConfig' => '

The configuration of the git repository, including the URL and the Amazon Resource Name (ARN) of the AWS Secrets Manager secret that contains the credentials used to access the repository. The secret must have a staging label of AWSCURRENT and must be in the following format:

{"username": UserName, "password": Password}

', ], ], 'GitConfigUrl' => [ 'base' => NULL, 'refs' => [ 'GitConfig$RepositoryUrl' => '

The URL where the Git repository is located.

', ], ], 'HumanTaskConfig' => [ 'base' => '

Information required for human workers to complete a labeling task.

', 'refs' => [ 'CreateLabelingJobRequest$HumanTaskConfig' => '

Configures the information required for human workers to complete a labeling task.

', 'DescribeLabelingJobResponse$HumanTaskConfig' => '

Configuration information required for human workers to complete a labeling task.

', ], ], 'HyperParameterAlgorithmSpecification' => [ 'base' => '

Specifies which training algorithm to use for training jobs that a hyperparameter tuning job launches and the metrics to monitor.

', 'refs' => [ 'HyperParameterTrainingJobDefinition$AlgorithmSpecification' => '

The HyperParameterAlgorithmSpecification object that specifies the resource algorithm to use for the training jobs that the tuning job launches.

', ], ], 'HyperParameterSpecification' => [ 'base' => '

Defines a hyperparameter to be used by an algorithm.

', 'refs' => [ 'HyperParameterSpecifications$member' => NULL, ], ], 'HyperParameterSpecifications' => [ 'base' => NULL, 'refs' => [ 'TrainingSpecification$SupportedHyperParameters' => '

A list of the HyperParameterSpecification objects, that define the supported hyperparameters. This is required if the algorithm supports automatic model tuning.>

', ], ], 'HyperParameterTrainingJobDefinition' => [ 'base' => '

Defines the training jobs launched by a hyperparameter tuning job.

', 'refs' => [ 'CreateHyperParameterTuningJobRequest$TrainingJobDefinition' => '

The HyperParameterTrainingJobDefinition object that describes the training jobs that this tuning job launches, including static hyperparameters, input data configuration, output data configuration, resource configuration, and stopping condition.

', 'DescribeHyperParameterTuningJobResponse$TrainingJobDefinition' => '

The HyperParameterTrainingJobDefinition object that specifies the definition of the training jobs that this tuning job launches.

', ], ], 'HyperParameterTrainingJobSummaries' => [ 'base' => NULL, 'refs' => [ 'ListTrainingJobsForHyperParameterTuningJobResponse$TrainingJobSummaries' => '

A list of TrainingJobSummary objects that describe the training jobs that the ListTrainingJobsForHyperParameterTuningJob request returned.

', ], ], 'HyperParameterTrainingJobSummary' => [ 'base' => '

Specifies summary information about a training job.

', 'refs' => [ 'DescribeHyperParameterTuningJobResponse$BestTrainingJob' => '

A TrainingJobSummary object that describes the training job that completed with the best current HyperParameterTuningJobObjective.

', 'DescribeHyperParameterTuningJobResponse$OverallBestTrainingJob' => '

If the hyperparameter tuning job is an warm start tuning job with a WarmStartType of IDENTICAL_DATA_AND_ALGORITHM, this is the TrainingJobSummary for the training job with the best objective metric value of all training jobs launched by this tuning job and all parent jobs specified for the warm start tuning job.

', 'HyperParameterTrainingJobSummaries$member' => NULL, ], ], 'HyperParameterTuningJobArn' => [ 'base' => NULL, 'refs' => [ 'CreateHyperParameterTuningJobResponse$HyperParameterTuningJobArn' => '

The Amazon Resource Name (ARN) of the tuning job. Amazon SageMaker assigns an ARN to a hyperparameter tuning job when you create it.

', 'DescribeHyperParameterTuningJobResponse$HyperParameterTuningJobArn' => '

The Amazon Resource Name (ARN) of the tuning job.

', 'DescribeTrainingJobResponse$TuningJobArn' => '

The Amazon Resource Name (ARN) of the associated hyperparameter tuning job if the training job was launched by a hyperparameter tuning job.

', 'HyperParameterTuningJobSummary$HyperParameterTuningJobArn' => '

The Amazon Resource Name (ARN) of the tuning job.

', 'TrainingJob$TuningJobArn' => '

The Amazon Resource Name (ARN) of the associated hyperparameter tuning job if the training job was launched by a hyperparameter tuning job.

', ], ], 'HyperParameterTuningJobConfig' => [ 'base' => '

Configures a hyperparameter tuning job.

', 'refs' => [ 'CreateHyperParameterTuningJobRequest$HyperParameterTuningJobConfig' => '

The HyperParameterTuningJobConfig object that describes the tuning job, including the search strategy, the objective metric used to evaluate training jobs, ranges of parameters to search, and resource limits for the tuning job. For more information, see automatic-model-tuning

', 'DescribeHyperParameterTuningJobResponse$HyperParameterTuningJobConfig' => '

The HyperParameterTuningJobConfig object that specifies the configuration of the tuning job.

', ], ], 'HyperParameterTuningJobName' => [ 'base' => NULL, 'refs' => [ 'CreateHyperParameterTuningJobRequest$HyperParameterTuningJobName' => '

The name of the tuning job. This name is the prefix for the names of all training jobs that this tuning job launches. The name must be unique within the same AWS account and AWS Region. The name must have { } to { } characters. Valid characters are a-z, A-Z, 0-9, and : + = @ _ % - (hyphen). The name is not case sensitive.

', 'DescribeHyperParameterTuningJobRequest$HyperParameterTuningJobName' => '

The name of the tuning job to describe.

', 'DescribeHyperParameterTuningJobResponse$HyperParameterTuningJobName' => '

The name of the tuning job.

', 'HyperParameterTrainingJobSummary$TuningJobName' => '

The HyperParameter tuning job that launched the training job.

', 'HyperParameterTuningJobSummary$HyperParameterTuningJobName' => '

The name of the tuning job.

', 'ListTrainingJobsForHyperParameterTuningJobRequest$HyperParameterTuningJobName' => '

The name of the tuning job whose training jobs you want to list.

', 'ParentHyperParameterTuningJob$HyperParameterTuningJobName' => '

The name of the hyperparameter tuning job to be used as a starting point for a new hyperparameter tuning job.

', 'StopHyperParameterTuningJobRequest$HyperParameterTuningJobName' => '

The name of the tuning job to stop.

', ], ], 'HyperParameterTuningJobObjective' => [ 'base' => '

Defines the objective metric for a hyperparameter tuning job. Hyperparameter tuning uses the value of this metric to evaluate the training jobs it launches, and returns the training job that results in either the highest or lowest value for this metric, depending on the value you specify for the Type parameter.

', 'refs' => [ 'HyperParameterTuningJobConfig$HyperParameterTuningJobObjective' => '

The HyperParameterTuningJobObjective object that specifies the objective metric for this tuning job.

', 'HyperParameterTuningJobObjectives$member' => NULL, ], ], 'HyperParameterTuningJobObjectiveType' => [ 'base' => NULL, 'refs' => [ 'FinalHyperParameterTuningJobObjectiveMetric$Type' => '

Whether to minimize or maximize the objective metric. Valid values are Minimize and Maximize.

', 'HyperParameterTuningJobObjective$Type' => '

Whether to minimize or maximize the objective metric.

', ], ], 'HyperParameterTuningJobObjectives' => [ 'base' => NULL, 'refs' => [ 'TrainingSpecification$SupportedTuningJobObjectiveMetrics' => '

A list of the metrics that the algorithm emits that can be used as the objective metric in a hyperparameter tuning job.

', ], ], 'HyperParameterTuningJobSortByOptions' => [ 'base' => NULL, 'refs' => [ 'ListHyperParameterTuningJobsRequest$SortBy' => '

The field to sort results by. The default is Name.

', ], ], 'HyperParameterTuningJobStatus' => [ 'base' => NULL, 'refs' => [ 'DescribeHyperParameterTuningJobResponse$HyperParameterTuningJobStatus' => '

The status of the tuning job: InProgress, Completed, Failed, Stopping, or Stopped.

', 'HyperParameterTuningJobSummary$HyperParameterTuningJobStatus' => '

The status of the tuning job.

', 'ListHyperParameterTuningJobsRequest$StatusEquals' => '

A filter that returns only tuning jobs with the specified status.

', ], ], 'HyperParameterTuningJobStrategyType' => [ 'base' => '

The strategy hyperparameter tuning uses to find the best combination of hyperparameters for your model. Currently, the only supported value is Bayesian.

', 'refs' => [ 'HyperParameterTuningJobConfig$Strategy' => '

Specifies the search strategy for hyperparameters. Currently, the only valid value is Bayesian.

', 'HyperParameterTuningJobSummary$Strategy' => '

Specifies the search strategy hyperparameter tuning uses to choose which hyperparameters to use for each iteration. Currently, the only valid value is Bayesian.

', ], ], 'HyperParameterTuningJobSummaries' => [ 'base' => NULL, 'refs' => [ 'ListHyperParameterTuningJobsResponse$HyperParameterTuningJobSummaries' => '

A list of HyperParameterTuningJobSummary objects that describe the tuning jobs that the ListHyperParameterTuningJobs request returned.

', ], ], 'HyperParameterTuningJobSummary' => [ 'base' => '

Provides summary information about a hyperparameter tuning job.

', 'refs' => [ 'HyperParameterTuningJobSummaries$member' => NULL, ], ], 'HyperParameterTuningJobWarmStartConfig' => [ 'base' => '

Specifies the configuration for a hyperparameter tuning job that uses one or more previous hyperparameter tuning jobs as a starting point. The results of previous tuning jobs are used to inform which combinations of hyperparameters to search over in the new tuning job.

All training jobs launched by the new hyperparameter tuning job are evaluated by using the objective metric, and the training job that performs the best is compared to the best training jobs from the parent tuning jobs. From these, the training job that performs the best as measured by the objective metric is returned as the overall best training job.

All training jobs launched by parent hyperparameter tuning jobs and the new hyperparameter tuning jobs count against the limit of training jobs for the tuning job.

', 'refs' => [ 'CreateHyperParameterTuningJobRequest$WarmStartConfig' => '

Specifies the configuration for starting the hyperparameter tuning job using one or more previous tuning jobs as a starting point. The results of previous tuning jobs are used to inform which combinations of hyperparameters to search over in the new tuning job.

All training jobs launched by the new hyperparameter tuning job are evaluated by using the objective metric. If you specify IDENTICAL_DATA_AND_ALGORITHM as the WarmStartType value for the warm start configuration, the training job that performs the best in the new tuning job is compared to the best training jobs from the parent tuning jobs. From these, the training job that performs the best as measured by the objective metric is returned as the overall best training job.

All training jobs launched by parent hyperparameter tuning jobs and the new hyperparameter tuning jobs count against the limit of training jobs for the tuning job.

', 'DescribeHyperParameterTuningJobResponse$WarmStartConfig' => '

The configuration for starting the hyperparameter parameter tuning job using one or more previous tuning jobs as a starting point. The results of previous tuning jobs are used to inform which combinations of hyperparameters to search over in the new tuning job.

', ], ], 'HyperParameterTuningJobWarmStartType' => [ 'base' => NULL, 'refs' => [ 'HyperParameterTuningJobWarmStartConfig$WarmStartType' => '

Specifies one of the following:

IDENTICAL_DATA_AND_ALGORITHM

The new hyperparameter tuning job uses the same input data and training image as the parent tuning jobs. You can change the hyperparameter ranges to search and the maximum number of training jobs that the hyperparameter tuning job launches. You cannot use a new version of the training algorithm, unless the changes in the new version do not affect the algorithm itself. For example, changes that improve logging or adding support for a different data format are allowed. You can also change hyperparameters from tunable to static, and from static to tunable, but the total number of static plus tunable hyperparameters must remain the same as it is in all parent jobs. The objective metric for the new tuning job must be the same as for all parent jobs.

TRANSFER_LEARNING

The new hyperparameter tuning job can include input data, hyperparameter ranges, maximum number of concurrent training jobs, and maximum number of training jobs that are different than those of its parent hyperparameter tuning jobs. The training image can also be a different version from the version used in the parent hyperparameter tuning job. You can also change hyperparameters from tunable to static, and from static to tunable, but the total number of static plus tunable hyperparameters must remain the same as it is in all parent jobs. The objective metric for the new tuning job must be the same as for all parent jobs.

', ], ], 'HyperParameters' => [ 'base' => NULL, 'refs' => [ 'CreateTrainingJobRequest$HyperParameters' => '

Algorithm-specific parameters that influence the quality of the model. You set hyperparameters before you start the learning process. For a list of hyperparameters for each training algorithm provided by Amazon SageMaker, see Algorithms.

You can specify a maximum of 100 hyperparameters. Each hyperparameter is a key-value pair. Each key and value is limited to 256 characters, as specified by the Length Constraint.

', 'DescribeTrainingJobResponse$HyperParameters' => '

Algorithm-specific parameters.

', 'HyperParameterTrainingJobDefinition$StaticHyperParameters' => '

Specifies the values of hyperparameters that do not change for the tuning job.

', 'HyperParameterTrainingJobSummary$TunedHyperParameters' => '

A list of the hyperparameters for which you specified ranges to search.

', 'TrainingJob$HyperParameters' => '

Algorithm-specific parameters.

', 'TrainingJobDefinition$HyperParameters' => '

The hyperparameters used for the training job.

', ], ], 'Image' => [ 'base' => NULL, 'refs' => [ 'ContainerDefinition$Image' => '

The Amazon EC2 Container Registry (Amazon ECR) path where inference code is stored. If you are using your own custom algorithm instead of an algorithm provided by Amazon SageMaker, the inference code must meet Amazon SageMaker requirements. Amazon SageMaker supports both registry/repository[:tag] and registry/repository[@digest] image path formats. For more information, see Using Your Own Algorithms with Amazon SageMaker

', 'DeployedImage$SpecifiedImage' => '

The image path you specified when you created the model.

', 'DeployedImage$ResolvedImage' => '

The specific digest path of the image hosted in this ProductionVariant.

', 'ModelPackageContainerDefinition$Image' => '

The Amazon EC2 Container Registry (Amazon ECR) path where inference code is stored.

If you are using your own custom algorithm instead of an algorithm provided by Amazon SageMaker, the inference code must meet Amazon SageMaker requirements. Amazon SageMaker supports both registry/repository[:tag] and registry/repository[@digest] image path formats. For more information, see Using Your Own Algorithms with Amazon SageMaker.

', 'TrainingSpecification$TrainingImage' => '

The Amazon ECR registry path of the Docker image that contains the training algorithm.

', ], ], 'ImageDigest' => [ 'base' => NULL, 'refs' => [ 'ModelPackageContainerDefinition$ImageDigest' => '

An MD5 hash of the training algorithm that identifies the Docker image used for training.

', 'TrainingSpecification$TrainingImageDigest' => '

An MD5 hash of the training algorithm that identifies the Docker image used for training.

', ], ], 'InferenceSpecification' => [ 'base' => '

Defines how to perform inference generation after a training job is run.

', 'refs' => [ 'CreateAlgorithmInput$InferenceSpecification' => '

Specifies details about inference jobs that the algorithm runs, including the following:

', 'CreateModelPackageInput$InferenceSpecification' => '

Specifies details about inference jobs that can be run with models based on this model package, including the following:

', 'DescribeAlgorithmOutput$InferenceSpecification' => '

Details about inference jobs that the algorithm runs.

', 'DescribeModelPackageOutput$InferenceSpecification' => '

Details about inference jobs that can be run with models based on this model package.

', ], ], 'InputConfig' => [ 'base' => '

Contains information about the location of input model artifacts, the name and shape of the expected data inputs, and the framework in which the model was trained.

', 'refs' => [ 'CreateCompilationJobRequest$InputConfig' => '

Provides information about the location of input model artifacts, the name and shape of the expected data inputs, and the framework in which the model was trained.

', 'DescribeCompilationJobResponse$InputConfig' => '

Information about the location in Amazon S3 of the input model artifacts, the name and shape of the expected data inputs, and the framework in which the model was trained.

', ], ], 'InputDataConfig' => [ 'base' => NULL, 'refs' => [ 'CreateTrainingJobRequest$InputDataConfig' => '

An array of Channel objects. Each channel is a named input source. InputDataConfig describes the input data and its location.

Algorithms can accept input data from one or more channels. For example, an algorithm might have two channels of input data, training_data and validation_data. The configuration for each channel provides the S3 location where the input data is stored. It also provides information about the stored data: the MIME type, compression method, and whether the data is wrapped in RecordIO format.

Depending on the input mode that the algorithm supports, Amazon SageMaker either copies input data files from an S3 bucket to a local directory in the Docker container, or makes it available as input streams.

', 'DescribeTrainingJobResponse$InputDataConfig' => '

An array of Channel objects that describes each data input channel.

', 'HyperParameterTrainingJobDefinition$InputDataConfig' => '

An array of Channel objects that specify the input for the training jobs that the tuning job launches.

', 'TrainingJob$InputDataConfig' => '

An array of Channel objects that describes each data input channel.

', 'TrainingJobDefinition$InputDataConfig' => '

An array of Channel objects, each of which specifies an input source.

', ], ], 'InputModes' => [ 'base' => NULL, 'refs' => [ 'ChannelSpecification$SupportedInputModes' => '

The allowed input mode, either FILE or PIPE.

In FILE mode, Amazon SageMaker copies the data from the input source onto the local Amazon Elastic Block Store (Amazon EBS) volumes before starting your training algorithm. This is the most commonly used input mode.

In PIPE mode, Amazon SageMaker streams input data from the source directly to your algorithm without using the EBS volume.

', ], ], 'InstanceType' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$InstanceType' => '

The type of ML compute instance to launch for the notebook instance.

', 'DescribeNotebookInstanceOutput$InstanceType' => '

The type of ML compute instance running on the notebook instance.

', 'NotebookInstanceSummary$InstanceType' => '

The type of ML compute instance that the notebook instance is running on.

', 'UpdateNotebookInstanceInput$InstanceType' => '

The Amazon ML compute instance type.

', ], ], 'IntegerParameterRange' => [ 'base' => '

For a hyperparameter of the integer type, specifies the range that a hyperparameter tuning job searches.

', 'refs' => [ 'IntegerParameterRanges$member' => NULL, ], ], 'IntegerParameterRangeSpecification' => [ 'base' => '

Defines the possible values for an integer hyperparameter.

', 'refs' => [ 'ParameterRange$IntegerParameterRangeSpecification' => '

A IntegerParameterRangeSpecification object that defines the possible values for an integer hyperparameter.

', ], ], 'IntegerParameterRanges' => [ 'base' => NULL, 'refs' => [ 'ParameterRanges$IntegerParameterRanges' => '

The array of IntegerParameterRange objects that specify ranges of integer hyperparameters that a hyperparameter tuning job searches.

', ], ], 'JobReferenceCode' => [ 'base' => NULL, 'refs' => [ 'DescribeLabelingJobResponse$JobReferenceCode' => '

A unique identifier for work done as part of a labeling job.

', 'LabelingJobForWorkteamSummary$JobReferenceCode' => '

A unique identifier for a labeling job. You can use this to refer to a specific labeling job.

', ], ], 'JobReferenceCodeContains' => [ 'base' => NULL, 'refs' => [ 'ListLabelingJobsForWorkteamRequest$JobReferenceCodeContains' => '

A filter the limits jobs to only the ones whose job reference code contains the specified string.

', ], ], 'KmsKeyId' => [ 'base' => NULL, 'refs' => [ 'CreateEndpointConfigInput$KmsKeyId' => '

The Amazon Resource Name (ARN) of a AWS Key Management Service key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance that hosts the endpoint.

', 'CreateNotebookInstanceInput$KmsKeyId' => '

If you provide a AWS KMS key ID, Amazon SageMaker uses it to encrypt data at rest on the ML storage volume that is attached to your notebook instance. The KMS key you provide must be enabled. For information, see Enabling and Disabling Keys in the AWS Key Management Service Developer Guide.

', 'DescribeEndpointConfigOutput$KmsKeyId' => '

AWS KMS key ID Amazon SageMaker uses to encrypt data when storing it on the ML storage volume attached to the instance.

', 'DescribeNotebookInstanceOutput$KmsKeyId' => '

The AWS KMS key ID Amazon SageMaker uses to encrypt data when storing it on the ML storage volume attached to the instance.

', 'LabelingJobOutputConfig$KmsKeyId' => '

The AWS Key Management Service ID of the key used to encrypt the output data, if any.

', 'LabelingJobResourceConfig$VolumeKmsKeyId' => '

The AWS Key Management Service key ID for the key used to encrypt the output data, if any.

', 'OutputDataConfig$KmsKeyId' => '

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption. The KmsKeyId can be any of the following formats:

If you don\'t provide a KMS key ID, Amazon SageMaker uses the default KMS key for Amazon S3 for your role\'s account. For more information, see KMS-Managed Encryption Keys in the Amazon Simple Storage Service Developer Guide.

The KMS key policy must grant permission to the IAM role that you specify in your CreateTramsformJob request. For more information, see Using Key Policies in AWS KMS in the AWS Key Management Service Developer Guide.

', 'ResourceConfig$VolumeKmsKeyId' => '

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the training job. The VolumeKmsKeyId can be any of the following formats:

', 'TransformOutput$KmsKeyId' => '

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption. The KmsKeyId can be any of the following formats:

If you don\'t provide a KMS key ID, Amazon SageMaker uses the default KMS key for Amazon S3 for your role\'s account. For more information, see KMS-Managed Encryption Keys in the Amazon Simple Storage Service Developer Guide.

The KMS key policy must grant permission to the IAM role that you specify in your CreateTramsformJob request. For more information, see Using Key Policies in AWS KMS in the AWS Key Management Service Developer Guide.

', 'TransformResources$VolumeKmsKeyId' => '

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the batch transform job. The VolumeKmsKeyId can be any of the following formats:

', ], ], 'LabelAttributeName' => [ 'base' => NULL, 'refs' => [ 'CreateLabelingJobRequest$LabelAttributeName' => '

The attribute name to use for the label in the output manifest file. This is the key for the key/value pair formed with the label that a worker assigns to the object. The name can\'t end with "-metadata". If you are running a semantic segmentation labeling job, the attribute name must end with "-ref". If you are running any other kind of labeling job, the attribute name must not end with "-ref".

', 'DescribeLabelingJobResponse$LabelAttributeName' => '

The attribute used as the label in the output manifest file.

', ], ], 'LabelCounter' => [ 'base' => NULL, 'refs' => [ 'LabelCounters$TotalLabeled' => '

The total number of objects labeled.

', 'LabelCounters$HumanLabeled' => '

The total number of objects labeled by a human worker.

', 'LabelCounters$MachineLabeled' => '

The total number of objects labeled by automated data labeling.

', 'LabelCounters$FailedNonRetryableError' => '

The total number of objects that could not be labeled due to an error.

', 'LabelCounters$Unlabeled' => '

The total number of objects not yet labeled.

', 'LabelCountersForWorkteam$HumanLabeled' => '

The total number of data objects labeled by a human worker.

', 'LabelCountersForWorkteam$PendingHuman' => '

The total number of data objects that need to be labeled by a human worker.

', 'LabelCountersForWorkteam$Total' => '

The total number of tasks in the labeling job.

', ], ], 'LabelCounters' => [ 'base' => '

Provides a breakdown of the number of objects labeled.

', 'refs' => [ 'DescribeLabelingJobResponse$LabelCounters' => '

Provides a breakdown of the number of data objects labeled by humans, the number of objects labeled by machine, the number of objects than couldn\'t be labeled, and the total number of objects labeled.

', 'LabelingJobSummary$LabelCounters' => '

Counts showing the progress of the labeling job.

', ], ], 'LabelCountersForWorkteam' => [ 'base' => '

Provides counts for human-labeled tasks in the labeling job.

', 'refs' => [ 'LabelingJobForWorkteamSummary$LabelCounters' => '

Provides information about the progress of a labeling job.

', ], ], 'LabelingJobAlgorithmSpecificationArn' => [ 'base' => NULL, 'refs' => [ 'LabelingJobAlgorithmsConfig$LabelingJobAlgorithmSpecificationArn' => '

Specifies the Amazon Resource Name (ARN) of the algorithm used for auto-labeling. You must select one of the following ARNs:

', ], ], 'LabelingJobAlgorithmsConfig' => [ 'base' => '

Provides configuration information for auto-labeling of your data objects. A LabelingJobAlgorithmsConfig object must be supplied in order to use auto-labeling.

', 'refs' => [ 'CreateLabelingJobRequest$LabelingJobAlgorithmsConfig' => '

Configures the information required to perform automated data labeling.

', 'DescribeLabelingJobResponse$LabelingJobAlgorithmsConfig' => '

Configuration information for automated data labeling.

', ], ], 'LabelingJobArn' => [ 'base' => NULL, 'refs' => [ 'CreateLabelingJobResponse$LabelingJobArn' => '

The Amazon Resource Name (ARN) of the labeling job. You use this ARN to identify the labeling job.

', 'DescribeLabelingJobResponse$LabelingJobArn' => '

The Amazon Resource Name (ARN) of the labeling job.

', 'DescribeTrainingJobResponse$LabelingJobArn' => '

The Amazon Resource Name (ARN) of the Amazon SageMaker Ground Truth labeling job that created the transform or training job.

', 'DescribeTransformJobResponse$LabelingJobArn' => '

The Amazon Resource Name (ARN) of the Amazon SageMaker Ground Truth labeling job that created the transform or training job.

', 'LabelingJobSummary$LabelingJobArn' => '

The Amazon Resource Name (ARN) assigned to the labeling job when it was created.

', 'TrainingJob$LabelingJobArn' => '

The Amazon Resource Name (ARN) of the labeling job.

', ], ], 'LabelingJobDataAttributes' => [ 'base' => '

Attributes of the data specified by the customer. Use these to describe the data to be labeled.

', 'refs' => [ 'LabelingJobInputConfig$DataAttributes' => '

Attributes of the data specified by the customer.

', ], ], 'LabelingJobDataSource' => [ 'base' => '

Provides information about the location of input data.

', 'refs' => [ 'LabelingJobInputConfig$DataSource' => '

The location of the input data.

', ], ], 'LabelingJobForWorkteamSummary' => [ 'base' => '

Provides summary information for a work team.

', 'refs' => [ 'LabelingJobForWorkteamSummaryList$member' => NULL, ], ], 'LabelingJobForWorkteamSummaryList' => [ 'base' => NULL, 'refs' => [ 'ListLabelingJobsForWorkteamResponse$LabelingJobSummaryList' => '

An array of LabelingJobSummary objects, each describing a labeling job.

', ], ], 'LabelingJobInputConfig' => [ 'base' => '

Input configuration information for a labeling job.

', 'refs' => [ 'CreateLabelingJobRequest$InputConfig' => '

Input data for the labeling job, such as the Amazon S3 location of the data objects and the location of the manifest file that describes the data objects.

', 'DescribeLabelingJobResponse$InputConfig' => '

Input configuration information for the labeling job, such as the Amazon S3 location of the data objects and the location of the manifest file that describes the data objects.

', 'LabelingJobSummary$InputConfig' => '

Input configuration for the labeling job.

', ], ], 'LabelingJobName' => [ 'base' => NULL, 'refs' => [ 'CreateLabelingJobRequest$LabelingJobName' => '

The name of the labeling job. This name is used to identify the job in a list of labeling jobs.

', 'DescribeLabelingJobRequest$LabelingJobName' => '

The name of the labeling job to return information for.

', 'DescribeLabelingJobResponse$LabelingJobName' => '

The name assigned to the labeling job when it was created.

', 'LabelingJobForWorkteamSummary$LabelingJobName' => '

The name of the labeling job that the work team is assigned to.

', 'LabelingJobSummary$LabelingJobName' => '

The name of the labeling job.

', 'StopLabelingJobRequest$LabelingJobName' => '

The name of the labeling job to stop.

', ], ], 'LabelingJobOutput' => [ 'base' => '

Specifies the location of the output produced by the labeling job.

', 'refs' => [ 'DescribeLabelingJobResponse$LabelingJobOutput' => '

The location of the output produced by the labeling job.

', 'LabelingJobSummary$LabelingJobOutput' => '

The location of the output produced by the labeling job.

', ], ], 'LabelingJobOutputConfig' => [ 'base' => '

Output configuration information for a labeling job.

', 'refs' => [ 'CreateLabelingJobRequest$OutputConfig' => '

The location of the output data and the AWS Key Management Service key ID for the key used to encrypt the output data, if any.

', 'DescribeLabelingJobResponse$OutputConfig' => '

The location of the job\'s output data and the AWS Key Management Service key ID for the key used to encrypt the output data, if any.

', ], ], 'LabelingJobResourceConfig' => [ 'base' => '

Provides configuration information for labeling jobs.

', 'refs' => [ 'LabelingJobAlgorithmsConfig$LabelingJobResourceConfig' => '

Provides configuration information for a labeling job.

', ], ], 'LabelingJobS3DataSource' => [ 'base' => '

The Amazon S3 location of the input data objects.

', 'refs' => [ 'LabelingJobDataSource$S3DataSource' => '

The Amazon S3 location of the input data objects.

', ], ], 'LabelingJobStatus' => [ 'base' => NULL, 'refs' => [ 'DescribeLabelingJobResponse$LabelingJobStatus' => '

The processing status of the labeling job.

', 'LabelingJobSummary$LabelingJobStatus' => '

The current status of the labeling job.

', 'ListLabelingJobsRequest$StatusEquals' => '

A filter that retrieves only labeling jobs with a specific status.

', ], ], 'LabelingJobStoppingConditions' => [ 'base' => '

A set of conditions for stopping a labeling job. If any of the conditions are met, the job is automatically stopped. You can use these conditions to control the cost of data labeling.

', 'refs' => [ 'CreateLabelingJobRequest$StoppingConditions' => '

A set of conditions for stopping the labeling job. If any of the conditions are met, the job is automatically stopped. You can use these conditions to control the cost of data labeling.

', 'DescribeLabelingJobResponse$StoppingConditions' => '

A set of conditions for stopping a labeling job. If any of the conditions are met, the job is automatically stopped.

', ], ], 'LabelingJobSummary' => [ 'base' => '

Provides summary information about a labeling job.

', 'refs' => [ 'LabelingJobSummaryList$member' => NULL, ], ], 'LabelingJobSummaryList' => [ 'base' => NULL, 'refs' => [ 'ListLabelingJobsResponse$LabelingJobSummaryList' => '

An array of LabelingJobSummary objects, each describing a labeling job.

', ], ], 'LambdaFunctionArn' => [ 'base' => NULL, 'refs' => [ 'AnnotationConsolidationConfig$AnnotationConsolidationLambdaArn' => '

The Amazon Resource Name (ARN) of a Lambda function implements the logic for annotation consolidation.

For the built-in bounding box, image classification, semantic segmentation, and text classification task types, Amazon SageMaker Ground Truth provides the following Lambda functions:

For more information, see Annotation Consolidation.

', 'HumanTaskConfig$PreHumanTaskLambdaArn' => '

The Amazon Resource Name (ARN) of a Lambda function that is run before a data object is sent to a human worker. Use this function to provide input to a custom labeling job.

For the built-in bounding box, image classification, semantic segmentation, and text classification task types, Amazon SageMaker Ground Truth provides the following Lambda functions:

US East (Northern Virginia) (us-east-1):

US East (Ohio) (us-east-2):

US West (Oregon) (us-west-2):

EU (Ireland) (eu-west-1):

Asia Pacific (Tokyo (ap-northeast-1):

', 'LabelingJobSummary$PreHumanTaskLambdaArn' => '

The Amazon Resource Name (ARN) of a Lambda function. The function is run before each data object is sent to a worker.

', 'LabelingJobSummary$AnnotationConsolidationLambdaArn' => '

The Amazon Resource Name (ARN) of the Lambda function used to consolidate the annotations from individual workers into a label for a data object. For more information, see Annotation Consolidation.

', ], ], 'LastModifiedTime' => [ 'base' => NULL, 'refs' => [ 'CodeRepositorySummary$LastModifiedTime' => '

The date and time that the Git repository was last modified.

', 'CompilationJobSummary$LastModifiedTime' => '

The time when the model compilation job was last modified.

', 'DescribeCodeRepositoryOutput$LastModifiedTime' => '

The date and time that the repository was last changed.

', 'DescribeCompilationJobResponse$LastModifiedTime' => '

The time that the status of the model compilation job was last modified.

', 'DescribeNotebookInstanceLifecycleConfigOutput$LastModifiedTime' => '

A timestamp that tells when the lifecycle configuration was last modified.

', 'DescribeNotebookInstanceOutput$LastModifiedTime' => '

A timestamp. Use this parameter to retrieve the time when the notebook instance was last modified.

', 'ListCompilationJobsRequest$LastModifiedTimeAfter' => '

A filter that returns the model compilation jobs that were modified after a specified time.

', 'ListCompilationJobsRequest$LastModifiedTimeBefore' => '

A filter that returns the model compilation jobs that were modified before a specified time.

', 'ListNotebookInstanceLifecycleConfigsInput$LastModifiedTimeBefore' => '

A filter that returns only lifecycle configurations that were modified before the specified time (timestamp).

', 'ListNotebookInstanceLifecycleConfigsInput$LastModifiedTimeAfter' => '

A filter that returns only lifecycle configurations that were modified after the specified time (timestamp).

', 'ListNotebookInstancesInput$LastModifiedTimeBefore' => '

A filter that returns only notebook instances that were modified before the specified time (timestamp).

', 'ListNotebookInstancesInput$LastModifiedTimeAfter' => '

A filter that returns only notebook instances that were modified after the specified time (timestamp).

', 'NotebookInstanceLifecycleConfigSummary$LastModifiedTime' => '

A timestamp that tells when the lifecycle configuration was last modified.

', 'NotebookInstanceSummary$LastModifiedTime' => '

A timestamp that shows when the notebook instance was last modified.

', ], ], 'ListAlgorithmsInput' => [ 'base' => NULL, 'refs' => [], ], 'ListAlgorithmsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListCodeRepositoriesInput' => [ 'base' => NULL, 'refs' => [], ], 'ListCodeRepositoriesOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListCompilationJobsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListCompilationJobsResponse' => [ 'base' => NULL, 'refs' => [], ], 'ListCompilationJobsSortBy' => [ 'base' => NULL, 'refs' => [ 'ListCompilationJobsRequest$SortBy' => '

The field by which to sort results. The default is CreationTime.

', ], ], 'ListEndpointConfigsInput' => [ 'base' => NULL, 'refs' => [], ], 'ListEndpointConfigsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListEndpointsInput' => [ 'base' => NULL, 'refs' => [], ], 'ListEndpointsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListHyperParameterTuningJobsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListHyperParameterTuningJobsResponse' => [ 'base' => NULL, 'refs' => [], ], 'ListLabelingJobsForWorkteamRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListLabelingJobsForWorkteamResponse' => [ 'base' => NULL, 'refs' => [], ], 'ListLabelingJobsForWorkteamSortByOptions' => [ 'base' => NULL, 'refs' => [ 'ListLabelingJobsForWorkteamRequest$SortBy' => '

The field to sort results by. The default is CreationTime.

', ], ], 'ListLabelingJobsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListLabelingJobsResponse' => [ 'base' => NULL, 'refs' => [], ], 'ListModelPackagesInput' => [ 'base' => NULL, 'refs' => [], ], 'ListModelPackagesOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListModelsInput' => [ 'base' => NULL, 'refs' => [], ], 'ListModelsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListNotebookInstanceLifecycleConfigsInput' => [ 'base' => NULL, 'refs' => [], ], 'ListNotebookInstanceLifecycleConfigsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListNotebookInstancesInput' => [ 'base' => NULL, 'refs' => [], ], 'ListNotebookInstancesOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListSubscribedWorkteamsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListSubscribedWorkteamsResponse' => [ 'base' => NULL, 'refs' => [], ], 'ListTagsInput' => [ 'base' => NULL, 'refs' => [], ], 'ListTagsMaxResults' => [ 'base' => NULL, 'refs' => [ 'ListTagsInput$MaxResults' => '

Maximum number of tags to return.

', ], ], 'ListTagsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListTrainingJobsForHyperParameterTuningJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListTrainingJobsForHyperParameterTuningJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'ListTrainingJobsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListTrainingJobsResponse' => [ 'base' => NULL, 'refs' => [], ], 'ListTransformJobsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListTransformJobsResponse' => [ 'base' => NULL, 'refs' => [], ], 'ListWorkteamsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListWorkteamsResponse' => [ 'base' => NULL, 'refs' => [], ], 'ListWorkteamsSortByOptions' => [ 'base' => NULL, 'refs' => [ 'ListWorkteamsRequest$SortBy' => '

The field to sort results by. The default is CreationTime.

', ], ], 'MaxConcurrentTaskCount' => [ 'base' => NULL, 'refs' => [ 'HumanTaskConfig$MaxConcurrentTaskCount' => '

Defines the maximum number of data objects that can be labeled by human workers at the same time. Each object may have more than one worker at one time.

', ], ], 'MaxConcurrentTransforms' => [ 'base' => NULL, 'refs' => [ 'CreateTransformJobRequest$MaxConcurrentTransforms' => '

The maximum number of parallel requests that can be sent to an algorithm container on an instance. This is good for algorithms that implement multiple workers on larger instances . The default value is 1. To allow Amazon SageMaker to determine the appropriate number for MaxConcurrentTransforms, do not set the value in the API.

', 'DescribeTransformJobResponse$MaxConcurrentTransforms' => '

The maximum number of parallel requests on each instance node that can be launched in a transform job. The default value is 1.

', 'TransformJobDefinition$MaxConcurrentTransforms' => '

The maximum number of parallel requests that can be sent to each instance in a transform job. The default value is 1.

', ], ], 'MaxHumanLabeledObjectCount' => [ 'base' => NULL, 'refs' => [ 'LabelingJobStoppingConditions$MaxHumanLabeledObjectCount' => '

The maximum number of objects that can be labeled by human workers.

', ], ], 'MaxNumberOfTrainingJobs' => [ 'base' => NULL, 'refs' => [ 'ResourceLimits$MaxNumberOfTrainingJobs' => '

The maximum number of training jobs that a hyperparameter tuning job can launch.

', ], ], 'MaxParallelTrainingJobs' => [ 'base' => NULL, 'refs' => [ 'ResourceLimits$MaxParallelTrainingJobs' => '

The maximum number of concurrent training jobs that a hyperparameter tuning job can launch.

', ], ], 'MaxPayloadInMB' => [ 'base' => NULL, 'refs' => [ 'CreateTransformJobRequest$MaxPayloadInMB' => '

The maximum payload size allowed, in MB. A payload is the data portion of a record (without metadata). The value in MaxPayloadInMB must be greater or equal to the size of a single record. You can approximate the size of a record by dividing the size of your dataset by the number of records. Then multiply this value by the number of records you want in a mini-batch. We recommend to enter a slightly larger value than this to ensure the records fit within the maximum payload size. The default value is 6 MB.

For cases where the payload might be arbitrarily large and is transmitted using HTTP chunked encoding, set the value to 0. This feature only works in supported algorithms. Currently, Amazon SageMaker built-in algorithms do not support this feature.

', 'DescribeTransformJobResponse$MaxPayloadInMB' => '

The maximum payload size, in MB, used in the transform job.

', 'TransformJobDefinition$MaxPayloadInMB' => '

The maximum payload size allowed, in MB. A payload is the data portion of a record (without metadata).

', ], ], 'MaxPercentageOfInputDatasetLabeled' => [ 'base' => NULL, 'refs' => [ 'LabelingJobStoppingConditions$MaxPercentageOfInputDatasetLabeled' => '

The maximum number of input data objects that should be labeled.

', ], ], 'MaxResults' => [ 'base' => NULL, 'refs' => [ 'ListAlgorithmsInput$MaxResults' => '

The maximum number of algorithms to return in the response.

', 'ListCodeRepositoriesInput$MaxResults' => '

The maximum number of Git repositories to return in the response.

', 'ListCompilationJobsRequest$MaxResults' => '

The maximum number of model compilation jobs to return in the response.

', 'ListEndpointConfigsInput$MaxResults' => '

The maximum number of training jobs to return in the response.

', 'ListEndpointsInput$MaxResults' => '

The maximum number of endpoints to return in the response.

', 'ListHyperParameterTuningJobsRequest$MaxResults' => '

The maximum number of tuning jobs to return. The default value is 10.

', 'ListLabelingJobsForWorkteamRequest$MaxResults' => '

The maximum number of labeling jobs to return in each page of the response.

', 'ListLabelingJobsRequest$MaxResults' => '

The maximum number of labeling jobs to return in each page of the response.

', 'ListModelPackagesInput$MaxResults' => '

The maximum number of model packages to return in the response.

', 'ListModelsInput$MaxResults' => '

The maximum number of models to return in the response.

', 'ListNotebookInstanceLifecycleConfigsInput$MaxResults' => '

The maximum number of lifecycle configurations to return in the response.

', 'ListNotebookInstancesInput$MaxResults' => '

The maximum number of notebook instances to return.

', 'ListSubscribedWorkteamsRequest$MaxResults' => '

The maximum number of work teams to return in each page of the response.

', 'ListTrainingJobsForHyperParameterTuningJobRequest$MaxResults' => '

The maximum number of training jobs to return. The default value is 10.

', 'ListTrainingJobsRequest$MaxResults' => '

The maximum number of training jobs to return in the response.

', 'ListTransformJobsRequest$MaxResults' => '

The maximum number of transform jobs to return in the response. The default value is 10.

', 'ListWorkteamsRequest$MaxResults' => '

The maximum number of work teams to return in each page of the response.

', 'SearchRequest$MaxResults' => '

The maximum number of results to return in a SearchResponse.

', ], ], 'MaxRuntimeInSeconds' => [ 'base' => NULL, 'refs' => [ 'StoppingCondition$MaxRuntimeInSeconds' => '

The maximum length of time, in seconds, that the training job can run. If model training does not complete during this time, Amazon SageMaker ends the job. If value is not specified, default value is 1 day. Maximum value is 28 days.

', ], ], 'MemberDefinition' => [ 'base' => '

Defines the Amazon Cognito user group that is part of a work team.

', 'refs' => [ 'MemberDefinitions$member' => NULL, ], ], 'MemberDefinitions' => [ 'base' => NULL, 'refs' => [ 'CreateWorkteamRequest$MemberDefinitions' => '

A list of MemberDefinition objects that contains objects that identify the Amazon Cognito user pool that makes up the work team. For more information, see Amazon Cognito User Pools.

All of the CognitoMemberDefinition objects that make up the member definition must have the same ClientId and UserPool values.

', 'UpdateWorkteamRequest$MemberDefinitions' => '

A list of MemberDefinition objects that contain the updated work team members.

', 'Workteam$MemberDefinitions' => '

The Amazon Cognito user groups that make up the work team.

', ], ], 'MetricData' => [ 'base' => '

The name, value, and date and time of a metric that was emitted to Amazon CloudWatch.

', 'refs' => [ 'FinalMetricDataList$member' => NULL, ], ], 'MetricDefinition' => [ 'base' => '

Specifies a metric that the training algorithm writes to stderr or stdout. Amazon SageMakerhyperparameter tuning captures all defined metrics. You specify one metric that a hyperparameter tuning job uses as its objective metric to choose the best training job.

', 'refs' => [ 'MetricDefinitionList$member' => NULL, ], ], 'MetricDefinitionList' => [ 'base' => NULL, 'refs' => [ 'AlgorithmSpecification$MetricDefinitions' => '

A list of metric definition objects. Each object specifies the metric name and regular expressions used to parse algorithm logs. Amazon SageMaker publishes each metric to Amazon CloudWatch.

', 'HyperParameterAlgorithmSpecification$MetricDefinitions' => '

An array of MetricDefinition objects that specify the metrics that the algorithm emits.

', 'TrainingSpecification$MetricDefinitions' => '

A list of MetricDefinition objects, which are used for parsing metrics generated by the algorithm.

', ], ], 'MetricName' => [ 'base' => NULL, 'refs' => [ 'FinalHyperParameterTuningJobObjectiveMetric$MetricName' => '

The name of the objective metric.

', 'HyperParameterTuningJobObjective$MetricName' => '

The name of the metric to use for the objective metric.

', 'MetricData$MetricName' => '

The name of the metric.

', 'MetricDefinition$Name' => '

The name of the metric.

', ], ], 'MetricRegex' => [ 'base' => NULL, 'refs' => [ 'MetricDefinition$Regex' => '

A regular expression that searches the output of a training job and gets the value of the metric. For more information about using regular expressions to define metrics, see Defining Objective Metrics.

', ], ], 'MetricValue' => [ 'base' => NULL, 'refs' => [ 'FinalHyperParameterTuningJobObjectiveMetric$Value' => '

The value of the objective metric.

', ], ], 'ModelArn' => [ 'base' => NULL, 'refs' => [ 'CreateModelOutput$ModelArn' => '

The ARN of the model created in Amazon SageMaker.

', 'DescribeModelOutput$ModelArn' => '

The Amazon Resource Name (ARN) of the model.

', 'LabelingJobAlgorithmsConfig$InitialActiveLearningModelArn' => '

At the end of an auto-label job Amazon SageMaker Ground Truth sends the Amazon Resource Nam (ARN) of the final model used for auto-labeling. You can use this model as the starting point for subsequent similar jobs by providing the ARN of the model here.

', 'LabelingJobOutput$FinalActiveLearningModelArn' => '

The Amazon Resource Name (ARN) for the most recent Amazon SageMaker model trained as part of automated data labeling.

', 'ModelSummary$ModelArn' => '

The Amazon Resource Name (ARN) of the model.

', ], ], 'ModelArtifacts' => [ 'base' => '

Provides information about the location that is configured for storing model artifacts.

', 'refs' => [ 'DescribeCompilationJobResponse$ModelArtifacts' => '

Information about the location in Amazon S3 that has been configured for storing the model artifacts used in the compilation job.

', 'DescribeTrainingJobResponse$ModelArtifacts' => '

Information about the Amazon S3 location that is configured for storing model artifacts.

', 'TrainingJob$ModelArtifacts' => '

Information about the Amazon S3 location that is configured for storing model artifacts.

', ], ], 'ModelName' => [ 'base' => NULL, 'refs' => [ 'CreateModelInput$ModelName' => '

The name of the new model.

', 'CreateTransformJobRequest$ModelName' => '

The name of the model that you want to use for the transform job. ModelName must be the name of an existing Amazon SageMaker model within an AWS Region in an AWS account.

', 'DeleteModelInput$ModelName' => '

The name of the model to delete.

', 'DescribeModelInput$ModelName' => '

The name of the model.

', 'DescribeModelOutput$ModelName' => '

Name of the Amazon SageMaker model.

', 'DescribeTransformJobResponse$ModelName' => '

The name of the model used in the transform job.

', 'ModelSummary$ModelName' => '

The name of the model that you want a summary for.

', 'ProductionVariant$ModelName' => '

The name of the model that you want to host. This is the name that you specified when creating the model.

', ], ], 'ModelNameContains' => [ 'base' => NULL, 'refs' => [ 'ListModelsInput$NameContains' => '

A string in the training job name. This filter returns only models in the training job whose name contains the specified string.

', ], ], 'ModelPackageArn' => [ 'base' => NULL, 'refs' => [ 'CreateModelPackageOutput$ModelPackageArn' => '

The Amazon Resource Name (ARN) of the new model package.

', 'DescribeModelPackageOutput$ModelPackageArn' => '

The Amazon Resource Name (ARN) of the model package.

', 'ModelPackageSummary$ModelPackageArn' => '

The Amazon Resource Name (ARN) of the model package.

', ], ], 'ModelPackageContainerDefinition' => [ 'base' => '

Describes the Docker container for the model package.

', 'refs' => [ 'ModelPackageContainerDefinitionList$member' => NULL, ], ], 'ModelPackageContainerDefinitionList' => [ 'base' => NULL, 'refs' => [ 'InferenceSpecification$Containers' => '

The Amazon ECR registry path of the Docker image that contains the inference code.

', ], ], 'ModelPackageSortBy' => [ 'base' => NULL, 'refs' => [ 'ListModelPackagesInput$SortBy' => '

The parameter by which to sort the results. The default is CreationTime.

', ], ], 'ModelPackageStatus' => [ 'base' => NULL, 'refs' => [ 'DescribeModelPackageOutput$ModelPackageStatus' => '

The current status of the model package.

', 'ModelPackageSummary$ModelPackageStatus' => '

The overall status of the model package.

', ], ], 'ModelPackageStatusDetails' => [ 'base' => '

Specifies the validation and image scan statuses of the model package.

', 'refs' => [ 'DescribeModelPackageOutput$ModelPackageStatusDetails' => '

Details about the current status of the model package.

', ], ], 'ModelPackageStatusItem' => [ 'base' => '

Represents the overall status of a model package.

', 'refs' => [ 'ModelPackageStatusItemList$member' => NULL, ], ], 'ModelPackageStatusItemList' => [ 'base' => NULL, 'refs' => [ 'ModelPackageStatusDetails$ValidationStatuses' => '

The validation status of the model package.

', 'ModelPackageStatusDetails$ImageScanStatuses' => '

The status of the scan of the Docker image container for the model package.

', ], ], 'ModelPackageSummary' => [ 'base' => '

Provides summary information about a model package.

', 'refs' => [ 'ModelPackageSummaryList$member' => NULL, ], ], 'ModelPackageSummaryList' => [ 'base' => NULL, 'refs' => [ 'ListModelPackagesOutput$ModelPackageSummaryList' => '

An array of ModelPackageSummary objects, each of which lists a model package.

', ], ], 'ModelPackageValidationProfile' => [ 'base' => '

Contains data, such as the inputs and targeted instance types that are used in the process of validating the model package.

The data provided in the validation profile is made available to your buyers on AWS Marketplace.

', 'refs' => [ 'ModelPackageValidationProfiles$member' => NULL, ], ], 'ModelPackageValidationProfiles' => [ 'base' => NULL, 'refs' => [ 'ModelPackageValidationSpecification$ValidationProfiles' => '

An array of ModelPackageValidationProfile objects, each of which specifies a batch transform job that Amazon SageMaker runs to validate your model package.

', ], ], 'ModelPackageValidationSpecification' => [ 'base' => '

Specifies batch transform jobs that Amazon SageMaker runs to validate your model package.

', 'refs' => [ 'CreateModelPackageInput$ValidationSpecification' => '

Specifies configurations for one or more transform jobs that Amazon SageMaker runs to test the model package.

', 'DescribeModelPackageOutput$ValidationSpecification' => '

Configurations for one or more transform jobs that Amazon SageMaker runs to test the model package.

', ], ], 'ModelSortKey' => [ 'base' => NULL, 'refs' => [ 'ListModelsInput$SortBy' => '

Sorts the list of results. The default is CreationTime.

', ], ], 'ModelSummary' => [ 'base' => '

Provides summary information about a model.

', 'refs' => [ 'ModelSummaryList$member' => NULL, ], ], 'ModelSummaryList' => [ 'base' => NULL, 'refs' => [ 'ListModelsOutput$Models' => '

An array of ModelSummary objects, each of which lists a model.

', ], ], 'NameContains' => [ 'base' => NULL, 'refs' => [ 'ListAlgorithmsInput$NameContains' => '

A string in the algorithm name. This filter returns only algorithms whose name contains the specified string.

', 'ListCompilationJobsRequest$NameContains' => '

A filter that returns the model compilation jobs whose name contains a specified string.

', 'ListHyperParameterTuningJobsRequest$NameContains' => '

A string in the tuning job name. This filter returns only tuning jobs whose name contains the specified string.

', 'ListLabelingJobsRequest$NameContains' => '

A string in the labeling job name. This filter returns only labeling jobs whose name contains the specified string.

', 'ListModelPackagesInput$NameContains' => '

A string in the model package name. This filter returns only model packages whose name contains the specified string.

', 'ListTrainingJobsRequest$NameContains' => '

A string in the training job name. This filter returns only training jobs whose name contains the specified string.

', 'ListTransformJobsRequest$NameContains' => '

A string in the transform job name. This filter returns only transform jobs whose name contains the specified string.

', ], ], 'NestedFilters' => [ 'base' => '

Defines a list of NestedFilters objects. To satisfy the conditions specified in the NestedFilters call, a resource must satisfy the conditions of all of the filters.

For example, you could define a NestedFilters using the training job\'s InputDataConfig property to filter on Channel objects.

A NestedFilters object contains multiple filters. For example, to find all training jobs whose name contains train and that have cat/data in their S3Uri (specified in InputDataConfig), you need to create a NestedFilters object that specifies the InputDataConfig property with the following Filter objects:

', 'refs' => [ 'NestedFiltersList$member' => NULL, ], ], 'NestedFiltersList' => [ 'base' => NULL, 'refs' => [ 'SearchExpression$NestedFilters' => '

A list of nested filter objects.

', ], ], 'NetworkInterfaceId' => [ 'base' => NULL, 'refs' => [ 'DescribeNotebookInstanceOutput$NetworkInterfaceId' => '

The network interface IDs that Amazon SageMaker created at the time of creating the instance.

', ], ], 'NextToken' => [ 'base' => NULL, 'refs' => [ 'ListAlgorithmsInput$NextToken' => '

If the response to a previous ListAlgorithms request was truncated, the response includes a NextToken. To retrieve the next set of algorithms, use the token in the next request.

', 'ListAlgorithmsOutput$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of algorithms, use it in the subsequent request.

', 'ListCodeRepositoriesInput$NextToken' => '

If the result of a ListCodeRepositoriesOutput request was truncated, the response includes a NextToken. To get the next set of Git repositories, use the token in the next request.

', 'ListCodeRepositoriesOutput$NextToken' => '

If the result of a ListCodeRepositoriesOutput request was truncated, the response includes a NextToken. To get the next set of Git repositories, use the token in the next request.

', 'ListCompilationJobsRequest$NextToken' => '

If the result of the previous ListCompilationJobs request was truncated, the response includes a NextToken. To retrieve the next set of model compilation jobs, use the token in the next request.

', 'ListCompilationJobsResponse$NextToken' => '

If the response is truncated, Amazon SageMaker returns this NextToken. To retrieve the next set of model compilation jobs, use this token in the next request.

', 'ListHyperParameterTuningJobsRequest$NextToken' => '

If the result of the previous ListHyperParameterTuningJobs request was truncated, the response includes a NextToken. To retrieve the next set of tuning jobs, use the token in the next request.

', 'ListHyperParameterTuningJobsResponse$NextToken' => '

If the result of this ListHyperParameterTuningJobs request was truncated, the response includes a NextToken. To retrieve the next set of tuning jobs, use the token in the next request.

', 'ListLabelingJobsForWorkteamRequest$NextToken' => '

If the result of the previous ListLabelingJobsForWorkteam request was truncated, the response includes a NextToken. To retrieve the next set of labeling jobs, use the token in the next request.

', 'ListLabelingJobsForWorkteamResponse$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of labeling jobs, use it in the subsequent request.

', 'ListLabelingJobsRequest$NextToken' => '

If the result of the previous ListLabelingJobs request was truncated, the response includes a NextToken. To retrieve the next set of labeling jobs, use the token in the next request.

', 'ListLabelingJobsResponse$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of labeling jobs, use it in the subsequent request.

', 'ListModelPackagesInput$NextToken' => '

If the response to a previous ListModelPackages request was truncated, the response includes a NextToken. To retrieve the next set of model packages, use the token in the next request.

', 'ListModelPackagesOutput$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of model packages, use it in the subsequent request.

', 'ListNotebookInstanceLifecycleConfigsInput$NextToken' => '

If the result of a ListNotebookInstanceLifecycleConfigs request was truncated, the response includes a NextToken. To get the next set of lifecycle configurations, use the token in the next request.

', 'ListNotebookInstanceLifecycleConfigsOutput$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To get the next set of lifecycle configurations, use it in the next request.

', 'ListNotebookInstancesInput$NextToken' => '

If the previous call to the ListNotebookInstances is truncated, the response includes a NextToken. You can use this token in your subsequent ListNotebookInstances request to fetch the next set of notebook instances.

You might specify a filter or a sort order in your request. When response is truncated, you must use the same values for the filer and sort order in the next request.

', 'ListNotebookInstancesOutput$NextToken' => '

If the response to the previous ListNotebookInstances request was truncated, Amazon SageMaker returns this token. To retrieve the next set of notebook instances, use the token in the next request.

', 'ListSubscribedWorkteamsRequest$NextToken' => '

If the result of the previous ListSubscribedWorkteams request was truncated, the response includes a NextToken. To retrieve the next set of labeling jobs, use the token in the next request.

', 'ListSubscribedWorkteamsResponse$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of work teams, use it in the subsequent request.

', 'ListTagsInput$NextToken' => '

If the response to the previous ListTags request is truncated, Amazon SageMaker returns this token. To retrieve the next set of tags, use it in the subsequent request.

', 'ListTagsOutput$NextToken' => '

If response is truncated, Amazon SageMaker includes a token in the response. You can use this token in your subsequent request to fetch next set of tokens.

', 'ListTrainingJobsForHyperParameterTuningJobRequest$NextToken' => '

If the result of the previous ListTrainingJobsForHyperParameterTuningJob request was truncated, the response includes a NextToken. To retrieve the next set of training jobs, use the token in the next request.

', 'ListTrainingJobsForHyperParameterTuningJobResponse$NextToken' => '

If the result of this ListTrainingJobsForHyperParameterTuningJob request was truncated, the response includes a NextToken. To retrieve the next set of training jobs, use the token in the next request.

', 'ListTrainingJobsRequest$NextToken' => '

If the result of the previous ListTrainingJobs request was truncated, the response includes a NextToken. To retrieve the next set of training jobs, use the token in the next request.

', 'ListTrainingJobsResponse$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of training jobs, use it in the subsequent request.

', 'ListTransformJobsRequest$NextToken' => '

If the result of the previous ListTransformJobs request was truncated, the response includes a NextToken. To retrieve the next set of transform jobs, use the token in the next request.

', 'ListTransformJobsResponse$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of transform jobs, use it in the next request.

', 'ListWorkteamsRequest$NextToken' => '

If the result of the previous ListWorkteams request was truncated, the response includes a NextToken. To retrieve the next set of labeling jobs, use the token in the next request.

', 'ListWorkteamsResponse$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of work teams, use it in the subsequent request.

', 'SearchRequest$NextToken' => '

If more than MaxResults resource objects match the specified SearchExpression, the SearchResponse includes a NextToken. The NextToken can be passed to the next SearchRequest to continue retrieving results for the specified SearchExpression and Sort parameters.

', 'SearchResponse$NextToken' => '

If the result of the previous Search request was truncated, the response includes a NextToken. To retrieve the next set of results, use the token in the next request.

', ], ], 'NotebookInstanceAcceleratorType' => [ 'base' => NULL, 'refs' => [ 'NotebookInstanceAcceleratorTypes$member' => NULL, ], ], 'NotebookInstanceAcceleratorTypes' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$AcceleratorTypes' => '

A list of Elastic Inference (EI) instance types to associate with this notebook instance. Currently, only one instance type can be associated with a notebook instance. For more information, see Using Elastic Inference in Amazon SageMaker.

', 'DescribeNotebookInstanceOutput$AcceleratorTypes' => '

A list of the Elastic Inference (EI) instance types associated with this notebook instance. Currently only one EI instance type can be associated with a notebook instance. For more information, see Using Elastic Inference in Amazon SageMaker.

', 'UpdateNotebookInstanceInput$AcceleratorTypes' => '

A list of the Elastic Inference (EI) instance types to associate with this notebook instance. Currently only one EI instance type can be associated with a notebook instance. For more information, see Using Elastic Inference in Amazon SageMaker.

', ], ], 'NotebookInstanceArn' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceOutput$NotebookInstanceArn' => '

The Amazon Resource Name (ARN) of the notebook instance.

', 'DescribeNotebookInstanceOutput$NotebookInstanceArn' => '

The Amazon Resource Name (ARN) of the notebook instance.

', 'NotebookInstanceSummary$NotebookInstanceArn' => '

The Amazon Resource Name (ARN) of the notebook instance.

', ], ], 'NotebookInstanceLifecycleConfigArn' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceLifecycleConfigOutput$NotebookInstanceLifecycleConfigArn' => '

The Amazon Resource Name (ARN) of the lifecycle configuration.

', 'DescribeNotebookInstanceLifecycleConfigOutput$NotebookInstanceLifecycleConfigArn' => '

The Amazon Resource Name (ARN) of the lifecycle configuration.

', 'NotebookInstanceLifecycleConfigSummary$NotebookInstanceLifecycleConfigArn' => '

The Amazon Resource Name (ARN) of the lifecycle configuration.

', ], ], 'NotebookInstanceLifecycleConfigContent' => [ 'base' => NULL, 'refs' => [ 'NotebookInstanceLifecycleHook$Content' => '

A base64-encoded string that contains a shell script for a notebook instance lifecycle configuration.

', ], ], 'NotebookInstanceLifecycleConfigList' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceLifecycleConfigInput$OnCreate' => '

A shell script that runs only once, when you create a notebook instance. The shell script must be a base64-encoded string.

', 'CreateNotebookInstanceLifecycleConfigInput$OnStart' => '

A shell script that runs every time you start a notebook instance, including when you create the notebook instance. The shell script must be a base64-encoded string.

', 'DescribeNotebookInstanceLifecycleConfigOutput$OnCreate' => '

The shell script that runs only once, when you create a notebook instance.

', 'DescribeNotebookInstanceLifecycleConfigOutput$OnStart' => '

The shell script that runs every time you start a notebook instance, including when you create the notebook instance.

', 'UpdateNotebookInstanceLifecycleConfigInput$OnCreate' => '

The shell script that runs only once, when you create a notebook instance

', 'UpdateNotebookInstanceLifecycleConfigInput$OnStart' => '

The shell script that runs every time you start a notebook instance, including when you create the notebook instance.

', ], ], 'NotebookInstanceLifecycleConfigName' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$LifecycleConfigName' => '

The name of a lifecycle configuration to associate with the notebook instance. For information about lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

', 'CreateNotebookInstanceLifecycleConfigInput$NotebookInstanceLifecycleConfigName' => '

The name of the lifecycle configuration.

', 'DeleteNotebookInstanceLifecycleConfigInput$NotebookInstanceLifecycleConfigName' => '

The name of the lifecycle configuration to delete.

', 'DescribeNotebookInstanceLifecycleConfigInput$NotebookInstanceLifecycleConfigName' => '

The name of the lifecycle configuration to describe.

', 'DescribeNotebookInstanceLifecycleConfigOutput$NotebookInstanceLifecycleConfigName' => '

The name of the lifecycle configuration.

', 'DescribeNotebookInstanceOutput$NotebookInstanceLifecycleConfigName' => '

Returns the name of a notebook instance lifecycle configuration.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance

', 'ListNotebookInstancesInput$NotebookInstanceLifecycleConfigNameContains' => '

A string in the name of a notebook instances lifecycle configuration associated with this notebook instance. This filter returns only notebook instances associated with a lifecycle configuration with a name that contains the specified string.

', 'NotebookInstanceLifecycleConfigSummary$NotebookInstanceLifecycleConfigName' => '

The name of the lifecycle configuration.

', 'NotebookInstanceSummary$NotebookInstanceLifecycleConfigName' => '

The name of a notebook instance lifecycle configuration associated with this notebook instance.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

', 'UpdateNotebookInstanceInput$LifecycleConfigName' => '

The name of a lifecycle configuration to associate with the notebook instance. For information about lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

', 'UpdateNotebookInstanceLifecycleConfigInput$NotebookInstanceLifecycleConfigName' => '

The name of the lifecycle configuration.

', ], ], 'NotebookInstanceLifecycleConfigNameContains' => [ 'base' => NULL, 'refs' => [ 'ListNotebookInstanceLifecycleConfigsInput$NameContains' => '

A string in the lifecycle configuration name. This filter returns only lifecycle configurations whose name contains the specified string.

', ], ], 'NotebookInstanceLifecycleConfigSortKey' => [ 'base' => NULL, 'refs' => [ 'ListNotebookInstanceLifecycleConfigsInput$SortBy' => '

Sorts the list of results. The default is CreationTime.

', ], ], 'NotebookInstanceLifecycleConfigSortOrder' => [ 'base' => NULL, 'refs' => [ 'ListNotebookInstanceLifecycleConfigsInput$SortOrder' => '

The sort order for results.

', ], ], 'NotebookInstanceLifecycleConfigSummary' => [ 'base' => '

Provides a summary of a notebook instance lifecycle configuration.

', 'refs' => [ 'NotebookInstanceLifecycleConfigSummaryList$member' => NULL, ], ], 'NotebookInstanceLifecycleConfigSummaryList' => [ 'base' => NULL, 'refs' => [ 'ListNotebookInstanceLifecycleConfigsOutput$NotebookInstanceLifecycleConfigs' => '

An array of NotebookInstanceLifecycleConfiguration objects, each listing a lifecycle configuration.

', ], ], 'NotebookInstanceLifecycleHook' => [ 'base' => '

Contains the notebook instance lifecycle configuration script.

Each lifecycle configuration script has a limit of 16384 characters.

The value of the $PATH environment variable that is available to both scripts is /sbin:bin:/usr/sbin:/usr/bin.

View CloudWatch Logs for notebook instance lifecycle configurations in log group /aws/sagemaker/NotebookInstances in log stream [notebook-instance-name]/[LifecycleConfigHook].

Lifecycle configuration scripts cannot run for longer than 5 minutes. If a script runs for longer than 5 minutes, it fails and the notebook instance is not created or started.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

', 'refs' => [ 'NotebookInstanceLifecycleConfigList$member' => NULL, ], ], 'NotebookInstanceName' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$NotebookInstanceName' => '

The name of the new notebook instance.

', 'CreatePresignedNotebookInstanceUrlInput$NotebookInstanceName' => '

The name of the notebook instance.

', 'DeleteNotebookInstanceInput$NotebookInstanceName' => '

The name of the Amazon SageMaker notebook instance to delete.

', 'DescribeNotebookInstanceInput$NotebookInstanceName' => '

The name of the notebook instance that you want information about.

', 'DescribeNotebookInstanceOutput$NotebookInstanceName' => '

The name of the Amazon SageMaker notebook instance.

', 'NotebookInstanceSummary$NotebookInstanceName' => '

The name of the notebook instance that you want a summary for.

', 'StartNotebookInstanceInput$NotebookInstanceName' => '

The name of the notebook instance to start.

', 'StopNotebookInstanceInput$NotebookInstanceName' => '

The name of the notebook instance to terminate.

', 'UpdateNotebookInstanceInput$NotebookInstanceName' => '

The name of the notebook instance to update.

', ], ], 'NotebookInstanceNameContains' => [ 'base' => NULL, 'refs' => [ 'ListNotebookInstancesInput$NameContains' => '

A string in the notebook instances\' name. This filter returns only notebook instances whose name contains the specified string.

', ], ], 'NotebookInstanceSortKey' => [ 'base' => NULL, 'refs' => [ 'ListNotebookInstancesInput$SortBy' => '

The field to sort results by. The default is Name.

', ], ], 'NotebookInstanceSortOrder' => [ 'base' => NULL, 'refs' => [ 'ListNotebookInstancesInput$SortOrder' => '

The sort order for results.

', ], ], 'NotebookInstanceStatus' => [ 'base' => NULL, 'refs' => [ 'DescribeNotebookInstanceOutput$NotebookInstanceStatus' => '

The status of the notebook instance.

', 'ListNotebookInstancesInput$StatusEquals' => '

A filter that returns only notebook instances with the specified status.

', 'NotebookInstanceSummary$NotebookInstanceStatus' => '

The status of the notebook instance.

', ], ], 'NotebookInstanceSummary' => [ 'base' => '

Provides summary information for an Amazon SageMaker notebook instance.

', 'refs' => [ 'NotebookInstanceSummaryList$member' => NULL, ], ], 'NotebookInstanceSummaryList' => [ 'base' => NULL, 'refs' => [ 'ListNotebookInstancesOutput$NotebookInstances' => '

An array of NotebookInstanceSummary objects, one for each notebook instance.

', ], ], 'NotebookInstanceUrl' => [ 'base' => NULL, 'refs' => [ 'CreatePresignedNotebookInstanceUrlOutput$AuthorizedUrl' => '

A JSON object that contains the URL string.

', 'DescribeNotebookInstanceOutput$Url' => '

The URL that you use to connect to the Jupyter notebook that is running in your notebook instance.

', 'NotebookInstanceSummary$Url' => '

The URL that you use to connect to the Jupyter instance running in your notebook instance.

', ], ], 'NotebookInstanceVolumeSizeInGB' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$VolumeSizeInGB' => '

The size, in GB, of the ML storage volume to attach to the notebook instance. The default value is 5 GB.

', 'DescribeNotebookInstanceOutput$VolumeSizeInGB' => '

The size, in GB, of the ML storage volume attached to the notebook instance.

', 'UpdateNotebookInstanceInput$VolumeSizeInGB' => '

The size, in GB, of the ML storage volume to attach to the notebook instance. The default value is 5 GB.

', ], ], 'NumberOfHumanWorkersPerDataObject' => [ 'base' => NULL, 'refs' => [ 'HumanTaskConfig$NumberOfHumanWorkersPerDataObject' => '

The number of human workers that will label an object.

', ], ], 'ObjectiveStatus' => [ 'base' => NULL, 'refs' => [ 'HyperParameterTrainingJobSummary$ObjectiveStatus' => '

The status of the objective metric for the training job:

', ], ], 'ObjectiveStatusCounter' => [ 'base' => NULL, 'refs' => [ 'ObjectiveStatusCounters$Succeeded' => '

The number of training jobs whose final objective metric was evaluated by the hyperparameter tuning job and used in the hyperparameter tuning process.

', 'ObjectiveStatusCounters$Pending' => '

The number of training jobs that are in progress and pending evaluation of their final objective metric.

', 'ObjectiveStatusCounters$Failed' => '

The number of training jobs whose final objective metric was not evaluated and used in the hyperparameter tuning process. This typically occurs when the training job failed or did not emit an objective metric.

', ], ], 'ObjectiveStatusCounters' => [ 'base' => '

Specifies the number of training jobs that this hyperparameter tuning job launched, categorized by the status of their objective metric. The objective metric status shows whether the final objective metric for the training job has been evaluated by the tuning job and used in the hyperparameter tuning process.

', 'refs' => [ 'DescribeHyperParameterTuningJobResponse$ObjectiveStatusCounters' => '

The ObjectiveStatusCounters object that specifies the number of training jobs, categorized by the status of their final objective metric, that this tuning job launched.

', 'HyperParameterTuningJobSummary$ObjectiveStatusCounters' => '

The ObjectiveStatusCounters object that specifies the numbers of training jobs, categorized by objective metric status, that this tuning job launched.

', ], ], 'Operator' => [ 'base' => NULL, 'refs' => [ 'Filter$Operator' => '

A Boolean binary operator that is used to evaluate the filter. The operator field contains one of the following values:

Equals

The specified resource in Name equals the specified Value.

NotEquals

The specified resource in Name does not equal the specified Value.

GreaterThan

The specified resource in Name is greater than the specified Value. Not supported for text-based properties.

GreaterThanOrEqualTo

The specified resource in Name is greater than or equal to the specified Value. Not supported for text-based properties.

LessThan

The specified resource in Name is less than the specified Value. Not supported for text-based properties.

LessThanOrEqualTo

The specified resource in Name is less than or equal to the specified Value. Not supported for text-based properties.

Contains

Only supported for text-based properties. The word-list of the property contains the specified Value.

If you have specified a filter Value, the default is Equals.

', ], ], 'OrderKey' => [ 'base' => NULL, 'refs' => [ 'ListEndpointConfigsInput$SortOrder' => '

The sort order for results. The default is Ascending.

', 'ListEndpointsInput$SortOrder' => '

The sort order for results. The default is Ascending.

', 'ListModelsInput$SortOrder' => '

The sort order for results. The default is Ascending.

', ], ], 'OutputConfig' => [ 'base' => '

Contains information about the output location for the compiled model and the device (target) that the model runs on.

', 'refs' => [ 'CreateCompilationJobRequest$OutputConfig' => '

Provides information about the output location for the compiled model and the target device the model runs on.

', 'DescribeCompilationJobResponse$OutputConfig' => '

Information about the output location for the compiled model and the target device that the model runs on.

', ], ], 'OutputDataConfig' => [ 'base' => '

Provides information about how to store model training results (model artifacts).

', 'refs' => [ 'CreateTrainingJobRequest$OutputDataConfig' => '

Specifies the path to the S3 bucket where you want to store model artifacts. Amazon SageMaker creates subfolders for the artifacts.

', 'DescribeTrainingJobResponse$OutputDataConfig' => '

The S3 path where model artifacts that you configured when creating the job are stored. Amazon SageMaker creates subfolders for model artifacts.

', 'HyperParameterTrainingJobDefinition$OutputDataConfig' => '

Specifies the path to the Amazon S3 bucket where you store model artifacts from the training jobs that the tuning job launches.

', 'TrainingJob$OutputDataConfig' => '

The S3 path where model artifacts that you configured when creating the job are stored. Amazon SageMaker creates subfolders for model artifacts.

', 'TrainingJobDefinition$OutputDataConfig' => '

the path to the S3 bucket where you want to store model artifacts. Amazon SageMaker creates subfolders for the artifacts.

', ], ], 'PaginationToken' => [ 'base' => NULL, 'refs' => [ 'ListEndpointConfigsInput$NextToken' => '

If the result of the previous ListEndpointConfig request was truncated, the response includes a NextToken. To retrieve the next set of endpoint configurations, use the token in the next request.

', 'ListEndpointConfigsOutput$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of endpoint configurations, use it in the subsequent request

', 'ListEndpointsInput$NextToken' => '

If the result of a ListEndpoints request was truncated, the response includes a NextToken. To retrieve the next set of endpoints, use the token in the next request.

', 'ListEndpointsOutput$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of training jobs, use it in the subsequent request.

', 'ListModelsInput$NextToken' => '

If the response to a previous ListModels request was truncated, the response includes a NextToken. To retrieve the next set of models, use the token in the next request.

', 'ListModelsOutput$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of models, use it in the subsequent request.

', ], ], 'ParameterKey' => [ 'base' => NULL, 'refs' => [ 'CategoricalParameterRange$Name' => '

The name of the categorical hyperparameter to tune.

', 'ContinuousParameterRange$Name' => '

The name of the continuous hyperparameter to tune.

', 'HyperParameters$key' => NULL, 'IntegerParameterRange$Name' => '

The name of the hyperparameter to search.

', ], ], 'ParameterName' => [ 'base' => NULL, 'refs' => [ 'HyperParameterSpecification$Name' => '

The name of this hyperparameter. The name must be unique.

', ], ], 'ParameterRange' => [ 'base' => '

Defines the possible values for categorical, continuous, and integer hyperparameters to be used by an algorithm.

', 'refs' => [ 'HyperParameterSpecification$Range' => '

The allowed range for this hyperparameter.

', ], ], 'ParameterRanges' => [ 'base' => '

Specifies ranges of integer, continuous, and categorical hyperparameters that a hyperparameter tuning job searches. The hyperparameter tuning job launches training jobs with hyperparameter values within these ranges to find the combination of values that result in the training job with the best performance as measured by the objective metric of the hyperparameter tuning job.

You can specify a maximum of 20 hyperparameters that a hyperparameter tuning job can search over. Every possible value of a categorical parameter range counts against this limit.

', 'refs' => [ 'HyperParameterTuningJobConfig$ParameterRanges' => '

The ParameterRanges object that specifies the ranges of hyperparameters that this tuning job searches.

', ], ], 'ParameterType' => [ 'base' => NULL, 'refs' => [ 'HyperParameterSpecification$Type' => '

The type of this hyperparameter. The valid types are Integer, Continuous, Categorical, and FreeText.

', ], ], 'ParameterValue' => [ 'base' => NULL, 'refs' => [ 'ContinuousParameterRange$MinValue' => '

The minimum value for the hyperparameter. The tuning job uses floating-point values between this value and MaxValuefor tuning.

', 'ContinuousParameterRange$MaxValue' => '

The maximum value for the hyperparameter. The tuning job uses floating-point values between MinValue value and this value for tuning.

', 'ContinuousParameterRangeSpecification$MinValue' => '

The minimum floating-point value allowed.

', 'ContinuousParameterRangeSpecification$MaxValue' => '

The maximum floating-point value allowed.

', 'HyperParameterSpecification$DefaultValue' => '

The default value for this hyperparameter. If a default value is specified, a hyperparameter cannot be required.

', 'HyperParameters$value' => NULL, 'IntegerParameterRange$MinValue' => '

The minimum value of the hyperparameter to search.

', 'IntegerParameterRange$MaxValue' => '

The maximum value of the hyperparameter to search.

', 'IntegerParameterRangeSpecification$MinValue' => '

The minimum integer value allowed.

', 'IntegerParameterRangeSpecification$MaxValue' => '

The maximum integer value allowed.

', 'ParameterValues$member' => NULL, ], ], 'ParameterValues' => [ 'base' => NULL, 'refs' => [ 'CategoricalParameterRange$Values' => '

A list of the categories for the hyperparameter.

', 'CategoricalParameterRangeSpecification$Values' => '

The allowed categories for the hyperparameter.

', ], ], 'ParentHyperParameterTuningJob' => [ 'base' => '

A previously completed or stopped hyperparameter tuning job to be used as a starting point for a new hyperparameter tuning job.

', 'refs' => [ 'ParentHyperParameterTuningJobs$member' => NULL, ], ], 'ParentHyperParameterTuningJobs' => [ 'base' => NULL, 'refs' => [ 'HyperParameterTuningJobWarmStartConfig$ParentHyperParameterTuningJobs' => '

An array of hyperparameter tuning jobs that are used as the starting point for the new hyperparameter tuning job. For more information about warm starting a hyperparameter tuning job, see Using a Previous Hyperparameter Tuning Job as a Starting Point.

Hyperparameter tuning jobs created before October 1, 2018 cannot be used as parent jobs for warm start tuning jobs.

', ], ], 'ProductId' => [ 'base' => NULL, 'refs' => [ 'DescribeAlgorithmOutput$ProductId' => '

The product identifier of the algorithm.

', 'ModelPackageContainerDefinition$ProductId' => '

The AWS Marketplace product ID of the model package.

', ], ], 'ProductListings' => [ 'base' => NULL, 'refs' => [ 'Workteam$ProductListingIds' => '

The Amazon Marketplace identifier for a vendor\'s work team.

', ], ], 'ProductionVariant' => [ 'base' => '

Identifies a model that you want to host and the resources to deploy for hosting it. If you are deploying multiple models, tell Amazon SageMaker how to distribute traffic among the models by specifying variant weights.

', 'refs' => [ 'ProductionVariantList$member' => NULL, ], ], 'ProductionVariantAcceleratorType' => [ 'base' => NULL, 'refs' => [ 'ProductionVariant$AcceleratorType' => '

The size of the Elastic Inference (EI) instance to use for the production variant. EI instances provide on-demand GPU computing for inference. For more information, see Using Elastic Inference in Amazon SageMaker. For more information, see Using Elastic Inference in Amazon SageMaker.

', ], ], 'ProductionVariantInstanceType' => [ 'base' => NULL, 'refs' => [ 'ProductionVariant$InstanceType' => '

The ML compute instance type.

', 'RealtimeInferenceInstanceTypes$member' => NULL, ], ], 'ProductionVariantList' => [ 'base' => NULL, 'refs' => [ 'CreateEndpointConfigInput$ProductionVariants' => '

An array of ProductionVariant objects, one for each model that you want to host at this endpoint.

', 'DescribeEndpointConfigOutput$ProductionVariants' => '

An array of ProductionVariant objects, one for each model that you want to host at this endpoint.

', ], ], 'ProductionVariantSummary' => [ 'base' => '

Describes weight and capacities for a production variant associated with an endpoint. If you sent a request to the UpdateEndpointWeightsAndCapacities API and the endpoint status is Updating, you get different desired and current values.

', 'refs' => [ 'ProductionVariantSummaryList$member' => NULL, ], ], 'ProductionVariantSummaryList' => [ 'base' => NULL, 'refs' => [ 'DescribeEndpointOutput$ProductionVariants' => '

An array of ProductionVariantSummary objects, one for each model hosted behind this endpoint.

', ], ], 'PropertyNameHint' => [ 'base' => NULL, 'refs' => [ 'PropertyNameQuery$PropertyNameHint' => '

Text that is part of a property\'s name. The property names of hyperparameter, metric, and tag key names that begin with the specified text in the PropertyNameHint.

', ], ], 'PropertyNameQuery' => [ 'base' => '

A type of SuggestionQuery. A suggestion query for retrieving property names that match the specified hint.

', 'refs' => [ 'SuggestionQuery$PropertyNameQuery' => '

A type of SuggestionQuery. Defines a property name hint. Only property names that match the specified hint are included in the response.

', ], ], 'PropertyNameSuggestion' => [ 'base' => '

A property name returned from a GetSearchSuggestions call that specifies a value in the PropertyNameQuery field.

', 'refs' => [ 'PropertyNameSuggestionList$member' => NULL, ], ], 'PropertyNameSuggestionList' => [ 'base' => NULL, 'refs' => [ 'GetSearchSuggestionsResponse$PropertyNameSuggestions' => '

A list of property names for a Resource that match a SuggestionQuery.

', ], ], 'PublicWorkforceTaskPrice' => [ 'base' => '

Defines the amount of money paid to an Amazon Mechanical Turk worker for each task performed.

Use one of the following prices for bounding box tasks. Prices are in US dollars.

Use one of the following prices for image classification, text classification, and custom tasks. Prices are in US dollars.

Use one of the following prices for semantic segmentation tasks. Prices are in US dollars.

', 'refs' => [ 'HumanTaskConfig$PublicWorkforceTaskPrice' => '

The price that you pay for each task performed by a public worker.

', ], ], 'RealtimeInferenceInstanceTypes' => [ 'base' => NULL, 'refs' => [ 'InferenceSpecification$SupportedRealtimeInferenceInstanceTypes' => '

A list of the instance types that are used to generate inferences in real-time.

', ], ], 'RecordWrapper' => [ 'base' => NULL, 'refs' => [ 'Channel$RecordWrapperType' => '

Specify RecordIO as the value when input data is in raw format but the training algorithm requires the RecordIO format. In this case, Amazon SageMaker wraps each individual S3 object in a RecordIO record. If the input data is already in RecordIO format, you don\'t need to set this attribute. For more information, see Create a Dataset Using RecordIO.

In File mode, leave this field unset or set it to None.

', ], ], 'RenderUiTemplateRequest' => [ 'base' => NULL, 'refs' => [], ], 'RenderUiTemplateResponse' => [ 'base' => NULL, 'refs' => [], ], 'RenderableTask' => [ 'base' => '

Contains input values for a task.

', 'refs' => [ 'RenderUiTemplateRequest$Task' => '

A RenderableTask object containing a representative task to render.

', ], ], 'RenderingError' => [ 'base' => '

A description of an error that occurred while rendering the template.

', 'refs' => [ 'RenderingErrorList$member' => NULL, ], ], 'RenderingErrorList' => [ 'base' => NULL, 'refs' => [ 'RenderUiTemplateResponse$Errors' => '

A list of one or more RenderingError objects if any were encountered while rendering the template. If there were no errors, the list is empty.

', ], ], 'ResourceArn' => [ 'base' => NULL, 'refs' => [ 'AddTagsInput$ResourceArn' => '

The Amazon Resource Name (ARN) of the resource that you want to tag.

', 'DeleteTagsInput$ResourceArn' => '

The Amazon Resource Name (ARN) of the resource whose tags you want to delete.

', 'ListTagsInput$ResourceArn' => '

The Amazon Resource Name (ARN) of the resource whose tags you want to retrieve.

', ], ], 'ResourceConfig' => [ 'base' => '

Describes the resources, including ML compute instances and ML storage volumes, to use for model training.

', 'refs' => [ 'CreateTrainingJobRequest$ResourceConfig' => '

The resources, including the ML compute instances and ML storage volumes, to use for model training.

ML storage volumes store model artifacts and incremental states. Training algorithms might also use ML storage volumes for scratch space. If you want Amazon SageMaker to use the ML storage volume to store the training data, choose File as the TrainingInputMode in the algorithm specification. For distributed training algorithms, specify an instance count greater than 1.

', 'DescribeTrainingJobResponse$ResourceConfig' => '

Resources, including ML compute instances and ML storage volumes, that are configured for model training.

', 'HyperParameterTrainingJobDefinition$ResourceConfig' => '

The resources, including the compute instances and storage volumes, to use for the training jobs that the tuning job launches.

Storage volumes store model artifacts and incremental states. Training algorithms might also use storage volumes for scratch space. If you want Amazon SageMaker to use the storage volume to store the training data, choose File as the TrainingInputMode in the algorithm specification. For distributed training algorithms, specify an instance count greater than 1.

', 'TrainingJob$ResourceConfig' => '

Resources, including ML compute instances and ML storage volumes, that are configured for model training.

', 'TrainingJobDefinition$ResourceConfig' => '

The resources, including the ML compute instances and ML storage volumes, to use for model training.

', ], ], 'ResourceInUse' => [ 'base' => '

Resource being accessed is in use.

', 'refs' => [], ], 'ResourceLimitExceeded' => [ 'base' => '

You have exceeded an Amazon SageMaker resource limit. For example, you might have too many training jobs created.

', 'refs' => [], ], 'ResourceLimits' => [ 'base' => '

Specifies the maximum number of training jobs and parallel training jobs that a hyperparameter tuning job can launch.

', 'refs' => [ 'HyperParameterTuningJobConfig$ResourceLimits' => '

The ResourceLimits object that specifies the maximum number of training jobs and parallel training jobs for this tuning job.

', 'HyperParameterTuningJobSummary$ResourceLimits' => '

The ResourceLimits object that specifies the maximum number of training jobs and parallel training jobs allowed for this tuning job.

', ], ], 'ResourceNotFound' => [ 'base' => '

Resource being access is not found.

', 'refs' => [], ], 'ResourcePropertyName' => [ 'base' => NULL, 'refs' => [ 'Filter$Name' => '

A property name. For example, TrainingJobName. For the list of valid property names returned in a search result for each supported resource, see TrainingJob properties. You must specify a valid property name for the resource.

', 'NestedFilters$NestedPropertyName' => '

The name of the property to use in the nested filters. The value must match a listed property name, such as InputDataConfig.

', 'PropertyNameSuggestion$PropertyName' => '

A suggested property name based on what you entered in the search textbox in the Amazon SageMaker console.

', 'SearchRequest$SortBy' => '

The name of the resource property used to sort the SearchResults. The default is LastModifiedTime.

', ], ], 'ResourceType' => [ 'base' => NULL, 'refs' => [ 'GetSearchSuggestionsRequest$Resource' => '

The name of the Amazon SageMaker resource to Search for. The only valid Resource value is TrainingJob.

', 'SearchRequest$Resource' => '

The name of the Amazon SageMaker resource to search for. Currently, the only valid Resource value is TrainingJob.

', ], ], 'ResponseMIMEType' => [ 'base' => NULL, 'refs' => [ 'ResponseMIMETypes$member' => NULL, ], ], 'ResponseMIMETypes' => [ 'base' => NULL, 'refs' => [ 'InferenceSpecification$SupportedResponseMIMETypes' => '

The supported MIME types for the output data.

', ], ], 'RoleArn' => [ 'base' => NULL, 'refs' => [ 'AlgorithmValidationSpecification$ValidationRole' => '

The IAM roles that Amazon SageMaker uses to run the training jobs.

', 'CreateCompilationJobRequest$RoleArn' => '

The Amazon Resource Name (ARN) of an IIAMAM role that enables Amazon SageMaker to perform tasks on your behalf.

During model compilation, Amazon SageMaker needs your permission to:

You grant permissions for all of these tasks to an IAM role. To pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission. For more information, see Amazon SageMaker Roles.

', 'CreateLabelingJobRequest$RoleArn' => '

The Amazon Resource Number (ARN) that Amazon SageMaker assumes to perform tasks on your behalf during data labeling. You must grant this role the necessary permissions so that Amazon SageMaker can successfully complete data labeling.

', 'CreateModelInput$ExecutionRoleArn' => '

The Amazon Resource Name (ARN) of the IAM role that Amazon SageMaker can assume to access model artifacts and docker image for deployment on ML compute instances or for batch transform jobs. Deploying on ML compute instances is part of model hosting. For more information, see Amazon SageMaker Roles.

To be able to pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission.

', 'CreateNotebookInstanceInput$RoleArn' => '

When you send any requests to AWS resources from the notebook instance, Amazon SageMaker assumes this role to perform tasks on your behalf. You must grant this role necessary permissions so Amazon SageMaker can perform these tasks. The policy must allow the Amazon SageMaker service principal (sagemaker.amazonaws.com) permissions to assume this role. For more information, see Amazon SageMaker Roles.

To be able to pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission.

', 'CreateTrainingJobRequest$RoleArn' => '

The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.

During model training, Amazon SageMaker needs your permission to read input data from an S3 bucket, download a Docker image that contains training code, write model artifacts to an S3 bucket, write logs to Amazon CloudWatch Logs, and publish metrics to Amazon CloudWatch. You grant permissions for all of these tasks to an IAM role. For more information, see Amazon SageMaker Roles.

To be able to pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission.

', 'DescribeCompilationJobResponse$RoleArn' => '

The Amazon Resource Name (ARN) of the model compilation job.

', 'DescribeLabelingJobResponse$RoleArn' => '

The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf during data labeling.

', 'DescribeModelOutput$ExecutionRoleArn' => '

The Amazon Resource Name (ARN) of the IAM role that you specified for the model.

', 'DescribeNotebookInstanceOutput$RoleArn' => '

The Amazon Resource Name (ARN) of the IAM role associated with the instance.

', 'DescribeTrainingJobResponse$RoleArn' => '

The AWS Identity and Access Management (IAM) role configured for the training job.

', 'HyperParameterTrainingJobDefinition$RoleArn' => '

The Amazon Resource Name (ARN) of the IAM role associated with the training jobs that the tuning job launches.

', 'ModelPackageValidationSpecification$ValidationRole' => '

The IAM roles to be used for the validation of the model package.

', 'RenderUiTemplateRequest$RoleArn' => '

The Amazon Resource Name (ARN) that has access to the S3 objects that are used by the template.

', 'TrainingJob$RoleArn' => '

The AWS Identity and Access Management (IAM) role configured for the training job.

', 'UpdateNotebookInstanceInput$RoleArn' => '

The Amazon Resource Name (ARN) of the IAM role that Amazon SageMaker can assume to access the notebook instance. For more information, see Amazon SageMaker Roles.

To be able to pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission.

', ], ], 'S3DataDistribution' => [ 'base' => NULL, 'refs' => [ 'S3DataSource$S3DataDistributionType' => '

If you want Amazon SageMaker to replicate the entire dataset on each ML compute instance that is launched for model training, specify FullyReplicated.

If you want Amazon SageMaker to replicate a subset of data on each ML compute instance that is launched for model training, specify ShardedByS3Key. If there are n ML compute instances launched for a training job, each instance gets approximately 1/n of the number of S3 objects. In this case, model training on each machine uses only the subset of training data.

Don\'t choose more ML compute instances for training than available S3 objects. If you do, some nodes won\'t get any data and you will pay for nodes that aren\'t getting any training data. This applies in both File and Pipe modes. Keep this in mind when developing algorithms.

In distributed training, where you use multiple ML compute EC2 instances, you might choose ShardedByS3Key. If the algorithm requires copying training data to the ML storage volume (when TrainingInputMode is set to File), this copies 1/n of the number of objects.

', ], ], 'S3DataSource' => [ 'base' => '

Describes the S3 data source.

', 'refs' => [ 'DataSource$S3DataSource' => '

The S3 location of the data source that is associated with a channel.

', ], ], 'S3DataType' => [ 'base' => NULL, 'refs' => [ 'S3DataSource$S3DataType' => '

If you choose S3Prefix, S3Uri identifies a key name prefix. Amazon SageMaker uses all objects that match the specified key name prefix for model training.

If you choose ManifestFile, S3Uri identifies an object that is a manifest file containing a list of object keys that you want Amazon SageMaker to use for model training.

If you choose AugmentedManifestFile, S3Uri identifies an object that is an augmented manifest file in JSON lines format. This file contains the data you want to use for model training. AugmentedManifestFile can only be used if the Channel\'s input mode is Pipe.

', 'TransformS3DataSource$S3DataType' => '

If you choose S3Prefix, S3Uri identifies a key name prefix. Amazon SageMaker uses all objects with the specified key name prefix for batch transform.

If you choose ManifestFile, S3Uri identifies an object that is a manifest file containing a list of object keys that you want Amazon SageMaker to use for batch transform.

', ], ], 'S3Uri' => [ 'base' => NULL, 'refs' => [ 'CreateLabelingJobRequest$LabelCategoryConfigS3Uri' => '

The S3 URL of the file that defines the categories used to label the data objects.

The file is a JSON structure in the following format:

{

"document-version": "2018-11-28"

"labels": [

{

"label": "label 1"

},

{

"label": "label 2"

},

...

{

"label": "label n"

}

]

}

', 'DescribeLabelingJobResponse$LabelCategoryConfigS3Uri' => '

The S3 location of the JSON file that defines the categories used to label data objects.

The file is a JSON structure in the following format:

{

"document-version": "2018-11-28"

"labels": [

{

"label": "label 1"

},

{

"label": "label 2"

},

...

{

"label": "label n"

}

]

}

', 'InputConfig$S3Uri' => '

The S3 path where the model artifacts, which result from model training, are stored. This path must point to a single gzip compressed tar archive (.tar.gz suffix).

', 'LabelingJobOutput$OutputDatasetS3Uri' => '

The Amazon S3 bucket location of the manifest file for labeled data.

', 'LabelingJobOutputConfig$S3OutputPath' => '

The Amazon S3 location to write output data.

', 'LabelingJobS3DataSource$ManifestS3Uri' => '

The Amazon S3 location of the manifest file that describes the input data objects.

', 'ModelArtifacts$S3ModelArtifacts' => '

The path of the S3 object that contains the model artifacts. For example, s3://bucket-name/keynameprefix/model.tar.gz.

', 'OutputConfig$S3OutputLocation' => '

Identifies the S3 path where you want Amazon SageMaker to store the model artifacts. For example, s3://bucket-name/key-name-prefix.

', 'OutputDataConfig$S3OutputPath' => '

Identifies the S3 path where you want Amazon SageMaker to store the model artifacts. For example, s3://bucket-name/key-name-prefix.

', 'S3DataSource$S3Uri' => '

Depending on the value specified for the S3DataType, identifies either a key name prefix or a manifest. For example:

', 'TransformOutput$S3OutputPath' => '

The Amazon S3 path where you want Amazon SageMaker to store the results of the transform job. For example, s3://bucket-name/key-name-prefix.

For every S3 object used as input for the transform job, the transformed data is stored in a corresponding subfolder in the location under the output prefix. For example, for the input data s3://bucket-name/input-name-prefix/dataset01/data.csv the transformed data is stored at s3://bucket-name/key-name-prefix/dataset01/. This is based on the original name, as a series of .part files (.part0001, part0002, etc.).

', 'TransformS3DataSource$S3Uri' => '

Depending on the value specified for the S3DataType, identifies either a key name prefix or a manifest. For example:

', 'UiConfig$UiTemplateS3Uri' => '

The Amazon S3 bucket location of the UI template. For more information about the contents of a UI template, see Creating Your Custom Labeling Task Template.

', ], ], 'SearchExpression' => [ 'base' => '

A multi-expression that searches for the specified resource or resources in a search. All resource objects that satisfy the expression\'s condition are included in the search results. You must specify at least one subexpression, filter, or nested filter. A SearchExpression can contain up to twenty elements.

A SearchExpression contains the following components:

', 'refs' => [ 'SearchExpressionList$member' => NULL, 'SearchRequest$SearchExpression' => '

A Boolean conditional statement. Resource objects must satisfy this condition to be included in search results. You must provide at least one subexpression, filter, or nested filter. The maximum number of recursive SubExpressions, NestedFilters, and Filters that can be included in a SearchExpression object is 50.

', ], ], 'SearchExpressionList' => [ 'base' => NULL, 'refs' => [ 'SearchExpression$SubExpressions' => '

A list of search expression objects.

', ], ], 'SearchRecord' => [ 'base' => '

An individual search result record that contains a single resource object.

', 'refs' => [ 'SearchResultsList$member' => NULL, ], ], 'SearchRequest' => [ 'base' => NULL, 'refs' => [], ], 'SearchResponse' => [ 'base' => NULL, 'refs' => [], ], 'SearchResultsList' => [ 'base' => NULL, 'refs' => [ 'SearchResponse$Results' => '

A list of SearchResult objects.

', ], ], 'SearchSortOrder' => [ 'base' => NULL, 'refs' => [ 'SearchRequest$SortOrder' => '

How SearchResults are ordered. Valid values are Ascending or Descending. The default is Descending.

', ], ], 'SecondaryStatus' => [ 'base' => NULL, 'refs' => [ 'DescribeTrainingJobResponse$SecondaryStatus' => '

Provides detailed information about the state of the training job. For detailed information on the secondary status of the training job, see StatusMessage under SecondaryStatusTransition.

Amazon SageMaker provides primary statuses and secondary statuses that apply to each of them:

InProgress
  • Starting - Starting the training job.

  • Downloading - An optional stage for algorithms that support File training input mode. It indicates that data is being downloaded to the ML storage volumes.

  • Training - Training is in progress.

  • Uploading - Training is complete and the model artifacts are being uploaded to the S3 location.

Completed
  • Completed - The training job has completed.

Failed
  • Failed - The training job has failed. The reason for the failure is returned in the FailureReason field of DescribeTrainingJobResponse.

Stopped
  • MaxRuntimeExceeded - The job stopped because it exceeded the maximum allowed runtime.

  • Stopped - The training job has stopped.

Stopping
  • Stopping - Stopping the training job.

Valid values for SecondaryStatus are subject to change.

We no longer support the following secondary statuses:

', 'SecondaryStatusTransition$Status' => '

Contains a secondary status information from a training job.

Status might be one of the following secondary statuses:

InProgress
  • Starting - Starting the training job.

  • Downloading - An optional stage for algorithms that support File training input mode. It indicates that data is being downloaded to the ML storage volumes.

  • Training - Training is in progress.

  • Uploading - Training is complete and the model artifacts are being uploaded to the S3 location.

Completed
  • Completed - The training job has completed.

Failed
  • Failed - The training job has failed. The reason for the failure is returned in the FailureReason field of DescribeTrainingJobResponse.

Stopped
  • MaxRuntimeExceeded - The job stopped because it exceeded the maximum allowed runtime.

  • Stopped - The training job has stopped.

Stopping
  • Stopping - Stopping the training job.

We no longer support the following secondary statuses:

', 'TrainingJob$SecondaryStatus' => '

Provides detailed information about the state of the training job. For detailed information about the secondary status of the training job, see StatusMessage under SecondaryStatusTransition.

Amazon SageMaker provides primary statuses and secondary statuses that apply to each of them:

InProgress
  • Starting - Starting the training job.

  • Downloading - An optional stage for algorithms that support File training input mode. It indicates that data is being downloaded to the ML storage volumes.

  • Training - Training is in progress.

  • Uploading - Training is complete and the model artifacts are being uploaded to the S3 location.

Completed
  • Completed - The training job has completed.

Failed
  • Failed - The training job has failed. The reason for the failure is returned in the FailureReason field of DescribeTrainingJobResponse.

Stopped
  • MaxRuntimeExceeded - The job stopped because it exceeded the maximum allowed runtime.

  • Stopped - The training job has stopped.

Stopping
  • Stopping - Stopping the training job.

Valid values for SecondaryStatus are subject to change.

We no longer support the following secondary statuses:

', ], ], 'SecondaryStatusTransition' => [ 'base' => '

An array element of DescribeTrainingJobResponse$SecondaryStatusTransitions. It provides additional details about a status that the training job has transitioned through. A training job can be in one of several states, for example, starting, downloading, training, or uploading. Within each state, there are a number of intermediate states. For example, within the starting state, Amazon SageMaker could be starting the training job or launching the ML instances. These transitional states are referred to as the job\'s secondary status.

', 'refs' => [ 'SecondaryStatusTransitions$member' => NULL, ], ], 'SecondaryStatusTransitions' => [ 'base' => NULL, 'refs' => [ 'DescribeTrainingJobResponse$SecondaryStatusTransitions' => '

A history of all of the secondary statuses that the training job has transitioned through.

', 'TrainingJob$SecondaryStatusTransitions' => '

A history of all of the secondary statuses that the training job has transitioned through.

', ], ], 'SecretArn' => [ 'base' => NULL, 'refs' => [ 'GitConfig$SecretArn' => '

The Amazon Resource Name (ARN) of the AWS Secrets Manager secret that contains the credentials used to access the git repository. The secret must have a staging label of AWSCURRENT and must be in the following format:

{"username": UserName, "password": Password}

', 'GitConfigForUpdate$SecretArn' => '

The Amazon Resource Name (ARN) of the AWS Secrets Manager secret that contains the credentials used to access the git repository. The secret must have a staging label of AWSCURRENT and must be in the following format:

{"username": UserName, "password": Password}

', ], ], 'SecurityGroupId' => [ 'base' => NULL, 'refs' => [ 'SecurityGroupIds$member' => NULL, 'VpcSecurityGroupIds$member' => NULL, ], ], 'SecurityGroupIds' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$SecurityGroupIds' => '

The VPC security group IDs, in the form sg-xxxxxxxx. The security groups must be for the same VPC as specified in the subnet.

', 'DescribeNotebookInstanceOutput$SecurityGroups' => '

The IDs of the VPC security groups.

', ], ], 'Seed' => [ 'base' => NULL, 'refs' => [ 'ShuffleConfig$Seed' => '

Determines the shuffling order in ShuffleConfig value.

', ], ], 'SessionExpirationDurationInSeconds' => [ 'base' => NULL, 'refs' => [ 'CreatePresignedNotebookInstanceUrlInput$SessionExpirationDurationInSeconds' => '

The duration of the session, in seconds. The default is 12 hours.

', ], ], 'ShuffleConfig' => [ 'base' => '

A configuration for a shuffle option for input data in a channel. If you use S3Prefix for S3DataType, the results of the S3 key prefix matches are shuffled. If you use ManifestFile, the order of the S3 object references in the ManifestFile is shuffled. If you use AugmentedManifestFile, the order of the JSON lines in the AugmentedManifestFile is shuffled. The shuffling order is determined using the Seed value.

For Pipe input mode, shuffling is done at the start of every epoch. With large datasets, this ensures that the order of the training data is different for each epoch, and it helps reduce bias and possible overfitting. In a multi-node training job when ShuffleConfig is combined with S3DataDistributionType of ShardedByS3Key, the data is shuffled across nodes so that the content sent to a particular node on the first epoch might be sent to a different node on the second epoch.

', 'refs' => [ 'Channel$ShuffleConfig' => '

A configuration for a shuffle option for input data in a channel. If you use S3Prefix for S3DataType, this shuffles the results of the S3 key prefix matches. If you use ManifestFile, the order of the S3 object references in the ManifestFile is shuffled. If you use AugmentedManifestFile, the order of the JSON lines in the AugmentedManifestFile is shuffled. The shuffling order is determined using the Seed value.

For Pipe input mode, shuffling is done at the start of every epoch. With large datasets this ensures that the order of the training data is different for each epoch, it helps reduce bias and possible overfitting. In a multi-node training job when ShuffleConfig is combined with S3DataDistributionType of ShardedByS3Key, the data is shuffled across nodes so that the content sent to a particular node on the first epoch might be sent to a different node on the second epoch.

', ], ], 'SortBy' => [ 'base' => NULL, 'refs' => [ 'ListLabelingJobsRequest$SortBy' => '

The field to sort results by. The default is CreationTime.

', 'ListTrainingJobsRequest$SortBy' => '

The field to sort results by. The default is CreationTime.

', 'ListTransformJobsRequest$SortBy' => '

The field to sort results by. The default is CreationTime.

', ], ], 'SortOrder' => [ 'base' => NULL, 'refs' => [ 'ListAlgorithmsInput$SortOrder' => '

The sort order for the results. The default is Ascending.

', 'ListCompilationJobsRequest$SortOrder' => '

The sort order for results. The default is Ascending.

', 'ListHyperParameterTuningJobsRequest$SortOrder' => '

The sort order for results. The default is Ascending.

', 'ListLabelingJobsForWorkteamRequest$SortOrder' => '

The sort order for results. The default is Ascending.

', 'ListLabelingJobsRequest$SortOrder' => '

The sort order for results. The default is Ascending.

', 'ListModelPackagesInput$SortOrder' => '

The sort order for the results. The default is Ascending.

', 'ListTrainingJobsForHyperParameterTuningJobRequest$SortOrder' => '

The sort order for results. The default is Ascending.

', 'ListTrainingJobsRequest$SortOrder' => '

The sort order for results. The default is Ascending.

', 'ListTransformJobsRequest$SortOrder' => '

The sort order for results. The default is Descending.

', 'ListWorkteamsRequest$SortOrder' => '

The sort order for results. The default is Ascending.

', ], ], 'SourceAlgorithm' => [ 'base' => '

Specifies an algorithm that was used to create the model package. The algorithm must be either an algorithm resource in your Amazon SageMaker account or an algorithm in AWS Marketplace that you are subscribed to.

', 'refs' => [ 'SourceAlgorithmList$member' => NULL, ], ], 'SourceAlgorithmList' => [ 'base' => NULL, 'refs' => [ 'SourceAlgorithmSpecification$SourceAlgorithms' => '

A list of the algorithms that were used to create a model package.

', ], ], 'SourceAlgorithmSpecification' => [ 'base' => '

A list of algorithms that were used to create a model package.

', 'refs' => [ 'CreateModelPackageInput$SourceAlgorithmSpecification' => '

Details about the algorithm that was used to create the model package.

', 'DescribeModelPackageOutput$SourceAlgorithmSpecification' => '

Details about the algorithm that was used to create the model package.

', ], ], 'SplitType' => [ 'base' => NULL, 'refs' => [ 'TransformInput$SplitType' => '

The method to use to split the transform job\'s data into smaller batches. If you don\'t want to split the data, specify None. If you want to split records on a newline character boundary, specify Line. To split records according to the RecordIO format, specify RecordIO. The default value is None.

Amazon SageMaker sends the maximum number of records per batch in each request up to the MaxPayloadInMB limit. For more information, see RecordIO data format.

For information about the RecordIO format, see Data Format.

', ], ], 'StartNotebookInstanceInput' => [ 'base' => NULL, 'refs' => [], ], 'StatusMessage' => [ 'base' => NULL, 'refs' => [ 'SecondaryStatusTransition$StatusMessage' => '

A detailed description of the progress within a secondary status.

Amazon SageMaker provides secondary statuses and status messages that apply to each of them:

Starting
  • Starting the training job.

  • Launching requested ML instances.

  • Insufficient capacity error from EC2 while launching instances, retrying!

  • Launched instance was unhealthy, replacing it!

  • Preparing the instances for training.

Training
  • Downloading the training image.

  • Training image download completed. Training in progress.

Status messages are subject to change. Therefore, we recommend not including them in code that programmatically initiates actions. For examples, don\'t use status messages in if statements.

To have an overview of your training job\'s progress, view TrainingJobStatus and SecondaryStatus in DescribeTrainingJobResponse, and StatusMessage together. For example, at the start of a training job, you might see the following:

', ], ], 'StopCompilationJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'StopHyperParameterTuningJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'StopLabelingJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'StopNotebookInstanceInput' => [ 'base' => NULL, 'refs' => [], ], 'StopTrainingJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'StopTransformJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'StoppingCondition' => [ 'base' => '

Specifies how long model training can run. When model training reaches the limit, Amazon SageMaker ends the training job. Use this API to cap model training cost.

To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays job termination for120 seconds. Algorithms might use this 120-second window to save the model artifacts, so the results of training is not lost.

Training algorithms provided by Amazon SageMaker automatically saves the intermediate results of a model training job (it is best effort case, as model might not be ready to save as some stages, for example training just started). This intermediate data is a valid model artifact. You can use it to create a model (CreateModel).

', 'refs' => [ 'CreateCompilationJobRequest$StoppingCondition' => '

The duration allowed for model compilation.

', 'CreateTrainingJobRequest$StoppingCondition' => '

Sets a duration for training. Use this parameter to cap model training costs. To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays job termination for 120 seconds. Algorithms might use this 120-second window to save the model artifacts.

When Amazon SageMaker terminates a job because the stopping condition has been met, training algorithms provided by Amazon SageMaker save the intermediate results of the job. This intermediate data is a valid model artifact. You can use it to create a model using the CreateModel API.

', 'DescribeCompilationJobResponse$StoppingCondition' => '

The duration allowed for model compilation.

', 'DescribeTrainingJobResponse$StoppingCondition' => '

The condition under which to stop the training job.

', 'HyperParameterTrainingJobDefinition$StoppingCondition' => '

Sets a maximum duration for the training jobs that the tuning job launches. Use this parameter to limit model training costs.

To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal. This delays job termination for 120 seconds. Algorithms might use this 120-second window to save the model artifacts.

When Amazon SageMaker terminates a job because the stopping condition has been met, training algorithms provided by Amazon SageMaker save the intermediate results of the job.

', 'TrainingJob$StoppingCondition' => '

The condition under which to stop the training job.

', 'TrainingJobDefinition$StoppingCondition' => '

Sets a duration for training. Use this parameter to cap model training costs.

To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays job termination for 120 seconds. Algorithms might use this 120-second window to save the model artifacts.

', ], ], 'String' => [ 'base' => NULL, 'refs' => [ 'AlgorithmStatusItem$FailureReason' => '

if the overall status is Failed, the reason for the failure.

', 'ModelPackageStatusItem$FailureReason' => '

if the overall status is Failed, the reason for the failure.

', 'ProductListings$member' => NULL, 'RenderUiTemplateResponse$RenderedContent' => '

A Liquid template that renders the HTML for the worker UI.

', 'RenderingError$Code' => '

A unique identifier for a specific class of errors.

', 'RenderingError$Message' => '

A human-readable message describing the error.

', 'SubscribedWorkteam$SellerName' => '

The name of the vendor in the Amazon Marketplace.

', 'SubscribedWorkteam$ListingId' => '

', 'Workteam$SubDomain' => '

The URI of the labeling job\'s user interface. Workers open this URI to start labeling your data objects.

', ], ], 'String200' => [ 'base' => NULL, 'refs' => [ 'CreateWorkteamRequest$Description' => '

A description of the work team.

', 'SubscribedWorkteam$MarketplaceTitle' => '

The title of the service provided by the vendor in the Amazon Marketplace.

', 'SubscribedWorkteam$MarketplaceDescription' => '

The description of the vendor from the Amazon Marketplace.

', 'UpdateWorkteamRequest$Description' => '

An updated description for the work team.

', 'Workteam$Description' => '

A description of the work team.

', ], ], 'SubnetId' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$SubnetId' => '

The ID of the subnet in a VPC to which you would like to have a connectivity from your ML compute instance.

', 'DescribeNotebookInstanceOutput$SubnetId' => '

The ID of the VPC subnet.

', 'Subnets$member' => NULL, ], ], 'Subnets' => [ 'base' => NULL, 'refs' => [ 'VpcConfig$Subnets' => '

The ID of the subnets in the VPC to which you want to connect your training job or model.

', ], ], 'SubscribedWorkteam' => [ 'base' => '

Describes a work team of a vendor that does the a labelling job.

', 'refs' => [ 'DescribeSubscribedWorkteamResponse$SubscribedWorkteam' => '

A Workteam instance that contains information about the work team.

', 'SubscribedWorkteams$member' => NULL, ], ], 'SubscribedWorkteams' => [ 'base' => NULL, 'refs' => [ 'ListSubscribedWorkteamsResponse$SubscribedWorkteams' => '

An array of Workteam objects, each describing a work team.

', ], ], 'Success' => [ 'base' => NULL, 'refs' => [ 'DeleteWorkteamResponse$Success' => '

Returns true if the work team was successfully deleted; otherwise, returns false.

', ], ], 'SuggestionQuery' => [ 'base' => '

Limits the property names that are included in the response.

', 'refs' => [ 'GetSearchSuggestionsRequest$SuggestionQuery' => '

Limits the property names that are included in the response.

', ], ], 'Tag' => [ 'base' => '

Describes a tag.

', 'refs' => [ 'TagList$member' => NULL, ], ], 'TagKey' => [ 'base' => NULL, 'refs' => [ 'Tag$Key' => '

The tag key.

', 'TagKeyList$member' => NULL, ], ], 'TagKeyList' => [ 'base' => NULL, 'refs' => [ 'DeleteTagsInput$TagKeys' => '

An array or one or more tag keys to delete.

', ], ], 'TagList' => [ 'base' => NULL, 'refs' => [ 'AddTagsInput$Tags' => '

An array of Tag objects. Each tag is a key-value pair. Only the key parameter is required. If you don\'t specify a value, Amazon SageMaker sets the value to an empty string.

', 'AddTagsOutput$Tags' => '

A list of tags associated with the Amazon SageMaker resource.

', 'CreateEndpointConfigInput$Tags' => '

An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

', 'CreateEndpointInput$Tags' => '

An array of key-value pairs. For more information, see Using Cost Allocation Tagsin the AWS Billing and Cost Management User Guide.

', 'CreateHyperParameterTuningJobRequest$Tags' => '

An array of key-value pairs. You can use tags to categorize your AWS resources in different ways, for example, by purpose, owner, or environment. For more information, see AWS Tagging Strategies.

Tags that you specify for the tuning job are also added to all training jobs that the tuning job launches.

', 'CreateLabelingJobRequest$Tags' => '

An array of key/value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

', 'CreateModelInput$Tags' => '

An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

', 'CreateNotebookInstanceInput$Tags' => '

A list of tags to associate with the notebook instance. You can add tags later by using the CreateTags API.

', 'CreateTrainingJobRequest$Tags' => '

An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

', 'CreateTransformJobRequest$Tags' => '

(Optional) An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

', 'CreateWorkteamRequest$Tags' => '

', 'DescribeLabelingJobResponse$Tags' => '

An array of key/value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

', 'ListTagsOutput$Tags' => '

An array of Tag objects, each with a tag key and a value.

', 'TrainingJob$Tags' => '

An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

', ], ], 'TagValue' => [ 'base' => NULL, 'refs' => [ 'Tag$Value' => '

The tag value.

', ], ], 'TargetDevice' => [ 'base' => NULL, 'refs' => [ 'CompilationJobSummary$CompilationTargetDevice' => '

The type of device that the model will run on after compilation has completed.

', 'OutputConfig$TargetDevice' => '

Identifies the device that you want to run your model on after it has been compiled. For example: ml_c5.

', ], ], 'TaskAvailabilityLifetimeInSeconds' => [ 'base' => NULL, 'refs' => [ 'HumanTaskConfig$TaskAvailabilityLifetimeInSeconds' => '

The length of time that a task remains available for labelling by human workers.

', ], ], 'TaskCount' => [ 'base' => NULL, 'refs' => [ 'DesiredWeightAndCapacity$DesiredInstanceCount' => '

The variant\'s capacity.

', 'ProductionVariant$InitialInstanceCount' => '

Number of instances to launch initially.

', 'ProductionVariantSummary$CurrentInstanceCount' => '

The number of instances associated with the variant.

', 'ProductionVariantSummary$DesiredInstanceCount' => '

The number of instances requested in the UpdateEndpointWeightsAndCapacities request.

', ], ], 'TaskDescription' => [ 'base' => NULL, 'refs' => [ 'HumanTaskConfig$TaskDescription' => '

A description of the task for your human workers.

', ], ], 'TaskInput' => [ 'base' => NULL, 'refs' => [ 'RenderableTask$Input' => '

A JSON object that contains values for the variables defined in the template. It is made available to the template under the substitution variable task.input. For example, if you define a variable task.input.text in your template, you can supply the variable in the JSON object as "text": "sample text".

', ], ], 'TaskKeyword' => [ 'base' => NULL, 'refs' => [ 'TaskKeywords$member' => NULL, ], ], 'TaskKeywords' => [ 'base' => NULL, 'refs' => [ 'HumanTaskConfig$TaskKeywords' => '

Keywords used to describe the task so that workers on Amazon Mechanical Turk can discover the task.

', ], ], 'TaskTimeLimitInSeconds' => [ 'base' => NULL, 'refs' => [ 'HumanTaskConfig$TaskTimeLimitInSeconds' => '

The amount of time that a worker has to complete a task.

', ], ], 'TaskTitle' => [ 'base' => NULL, 'refs' => [ 'HumanTaskConfig$TaskTitle' => '

A title for the task for your human workers.

', ], ], 'TemplateContent' => [ 'base' => NULL, 'refs' => [ 'UiTemplate$Content' => '

The content of the Liquid template for the worker user interface.

', ], ], 'TenthFractionsOfACent' => [ 'base' => NULL, 'refs' => [ 'USD$TenthFractionsOfACent' => '

Fractions of a cent, in tenths.

', ], ], 'Timestamp' => [ 'base' => NULL, 'refs' => [ 'CompilationJobSummary$CompilationStartTime' => '

The time when the model compilation job started.

', 'CompilationJobSummary$CompilationEndTime' => '

The time when the model compilation job completed.

', 'DeployedImage$ResolutionTime' => '

The date and time when the image path for the model resolved to the ResolvedImage

', 'DescribeCompilationJobResponse$CompilationStartTime' => '

The time when the model compilation job started the CompilationJob instances.

You are billed for the time between this timestamp and the timestamp in the DescribeCompilationJobResponse$CompilationEndTime field. In Amazon CloudWatch Logs, the start time might be later than this time. That\'s because it takes time to download the compilation job, which depends on the size of the compilation job container.

', 'DescribeCompilationJobResponse$CompilationEndTime' => '

The time when the model compilation job on a compilation job instance ended. For a successful or stopped job, this is when the job\'s model artifacts have finished uploading. For a failed job, this is when Amazon SageMaker detected that the job failed.

', 'DescribeEndpointConfigOutput$CreationTime' => '

A timestamp that shows when the endpoint configuration was created.

', 'DescribeEndpointOutput$CreationTime' => '

A timestamp that shows when the endpoint was created.

', 'DescribeEndpointOutput$LastModifiedTime' => '

A timestamp that shows when the endpoint was last modified.

', 'DescribeHyperParameterTuningJobResponse$CreationTime' => '

The date and time that the tuning job started.

', 'DescribeHyperParameterTuningJobResponse$HyperParameterTuningEndTime' => '

The date and time that the tuning job ended.

', 'DescribeHyperParameterTuningJobResponse$LastModifiedTime' => '

The date and time that the status of the tuning job was modified.

', 'DescribeLabelingJobResponse$CreationTime' => '

The date and time that the labeling job was created.

', 'DescribeLabelingJobResponse$LastModifiedTime' => '

The date and time that the labeling job was last updated.

', 'DescribeModelOutput$CreationTime' => '

A timestamp that shows when the model was created.

', 'DescribeTrainingJobResponse$CreationTime' => '

A timestamp that indicates when the training job was created.

', 'DescribeTrainingJobResponse$TrainingStartTime' => '

Indicates the time when the training job starts on training instances. You are billed for the time interval between this time and the value of TrainingEndTime. The start time in CloudWatch Logs might be later than this time. The difference is due to the time it takes to download the training data and to the size of the training container.

', 'DescribeTrainingJobResponse$TrainingEndTime' => '

Indicates the time when the training job ends on training instances. You are billed for the time interval between the value of TrainingStartTime and this time. For successful jobs and stopped jobs, this is the time after model artifacts are uploaded. For failed jobs, this is the time when Amazon SageMaker detects a job failure.

', 'DescribeTrainingJobResponse$LastModifiedTime' => '

A timestamp that indicates when the status of the training job was last modified.

', 'DescribeTransformJobResponse$CreationTime' => '

A timestamp that shows when the transform Job was created.

', 'DescribeTransformJobResponse$TransformStartTime' => '

Indicates when the transform job starts on ML instances. You are billed for the time interval between this time and the value of TransformEndTime.

', 'DescribeTransformJobResponse$TransformEndTime' => '

Indicates when the transform job has been completed, or has stopped or failed. You are billed for the time interval between this time and the value of TransformStartTime.

', 'EndpointConfigSummary$CreationTime' => '

A timestamp that shows when the endpoint configuration was created.

', 'EndpointSummary$CreationTime' => '

A timestamp that shows when the endpoint was created.

', 'EndpointSummary$LastModifiedTime' => '

A timestamp that shows when the endpoint was last modified.

', 'HyperParameterTrainingJobSummary$CreationTime' => '

The date and time that the training job was created.

', 'HyperParameterTrainingJobSummary$TrainingStartTime' => '

The date and time that the training job started.

', 'HyperParameterTrainingJobSummary$TrainingEndTime' => '

Specifies the time when the training job ends on training instances. You are billed for the time interval between the value of TrainingStartTime and this time. For successful jobs and stopped jobs, this is the time after model artifacts are uploaded. For failed jobs, this is the time when Amazon SageMaker detects a job failure.

', 'HyperParameterTuningJobSummary$CreationTime' => '

The date and time that the tuning job was created.

', 'HyperParameterTuningJobSummary$HyperParameterTuningEndTime' => '

The date and time that the tuning job ended.

', 'HyperParameterTuningJobSummary$LastModifiedTime' => '

The date and time that the tuning job was modified.

', 'LabelingJobForWorkteamSummary$CreationTime' => '

The date and time that the labeling job was created.

', 'LabelingJobSummary$CreationTime' => '

The date and time that the job was created (timestamp).

', 'LabelingJobSummary$LastModifiedTime' => '

The date and time that the job was last modified (timestamp).

', 'ListCodeRepositoriesInput$LastModifiedTimeAfter' => '

A filter that returns only Git repositories that were last modified after the specified time.

', 'ListCodeRepositoriesInput$LastModifiedTimeBefore' => '

A filter that returns only Git repositories that were last modified before the specified time.

', 'ListEndpointConfigsInput$CreationTimeBefore' => '

A filter that returns only endpoint configurations created before the specified time (timestamp).

', 'ListEndpointConfigsInput$CreationTimeAfter' => '

A filter that returns only endpoint configurations created after the specified time (timestamp).

', 'ListEndpointsInput$CreationTimeBefore' => '

A filter that returns only endpoints that were created before the specified time (timestamp).

', 'ListEndpointsInput$CreationTimeAfter' => '

A filter that returns only endpoints that were created after the specified time (timestamp).

', 'ListEndpointsInput$LastModifiedTimeBefore' => '

A filter that returns only endpoints that were modified before the specified timestamp.

', 'ListEndpointsInput$LastModifiedTimeAfter' => '

A filter that returns only endpoints that were modified after the specified timestamp.

', 'ListHyperParameterTuningJobsRequest$CreationTimeAfter' => '

A filter that returns only tuning jobs that were created after the specified time.

', 'ListHyperParameterTuningJobsRequest$CreationTimeBefore' => '

A filter that returns only tuning jobs that were created before the specified time.

', 'ListHyperParameterTuningJobsRequest$LastModifiedTimeAfter' => '

A filter that returns only tuning jobs that were modified after the specified time.

', 'ListHyperParameterTuningJobsRequest$LastModifiedTimeBefore' => '

A filter that returns only tuning jobs that were modified before the specified time.

', 'ListLabelingJobsForWorkteamRequest$CreationTimeAfter' => '

A filter that returns only labeling jobs created after the specified time (timestamp).

', 'ListLabelingJobsForWorkteamRequest$CreationTimeBefore' => '

A filter that returns only labeling jobs created before the specified time (timestamp).

', 'ListLabelingJobsRequest$CreationTimeAfter' => '

A filter that returns only labeling jobs created after the specified time (timestamp).

', 'ListLabelingJobsRequest$CreationTimeBefore' => '

A filter that returns only labeling jobs created before the specified time (timestamp).

', 'ListLabelingJobsRequest$LastModifiedTimeAfter' => '

A filter that returns only labeling jobs modified after the specified time (timestamp).

', 'ListLabelingJobsRequest$LastModifiedTimeBefore' => '

A filter that returns only labeling jobs modified before the specified time (timestamp).

', 'ListModelsInput$CreationTimeBefore' => '

A filter that returns only models created before the specified time (timestamp).

', 'ListModelsInput$CreationTimeAfter' => '

A filter that returns only models created after the specified time (timestamp).

', 'ListTrainingJobsRequest$CreationTimeAfter' => '

A filter that returns only training jobs created after the specified time (timestamp).

', 'ListTrainingJobsRequest$CreationTimeBefore' => '

A filter that returns only training jobs created before the specified time (timestamp).

', 'ListTrainingJobsRequest$LastModifiedTimeAfter' => '

A filter that returns only training jobs modified after the specified time (timestamp).

', 'ListTrainingJobsRequest$LastModifiedTimeBefore' => '

A filter that returns only training jobs modified before the specified time (timestamp).

', 'ListTransformJobsRequest$CreationTimeAfter' => '

A filter that returns only transform jobs created after the specified time.

', 'ListTransformJobsRequest$CreationTimeBefore' => '

A filter that returns only transform jobs created before the specified time.

', 'ListTransformJobsRequest$LastModifiedTimeAfter' => '

A filter that returns only transform jobs modified after the specified time.

', 'ListTransformJobsRequest$LastModifiedTimeBefore' => '

A filter that returns only transform jobs modified before the specified time.

', 'MetricData$Timestamp' => '

The date and time that the algorithm emitted the metric.

', 'ModelSummary$CreationTime' => '

A timestamp that indicates when the model was created.

', 'SecondaryStatusTransition$StartTime' => '

A timestamp that shows when the training job transitioned to the current secondary status state.

', 'SecondaryStatusTransition$EndTime' => '

A timestamp that shows when the training job transitioned out of this secondary status state into another secondary status state or when the training job has ended.

', 'TrainingJob$CreationTime' => '

A timestamp that indicates when the training job was created.

', 'TrainingJob$TrainingStartTime' => '

Indicates the time when the training job starts on training instances. You are billed for the time interval between this time and the value of TrainingEndTime. The start time in CloudWatch Logs might be later than this time. The difference is due to the time it takes to download the training data and to the size of the training container.

', 'TrainingJob$TrainingEndTime' => '

Indicates the time when the training job ends on training instances. You are billed for the time interval between the value of TrainingStartTime and this time. For successful jobs and stopped jobs, this is the time after model artifacts are uploaded. For failed jobs, this is the time when Amazon SageMaker detects a job failure.

', 'TrainingJob$LastModifiedTime' => '

A timestamp that indicates when the status of the training job was last modified.

', 'TrainingJobSummary$CreationTime' => '

A timestamp that shows when the training job was created.

', 'TrainingJobSummary$TrainingEndTime' => '

A timestamp that shows when the training job ended. This field is set only if the training job has one of the terminal statuses (Completed, Failed, or Stopped).

', 'TrainingJobSummary$LastModifiedTime' => '

Timestamp when the training job was last modified.

', 'TransformJobSummary$CreationTime' => '

A timestamp that shows when the transform Job was created.

', 'TransformJobSummary$TransformEndTime' => '

Indicates when the transform job ends on compute instances. For successful jobs and stopped jobs, this is the exact time recorded after the results are uploaded. For failed jobs, this is when Amazon SageMaker detected that the job failed.

', 'TransformJobSummary$LastModifiedTime' => '

Indicates when the transform job was last modified.

', 'Workteam$CreateDate' => '

The date and time that the work team was created (timestamp).

', 'Workteam$LastUpdatedDate' => '

The date and time that the work team was last updated (timestamp).

', ], ], 'TrainingInputMode' => [ 'base' => NULL, 'refs' => [ 'AlgorithmSpecification$TrainingInputMode' => '

The input mode that the algorithm supports. For the input modes that Amazon SageMaker algorithms support, see Algorithms. If an algorithm supports the File input mode, Amazon SageMaker downloads the training data from S3 to the provisioned ML storage Volume, and mounts the directory to docker volume for training container. If an algorithm supports the Pipe input mode, Amazon SageMaker streams data directly from S3 to the container.

In File mode, make sure you provision ML storage volume with sufficient capacity to accommodate the data download from S3. In addition to the training data, the ML storage volume also stores the output model. The algorithm container use ML storage volume to also store intermediate information, if any.

For distributed algorithms using File mode, training data is distributed uniformly, and your training duration is predictable if the input data objects size is approximately same. Amazon SageMaker does not split the files any further for model training. If the object sizes are skewed, training won\'t be optimal as the data distribution is also skewed where one host in a training cluster is overloaded, thus becoming bottleneck in training.

', 'Channel$InputMode' => '

(Optional) The input mode to use for the data channel in a training job. If you don\'t set a value for InputMode, Amazon SageMaker uses the value set for TrainingInputMode. Use this parameter to override the TrainingInputMode setting in a AlgorithmSpecification request when you have a channel that needs a different input mode from the training job\'s general setting. To download the data from Amazon Simple Storage Service (Amazon S3) to the provisioned ML storage volume, and mount the directory to a Docker volume, use File input mode. To stream data directly from Amazon S3 to the container, choose Pipe input mode.

To use a model for incremental training, choose File input model.

', 'HyperParameterAlgorithmSpecification$TrainingInputMode' => '

The input mode that the algorithm supports: File or Pipe. In File input mode, Amazon SageMaker downloads the training data from Amazon S3 to the storage volume that is attached to the training instance and mounts the directory to the Docker volume for the training container. In Pipe input mode, Amazon SageMaker streams data directly from Amazon S3 to the container.

If you specify File mode, make sure that you provision the storage volume that is attached to the training instance with enough capacity to accommodate the training data downloaded from Amazon S3, the model artifacts, and intermediate information.

For more information about input modes, see Algorithms.

', 'InputModes$member' => NULL, 'TrainingJobDefinition$TrainingInputMode' => '

The input mode used by the algorithm for the training job. For the input modes that Amazon SageMaker algorithms support, see Algorithms.

If an algorithm supports the File input mode, Amazon SageMaker downloads the training data from S3 to the provisioned ML storage Volume, and mounts the directory to docker volume for training container. If an algorithm supports the Pipe input mode, Amazon SageMaker streams data directly from S3 to the container.

', ], ], 'TrainingInstanceCount' => [ 'base' => NULL, 'refs' => [ 'ResourceConfig$InstanceCount' => '

The number of ML compute instances to use. For distributed training, provide a value greater than 1.

', ], ], 'TrainingInstanceType' => [ 'base' => NULL, 'refs' => [ 'ResourceConfig$InstanceType' => '

The ML compute instance type.

', 'TrainingInstanceTypes$member' => NULL, ], ], 'TrainingInstanceTypes' => [ 'base' => NULL, 'refs' => [ 'TrainingSpecification$SupportedTrainingInstanceTypes' => '

A list of the instance types that this algorithm can use for training.

', ], ], 'TrainingJob' => [ 'base' => '

Contains information about a training job.

', 'refs' => [ 'SearchRecord$TrainingJob' => '

A TrainingJob object that is returned as part of a Search request.

', ], ], 'TrainingJobArn' => [ 'base' => NULL, 'refs' => [ 'CreateTrainingJobResponse$TrainingJobArn' => '

The Amazon Resource Name (ARN) of the training job.

', 'DescribeTrainingJobResponse$TrainingJobArn' => '

The Amazon Resource Name (ARN) of the training job.

', 'HyperParameterTrainingJobSummary$TrainingJobArn' => '

The Amazon Resource Name (ARN) of the training job.

', 'TrainingJob$TrainingJobArn' => '

The Amazon Resource Name (ARN) of the training job.

', 'TrainingJobSummary$TrainingJobArn' => '

The Amazon Resource Name (ARN) of the training job.

', ], ], 'TrainingJobDefinition' => [ 'base' => '

Defines the input needed to run a training job using the algorithm.

', 'refs' => [ 'AlgorithmValidationProfile$TrainingJobDefinition' => '

The TrainingJobDefinition object that describes the training job that Amazon SageMaker runs to validate your algorithm.

', ], ], 'TrainingJobEarlyStoppingType' => [ 'base' => NULL, 'refs' => [ 'HyperParameterTuningJobConfig$TrainingJobEarlyStoppingType' => '

Specifies whether to use early stopping for training jobs launched by the hyperparameter tuning job. This can be one of the following values (the default value is OFF):

OFF

Training jobs launched by the hyperparameter tuning job do not use early stopping.

AUTO

Amazon SageMaker stops training jobs launched by the hyperparameter tuning job when they are unlikely to perform better than previously completed training jobs. For more information, see Stop Training Jobs Early.

', ], ], 'TrainingJobName' => [ 'base' => NULL, 'refs' => [ 'CreateTrainingJobRequest$TrainingJobName' => '

The name of the training job. The name must be unique within an AWS Region in an AWS account.

', 'DescribeTrainingJobRequest$TrainingJobName' => '

The name of the training job.

', 'DescribeTrainingJobResponse$TrainingJobName' => '

Name of the model training job.

', 'HyperParameterTrainingJobSummary$TrainingJobName' => '

The name of the training job.

', 'StopTrainingJobRequest$TrainingJobName' => '

The name of the training job to stop.

', 'TrainingJob$TrainingJobName' => '

The name of the training job.

', 'TrainingJobSummary$TrainingJobName' => '

The name of the training job that you want a summary for.

', ], ], 'TrainingJobSortByOptions' => [ 'base' => NULL, 'refs' => [ 'ListTrainingJobsForHyperParameterTuningJobRequest$SortBy' => '

The field to sort results by. The default is Name.

If the value of this field is FinalObjectiveMetricValue, any training jobs that did not return an objective metric are not listed.

', ], ], 'TrainingJobStatus' => [ 'base' => NULL, 'refs' => [ 'DescribeTrainingJobResponse$TrainingJobStatus' => '

The status of the training job.

Amazon SageMaker provides the following training job statuses:

For more detailed information, see SecondaryStatus.

', 'HyperParameterTrainingJobSummary$TrainingJobStatus' => '

The status of the training job.

', 'ListTrainingJobsForHyperParameterTuningJobRequest$StatusEquals' => '

A filter that returns only training jobs with the specified status.

', 'ListTrainingJobsRequest$StatusEquals' => '

A filter that retrieves only training jobs with a specific status.

', 'TrainingJob$TrainingJobStatus' => '

The status of the training job.

Training job statuses are:

For more detailed information, see SecondaryStatus.

', 'TrainingJobSummary$TrainingJobStatus' => '

The status of the training job.

', ], ], 'TrainingJobStatusCounter' => [ 'base' => NULL, 'refs' => [ 'TrainingJobStatusCounters$Completed' => '

The number of completed training jobs launched by the hyperparameter tuning job.

', 'TrainingJobStatusCounters$InProgress' => '

The number of in-progress training jobs launched by a hyperparameter tuning job.

', 'TrainingJobStatusCounters$RetryableError' => '

The number of training jobs that failed, but can be retried. A failed training job can be retried only if it failed because an internal service error occurred.

', 'TrainingJobStatusCounters$NonRetryableError' => '

The number of training jobs that failed and can\'t be retried. A failed training job can\'t be retried if it failed because a client error occurred.

', 'TrainingJobStatusCounters$Stopped' => '

The number of training jobs launched by a hyperparameter tuning job that were manually stopped.

', ], ], 'TrainingJobStatusCounters' => [ 'base' => '

The numbers of training jobs launched by a hyperparameter tuning job, categorized by status.

', 'refs' => [ 'DescribeHyperParameterTuningJobResponse$TrainingJobStatusCounters' => '

The TrainingJobStatusCounters object that specifies the number of training jobs, categorized by status, that this tuning job launched.

', 'HyperParameterTuningJobSummary$TrainingJobStatusCounters' => '

The TrainingJobStatusCounters object that specifies the numbers of training jobs, categorized by status, that this tuning job launched.

', ], ], 'TrainingJobSummaries' => [ 'base' => NULL, 'refs' => [ 'ListTrainingJobsResponse$TrainingJobSummaries' => '

An array of TrainingJobSummary objects, each listing a training job.

', ], ], 'TrainingJobSummary' => [ 'base' => '

Provides summary information about a training job.

', 'refs' => [ 'TrainingJobSummaries$member' => NULL, ], ], 'TrainingSpecification' => [ 'base' => '

Defines how the algorithm is used for a training job.

', 'refs' => [ 'CreateAlgorithmInput$TrainingSpecification' => '

Specifies details about training jobs run by this algorithm, including the following:

', 'DescribeAlgorithmOutput$TrainingSpecification' => '

Details about training jobs run by this algorithm.

', ], ], 'TransformDataSource' => [ 'base' => '

Describes the location of the channel data.

', 'refs' => [ 'TransformInput$DataSource' => '

Describes the location of the channel data, which is, the S3 location of the input data that the model can consume.

', ], ], 'TransformEnvironmentKey' => [ 'base' => NULL, 'refs' => [ 'TransformEnvironmentMap$key' => NULL, ], ], 'TransformEnvironmentMap' => [ 'base' => NULL, 'refs' => [ 'CreateTransformJobRequest$Environment' => '

The environment variables to set in the Docker container. We support up to 16 key and values entries in the map.

', 'DescribeTransformJobResponse$Environment' => '

', 'TransformJobDefinition$Environment' => '

The environment variables to set in the Docker container. We support up to 16 key and values entries in the map.

', ], ], 'TransformEnvironmentValue' => [ 'base' => NULL, 'refs' => [ 'TransformEnvironmentMap$value' => NULL, ], ], 'TransformInput' => [ 'base' => '

Describes the input source of a transform job and the way the transform job consumes it.

', 'refs' => [ 'CreateTransformJobRequest$TransformInput' => '

Describes the input source and the way the transform job consumes it.

', 'DescribeTransformJobResponse$TransformInput' => '

Describes the dataset to be transformed and the Amazon S3 location where it is stored.

', 'TransformJobDefinition$TransformInput' => '

A description of the input source and the way the transform job consumes it.

', ], ], 'TransformInstanceCount' => [ 'base' => NULL, 'refs' => [ 'TransformResources$InstanceCount' => '

The number of ML compute instances to use in the transform job. For distributed transform, provide a value greater than 1. The default value is 1.

', ], ], 'TransformInstanceType' => [ 'base' => NULL, 'refs' => [ 'TransformInstanceTypes$member' => NULL, 'TransformResources$InstanceType' => '

The ML compute instance type for the transform job. For using built-in algorithms to transform moderately sized datasets, ml.m4.xlarge or ml.m5.large should suffice. There is no default value for InstanceType.

', ], ], 'TransformInstanceTypes' => [ 'base' => NULL, 'refs' => [ 'InferenceSpecification$SupportedTransformInstanceTypes' => '

A list of the instance types on which a transformation job can be run or on which an endpoint can be deployed.

', ], ], 'TransformJobArn' => [ 'base' => NULL, 'refs' => [ 'CreateTransformJobResponse$TransformJobArn' => '

The Amazon Resource Name (ARN) of the transform job.

', 'DescribeTransformJobResponse$TransformJobArn' => '

The Amazon Resource Name (ARN) of the transform job.

', 'TransformJobSummary$TransformJobArn' => '

The Amazon Resource Name (ARN) of the transform job.

', ], ], 'TransformJobDefinition' => [ 'base' => '

Defines the input needed to run a transform job using the inference specification specified in the algorithm.

', 'refs' => [ 'AlgorithmValidationProfile$TransformJobDefinition' => '

The TransformJobDefinition object that describes the transform job that Amazon SageMaker runs to validate your algorithm.

', 'ModelPackageValidationProfile$TransformJobDefinition' => '

The TransformJobDefinition object that describes the transform job used for the validation of the model package.

', ], ], 'TransformJobName' => [ 'base' => NULL, 'refs' => [ 'CreateTransformJobRequest$TransformJobName' => '

The name of the transform job. The name must be unique within an AWS Region in an AWS account.

', 'DescribeTransformJobRequest$TransformJobName' => '

The name of the transform job that you want to view details of.

', 'DescribeTransformJobResponse$TransformJobName' => '

The name of the transform job.

', 'StopTransformJobRequest$TransformJobName' => '

The name of the transform job to stop.

', 'TransformJobSummary$TransformJobName' => '

The name of the transform job.

', ], ], 'TransformJobStatus' => [ 'base' => NULL, 'refs' => [ 'DescribeTransformJobResponse$TransformJobStatus' => '

The status of the transform job. If the transform job failed, the reason is returned in the FailureReason field.

', 'ListTransformJobsRequest$StatusEquals' => '

A filter that retrieves only transform jobs with a specific status.

', 'TransformJobSummary$TransformJobStatus' => '

The status of the transform job.

', ], ], 'TransformJobSummaries' => [ 'base' => NULL, 'refs' => [ 'ListTransformJobsResponse$TransformJobSummaries' => '

An array of TransformJobSummary objects.

', ], ], 'TransformJobSummary' => [ 'base' => '

Provides a summary of a transform job. Multiple TransformJobSummary objects are returned as a list after in response to a ListTransformJobs call.

', 'refs' => [ 'TransformJobSummaries$member' => NULL, ], ], 'TransformOutput' => [ 'base' => '

Describes the results of a transform job output.

', 'refs' => [ 'CreateTransformJobRequest$TransformOutput' => '

Describes the results of the transform job.

', 'DescribeTransformJobResponse$TransformOutput' => '

Identifies the Amazon S3 location where you want Amazon SageMaker to save the results from the transform job.

', 'TransformJobDefinition$TransformOutput' => '

Identifies the Amazon S3 location where you want Amazon SageMaker to save the results from the transform job.

', ], ], 'TransformResources' => [ 'base' => '

Describes the resources, including ML instance types and ML instance count, to use for transform job.

', 'refs' => [ 'CreateTransformJobRequest$TransformResources' => '

Describes the resources, including ML instance types and ML instance count, to use for the transform job.

', 'DescribeTransformJobResponse$TransformResources' => '

Describes the resources, including ML instance types and ML instance count, to use for the transform job.

', 'TransformJobDefinition$TransformResources' => '

Identifies the ML compute instances for the transform job.

', ], ], 'TransformS3DataSource' => [ 'base' => '

Describes the S3 data source.

', 'refs' => [ 'TransformDataSource$S3DataSource' => '

The S3 location of the data source that is associated with a channel.

', ], ], 'USD' => [ 'base' => '

Represents an amount of money in United States dollars/

', 'refs' => [ 'PublicWorkforceTaskPrice$AmountInUsd' => '

Defines the amount of money paid to a worker in United States dollars.

', ], ], 'UiConfig' => [ 'base' => '

Provided configuration information for the worker UI for a labeling job.

', 'refs' => [ 'HumanTaskConfig$UiConfig' => '

Information about the user interface that workers use to complete the labeling task.

', ], ], 'UiTemplate' => [ 'base' => '

The Liquid template for the worker user interface.

', 'refs' => [ 'RenderUiTemplateRequest$UiTemplate' => '

A Template object containing the worker UI template to render.

', ], ], 'UpdateCodeRepositoryInput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateCodeRepositoryOutput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateEndpointInput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateEndpointOutput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateEndpointWeightsAndCapacitiesInput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateEndpointWeightsAndCapacitiesOutput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateNotebookInstanceInput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateNotebookInstanceLifecycleConfigInput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateNotebookInstanceLifecycleConfigOutput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateNotebookInstanceOutput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateWorkteamRequest' => [ 'base' => NULL, 'refs' => [], ], 'UpdateWorkteamResponse' => [ 'base' => NULL, 'refs' => [], ], 'Url' => [ 'base' => NULL, 'refs' => [ 'ContainerDefinition$ModelDataUrl' => '

The S3 path where the model artifacts, which result from model training, are stored. This path must point to a single gzip compressed tar archive (.tar.gz suffix).

If you provide a value for this parameter, Amazon SageMaker uses AWS Security Token Service to download model artifacts from the S3 path you provide. AWS STS is activated in your IAM user account by default. If you previously deactivated AWS STS for a region, you need to reactivate AWS STS for that region. For more information, see Activating and Deactivating AWS STS in an AWS Region in the AWS Identity and Access Management User Guide.

', 'ModelPackageContainerDefinition$ModelDataUrl' => '

The Amazon S3 path where the model artifacts, which result from model training, are stored. This path must point to a single gzip compressed tar archive (.tar.gz suffix).

', 'SourceAlgorithm$ModelDataUrl' => '

The Amazon S3 path where the model artifacts, which result from model training, are stored. This path must point to a single gzip compressed tar archive (.tar.gz suffix).

', ], ], 'VariantName' => [ 'base' => NULL, 'refs' => [ 'DesiredWeightAndCapacity$VariantName' => '

The name of the variant to update.

', 'ProductionVariant$VariantName' => '

The name of the production variant.

', 'ProductionVariantSummary$VariantName' => '

The name of the variant.

', ], ], 'VariantWeight' => [ 'base' => NULL, 'refs' => [ 'DesiredWeightAndCapacity$DesiredWeight' => '

The variant\'s weight.

', 'ProductionVariant$InitialVariantWeight' => '

Determines initial traffic distribution among all of the models that you specify in the endpoint configuration. The traffic to a production variant is determined by the ratio of the VariantWeight to the sum of all VariantWeight values across all ProductionVariants. If unspecified, it defaults to 1.0.

', 'ProductionVariantSummary$CurrentWeight' => '

The weight associated with the variant.

', 'ProductionVariantSummary$DesiredWeight' => '

The requested weight, as specified in the UpdateEndpointWeightsAndCapacities request.

', ], ], 'VolumeSizeInGB' => [ 'base' => NULL, 'refs' => [ 'ResourceConfig$VolumeSizeInGB' => '

The size of the ML storage volume that you want to provision.

ML storage volumes store model artifacts and incremental states. Training algorithms might also use the ML storage volume for scratch space. If you want to store the training data in the ML storage volume, choose File as the TrainingInputMode in the algorithm specification.

You must specify sufficient ML storage for your scenario.

Amazon SageMaker supports only the General Purpose SSD (gp2) ML storage volume type.

', ], ], 'VpcConfig' => [ 'base' => '

Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. For more information, see Protect Endpoints by Using an Amazon Virtual Private Cloud and Protect Training Jobs by Using an Amazon Virtual Private Cloud.

', 'refs' => [ 'CreateModelInput$VpcConfig' => '

A VpcConfig object that specifies the VPC that you want your model to connect to. Control access to and from your model container by configuring the VPC. VpcConfig is used in hosting services and in batch transform. For more information, see Protect Endpoints by Using an Amazon Virtual Private Cloud and Protect Data in Batch Transform Jobs by Using an Amazon Virtual Private Cloud.

', 'CreateTrainingJobRequest$VpcConfig' => '

A VpcConfig object that specifies the VPC that you want your training job to connect to. Control access to and from your training container by configuring the VPC. For more information, see Protect Training Jobs by Using an Amazon Virtual Private Cloud.

', 'DescribeModelOutput$VpcConfig' => '

A VpcConfig object that specifies the VPC that this model has access to. For more information, see Protect Endpoints by Using an Amazon Virtual Private Cloud

', 'DescribeTrainingJobResponse$VpcConfig' => '

A VpcConfig object that specifies the VPC that this training job has access to. For more information, see Protect Training Jobs by Using an Amazon Virtual Private Cloud.

', 'HyperParameterTrainingJobDefinition$VpcConfig' => '

The VpcConfig object that specifies the VPC that you want the training jobs that this hyperparameter tuning job launches to connect to. Control access to and from your training container by configuring the VPC. For more information, see Protect Training Jobs by Using an Amazon Virtual Private Cloud.

', 'TrainingJob$VpcConfig' => '

A VpcConfig object that specifies the VPC that this training job has access to. For more information, see Protect Training Jobs by Using an Amazon Virtual Private Cloud.

', ], ], 'VpcSecurityGroupIds' => [ 'base' => NULL, 'refs' => [ 'VpcConfig$SecurityGroupIds' => '

The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the Subnets field.

', ], ], 'Workteam' => [ 'base' => '

Provides details about a labeling work team.

', 'refs' => [ 'DescribeWorkteamResponse$Workteam' => '

A Workteam instance that contains information about the work team.

', 'UpdateWorkteamResponse$Workteam' => '

A Workteam object that describes the updated work team.

', 'Workteams$member' => NULL, ], ], 'WorkteamArn' => [ 'base' => NULL, 'refs' => [ 'CreateWorkteamResponse$WorkteamArn' => '

The Amazon Resource Name (ARN) of the work team. You can use this ARN to identify the work team.

', 'DescribeSubscribedWorkteamRequest$WorkteamArn' => '

The Amazon Resource Name (ARN) of the subscribed work team to describe.

', 'HumanTaskConfig$WorkteamArn' => '

The Amazon Resource Name (ARN) of the work team assigned to complete the tasks.

', 'LabelingJobSummary$WorkteamArn' => '

The Amazon Resource Name (ARN) of the work team assigned to the job.

', 'ListLabelingJobsForWorkteamRequest$WorkteamArn' => '

The Amazon Resource Name (ARN) of the work team for which you want to see labeling jobs for.

', 'SubscribedWorkteam$WorkteamArn' => '

The Amazon Resource Name (ARN) of the vendor that you have subscribed.

', 'Workteam$WorkteamArn' => '

The Amazon Resource Name (ARN) that identifies the work team.

', ], ], 'WorkteamName' => [ 'base' => NULL, 'refs' => [ 'CreateWorkteamRequest$WorkteamName' => '

The name of the work team. Use this name to identify the work team.

', 'DeleteWorkteamRequest$WorkteamName' => '

The name of the work team to delete.

', 'DescribeWorkteamRequest$WorkteamName' => '

The name of the work team to return a description of.

', 'ListSubscribedWorkteamsRequest$NameContains' => '

A string in the work team name. This filter returns only work teams whose name contains the specified string.

', 'ListWorkteamsRequest$NameContains' => '

A string in the work team\'s name. This filter returns only work teams whose name contains the specified string.

', 'UpdateWorkteamRequest$WorkteamName' => '

The name of the work team to update.

', 'Workteam$WorkteamName' => '

The name of the work team.

', ], ], 'Workteams' => [ 'base' => NULL, 'refs' => [ 'ListWorkteamsResponse$Workteams' => '

An array of Workteam objects, each describing a work team.

', ], ], ],]; +return [ 'version' => '2.0', 'service' => '

Provides APIs for creating and managing Amazon SageMaker resources.

', 'operations' => [ 'AddTags' => '

Adds or overwrites one or more tags for the specified Amazon SageMaker resource. You can add tags to notebook instances, training jobs, hyperparameter tuning jobs, models, endpoint configurations, and endpoints.

Each tag consists of a key and an optional value. Tag keys must be unique per resource. For more information about tags, see For more information, see AWS Tagging Strategies.

Tags that you add to a hyperparameter tuning job by calling this API are also added to any training jobs that the hyperparameter tuning job launches after you call this API, but not to training jobs that the hyperparameter tuning job launched before you called this API. To make sure that the tags associated with a hyperparameter tuning job are also added to all training jobs that the hyperparameter tuning job launches, add the tags when you first create the tuning job by specifying them in the Tags parameter of CreateHyperParameterTuningJob

', 'CreateAlgorithm' => '

Create a machine learning algorithm that you can use in Amazon SageMaker and list in the AWS Marketplace.

', 'CreateCodeRepository' => '

Creates a Git repository as a resource in your Amazon SageMaker account. You can associate the repository with notebook instances so that you can use Git source control for the notebooks you create. The Git repository is a resource in your Amazon SageMaker account, so it can be associated with more than one notebook instance, and it persists independently from the lifecycle of any notebook instances it is associated with.

The repository can be hosted either in AWS CodeCommit or in any other Git repository.

', 'CreateCompilationJob' => '

Starts a model compilation job. After the model has been compiled, Amazon SageMaker saves the resulting model artifacts to an Amazon Simple Storage Service (Amazon S3) bucket that you specify.

If you choose to host your model using Amazon SageMaker hosting services, you can use the resulting model artifacts as part of the model. You can also use the artifacts with AWS IoT Greengrass. In that case, deploy them as an ML resource.

In the request body, you provide the following:

You can also provide a Tag to track the model compilation job\'s resource use and costs. The response body contains the CompilationJobArn for the compiled job.

To stop a model compilation job, use StopCompilationJob. To get information about a particular model compilation job, use DescribeCompilationJob. To get information about multiple model compilation jobs, use ListCompilationJobs.

', 'CreateEndpoint' => '

Creates an endpoint using the endpoint configuration specified in the request. Amazon SageMaker uses the endpoint to provision resources and deploy models. You create the endpoint configuration with the CreateEndpointConfig API.

Use this API only for hosting models using Amazon SageMaker hosting services.

The endpoint name must be unique within an AWS Region in your AWS account.

When it receives the request, Amazon SageMaker creates the endpoint, launches the resources (ML compute instances), and deploys the model(s) on them.

When Amazon SageMaker receives the request, it sets the endpoint status to Creating. After it creates the endpoint, it sets the status to InService. Amazon SageMaker can then process incoming requests for inferences. To check the status of an endpoint, use the DescribeEndpoint API.

For an example, see Exercise 1: Using the K-Means Algorithm Provided by Amazon SageMaker.

If any of the models hosted at this endpoint get model data from an Amazon S3 location, Amazon SageMaker uses AWS Security Token Service to download model artifacts from the S3 path you provided. AWS STS is activated in your IAM user account by default. If you previously deactivated AWS STS for a region, you need to reactivate AWS STS for that region. For more information, see Activating and Deactivating AWS STS i an AWS Region in the AWS Identity and Access Management User Guide.

', 'CreateEndpointConfig' => '

Creates an endpoint configuration that Amazon SageMaker hosting services uses to deploy models. In the configuration, you identify one or more models, created using the CreateModel API, to deploy and the resources that you want Amazon SageMaker to provision. Then you call the CreateEndpoint API.

Use this API only if you want to use Amazon SageMaker hosting services to deploy models into production.

In the request, you define one or more ProductionVariants, each of which identifies a model. Each ProductionVariant parameter also describes the resources that you want Amazon SageMaker to provision. This includes the number and type of ML compute instances to deploy.

If you are hosting multiple models, you also assign a VariantWeight to specify how much traffic you want to allocate to each model. For example, suppose that you want to host two models, A and B, and you assign traffic weight 2 for model A and 1 for model B. Amazon SageMaker distributes two-thirds of the traffic to Model A, and one-third to model B.

', 'CreateHyperParameterTuningJob' => '

Starts a hyperparameter tuning job. A hyperparameter tuning job finds the best version of a model by running many training jobs on your dataset using the algorithm you choose and values for hyperparameters within ranges that you specify. It then chooses the hyperparameter values that result in a model that performs the best, as measured by an objective metric that you choose.

', 'CreateLabelingJob' => '

Creates a job that uses workers to label the data objects in your input dataset. You can use the labeled data to train machine learning models.

You can select your workforce from one of three providers:

You can also use automated data labeling to reduce the number of data objects that need to be labeled by a human. Automated data labeling uses active learning to determine if a data object can be labeled by machine or if it needs to be sent to a human worker. For more information, see Using Automated Data Labeling.

The data objects to be labeled are contained in an Amazon S3 bucket. You create a manifest file that describes the location of each object. For more information, see Using Input and Output Data.

The output can be used as the manifest file for another labeling job or as training data for your machine learning models.

', 'CreateModel' => '

Creates a model in Amazon SageMaker. In the request, you name the model and describe a primary container. For the primary container, you specify the docker image containing inference code, artifacts (from prior training), and custom environment map that the inference code uses when you deploy the model for predictions.

Use this API to create a model if you want to use Amazon SageMaker hosting services or run a batch transform job.

To host your model, you create an endpoint configuration with the CreateEndpointConfig API, and then create an endpoint with the CreateEndpoint API. Amazon SageMaker then deploys all of the containers that you defined for the model in the hosting environment.

To run a batch transform using your model, you start a job with the CreateTransformJob API. Amazon SageMaker uses your model and your dataset to get inferences which are then saved to a specified S3 location.

In the CreateModel request, you must define a container with the PrimaryContainer parameter.

In the request, you also provide an IAM role that Amazon SageMaker can assume to access model artifacts and docker image for deployment on ML compute hosting instances or for batch transform jobs. In addition, you also use the IAM role to manage permissions the inference code needs. For example, if the inference code access any other AWS resources, you grant necessary permissions via this role.

', 'CreateModelPackage' => '

Creates a model package that you can use to create Amazon SageMaker models or list on AWS Marketplace. Buyers can subscribe to model packages listed on AWS Marketplace to create models in Amazon SageMaker.

To create a model package by specifying a Docker container that contains your inference code and the Amazon S3 location of your model artifacts, provide values for InferenceSpecification. To create a model from an algorithm resource that you created or subscribed to in AWS Marketplace, provide a value for SourceAlgorithmSpecification.

', 'CreateNotebookInstance' => '

Creates an Amazon SageMaker notebook instance. A notebook instance is a machine learning (ML) compute instance running on a Jupyter notebook.

In a CreateNotebookInstance request, specify the type of ML compute instance that you want to run. Amazon SageMaker launches the instance, installs common libraries that you can use to explore datasets for model training, and attaches an ML storage volume to the notebook instance.

Amazon SageMaker also provides a set of example notebooks. Each notebook demonstrates how to use Amazon SageMaker with a specific algorithm or with a machine learning framework.

After receiving the request, Amazon SageMaker does the following:

  1. Creates a network interface in the Amazon SageMaker VPC.

  2. (Option) If you specified SubnetId, Amazon SageMaker creates a network interface in your own VPC, which is inferred from the subnet ID that you provide in the input. When creating this network interface, Amazon SageMaker attaches the security group that you specified in the request to the network interface that it creates in your VPC.

  3. Launches an EC2 instance of the type specified in the request in the Amazon SageMaker VPC. If you specified SubnetId of your VPC, Amazon SageMaker specifies both network interfaces when launching this instance. This enables inbound traffic from your own VPC to the notebook instance, assuming that the security groups allow it.

After creating the notebook instance, Amazon SageMaker returns its Amazon Resource Name (ARN).

After Amazon SageMaker creates the notebook instance, you can connect to the Jupyter server and work in Jupyter notebooks. For example, you can write code to explore a dataset that you can use for model training, train a model, host models by creating Amazon SageMaker endpoints, and validate hosted models.

For more information, see How It Works.

', 'CreateNotebookInstanceLifecycleConfig' => '

Creates a lifecycle configuration that you can associate with a notebook instance. A lifecycle configuration is a collection of shell scripts that run when you create or start a notebook instance.

Each lifecycle configuration script has a limit of 16384 characters.

The value of the $PATH environment variable that is available to both scripts is /sbin:bin:/usr/sbin:/usr/bin.

View CloudWatch Logs for notebook instance lifecycle configurations in log group /aws/sagemaker/NotebookInstances in log stream [notebook-instance-name]/[LifecycleConfigHook].

Lifecycle configuration scripts cannot run for longer than 5 minutes. If a script runs for longer than 5 minutes, it fails and the notebook instance is not created or started.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

', 'CreatePresignedNotebookInstanceUrl' => '

Returns a URL that you can use to connect to the Jupyter server from a notebook instance. In the Amazon SageMaker console, when you choose Open next to a notebook instance, Amazon SageMaker opens a new tab showing the Jupyter server home page from the notebook instance. The console uses this API to get the URL and show the page.

You can restrict access to this API and to the URL that it returns to a list of IP addresses that you specify. To restrict access, attach an IAM policy that denies access to this API unless the call comes from an IP address in the specified list to every AWS Identity and Access Management user, group, or role used to access the notebook instance. Use the NotIpAddress condition operator and the aws:SourceIP condition context key to specify the list of IP addresses that you want to have access to the notebook instance. For more information, see Limit Access to a Notebook Instance by IP Address.

', 'CreateTrainingJob' => '

Starts a model training job. After training completes, Amazon SageMaker saves the resulting model artifacts to an Amazon S3 location that you specify.

If you choose to host your model using Amazon SageMaker hosting services, you can use the resulting model artifacts as part of the model. You can also use the artifacts in a machine learning service other than Amazon SageMaker, provided that you know how to use them for inferences.

In the request body, you provide the following:

For more information about Amazon SageMaker, see How It Works.

', 'CreateTransformJob' => '

Starts a transform job. A transform job uses a trained model to get inferences on a dataset and saves these results to an Amazon S3 location that you specify.

To perform batch transformations, you create a transform job and use the data that you have readily available.

In the request body, you provide the following:

For more information about how batch transformation works Amazon SageMaker, see How It Works.

', 'CreateWorkteam' => '

Creates a new work team for labeling your data. A work team is defined by one or more Amazon Cognito user pools. You must first create the user pools before you can create a work team.

You cannot create more than 25 work teams in an account and region.

', 'DeleteAlgorithm' => '

Removes the specified algorithm from your account.

', 'DeleteCodeRepository' => '

Deletes the specified Git repository from your account.

', 'DeleteEndpoint' => '

Deletes an endpoint. Amazon SageMaker frees up all of the resources that were deployed when the endpoint was created.

Amazon SageMaker retires any custom KMS key grants associated with the endpoint, meaning you don\'t need to use the RevokeGrant API call.

', 'DeleteEndpointConfig' => '

Deletes an endpoint configuration. The DeleteEndpointConfig API deletes only the specified configuration. It does not delete endpoints created using the configuration.

', 'DeleteModel' => '

Deletes a model. The DeleteModel API deletes only the model entry that was created in Amazon SageMaker when you called the CreateModel API. It does not delete model artifacts, inference code, or the IAM role that you specified when creating the model.

', 'DeleteModelPackage' => '

Deletes a model package.

A model package is used to create Amazon SageMaker models or list on AWS Marketplace. Buyers can subscribe to model packages listed on AWS Marketplace to create models in Amazon SageMaker.

', 'DeleteNotebookInstance' => '

Deletes an Amazon SageMaker notebook instance. Before you can delete a notebook instance, you must call the StopNotebookInstance API.

When you delete a notebook instance, you lose all of your data. Amazon SageMaker removes the ML compute instance, and deletes the ML storage volume and the network interface associated with the notebook instance.

', 'DeleteNotebookInstanceLifecycleConfig' => '

Deletes a notebook instance lifecycle configuration.

', 'DeleteTags' => '

Deletes the specified tags from an Amazon SageMaker resource.

To list a resource\'s tags, use the ListTags API.

When you call this API to delete tags from a hyperparameter tuning job, the deleted tags are not removed from training jobs that the hyperparameter tuning job launched before you called this API.

', 'DeleteWorkteam' => '

Deletes an existing work team. This operation can\'t be undone.

', 'DescribeAlgorithm' => '

Returns a description of the specified algorithm that is in your account.

', 'DescribeCodeRepository' => '

Gets details about the specified Git repository.

', 'DescribeCompilationJob' => '

Returns information about a model compilation job.

To create a model compilation job, use CreateCompilationJob. To get information about multiple model compilation jobs, use ListCompilationJobs.

', 'DescribeEndpoint' => '

Returns the description of an endpoint.

', 'DescribeEndpointConfig' => '

Returns the description of an endpoint configuration created using the CreateEndpointConfig API.

', 'DescribeHyperParameterTuningJob' => '

Gets a description of a hyperparameter tuning job.

', 'DescribeLabelingJob' => '

Gets information about a labeling job.

', 'DescribeModel' => '

Describes a model that you created using the CreateModel API.

', 'DescribeModelPackage' => '

Returns a description of the specified model package, which is used to create Amazon SageMaker models or list them on AWS Marketplace.

To create models in Amazon SageMaker, buyers can subscribe to model packages listed on AWS Marketplace.

', 'DescribeNotebookInstance' => '

Returns information about a notebook instance.

', 'DescribeNotebookInstanceLifecycleConfig' => '

Returns a description of a notebook instance lifecycle configuration.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

', 'DescribeSubscribedWorkteam' => '

Gets information about a work team provided by a vendor. It returns details about the subscription with a vendor in the AWS Marketplace.

', 'DescribeTrainingJob' => '

Returns information about a training job.

', 'DescribeTransformJob' => '

Returns information about a transform job.

', 'DescribeWorkteam' => '

Gets information about a specific work team. You can see information such as the create date, the last updated date, membership information, and the work team\'s Amazon Resource Name (ARN).

', 'GetSearchSuggestions' => '

An auto-complete API for the search functionality in the Amazon SageMaker console. It returns suggestions of possible matches for the property name to use in Search queries. Provides suggestions for HyperParameters, Tags, and Metrics.

', 'ListAlgorithms' => '

Lists the machine learning algorithms that have been created.

', 'ListCodeRepositories' => '

Gets a list of the Git repositories in your account.

', 'ListCompilationJobs' => '

Lists model compilation jobs that satisfy various filters.

To create a model compilation job, use CreateCompilationJob. To get information about a particular model compilation job you have created, use DescribeCompilationJob.

', 'ListEndpointConfigs' => '

Lists endpoint configurations.

', 'ListEndpoints' => '

Lists endpoints.

', 'ListHyperParameterTuningJobs' => '

Gets a list of HyperParameterTuningJobSummary objects that describe the hyperparameter tuning jobs launched in your account.

', 'ListLabelingJobs' => '

Gets a list of labeling jobs.

', 'ListLabelingJobsForWorkteam' => '

Gets a list of labeling jobs assigned to a specified work team.

', 'ListModelPackages' => '

Lists the model packages that have been created.

', 'ListModels' => '

Lists models created with the CreateModel API.

', 'ListNotebookInstanceLifecycleConfigs' => '

Lists notebook instance lifestyle configurations created with the CreateNotebookInstanceLifecycleConfig API.

', 'ListNotebookInstances' => '

Returns a list of the Amazon SageMaker notebook instances in the requester\'s account in an AWS Region.

', 'ListSubscribedWorkteams' => '

Gets a list of the work teams that you are subscribed to in the AWS Marketplace. The list may be empty if no work team satisfies the filter specified in the NameContains parameter.

', 'ListTags' => '

Returns the tags for the specified Amazon SageMaker resource.

', 'ListTrainingJobs' => '

Lists training jobs.

', 'ListTrainingJobsForHyperParameterTuningJob' => '

Gets a list of TrainingJobSummary objects that describe the training jobs that a hyperparameter tuning job launched.

', 'ListTransformJobs' => '

Lists transform jobs.

', 'ListWorkteams' => '

Gets a list of work teams that you have defined in a region. The list may be empty if no work team satisfies the filter specified in the NameContains parameter.

', 'RenderUiTemplate' => '

Renders the UI template so that you can preview the worker\'s experience.

', 'Search' => '

Finds Amazon SageMaker resources that match a search query. Matching resource objects are returned as a list of SearchResult objects in the response. You can sort the search results by any resource property in a ascending or descending order.

You can query against the following value types: numerical, text, Booleans, and timestamps.

', 'StartNotebookInstance' => '

Launches an ML compute instance with the latest version of the libraries and attaches your ML storage volume. After configuring the notebook instance, Amazon SageMaker sets the notebook instance status to InService. A notebook instance\'s status must be InService before you can connect to your Jupyter notebook.

', 'StopCompilationJob' => '

Stops a model compilation job.

To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal. This gracefully shuts the job down. If the job hasn\'t stopped, it sends the SIGKILL signal.

When it receives a StopCompilationJob request, Amazon SageMaker changes the CompilationJobSummary$CompilationJobStatus of the job to Stopping. After Amazon SageMaker stops the job, it sets the CompilationJobSummary$CompilationJobStatus to Stopped.

', 'StopHyperParameterTuningJob' => '

Stops a running hyperparameter tuning job and all running training jobs that the tuning job launched.

All model artifacts output from the training jobs are stored in Amazon Simple Storage Service (Amazon S3). All data that the training jobs write to Amazon CloudWatch Logs are still available in CloudWatch. After the tuning job moves to the Stopped state, it releases all reserved resources for the tuning job.

', 'StopLabelingJob' => '

Stops a running labeling job. A job that is stopped cannot be restarted. Any results obtained before the job is stopped are placed in the Amazon S3 output bucket.

', 'StopNotebookInstance' => '

Terminates the ML compute instance. Before terminating the instance, Amazon SageMaker disconnects the ML storage volume from it. Amazon SageMaker preserves the ML storage volume.

To access data on the ML storage volume for a notebook instance that has been terminated, call the StartNotebookInstance API. StartNotebookInstance launches another ML compute instance, configures it, and attaches the preserved ML storage volume so you can continue your work.

', 'StopTrainingJob' => '

Stops a training job. To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays job termination for 120 seconds. Algorithms might use this 120-second window to save the model artifacts, so the results of the training is not lost.

When it receives a StopTrainingJob request, Amazon SageMaker changes the status of the job to Stopping. After Amazon SageMaker stops the job, it sets the status to Stopped.

', 'StopTransformJob' => '

Stops a transform job.

When Amazon SageMaker receives a StopTransformJob request, the status of the job changes to Stopping. After Amazon SageMaker stops the job, the status is set to Stopped. When you stop a transform job before it is completed, Amazon SageMaker doesn\'t store the job\'s output in Amazon S3.

', 'UpdateCodeRepository' => '

Updates the specified Git repository with the specified values.

', 'UpdateEndpoint' => '

Deploys the new EndpointConfig specified in the request, switches to using newly created endpoint, and then deletes resources provisioned for the endpoint using the previous EndpointConfig (there is no availability loss).

When Amazon SageMaker receives the request, it sets the endpoint status to Updating. After updating the endpoint, it sets the status to InService. To check the status of an endpoint, use the DescribeEndpoint API.

You cannot update an endpoint with the current EndpointConfig. To update an endpoint, you must create a new EndpointConfig.

', 'UpdateEndpointWeightsAndCapacities' => '

Updates variant weight of one or more variants associated with an existing endpoint, or capacity of one variant associated with an existing endpoint. When it receives the request, Amazon SageMaker sets the endpoint status to Updating. After updating the endpoint, it sets the status to InService. To check the status of an endpoint, use the DescribeEndpoint API.

', 'UpdateNotebookInstance' => '

Updates a notebook instance. NotebookInstance updates include upgrading or downgrading the ML compute instance used for your notebook instance to accommodate changes in your workload requirements. You can also update the VPC security groups.

', 'UpdateNotebookInstanceLifecycleConfig' => '

Updates a notebook instance lifecycle configuration created with the CreateNotebookInstanceLifecycleConfig API.

', 'UpdateWorkteam' => '

Updates an existing work team with new member definitions or description.

', ], 'shapes' => [ 'Accept' => [ 'base' => NULL, 'refs' => [ 'TransformOutput$Accept' => '

The MIME type used to specify the output data. Amazon SageMaker uses the MIME type with each http call to transfer data from the transform job.

', ], ], 'AccountId' => [ 'base' => NULL, 'refs' => [ 'LabelingJobForWorkteamSummary$WorkRequesterAccountId' => '

', ], ], 'AddTagsInput' => [ 'base' => NULL, 'refs' => [], ], 'AddTagsOutput' => [ 'base' => NULL, 'refs' => [], ], 'AdditionalCodeRepositoryNamesOrUrls' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$AdditionalCodeRepositories' => '

An array of up to three Git repositories to associate with the notebook instance. These can be either the names of Git repositories stored as resources in your account, or the URL of Git repositories in AWS CodeCommit or in any other Git repository. These repositories are cloned at the same level as the default repository of your notebook instance. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

', 'DescribeNotebookInstanceOutput$AdditionalCodeRepositories' => '

An array of up to three Git repositories associated with the notebook instance. These can be either the names of Git repositories stored as resources in your account, or the URL of Git repositories in AWS CodeCommit or in any other Git repository. These repositories are cloned at the same level as the default repository of your notebook instance. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

', 'NotebookInstanceSummary$AdditionalCodeRepositories' => '

An array of up to three Git repositories associated with the notebook instance. These can be either the names of Git repositories stored as resources in your account, or the URL of Git repositories in AWS CodeCommit or in any other Git repository. These repositories are cloned at the same level as the default repository of your notebook instance. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

', 'UpdateNotebookInstanceInput$AdditionalCodeRepositories' => '

An array of up to three Git repositories to associate with the notebook instance. These can be either the names of Git repositories stored as resources in your account, or the URL of Git repositories in AWS CodeCommit or in any other Git repository. These repositories are cloned at the same level as the default repository of your notebook instance. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

', ], ], 'AlgorithmArn' => [ 'base' => NULL, 'refs' => [ 'AlgorithmSummary$AlgorithmArn' => '

The Amazon Resource Name (ARN) of the algorithm.

', 'CreateAlgorithmOutput$AlgorithmArn' => '

The Amazon Resource Name (ARN) of the new algorithm.

', 'DescribeAlgorithmOutput$AlgorithmArn' => '

The Amazon Resource Name (ARN) of the algorithm.

', ], ], 'AlgorithmImage' => [ 'base' => NULL, 'refs' => [ 'AlgorithmSpecification$TrainingImage' => '

The registry path of the Docker image that contains the training algorithm. For information about docker registry paths for built-in algorithms, see Algorithms Provided by Amazon SageMaker: Common Parameters. Amazon SageMaker supports both registry/repository[:tag] and registry/repository[@digest] image path formats. For more information, see Using Your Own Algorithms with Amazon SageMaker.

', 'HyperParameterAlgorithmSpecification$TrainingImage' => '

The registry path of the Docker image that contains the training algorithm. For information about Docker registry paths for built-in algorithms, see Algorithms Provided by Amazon SageMaker: Common Parameters. Amazon SageMaker supports both registry/repository[:tag] and registry/repository[@digest] image path formats. For more information, see Using Your Own Algorithms with Amazon SageMaker.

', ], ], 'AlgorithmSortBy' => [ 'base' => NULL, 'refs' => [ 'ListAlgorithmsInput$SortBy' => '

The parameter by which to sort the results. The default is CreationTime.

', ], ], 'AlgorithmSpecification' => [ 'base' => '

Specifies the training algorithm to use in a CreateTrainingJob request.

For more information about algorithms provided by Amazon SageMaker, see Algorithms. For information about using your own algorithms, see Using Your Own Algorithms with Amazon SageMaker.

', 'refs' => [ 'CreateTrainingJobRequest$AlgorithmSpecification' => '

The registry path of the Docker image that contains the training algorithm and algorithm-specific metadata, including the input mode. For more information about algorithms provided by Amazon SageMaker, see Algorithms. For information about providing your own algorithms, see Using Your Own Algorithms with Amazon SageMaker.

', 'DescribeTrainingJobResponse$AlgorithmSpecification' => '

Information about the algorithm used for training, and algorithm metadata.

', 'TrainingJob$AlgorithmSpecification' => '

Information about the algorithm used for training, and algorithm metadata.

', ], ], 'AlgorithmStatus' => [ 'base' => NULL, 'refs' => [ 'AlgorithmSummary$AlgorithmStatus' => '

The overall status of the algorithm.

', 'DescribeAlgorithmOutput$AlgorithmStatus' => '

The current status of the algorithm.

', ], ], 'AlgorithmStatusDetails' => [ 'base' => '

Specifies the validation and image scan statuses of the algorithm.

', 'refs' => [ 'DescribeAlgorithmOutput$AlgorithmStatusDetails' => '

Details about the current status of the algorithm.

', ], ], 'AlgorithmStatusItem' => [ 'base' => '

Represents the overall status of an algorithm.

', 'refs' => [ 'AlgorithmStatusItemList$member' => NULL, ], ], 'AlgorithmStatusItemList' => [ 'base' => NULL, 'refs' => [ 'AlgorithmStatusDetails$ValidationStatuses' => '

The status of algorithm validation.

', 'AlgorithmStatusDetails$ImageScanStatuses' => '

The status of the scan of the algorithm\'s Docker image container.

', ], ], 'AlgorithmSummary' => [ 'base' => '

Provides summary information about an algorithm.

', 'refs' => [ 'AlgorithmSummaryList$member' => NULL, ], ], 'AlgorithmSummaryList' => [ 'base' => NULL, 'refs' => [ 'ListAlgorithmsOutput$AlgorithmSummaryList' => '

>An array of AlgorithmSummary objects, each of which lists an algorithm.

', ], ], 'AlgorithmValidationProfile' => [ 'base' => '

Defines a training job and a batch transform job that Amazon SageMaker runs to validate your algorithm.

The data provided in the validation profile is made available to your buyers on AWS Marketplace.

', 'refs' => [ 'AlgorithmValidationProfiles$member' => NULL, ], ], 'AlgorithmValidationProfiles' => [ 'base' => NULL, 'refs' => [ 'AlgorithmValidationSpecification$ValidationProfiles' => '

An array of AlgorithmValidationProfile objects, each of which specifies a training job and batch transform job that Amazon SageMaker runs to validate your algorithm.

', ], ], 'AlgorithmValidationSpecification' => [ 'base' => '

Specifies configurations for one or more training jobs that Amazon SageMaker runs to test the algorithm.

', 'refs' => [ 'CreateAlgorithmInput$ValidationSpecification' => '

Specifies configurations for one or more training jobs and that Amazon SageMaker runs to test the algorithm\'s training code and, optionally, one or more batch transform jobs that Amazon SageMaker runs to test the algorithm\'s inference code.

', 'DescribeAlgorithmOutput$ValidationSpecification' => '

Details about configurations for one or more training jobs that Amazon SageMaker runs to test the algorithm.

', ], ], 'AnnotationConsolidationConfig' => [ 'base' => '

Configures how labels are consolidated across human workers.

', 'refs' => [ 'HumanTaskConfig$AnnotationConsolidationConfig' => '

Configures how labels are consolidated across human workers.

', ], ], 'ArnOrName' => [ 'base' => NULL, 'refs' => [ 'AlgorithmSpecification$AlgorithmName' => '

The name of the algorithm resource to use for the training job. This must be an algorithm resource that you created or subscribe to on AWS Marketplace. If you specify a value for this parameter, you can\'t specify a value for TrainingImage.

', 'ContainerDefinition$ModelPackageName' => '

The name of the model package to use to create the model.

', 'DescribeAlgorithmInput$AlgorithmName' => '

The name of the algorithm to describe.

', 'DescribeModelPackageInput$ModelPackageName' => '

The name of the model package to describe.

', 'HyperParameterAlgorithmSpecification$AlgorithmName' => '

The name of the resource algorithm to use for the hyperparameter tuning job. If you specify a value for this parameter, do not specify a value for TrainingImage.

', 'SourceAlgorithm$AlgorithmName' => '

The name of an algorithm that was used to create the model package. The algorithm must be either an algorithm resource in your Amazon SageMaker account or an algorithm in AWS Marketplace that you are subscribed to.

', ], ], 'AssemblyType' => [ 'base' => NULL, 'refs' => [ 'TransformOutput$AssembleWith' => '

Defines how to assemble the results of the transform job as a single S3 object. Choose a format that is most convenient to you. To concatenate the results in binary format, specify None. To add a newline character at the end of every transformed record, specify Line.

', ], ], 'AttributeName' => [ 'base' => NULL, 'refs' => [ 'AttributeNames$member' => NULL, ], ], 'AttributeNames' => [ 'base' => NULL, 'refs' => [ 'S3DataSource$AttributeNames' => '

A list of one or more attribute names to use that are found in a specified augmented manifest file.

', ], ], 'BatchStrategy' => [ 'base' => NULL, 'refs' => [ 'CreateTransformJobRequest$BatchStrategy' => '

Specifies the number of records to include in a mini-batch for an HTTP inference request. A record is a single unit of input data that inference can be made on. For example, a single line in a CSV file is a record.

To enable the batch strategy, you must set SplitType to Line, RecordIO, or TFRecord.

To use only one record when making an HTTP invocation request to a container, set BatchStrategy to SingleRecord and SplitType to Line.

To fit as many records in a mini-batch as can fit within the MaxPayloadInMB limit, set BatchStrategy to MultiRecord and SplitType to Line.

', 'DescribeTransformJobResponse$BatchStrategy' => '

Specifies the number of records to include in a mini-batch for an HTTP inference request. A record is a single unit of input data that inference can be made on. For example, a single line in a CSV file is a record.

To enable the batch strategy, you must set SplitType to Line, RecordIO, or TFRecord.

', 'TransformJobDefinition$BatchStrategy' => '

A string that determines the number of records included in a single mini-batch.

SingleRecord means only one record is used per mini-batch. MultiRecord means a mini-batch is set to contain as many records that can fit within the MaxPayloadInMB limit.

', ], ], 'Boolean' => [ 'base' => NULL, 'refs' => [ 'ChannelSpecification$IsRequired' => '

Indicates whether the channel is required by the algorithm.

', 'CreateModelInput$EnableNetworkIsolation' => '

Isolates the model container. No inbound or outbound network calls can be made to or from the model container.

The Semantic Segmentation built-in algorithm does not support network isolation.

', 'CreateTrainingJobRequest$EnableNetworkIsolation' => '

Isolates the training container. No inbound or outbound network calls can be made, except for calls between peers within a training cluster for distributed training. If you enable network isolation for training jobs that are configured to use a VPC, Amazon SageMaker downloads and uploads customer data and model artifacts through the specified VPC, but the training container does not have network access.

The Semantic Segmentation built-in algorithm does not support network isolation.

', 'CreateTrainingJobRequest$EnableInterContainerTrafficEncryption' => '

To encrypt all communications between ML compute instances in distributed training, choose True. Encryption provides greater security for distributed training, but training might take longer. How long it takes depends on the amount of communication between compute instances, especially if you use a deep learning algorithm in distributed training. For more information, see Protect Communications Between ML Compute Instances in a Distributed Training Job.

', 'DescribeModelOutput$EnableNetworkIsolation' => '

If True, no inbound or outbound network calls can be made to or from the model container.

The Semantic Segmentation built-in algorithm does not support network isolation.

', 'DescribeTrainingJobResponse$EnableNetworkIsolation' => '

If you want to allow inbound or outbound network calls, except for calls between peers within a training cluster for distributed training, choose True. If you enable network isolation for training jobs that are configured to use a VPC, Amazon SageMaker downloads and uploads customer data and model artifacts through the specified VPC, but the training container does not have network access.

The Semantic Segmentation built-in algorithm does not support network isolation.

', 'DescribeTrainingJobResponse$EnableInterContainerTrafficEncryption' => '

To encrypt all communications between ML compute instances in distributed training, choose True. Encryption provides greater security for distributed training, but training might take longer. How long it takes depends on the amount of communication between compute instances, especially if you use a deep learning algorithm in distributed training.

', 'HyperParameterSpecification$IsTunable' => '

Indicates whether this hyperparameter is tunable in a hyperparameter tuning job.

', 'HyperParameterSpecification$IsRequired' => '

Indicates whether this hyperparameter is required.

', 'HyperParameterTrainingJobDefinition$EnableNetworkIsolation' => '

Isolates the training container. No inbound or outbound network calls can be made, except for calls between peers within a training cluster for distributed training. If network isolation is used for training jobs that are configured to use a VPC, Amazon SageMaker downloads and uploads customer data and model artifacts through the specified VPC, but the training container does not have network access.

The Semantic Segmentation built-in algorithm does not support network isolation.

', 'HyperParameterTrainingJobDefinition$EnableInterContainerTrafficEncryption' => '

To encrypt all communications between ML compute instances in distributed training, choose True. Encryption provides greater security for distributed training, but training might take longer. How long it takes depends on the amount of communication between compute instances, especially if you use a deep learning algorithm in distributed training.

', 'TrainingJob$EnableNetworkIsolation' => '

If the TrainingJob was created with network isolation, the value is set to true. If network isolation is enabled, nodes can\'t communicate beyond the VPC they run in.

', 'TrainingJob$EnableInterContainerTrafficEncryption' => '

To encrypt all communications between ML compute instances in distributed training, choose True. Encryption provides greater security for distributed training, but training might take longer. How long it takes depends on the amount of communication between compute instances, especially if you use a deep learning algorithm in distributed training.

', 'TrainingSpecification$SupportsDistributedTraining' => '

Indicates whether the algorithm supports distributed training. If set to false, buyers can’t request more than one instance during training.

', ], ], 'BooleanOperator' => [ 'base' => NULL, 'refs' => [ 'SearchExpression$Operator' => '

A Boolean operator used to evaluate the search expression. If you want every conditional statement in all lists to be satisfied for the entire search expression to be true, specify And. If only a single conditional statement needs to be true for the entire search expression to be true, specify Or. The default value is And.

', ], ], 'Branch' => [ 'base' => NULL, 'refs' => [ 'GitConfig$Branch' => '

The default branch for the Git repository.

', ], ], 'CategoricalParameterRange' => [ 'base' => '

A list of categorical hyperparameters to tune.

', 'refs' => [ 'CategoricalParameterRanges$member' => NULL, ], ], 'CategoricalParameterRangeSpecification' => [ 'base' => '

Defines the possible values for a categorical hyperparameter.

', 'refs' => [ 'ParameterRange$CategoricalParameterRangeSpecification' => '

A CategoricalParameterRangeSpecification object that defines the possible values for a categorical hyperparameter.

', ], ], 'CategoricalParameterRanges' => [ 'base' => NULL, 'refs' => [ 'ParameterRanges$CategoricalParameterRanges' => '

The array of CategoricalParameterRange objects that specify ranges of categorical hyperparameters that a hyperparameter tuning job searches.

', ], ], 'Cents' => [ 'base' => NULL, 'refs' => [ 'USD$Cents' => '

The fractional portion, in cents, of the amount.

', ], ], 'CertifyForMarketplace' => [ 'base' => NULL, 'refs' => [ 'CreateAlgorithmInput$CertifyForMarketplace' => '

Whether to certify the algorithm so that it can be listed in AWS Marketplace.

', 'CreateModelPackageInput$CertifyForMarketplace' => '

Whether to certify the model package for listing on AWS Marketplace.

', 'DescribeAlgorithmOutput$CertifyForMarketplace' => '

Whether the algorithm is certified to be listed in AWS Marketplace.

', 'DescribeModelPackageOutput$CertifyForMarketplace' => '

Whether the model package is certified for listing on AWS Marketplace.

', ], ], 'Channel' => [ 'base' => '

A channel is a named input source that training algorithms can consume.

', 'refs' => [ 'InputDataConfig$member' => NULL, ], ], 'ChannelName' => [ 'base' => NULL, 'refs' => [ 'Channel$ChannelName' => '

The name of the channel.

', 'ChannelSpecification$Name' => '

The name of the channel.

', ], ], 'ChannelSpecification' => [ 'base' => '

Defines a named input source, called a channel, to be used by an algorithm.

', 'refs' => [ 'ChannelSpecifications$member' => NULL, ], ], 'ChannelSpecifications' => [ 'base' => NULL, 'refs' => [ 'TrainingSpecification$TrainingChannels' => '

A list of ChannelSpecification objects, which specify the input sources to be used by the algorithm.

', ], ], 'CodeRepositoryArn' => [ 'base' => NULL, 'refs' => [ 'CodeRepositorySummary$CodeRepositoryArn' => '

The Amazon Resource Name (ARN) of the Git repository.

', 'CreateCodeRepositoryOutput$CodeRepositoryArn' => '

The Amazon Resource Name (ARN) of the new repository.

', 'DescribeCodeRepositoryOutput$CodeRepositoryArn' => '

The Amazon Resource Name (ARN) of the Git repository.

', 'UpdateCodeRepositoryOutput$CodeRepositoryArn' => '

The ARN of the Git repository.

', ], ], 'CodeRepositoryContains' => [ 'base' => NULL, 'refs' => [ 'ListNotebookInstancesInput$DefaultCodeRepositoryContains' => '

A string in the name or URL of a Git repository associated with this notebook instance. This filter returns only notebook instances associated with a git repository with a name that contains the specified string.

', ], ], 'CodeRepositoryNameContains' => [ 'base' => NULL, 'refs' => [ 'ListCodeRepositoriesInput$NameContains' => '

A string in the Git repositories name. This filter returns only repositories whose name contains the specified string.

', ], ], 'CodeRepositoryNameOrUrl' => [ 'base' => NULL, 'refs' => [ 'AdditionalCodeRepositoryNamesOrUrls$member' => NULL, 'CreateNotebookInstanceInput$DefaultCodeRepository' => '

A Git repository to associate with the notebook instance as its default code repository. This can be either the name of a Git repository stored as a resource in your account, or the URL of a Git repository in AWS CodeCommit or in any other Git repository. When you open a notebook instance, it opens in the directory that contains this repository. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

', 'DescribeNotebookInstanceOutput$DefaultCodeRepository' => '

The Git repository associated with the notebook instance as its default code repository. This can be either the name of a Git repository stored as a resource in your account, or the URL of a Git repository in AWS CodeCommit or in any other Git repository. When you open a notebook instance, it opens in the directory that contains this repository. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

', 'ListNotebookInstancesInput$AdditionalCodeRepositoryEquals' => '

A filter that returns only notebook instances with associated with the specified git repository.

', 'NotebookInstanceSummary$DefaultCodeRepository' => '

The Git repository associated with the notebook instance as its default code repository. This can be either the name of a Git repository stored as a resource in your account, or the URL of a Git repository in AWS CodeCommit or in any other Git repository. When you open a notebook instance, it opens in the directory that contains this repository. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

', 'UpdateNotebookInstanceInput$DefaultCodeRepository' => '

The Git repository to associate with the notebook instance as its default code repository. This can be either the name of a Git repository stored as a resource in your account, or the URL of a Git repository in AWS CodeCommit or in any other Git repository. When you open a notebook instance, it opens in the directory that contains this repository. For more information, see Associating Git Repositories with Amazon SageMaker Notebook Instances.

', ], ], 'CodeRepositorySortBy' => [ 'base' => NULL, 'refs' => [ 'ListCodeRepositoriesInput$SortBy' => '

The field to sort results by. The default is Name.

', ], ], 'CodeRepositorySortOrder' => [ 'base' => NULL, 'refs' => [ 'ListCodeRepositoriesInput$SortOrder' => '

The sort order for results. The default is Ascending.

', ], ], 'CodeRepositorySummary' => [ 'base' => '

Specifies summary information about a Git repository.

', 'refs' => [ 'CodeRepositorySummaryList$member' => NULL, ], ], 'CodeRepositorySummaryList' => [ 'base' => NULL, 'refs' => [ 'ListCodeRepositoriesOutput$CodeRepositorySummaryList' => '

Gets a list of summaries of the Git repositories. Each summary specifies the following values for the repository:

  • Name

  • Amazon Resource Name (ARN)

  • Creation time

  • Last modified time

  • Configuration information, including the URL location of the repository and the ARN of the AWS Secrets Manager secret that contains the credentials used to access the repository.

', ], ], 'CognitoClientId' => [ 'base' => NULL, 'refs' => [ 'CognitoMemberDefinition$ClientId' => '

An identifier for an application client. You must create the app client ID using Amazon Cognito.

', ], ], 'CognitoMemberDefinition' => [ 'base' => '

Identifies a Amazon Cognito user group. A user group can be used in on or more work teams.

', 'refs' => [ 'MemberDefinition$CognitoMemberDefinition' => '

The Amazon Cognito user group that is part of the work team.

', ], ], 'CognitoUserGroup' => [ 'base' => NULL, 'refs' => [ 'CognitoMemberDefinition$UserGroup' => '

An identifier for a user group.

', ], ], 'CognitoUserPool' => [ 'base' => NULL, 'refs' => [ 'CognitoMemberDefinition$UserPool' => '

An identifier for a user pool. The user pool must be in the same region as the service that you are calling.

', ], ], 'CompilationJobArn' => [ 'base' => NULL, 'refs' => [ 'CompilationJobSummary$CompilationJobArn' => '

The Amazon Resource Name (ARN) of the model compilation job.

', 'CreateCompilationJobResponse$CompilationJobArn' => '

If the action is successful, the service sends back an HTTP 200 response. Amazon SageMaker returns the following data in JSON format:

  • CompilationJobArn: The Amazon Resource Name (ARN) of the compiled job.

', 'DescribeCompilationJobResponse$CompilationJobArn' => '

The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker assumes to perform the model compilation job.

', ], ], 'CompilationJobStatus' => [ 'base' => NULL, 'refs' => [ 'CompilationJobSummary$CompilationJobStatus' => '

The status of the model compilation job.

', 'DescribeCompilationJobResponse$CompilationJobStatus' => '

The status of the model compilation job.

', 'ListCompilationJobsRequest$StatusEquals' => '

A filter that retrieves model compilation jobs with a specific DescribeCompilationJobResponse$CompilationJobStatus status.

', ], ], 'CompilationJobSummaries' => [ 'base' => NULL, 'refs' => [ 'ListCompilationJobsResponse$CompilationJobSummaries' => '

An array of CompilationJobSummary objects, each describing a model compilation job.

', ], ], 'CompilationJobSummary' => [ 'base' => '

A summary of a model compilation job.

', 'refs' => [ 'CompilationJobSummaries$member' => NULL, ], ], 'CompressionType' => [ 'base' => NULL, 'refs' => [ 'Channel$CompressionType' => '

If training data is compressed, the compression type. The default value is None. CompressionType is used only in Pipe input mode. In File mode, leave this field unset or set it to None.

', 'CompressionTypes$member' => NULL, 'TransformInput$CompressionType' => '

If your transform data is compressed, specify the compression type. Amazon SageMaker automatically decompresses the data for the transform job accordingly. The default value is None.

', ], ], 'CompressionTypes' => [ 'base' => NULL, 'refs' => [ 'ChannelSpecification$SupportedCompressionTypes' => '

The allowed compression types, if data compression is used.

', ], ], 'ContainerDefinition' => [ 'base' => '

Describes the container, as part of model definition.

', 'refs' => [ 'ContainerDefinitionList$member' => NULL, 'CreateModelInput$PrimaryContainer' => '

The location of the primary docker image containing inference code, associated artifacts, and custom environment map that the inference code uses when the model is deployed for predictions.

', 'DescribeModelOutput$PrimaryContainer' => '

The location of the primary inference code, associated artifacts, and custom environment map that the inference code uses when it is deployed in production.

', ], ], 'ContainerDefinitionList' => [ 'base' => NULL, 'refs' => [ 'CreateModelInput$Containers' => '

Specifies the containers in the inference pipeline.

', 'DescribeModelOutput$Containers' => '

The containers in the inference pipeline.

', ], ], 'ContainerHostname' => [ 'base' => NULL, 'refs' => [ 'ContainerDefinition$ContainerHostname' => '

This parameter is ignored.

', 'ModelPackageContainerDefinition$ContainerHostname' => '

The DNS host name for the Docker container.

', ], ], 'ContentClassifier' => [ 'base' => NULL, 'refs' => [ 'ContentClassifiers$member' => NULL, ], ], 'ContentClassifiers' => [ 'base' => NULL, 'refs' => [ 'LabelingJobDataAttributes$ContentClassifiers' => '

Declares that your content is free of personally identifiable information or adult content. Amazon SageMaker may restrict the Amazon Mechanical Turk workers that can view your task based on this information.

', ], ], 'ContentType' => [ 'base' => NULL, 'refs' => [ 'Channel$ContentType' => '

The MIME type of the data.

', 'ContentTypes$member' => NULL, 'TransformInput$ContentType' => '

The multipurpose internet mail extension (MIME) type of the data. Amazon SageMaker uses the MIME type with each http call to transfer data to the transform job.

', ], ], 'ContentTypes' => [ 'base' => NULL, 'refs' => [ 'ChannelSpecification$SupportedContentTypes' => '

The supported MIME types for the data.

', 'InferenceSpecification$SupportedContentTypes' => '

The supported MIME types for the input data.

', ], ], 'ContinuousParameterRange' => [ 'base' => '

A list of continuous hyperparameters to tune.

', 'refs' => [ 'ContinuousParameterRanges$member' => NULL, ], ], 'ContinuousParameterRangeSpecification' => [ 'base' => '

Defines the possible values for a continuous hyperparameter.

', 'refs' => [ 'ParameterRange$ContinuousParameterRangeSpecification' => '

A ContinuousParameterRangeSpecification object that defines the possible values for a continuous hyperparameter.

', ], ], 'ContinuousParameterRanges' => [ 'base' => NULL, 'refs' => [ 'ParameterRanges$ContinuousParameterRanges' => '

The array of ContinuousParameterRange objects that specify ranges of continuous hyperparameters that a hyperparameter tuning job searches.

', ], ], 'CreateAlgorithmInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateAlgorithmOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateCodeRepositoryInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateCodeRepositoryOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateCompilationJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'CreateCompilationJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'CreateEndpointConfigInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateEndpointConfigOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateEndpointInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateEndpointOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateHyperParameterTuningJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'CreateHyperParameterTuningJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'CreateLabelingJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'CreateLabelingJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'CreateModelInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateModelOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateModelPackageInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateModelPackageOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateNotebookInstanceInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateNotebookInstanceLifecycleConfigInput' => [ 'base' => NULL, 'refs' => [], ], 'CreateNotebookInstanceLifecycleConfigOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateNotebookInstanceOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreatePresignedNotebookInstanceUrlInput' => [ 'base' => NULL, 'refs' => [], ], 'CreatePresignedNotebookInstanceUrlOutput' => [ 'base' => NULL, 'refs' => [], ], 'CreateTrainingJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'CreateTrainingJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'CreateTransformJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'CreateTransformJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'CreateWorkteamRequest' => [ 'base' => NULL, 'refs' => [], ], 'CreateWorkteamResponse' => [ 'base' => NULL, 'refs' => [], ], 'CreationTime' => [ 'base' => NULL, 'refs' => [ 'AlgorithmSummary$CreationTime' => '

A timestamp that shows when the algorithm was created.

', 'CodeRepositorySummary$CreationTime' => '

The date and time that the Git repository was created.

', 'CompilationJobSummary$CreationTime' => '

The time when the model compilation job was created.

', 'DescribeAlgorithmOutput$CreationTime' => '

A timestamp specifying when the algorithm was created.

', 'DescribeCodeRepositoryOutput$CreationTime' => '

The date and time that the repository was created.

', 'DescribeCompilationJobResponse$CreationTime' => '

The time that the model compilation job was created.

', 'DescribeModelPackageOutput$CreationTime' => '

A timestamp specifying when the model package was created.

', 'DescribeNotebookInstanceLifecycleConfigOutput$CreationTime' => '

A timestamp that tells when the lifecycle configuration was created.

', 'DescribeNotebookInstanceOutput$CreationTime' => '

A timestamp. Use this parameter to return the time when the notebook instance was created

', 'ListAlgorithmsInput$CreationTimeAfter' => '

A filter that returns only algorithms created after the specified time (timestamp).

', 'ListAlgorithmsInput$CreationTimeBefore' => '

A filter that returns only algorithms created before the specified time (timestamp).

', 'ListCodeRepositoriesInput$CreationTimeAfter' => '

A filter that returns only Git repositories that were created after the specified time.

', 'ListCodeRepositoriesInput$CreationTimeBefore' => '

A filter that returns only Git repositories that were created before the specified time.

', 'ListCompilationJobsRequest$CreationTimeAfter' => '

A filter that returns the model compilation jobs that were created after a specified time.

', 'ListCompilationJobsRequest$CreationTimeBefore' => '

A filter that returns the model compilation jobs that were created before a specified time.

', 'ListModelPackagesInput$CreationTimeAfter' => '

A filter that returns only model packages created after the specified time (timestamp).

', 'ListModelPackagesInput$CreationTimeBefore' => '

A filter that returns only model packages created before the specified time (timestamp).

', 'ListNotebookInstanceLifecycleConfigsInput$CreationTimeBefore' => '

A filter that returns only lifecycle configurations that were created before the specified time (timestamp).

', 'ListNotebookInstanceLifecycleConfigsInput$CreationTimeAfter' => '

A filter that returns only lifecycle configurations that were created after the specified time (timestamp).

', 'ListNotebookInstancesInput$CreationTimeBefore' => '

A filter that returns only notebook instances that were created before the specified time (timestamp).

', 'ListNotebookInstancesInput$CreationTimeAfter' => '

A filter that returns only notebook instances that were created after the specified time (timestamp).

', 'ModelPackageSummary$CreationTime' => '

A timestamp that shows when the model package was created.

', 'NotebookInstanceLifecycleConfigSummary$CreationTime' => '

A timestamp that tells when the lifecycle configuration was created.

', 'NotebookInstanceSummary$CreationTime' => '

A timestamp that shows when the notebook instance was created.

', ], ], 'DataInputConfig' => [ 'base' => NULL, 'refs' => [ 'InputConfig$DataInputConfig' => '

Specifies the name and shape of the expected data inputs for your trained model with a JSON dictionary form. The data inputs are InputConfig$Framework specific.

  • TensorFlow: You must specify the name and shape (NHWC format) of the expected data inputs using a dictionary format for your trained model. The dictionary formats required for the console and CLI are different.

    • Examples for one input:

      • If using the console, {"input":[1,1024,1024,3]}

      • If using the CLI, {\\"input\\":[1,1024,1024,3]}

    • Examples for two inputs:

      • If using the console, {"data1": [1,28,28,1], "data2":[1,28,28,1]}

      • If using the CLI, {\\"data1\\": [1,28,28,1], \\"data2\\":[1,28,28,1]}

  • MXNET/ONNX: You must specify the name and shape (NCHW format) of the expected data inputs in order using a dictionary format for your trained model. The dictionary formats required for the console and CLI are different.

    • Examples for one input:

      • If using the console, {"data":[1,3,1024,1024]}

      • If using the CLI, {\\"data\\":[1,3,1024,1024]}

    • Examples for two inputs:

      • If using the console, {"var1": [1,1,28,28], "var2":[1,1,28,28]}

      • If using the CLI, {\\"var1\\": [1,1,28,28], \\"var2\\":[1,1,28,28]}

  • PyTorch: You can either specify the name and shape (NCHW format) of expected data inputs in order using a dictionary format for your trained model or you can specify the shape only using a list format. The dictionary formats required for the console and CLI are different. The list formats for the console and CLI are the same.

    • Examples for one input in dictionary format:

      • If using the console, {"input0":[1,3,224,224]}

      • If using the CLI, {\\"input0\\":[1,3,224,224]}

    • Example for one input in list format: [[1,3,224,224]]

    • Examples for two inputs in dictionary format:

      • If using the console, {"input0":[1,3,224,224], "input1":[1,3,224,224]}

      • If using the CLI, {\\"input0\\":[1,3,224,224], \\"input1\\":[1,3,224,224]}

    • Example for two inputs in list format: [[1,3,224,224], [1,3,224,224]]

  • XGBOOST: input data name and shape are not needed.

', ], ], 'DataSource' => [ 'base' => '

Describes the location of the channel data.

', 'refs' => [ 'Channel$DataSource' => '

The location of the channel data.

', ], ], 'DeleteAlgorithmInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteCodeRepositoryInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteEndpointConfigInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteEndpointInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteModelInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteModelPackageInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteNotebookInstanceInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteNotebookInstanceLifecycleConfigInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteTagsInput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteTagsOutput' => [ 'base' => NULL, 'refs' => [], ], 'DeleteWorkteamRequest' => [ 'base' => NULL, 'refs' => [], ], 'DeleteWorkteamResponse' => [ 'base' => NULL, 'refs' => [], ], 'DeployedImage' => [ 'base' => '

Gets the Amazon EC2 Container Registry path of the docker image of the model that is hosted in this ProductionVariant.

If you used the registry/repository[:tag] form to specify the image path of the primary container when you created the model hosted in this ProductionVariant, the path resolves to a path of the form registry/repository[@digest]. A digest is a hash value that identifies a specific version of an image. For information about Amazon ECR paths, see Pulling an Image in the Amazon ECR User Guide.

', 'refs' => [ 'DeployedImages$member' => NULL, ], ], 'DeployedImages' => [ 'base' => NULL, 'refs' => [ 'ProductionVariantSummary$DeployedImages' => '

An array of DeployedImage objects that specify the Amazon EC2 Container Registry paths of the inference images deployed on instances of this ProductionVariant.

', ], ], 'DescribeAlgorithmInput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeAlgorithmOutput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeCodeRepositoryInput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeCodeRepositoryOutput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeCompilationJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'DescribeCompilationJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'DescribeEndpointConfigInput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeEndpointConfigOutput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeEndpointInput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeEndpointOutput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeHyperParameterTuningJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'DescribeHyperParameterTuningJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'DescribeLabelingJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'DescribeLabelingJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'DescribeModelInput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeModelOutput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeModelPackageInput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeModelPackageOutput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeNotebookInstanceInput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeNotebookInstanceLifecycleConfigInput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeNotebookInstanceLifecycleConfigOutput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeNotebookInstanceOutput' => [ 'base' => NULL, 'refs' => [], ], 'DescribeSubscribedWorkteamRequest' => [ 'base' => NULL, 'refs' => [], ], 'DescribeSubscribedWorkteamResponse' => [ 'base' => NULL, 'refs' => [], ], 'DescribeTrainingJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'DescribeTrainingJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'DescribeTransformJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'DescribeTransformJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'DescribeWorkteamRequest' => [ 'base' => NULL, 'refs' => [], ], 'DescribeWorkteamResponse' => [ 'base' => NULL, 'refs' => [], ], 'DesiredWeightAndCapacity' => [ 'base' => '

Specifies weight and capacity values for a production variant.

', 'refs' => [ 'DesiredWeightAndCapacityList$member' => NULL, ], ], 'DesiredWeightAndCapacityList' => [ 'base' => NULL, 'refs' => [ 'UpdateEndpointWeightsAndCapacitiesInput$DesiredWeightsAndCapacities' => '

An object that provides new capacity and weight values for a variant.

', ], ], 'DetailedAlgorithmStatus' => [ 'base' => NULL, 'refs' => [ 'AlgorithmStatusItem$Status' => '

The current status.

', ], ], 'DetailedModelPackageStatus' => [ 'base' => NULL, 'refs' => [ 'ModelPackageStatusItem$Status' => '

The current status.

', ], ], 'DirectInternetAccess' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$DirectInternetAccess' => '

Sets whether Amazon SageMaker provides internet access to the notebook instance. If you set this to Disabled this notebook instance will be able to access resources only in your VPC, and will not be able to connect to Amazon SageMaker training and endpoint services unless your configure a NAT Gateway in your VPC.

For more information, see Notebook Instances Are Internet-Enabled by Default. You can set the value of this parameter to Disabled only if you set a value for the SubnetId parameter.

', 'DescribeNotebookInstanceOutput$DirectInternetAccess' => '

Describes whether Amazon SageMaker provides internet access to the notebook instance. If this value is set to Disabled, the notebook instance does not have internet access, and cannot connect to Amazon SageMaker training and endpoint services.

For more information, see Notebook Instances Are Internet-Enabled by Default.

', ], ], 'DisassociateAdditionalCodeRepositories' => [ 'base' => NULL, 'refs' => [ 'UpdateNotebookInstanceInput$DisassociateAdditionalCodeRepositories' => '

A list of names or URLs of the default Git repositories to remove from this notebook instance.

', ], ], 'DisassociateDefaultCodeRepository' => [ 'base' => NULL, 'refs' => [ 'UpdateNotebookInstanceInput$DisassociateDefaultCodeRepository' => '

The name or URL of the default Git repository to remove from this notebook instance.

', ], ], 'DisassociateNotebookInstanceAcceleratorTypes' => [ 'base' => NULL, 'refs' => [ 'UpdateNotebookInstanceInput$DisassociateAcceleratorTypes' => '

A list of the Elastic Inference (EI) instance types to remove from this notebook instance.

', ], ], 'DisassociateNotebookInstanceLifecycleConfig' => [ 'base' => NULL, 'refs' => [ 'UpdateNotebookInstanceInput$DisassociateLifecycleConfig' => '

Set to true to remove the notebook instance lifecycle configuration currently associated with the notebook instance.

', ], ], 'Dollars' => [ 'base' => NULL, 'refs' => [ 'USD$Dollars' => '

The whole number of dollars in the amount.

', ], ], 'EndpointArn' => [ 'base' => NULL, 'refs' => [ 'CreateEndpointOutput$EndpointArn' => '

The Amazon Resource Name (ARN) of the endpoint.

', 'DescribeEndpointOutput$EndpointArn' => '

The Amazon Resource Name (ARN) of the endpoint.

', 'EndpointSummary$EndpointArn' => '

The Amazon Resource Name (ARN) of the endpoint.

', 'UpdateEndpointOutput$EndpointArn' => '

The Amazon Resource Name (ARN) of the endpoint.

', 'UpdateEndpointWeightsAndCapacitiesOutput$EndpointArn' => '

The Amazon Resource Name (ARN) of the updated endpoint.

', ], ], 'EndpointConfigArn' => [ 'base' => NULL, 'refs' => [ 'CreateEndpointConfigOutput$EndpointConfigArn' => '

The Amazon Resource Name (ARN) of the endpoint configuration.

', 'DescribeEndpointConfigOutput$EndpointConfigArn' => '

The Amazon Resource Name (ARN) of the endpoint configuration.

', 'EndpointConfigSummary$EndpointConfigArn' => '

The Amazon Resource Name (ARN) of the endpoint configuration.

', ], ], 'EndpointConfigName' => [ 'base' => NULL, 'refs' => [ 'CreateEndpointConfigInput$EndpointConfigName' => '

The name of the endpoint configuration. You specify this name in a CreateEndpoint request.

', 'CreateEndpointInput$EndpointConfigName' => '

The name of an endpoint configuration. For more information, see CreateEndpointConfig.

', 'DeleteEndpointConfigInput$EndpointConfigName' => '

The name of the endpoint configuration that you want to delete.

', 'DescribeEndpointConfigInput$EndpointConfigName' => '

The name of the endpoint configuration.

', 'DescribeEndpointConfigOutput$EndpointConfigName' => '

Name of the Amazon SageMaker endpoint configuration.

', 'DescribeEndpointOutput$EndpointConfigName' => '

The name of the endpoint configuration associated with this endpoint.

', 'EndpointConfigSummary$EndpointConfigName' => '

The name of the endpoint configuration.

', 'UpdateEndpointInput$EndpointConfigName' => '

The name of the new endpoint configuration.

', ], ], 'EndpointConfigNameContains' => [ 'base' => NULL, 'refs' => [ 'ListEndpointConfigsInput$NameContains' => '

A string in the endpoint configuration name. This filter returns only endpoint configurations whose name contains the specified string.

', ], ], 'EndpointConfigSortKey' => [ 'base' => NULL, 'refs' => [ 'ListEndpointConfigsInput$SortBy' => '

The field to sort results by. The default is CreationTime.

', ], ], 'EndpointConfigSummary' => [ 'base' => '

Provides summary information for an endpoint configuration.

', 'refs' => [ 'EndpointConfigSummaryList$member' => NULL, ], ], 'EndpointConfigSummaryList' => [ 'base' => NULL, 'refs' => [ 'ListEndpointConfigsOutput$EndpointConfigs' => '

An array of endpoint configurations.

', ], ], 'EndpointName' => [ 'base' => NULL, 'refs' => [ 'CreateEndpointInput$EndpointName' => '

The name of the endpoint. The name must be unique within an AWS Region in your AWS account.

', 'DeleteEndpointInput$EndpointName' => '

The name of the endpoint that you want to delete.

', 'DescribeEndpointInput$EndpointName' => '

The name of the endpoint.

', 'DescribeEndpointOutput$EndpointName' => '

Name of the endpoint.

', 'EndpointSummary$EndpointName' => '

The name of the endpoint.

', 'UpdateEndpointInput$EndpointName' => '

The name of the endpoint whose configuration you want to update.

', 'UpdateEndpointWeightsAndCapacitiesInput$EndpointName' => '

The name of an existing Amazon SageMaker endpoint.

', ], ], 'EndpointNameContains' => [ 'base' => NULL, 'refs' => [ 'ListEndpointsInput$NameContains' => '

A string in endpoint names. This filter returns only endpoints whose name contains the specified string.

', ], ], 'EndpointSortKey' => [ 'base' => NULL, 'refs' => [ 'ListEndpointsInput$SortBy' => '

Sorts the list of results. The default is CreationTime.

', ], ], 'EndpointStatus' => [ 'base' => NULL, 'refs' => [ 'DescribeEndpointOutput$EndpointStatus' => '

The status of the endpoint.

  • OutOfService: Endpoint is not available to take incoming requests.

  • Creating: CreateEndpoint is executing.

  • Updating: UpdateEndpoint or UpdateEndpointWeightsAndCapacities is executing.

  • SystemUpdating: Endpoint is undergoing maintenance and cannot be updated or deleted or re-scaled until it has completed. This maintenance operation does not change any customer-specified values such as VPC config, KMS encryption, model, instance type, or instance count.

  • RollingBack: Endpoint fails to scale up or down or change its variant weight and is in the process of rolling back to its previous configuration. Once the rollback completes, endpoint returns to an InService status. This transitional status only applies to an endpoint that has autoscaling enabled and is undergoing variant weight or capacity changes as part of an UpdateEndpointWeightsAndCapacities call or when the UpdateEndpointWeightsAndCapacities operation is called explicitly.

  • InService: Endpoint is available to process incoming requests.

  • Deleting: DeleteEndpoint is executing.

  • Failed: Endpoint could not be created, updated, or re-scaled. Use DescribeEndpointOutput$FailureReason for information about the failure. DeleteEndpoint is the only operation that can be performed on a failed endpoint.

', 'EndpointSummary$EndpointStatus' => '

The status of the endpoint.

  • OutOfService: Endpoint is not available to take incoming requests.

  • Creating: CreateEndpoint is executing.

  • Updating: UpdateEndpoint or UpdateEndpointWeightsAndCapacities is executing.

  • SystemUpdating: Endpoint is undergoing maintenance and cannot be updated or deleted or re-scaled until it has completed. This maintenance operation does not change any customer-specified values such as VPC config, KMS encryption, model, instance type, or instance count.

  • RollingBack: Endpoint fails to scale up or down or change its variant weight and is in the process of rolling back to its previous configuration. Once the rollback completes, endpoint returns to an InService status. This transitional status only applies to an endpoint that has autoscaling enabled and is undergoing variant weight or capacity changes as part of an UpdateEndpointWeightsAndCapacities call or when the UpdateEndpointWeightsAndCapacities operation is called explicitly.

  • InService: Endpoint is available to process incoming requests.

  • Deleting: DeleteEndpoint is executing.

  • Failed: Endpoint could not be created, updated, or re-scaled. Use DescribeEndpointOutput$FailureReason for information about the failure. DeleteEndpoint is the only operation that can be performed on a failed endpoint.

To get a list of endpoints with a specified status, use the ListEndpointsInput$StatusEquals filter.

', 'ListEndpointsInput$StatusEquals' => '

A filter that returns only endpoints with the specified status.

', ], ], 'EndpointSummary' => [ 'base' => '

Provides summary information for an endpoint.

', 'refs' => [ 'EndpointSummaryList$member' => NULL, ], ], 'EndpointSummaryList' => [ 'base' => NULL, 'refs' => [ 'ListEndpointsOutput$Endpoints' => '

An array or endpoint objects.

', ], ], 'EntityDescription' => [ 'base' => NULL, 'refs' => [ 'AlgorithmSummary$AlgorithmDescription' => '

A brief description of the algorithm.

', 'ChannelSpecification$Description' => '

A brief description of the channel.

', 'CreateAlgorithmInput$AlgorithmDescription' => '

A description of the algorithm.

', 'CreateModelPackageInput$ModelPackageDescription' => '

A description of the model package.

', 'DescribeAlgorithmOutput$AlgorithmDescription' => '

A brief summary about the algorithm.

', 'DescribeModelPackageOutput$ModelPackageDescription' => '

A brief summary of the model package.

', 'HyperParameterSpecification$Description' => '

A brief description of the hyperparameter.

', 'ModelPackageSummary$ModelPackageDescription' => '

A brief description of the model package.

', ], ], 'EntityName' => [ 'base' => NULL, 'refs' => [ 'AlgorithmStatusItem$Name' => '

The name of the algorithm for which the overall status is being reported.

', 'AlgorithmSummary$AlgorithmName' => '

The name of the algorithm that is described by the summary.

', 'AlgorithmValidationProfile$ProfileName' => '

The name of the profile for the algorithm. The name must have 1 to 63 characters. Valid characters are a-z, A-Z, 0-9, and - (hyphen).

', 'CodeRepositorySummary$CodeRepositoryName' => '

The name of the Git repository.

', 'CompilationJobSummary$CompilationJobName' => '

The name of the model compilation job that you want a summary for.

', 'CreateAlgorithmInput$AlgorithmName' => '

The name of the algorithm.

', 'CreateCodeRepositoryInput$CodeRepositoryName' => '

The name of the Git repository. The name must have 1 to 63 characters. Valid characters are a-z, A-Z, 0-9, and - (hyphen).

', 'CreateCompilationJobRequest$CompilationJobName' => '

A name for the model compilation job. The name must be unique within the AWS Region and within your AWS account.

', 'CreateModelPackageInput$ModelPackageName' => '

The name of the model package. The name must have 1 to 63 characters. Valid characters are a-z, A-Z, 0-9, and - (hyphen).

', 'DeleteAlgorithmInput$AlgorithmName' => '

The name of the algorithm to delete.

', 'DeleteCodeRepositoryInput$CodeRepositoryName' => '

The name of the Git repository to delete.

', 'DeleteModelPackageInput$ModelPackageName' => '

The name of the model package. The name must have 1 to 63 characters. Valid characters are a-z, A-Z, 0-9, and - (hyphen).

', 'DescribeAlgorithmOutput$AlgorithmName' => '

The name of the algorithm being described.

', 'DescribeCodeRepositoryInput$CodeRepositoryName' => '

The name of the Git repository to describe.

', 'DescribeCodeRepositoryOutput$CodeRepositoryName' => '

The name of the Git repository.

', 'DescribeCompilationJobRequest$CompilationJobName' => '

The name of the model compilation job that you want information about.

', 'DescribeCompilationJobResponse$CompilationJobName' => '

The name of the model compilation job.

', 'DescribeModelPackageOutput$ModelPackageName' => '

The name of the model package being described.

', 'ModelPackageStatusItem$Name' => '

The name of the model package for which the overall status is being reported.

', 'ModelPackageSummary$ModelPackageName' => '

The name of the model package.

', 'ModelPackageValidationProfile$ProfileName' => '

The name of the profile for the model package.

', 'StopCompilationJobRequest$CompilationJobName' => '

The name of the model compilation job to stop.

', 'UpdateCodeRepositoryInput$CodeRepositoryName' => '

The name of the Git repository to update.

', ], ], 'EnvironmentKey' => [ 'base' => NULL, 'refs' => [ 'EnvironmentMap$key' => NULL, ], ], 'EnvironmentMap' => [ 'base' => NULL, 'refs' => [ 'ContainerDefinition$Environment' => '

The environment variables to set in the Docker container. Each key and value in the Environment string to string map can have length of up to 1024. We support up to 16 entries in the map.

', ], ], 'EnvironmentValue' => [ 'base' => NULL, 'refs' => [ 'EnvironmentMap$value' => NULL, ], ], 'FailureReason' => [ 'base' => NULL, 'refs' => [ 'DescribeCompilationJobResponse$FailureReason' => '

If a model compilation job failed, the reason it failed.

', 'DescribeEndpointOutput$FailureReason' => '

If the status of the endpoint is Failed, the reason why it failed.

', 'DescribeHyperParameterTuningJobResponse$FailureReason' => '

If the tuning job failed, the reason it failed.

', 'DescribeLabelingJobResponse$FailureReason' => '

If the job failed, the reason that it failed.

', 'DescribeNotebookInstanceOutput$FailureReason' => '

If status is Failed, the reason it failed.

', 'DescribeTrainingJobResponse$FailureReason' => '

If the training job failed, the reason it failed.

', 'DescribeTransformJobResponse$FailureReason' => '

If the transform job failed, FailureReason describes why it failed. A transform job creates a log file, which includes error messages, and stores it as an Amazon S3 object. For more information, see Log Amazon SageMaker Events with Amazon CloudWatch.

', 'HyperParameterTrainingJobSummary$FailureReason' => '

The reason that the training job failed.

', 'LabelingJobSummary$FailureReason' => '

If the LabelingJobStatus field is Failed, this field contains a description of the error.

', 'ResourceInUse$Message' => NULL, 'ResourceLimitExceeded$Message' => NULL, 'ResourceNotFound$Message' => NULL, 'TrainingJob$FailureReason' => '

If the training job failed, the reason it failed.

', 'TransformJobSummary$FailureReason' => '

If the transform job failed, the reason it failed.

', ], ], 'Filter' => [ 'base' => '

A conditional statement for a search expression that includes a Boolean operator, a resource property, and a value.

If you don\'t specify an Operator and a Value, the filter searches for only the specified property. For example, defining a Filter for the FailureReason for the TrainingJob Resource searches for training job objects that have a value in the FailureReason field.

If you specify a Value, but not an Operator, Amazon SageMaker uses the equals operator as the default.

In search, there are several property types:

Metrics

To define a metric filter, enter a value using the form "Metrics.<name>", where <name> is a metric name. For example, the following filter searches for training jobs with an "accuracy" metric greater than "0.9":

{

"Name": "Metrics.accuracy",

"Operator": "GREATER_THAN",

"Value": "0.9"

}

HyperParameters

To define a hyperparameter filter, enter a value with the form "HyperParameters.<name>". Decimal hyperparameter values are treated as a decimal in a comparison if the specified Value is also a decimal value. If the specified Value is an integer, the decimal hyperparameter values are treated as integers. For example, the following filter is satisfied by training jobs with a "learning_rate" hyperparameter that is less than "0.5":

{

"Name": "HyperParameters.learning_rate",

"Operator": "LESS_THAN",

"Value": "0.5"

}

Tags

To define a tag filter, enter a value with the form "Tags.<key>".

', 'refs' => [ 'FilterList$member' => NULL, ], ], 'FilterList' => [ 'base' => NULL, 'refs' => [ 'NestedFilters$Filters' => '

A list of filters. Each filter acts on a property. Filters must contain at least one Filters value. For example, a NestedFilters call might include a filter on the PropertyName parameter of the InputDataConfig property: InputDataConfig.DataSource.S3DataSource.S3Uri.

', 'SearchExpression$Filters' => '

A list of filter objects.

', ], ], 'FilterValue' => [ 'base' => NULL, 'refs' => [ 'Filter$Value' => '

A value used with Resource and Operator to determine if objects satisfy the filter\'s condition. For numerical properties, Value must be an integer or floating-point decimal. For timestamp properties, Value must be an ISO 8601 date-time string of the following format: YYYY-mm-dd\'T\'HH:MM:SS.

', ], ], 'FinalHyperParameterTuningJobObjectiveMetric' => [ 'base' => '

Shows the final value for the objective metric for a training job that was launched by a hyperparameter tuning job. You define the objective metric in the HyperParameterTuningJobObjective parameter of HyperParameterTuningJobConfig.

', 'refs' => [ 'HyperParameterTrainingJobSummary$FinalHyperParameterTuningJobObjectiveMetric' => '

The FinalHyperParameterTuningJobObjectiveMetric object that specifies the value of the objective metric of the tuning job that launched this training job.

', ], ], 'FinalMetricDataList' => [ 'base' => NULL, 'refs' => [ 'DescribeTrainingJobResponse$FinalMetricDataList' => '

A collection of MetricData objects that specify the names, values, and dates and times that the training algorithm emitted to Amazon CloudWatch.

', 'TrainingJob$FinalMetricDataList' => '

A list of final metric values that are set when the training job completes. Used only if the training job was configured to use metrics.

', ], ], 'Float' => [ 'base' => NULL, 'refs' => [ 'MetricData$Value' => '

The value of the metric.

', ], ], 'Framework' => [ 'base' => NULL, 'refs' => [ 'InputConfig$Framework' => '

Identifies the framework in which the model was trained. For example: TENSORFLOW.

', ], ], 'GetSearchSuggestionsRequest' => [ 'base' => NULL, 'refs' => [], ], 'GetSearchSuggestionsResponse' => [ 'base' => NULL, 'refs' => [], ], 'GitConfig' => [ 'base' => '

Specifies configuration details for a Git repository in your AWS account.

', 'refs' => [ 'CodeRepositorySummary$GitConfig' => '

Configuration details for the Git repository, including the URL where it is located and the ARN of the AWS Secrets Manager secret that contains the credentials used to access the repository.

', 'CreateCodeRepositoryInput$GitConfig' => '

Specifies details about the repository, including the URL where the repository is located, the default branch, and credentials to use to access the repository.

', 'DescribeCodeRepositoryOutput$GitConfig' => '

Configuration details about the repository, including the URL where the repository is located, the default branch, and the Amazon Resource Name (ARN) of the AWS Secrets Manager secret that contains the credentials used to access the repository.

', ], ], 'GitConfigForUpdate' => [ 'base' => '

Specifies configuration details for a Git repository when the repository is updated.

', 'refs' => [ 'UpdateCodeRepositoryInput$GitConfig' => '

The configuration of the git repository, including the URL and the Amazon Resource Name (ARN) of the AWS Secrets Manager secret that contains the credentials used to access the repository. The secret must have a staging label of AWSCURRENT and must be in the following format:

{"username": UserName, "password": Password}

', ], ], 'GitConfigUrl' => [ 'base' => NULL, 'refs' => [ 'GitConfig$RepositoryUrl' => '

The URL where the Git repository is located.

', ], ], 'HumanTaskConfig' => [ 'base' => '

Information required for human workers to complete a labeling task.

', 'refs' => [ 'CreateLabelingJobRequest$HumanTaskConfig' => '

Configures the information required for human workers to complete a labeling task.

', 'DescribeLabelingJobResponse$HumanTaskConfig' => '

Configuration information required for human workers to complete a labeling task.

', ], ], 'HyperParameterAlgorithmSpecification' => [ 'base' => '

Specifies which training algorithm to use for training jobs that a hyperparameter tuning job launches and the metrics to monitor.

', 'refs' => [ 'HyperParameterTrainingJobDefinition$AlgorithmSpecification' => '

The HyperParameterAlgorithmSpecification object that specifies the resource algorithm to use for the training jobs that the tuning job launches.

', ], ], 'HyperParameterSpecification' => [ 'base' => '

Defines a hyperparameter to be used by an algorithm.

', 'refs' => [ 'HyperParameterSpecifications$member' => NULL, ], ], 'HyperParameterSpecifications' => [ 'base' => NULL, 'refs' => [ 'TrainingSpecification$SupportedHyperParameters' => '

A list of the HyperParameterSpecification objects, that define the supported hyperparameters. This is required if the algorithm supports automatic model tuning.>

', ], ], 'HyperParameterTrainingJobDefinition' => [ 'base' => '

Defines the training jobs launched by a hyperparameter tuning job.

', 'refs' => [ 'CreateHyperParameterTuningJobRequest$TrainingJobDefinition' => '

The HyperParameterTrainingJobDefinition object that describes the training jobs that this tuning job launches, including static hyperparameters, input data configuration, output data configuration, resource configuration, and stopping condition.

', 'DescribeHyperParameterTuningJobResponse$TrainingJobDefinition' => '

The HyperParameterTrainingJobDefinition object that specifies the definition of the training jobs that this tuning job launches.

', ], ], 'HyperParameterTrainingJobSummaries' => [ 'base' => NULL, 'refs' => [ 'ListTrainingJobsForHyperParameterTuningJobResponse$TrainingJobSummaries' => '

A list of TrainingJobSummary objects that describe the training jobs that the ListTrainingJobsForHyperParameterTuningJob request returned.

', ], ], 'HyperParameterTrainingJobSummary' => [ 'base' => '

Specifies summary information about a training job.

', 'refs' => [ 'DescribeHyperParameterTuningJobResponse$BestTrainingJob' => '

A TrainingJobSummary object that describes the training job that completed with the best current HyperParameterTuningJobObjective.

', 'DescribeHyperParameterTuningJobResponse$OverallBestTrainingJob' => '

If the hyperparameter tuning job is an warm start tuning job with a WarmStartType of IDENTICAL_DATA_AND_ALGORITHM, this is the TrainingJobSummary for the training job with the best objective metric value of all training jobs launched by this tuning job and all parent jobs specified for the warm start tuning job.

', 'HyperParameterTrainingJobSummaries$member' => NULL, ], ], 'HyperParameterTuningJobArn' => [ 'base' => NULL, 'refs' => [ 'CreateHyperParameterTuningJobResponse$HyperParameterTuningJobArn' => '

The Amazon Resource Name (ARN) of the tuning job. Amazon SageMaker assigns an ARN to a hyperparameter tuning job when you create it.

', 'DescribeHyperParameterTuningJobResponse$HyperParameterTuningJobArn' => '

The Amazon Resource Name (ARN) of the tuning job.

', 'DescribeTrainingJobResponse$TuningJobArn' => '

The Amazon Resource Name (ARN) of the associated hyperparameter tuning job if the training job was launched by a hyperparameter tuning job.

', 'HyperParameterTuningJobSummary$HyperParameterTuningJobArn' => '

The Amazon Resource Name (ARN) of the tuning job.

', 'TrainingJob$TuningJobArn' => '

The Amazon Resource Name (ARN) of the associated hyperparameter tuning job if the training job was launched by a hyperparameter tuning job.

', ], ], 'HyperParameterTuningJobConfig' => [ 'base' => '

Configures a hyperparameter tuning job.

', 'refs' => [ 'CreateHyperParameterTuningJobRequest$HyperParameterTuningJobConfig' => '

The HyperParameterTuningJobConfig object that describes the tuning job, including the search strategy, the objective metric used to evaluate training jobs, ranges of parameters to search, and resource limits for the tuning job. For more information, see automatic-model-tuning

', 'DescribeHyperParameterTuningJobResponse$HyperParameterTuningJobConfig' => '

The HyperParameterTuningJobConfig object that specifies the configuration of the tuning job.

', ], ], 'HyperParameterTuningJobName' => [ 'base' => NULL, 'refs' => [ 'CreateHyperParameterTuningJobRequest$HyperParameterTuningJobName' => '

The name of the tuning job. This name is the prefix for the names of all training jobs that this tuning job launches. The name must be unique within the same AWS account and AWS Region. The name must have { } to { } characters. Valid characters are a-z, A-Z, 0-9, and : + = @ _ % - (hyphen). The name is not case sensitive.

', 'DescribeHyperParameterTuningJobRequest$HyperParameterTuningJobName' => '

The name of the tuning job to describe.

', 'DescribeHyperParameterTuningJobResponse$HyperParameterTuningJobName' => '

The name of the tuning job.

', 'HyperParameterTrainingJobSummary$TuningJobName' => '

The HyperParameter tuning job that launched the training job.

', 'HyperParameterTuningJobSummary$HyperParameterTuningJobName' => '

The name of the tuning job.

', 'ListTrainingJobsForHyperParameterTuningJobRequest$HyperParameterTuningJobName' => '

The name of the tuning job whose training jobs you want to list.

', 'ParentHyperParameterTuningJob$HyperParameterTuningJobName' => '

The name of the hyperparameter tuning job to be used as a starting point for a new hyperparameter tuning job.

', 'StopHyperParameterTuningJobRequest$HyperParameterTuningJobName' => '

The name of the tuning job to stop.

', ], ], 'HyperParameterTuningJobObjective' => [ 'base' => '

Defines the objective metric for a hyperparameter tuning job. Hyperparameter tuning uses the value of this metric to evaluate the training jobs it launches, and returns the training job that results in either the highest or lowest value for this metric, depending on the value you specify for the Type parameter.

', 'refs' => [ 'HyperParameterTuningJobConfig$HyperParameterTuningJobObjective' => '

The HyperParameterTuningJobObjective object that specifies the objective metric for this tuning job.

', 'HyperParameterTuningJobObjectives$member' => NULL, ], ], 'HyperParameterTuningJobObjectiveType' => [ 'base' => NULL, 'refs' => [ 'FinalHyperParameterTuningJobObjectiveMetric$Type' => '

Whether to minimize or maximize the objective metric. Valid values are Minimize and Maximize.

', 'HyperParameterTuningJobObjective$Type' => '

Whether to minimize or maximize the objective metric.

', ], ], 'HyperParameterTuningJobObjectives' => [ 'base' => NULL, 'refs' => [ 'TrainingSpecification$SupportedTuningJobObjectiveMetrics' => '

A list of the metrics that the algorithm emits that can be used as the objective metric in a hyperparameter tuning job.

', ], ], 'HyperParameterTuningJobSortByOptions' => [ 'base' => NULL, 'refs' => [ 'ListHyperParameterTuningJobsRequest$SortBy' => '

The field to sort results by. The default is Name.

', ], ], 'HyperParameterTuningJobStatus' => [ 'base' => NULL, 'refs' => [ 'DescribeHyperParameterTuningJobResponse$HyperParameterTuningJobStatus' => '

The status of the tuning job: InProgress, Completed, Failed, Stopping, or Stopped.

', 'HyperParameterTuningJobSummary$HyperParameterTuningJobStatus' => '

The status of the tuning job.

', 'ListHyperParameterTuningJobsRequest$StatusEquals' => '

A filter that returns only tuning jobs with the specified status.

', ], ], 'HyperParameterTuningJobStrategyType' => [ 'base' => '

The strategy hyperparameter tuning uses to find the best combination of hyperparameters for your model. Currently, the only supported value is Bayesian.

', 'refs' => [ 'HyperParameterTuningJobConfig$Strategy' => '

Specifies the search strategy for hyperparameters. Currently, the only valid value is Bayesian.

', 'HyperParameterTuningJobSummary$Strategy' => '

Specifies the search strategy hyperparameter tuning uses to choose which hyperparameters to use for each iteration. Currently, the only valid value is Bayesian.

', ], ], 'HyperParameterTuningJobSummaries' => [ 'base' => NULL, 'refs' => [ 'ListHyperParameterTuningJobsResponse$HyperParameterTuningJobSummaries' => '

A list of HyperParameterTuningJobSummary objects that describe the tuning jobs that the ListHyperParameterTuningJobs request returned.

', ], ], 'HyperParameterTuningJobSummary' => [ 'base' => '

Provides summary information about a hyperparameter tuning job.

', 'refs' => [ 'HyperParameterTuningJobSummaries$member' => NULL, ], ], 'HyperParameterTuningJobWarmStartConfig' => [ 'base' => '

Specifies the configuration for a hyperparameter tuning job that uses one or more previous hyperparameter tuning jobs as a starting point. The results of previous tuning jobs are used to inform which combinations of hyperparameters to search over in the new tuning job.

All training jobs launched by the new hyperparameter tuning job are evaluated by using the objective metric, and the training job that performs the best is compared to the best training jobs from the parent tuning jobs. From these, the training job that performs the best as measured by the objective metric is returned as the overall best training job.

All training jobs launched by parent hyperparameter tuning jobs and the new hyperparameter tuning jobs count against the limit of training jobs for the tuning job.

', 'refs' => [ 'CreateHyperParameterTuningJobRequest$WarmStartConfig' => '

Specifies the configuration for starting the hyperparameter tuning job using one or more previous tuning jobs as a starting point. The results of previous tuning jobs are used to inform which combinations of hyperparameters to search over in the new tuning job.

All training jobs launched by the new hyperparameter tuning job are evaluated by using the objective metric. If you specify IDENTICAL_DATA_AND_ALGORITHM as the WarmStartType value for the warm start configuration, the training job that performs the best in the new tuning job is compared to the best training jobs from the parent tuning jobs. From these, the training job that performs the best as measured by the objective metric is returned as the overall best training job.

All training jobs launched by parent hyperparameter tuning jobs and the new hyperparameter tuning jobs count against the limit of training jobs for the tuning job.

', 'DescribeHyperParameterTuningJobResponse$WarmStartConfig' => '

The configuration for starting the hyperparameter parameter tuning job using one or more previous tuning jobs as a starting point. The results of previous tuning jobs are used to inform which combinations of hyperparameters to search over in the new tuning job.

', ], ], 'HyperParameterTuningJobWarmStartType' => [ 'base' => NULL, 'refs' => [ 'HyperParameterTuningJobWarmStartConfig$WarmStartType' => '

Specifies one of the following:

IDENTICAL_DATA_AND_ALGORITHM

The new hyperparameter tuning job uses the same input data and training image as the parent tuning jobs. You can change the hyperparameter ranges to search and the maximum number of training jobs that the hyperparameter tuning job launches. You cannot use a new version of the training algorithm, unless the changes in the new version do not affect the algorithm itself. For example, changes that improve logging or adding support for a different data format are allowed. You can also change hyperparameters from tunable to static, and from static to tunable, but the total number of static plus tunable hyperparameters must remain the same as it is in all parent jobs. The objective metric for the new tuning job must be the same as for all parent jobs.

TRANSFER_LEARNING

The new hyperparameter tuning job can include input data, hyperparameter ranges, maximum number of concurrent training jobs, and maximum number of training jobs that are different than those of its parent hyperparameter tuning jobs. The training image can also be a different version from the version used in the parent hyperparameter tuning job. You can also change hyperparameters from tunable to static, and from static to tunable, but the total number of static plus tunable hyperparameters must remain the same as it is in all parent jobs. The objective metric for the new tuning job must be the same as for all parent jobs.

', ], ], 'HyperParameters' => [ 'base' => NULL, 'refs' => [ 'CreateTrainingJobRequest$HyperParameters' => '

Algorithm-specific parameters that influence the quality of the model. You set hyperparameters before you start the learning process. For a list of hyperparameters for each training algorithm provided by Amazon SageMaker, see Algorithms.

You can specify a maximum of 100 hyperparameters. Each hyperparameter is a key-value pair. Each key and value is limited to 256 characters, as specified by the Length Constraint.

', 'DescribeTrainingJobResponse$HyperParameters' => '

Algorithm-specific parameters.

', 'HyperParameterTrainingJobDefinition$StaticHyperParameters' => '

Specifies the values of hyperparameters that do not change for the tuning job.

', 'HyperParameterTrainingJobSummary$TunedHyperParameters' => '

A list of the hyperparameters for which you specified ranges to search.

', 'TrainingJob$HyperParameters' => '

Algorithm-specific parameters.

', 'TrainingJobDefinition$HyperParameters' => '

The hyperparameters used for the training job.

', ], ], 'Image' => [ 'base' => NULL, 'refs' => [ 'ContainerDefinition$Image' => '

The Amazon EC2 Container Registry (Amazon ECR) path where inference code is stored. If you are using your own custom algorithm instead of an algorithm provided by Amazon SageMaker, the inference code must meet Amazon SageMaker requirements. Amazon SageMaker supports both registry/repository[:tag] and registry/repository[@digest] image path formats. For more information, see Using Your Own Algorithms with Amazon SageMaker

', 'DeployedImage$SpecifiedImage' => '

The image path you specified when you created the model.

', 'DeployedImage$ResolvedImage' => '

The specific digest path of the image hosted in this ProductionVariant.

', 'ModelPackageContainerDefinition$Image' => '

The Amazon EC2 Container Registry (Amazon ECR) path where inference code is stored.

If you are using your own custom algorithm instead of an algorithm provided by Amazon SageMaker, the inference code must meet Amazon SageMaker requirements. Amazon SageMaker supports both registry/repository[:tag] and registry/repository[@digest] image path formats. For more information, see Using Your Own Algorithms with Amazon SageMaker.

', 'TrainingSpecification$TrainingImage' => '

The Amazon ECR registry path of the Docker image that contains the training algorithm.

', ], ], 'ImageDigest' => [ 'base' => NULL, 'refs' => [ 'ModelPackageContainerDefinition$ImageDigest' => '

An MD5 hash of the training algorithm that identifies the Docker image used for training.

', 'TrainingSpecification$TrainingImageDigest' => '

An MD5 hash of the training algorithm that identifies the Docker image used for training.

', ], ], 'InferenceSpecification' => [ 'base' => '

Defines how to perform inference generation after a training job is run.

', 'refs' => [ 'CreateAlgorithmInput$InferenceSpecification' => '

Specifies details about inference jobs that the algorithm runs, including the following:

  • The Amazon ECR paths of containers that contain the inference code and model artifacts.

  • The instance types that the algorithm supports for transform jobs and real-time endpoints used for inference.

  • The input and output content formats that the algorithm supports for inference.

', 'CreateModelPackageInput$InferenceSpecification' => '

Specifies details about inference jobs that can be run with models based on this model package, including the following:

  • The Amazon ECR paths of containers that contain the inference code and model artifacts.

  • The instance types that the model package supports for transform jobs and real-time endpoints used for inference.

  • The input and output content formats that the model package supports for inference.

', 'DescribeAlgorithmOutput$InferenceSpecification' => '

Details about inference jobs that the algorithm runs.

', 'DescribeModelPackageOutput$InferenceSpecification' => '

Details about inference jobs that can be run with models based on this model package.

', ], ], 'InputConfig' => [ 'base' => '

Contains information about the location of input model artifacts, the name and shape of the expected data inputs, and the framework in which the model was trained.

', 'refs' => [ 'CreateCompilationJobRequest$InputConfig' => '

Provides information about the location of input model artifacts, the name and shape of the expected data inputs, and the framework in which the model was trained.

', 'DescribeCompilationJobResponse$InputConfig' => '

Information about the location in Amazon S3 of the input model artifacts, the name and shape of the expected data inputs, and the framework in which the model was trained.

', ], ], 'InputDataConfig' => [ 'base' => NULL, 'refs' => [ 'CreateTrainingJobRequest$InputDataConfig' => '

An array of Channel objects. Each channel is a named input source. InputDataConfig describes the input data and its location.

Algorithms can accept input data from one or more channels. For example, an algorithm might have two channels of input data, training_data and validation_data. The configuration for each channel provides the S3 location where the input data is stored. It also provides information about the stored data: the MIME type, compression method, and whether the data is wrapped in RecordIO format.

Depending on the input mode that the algorithm supports, Amazon SageMaker either copies input data files from an S3 bucket to a local directory in the Docker container, or makes it available as input streams.

', 'DescribeTrainingJobResponse$InputDataConfig' => '

An array of Channel objects that describes each data input channel.

', 'HyperParameterTrainingJobDefinition$InputDataConfig' => '

An array of Channel objects that specify the input for the training jobs that the tuning job launches.

', 'TrainingJob$InputDataConfig' => '

An array of Channel objects that describes each data input channel.

', 'TrainingJobDefinition$InputDataConfig' => '

An array of Channel objects, each of which specifies an input source.

', ], ], 'InputModes' => [ 'base' => NULL, 'refs' => [ 'ChannelSpecification$SupportedInputModes' => '

The allowed input mode, either FILE or PIPE.

In FILE mode, Amazon SageMaker copies the data from the input source onto the local Amazon Elastic Block Store (Amazon EBS) volumes before starting your training algorithm. This is the most commonly used input mode.

In PIPE mode, Amazon SageMaker streams input data from the source directly to your algorithm without using the EBS volume.

', ], ], 'InstanceType' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$InstanceType' => '

The type of ML compute instance to launch for the notebook instance.

', 'DescribeNotebookInstanceOutput$InstanceType' => '

The type of ML compute instance running on the notebook instance.

', 'NotebookInstanceSummary$InstanceType' => '

The type of ML compute instance that the notebook instance is running on.

', 'UpdateNotebookInstanceInput$InstanceType' => '

The Amazon ML compute instance type.

', ], ], 'IntegerParameterRange' => [ 'base' => '

For a hyperparameter of the integer type, specifies the range that a hyperparameter tuning job searches.

', 'refs' => [ 'IntegerParameterRanges$member' => NULL, ], ], 'IntegerParameterRangeSpecification' => [ 'base' => '

Defines the possible values for an integer hyperparameter.

', 'refs' => [ 'ParameterRange$IntegerParameterRangeSpecification' => '

A IntegerParameterRangeSpecification object that defines the possible values for an integer hyperparameter.

', ], ], 'IntegerParameterRanges' => [ 'base' => NULL, 'refs' => [ 'ParameterRanges$IntegerParameterRanges' => '

The array of IntegerParameterRange objects that specify ranges of integer hyperparameters that a hyperparameter tuning job searches.

', ], ], 'JobReferenceCode' => [ 'base' => NULL, 'refs' => [ 'DescribeLabelingJobResponse$JobReferenceCode' => '

A unique identifier for work done as part of a labeling job.

', 'LabelingJobForWorkteamSummary$JobReferenceCode' => '

A unique identifier for a labeling job. You can use this to refer to a specific labeling job.

', ], ], 'JobReferenceCodeContains' => [ 'base' => NULL, 'refs' => [ 'ListLabelingJobsForWorkteamRequest$JobReferenceCodeContains' => '

A filter the limits jobs to only the ones whose job reference code contains the specified string.

', ], ], 'KmsKeyId' => [ 'base' => NULL, 'refs' => [ 'CreateEndpointConfigInput$KmsKeyId' => '

The Amazon Resource Name (ARN) of a AWS Key Management Service key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance that hosts the endpoint.

', 'CreateNotebookInstanceInput$KmsKeyId' => '

If you provide a AWS KMS key ID, Amazon SageMaker uses it to encrypt data at rest on the ML storage volume that is attached to your notebook instance. The KMS key you provide must be enabled. For information, see Enabling and Disabling Keys in the AWS Key Management Service Developer Guide.

', 'DescribeEndpointConfigOutput$KmsKeyId' => '

AWS KMS key ID Amazon SageMaker uses to encrypt data when storing it on the ML storage volume attached to the instance.

', 'DescribeNotebookInstanceOutput$KmsKeyId' => '

The AWS KMS key ID Amazon SageMaker uses to encrypt data when storing it on the ML storage volume attached to the instance.

', 'LabelingJobOutputConfig$KmsKeyId' => '

The AWS Key Management Service ID of the key used to encrypt the output data, if any.

', 'LabelingJobResourceConfig$VolumeKmsKeyId' => '

The AWS Key Management Service key ID for the key used to encrypt the output data, if any.

', 'OutputDataConfig$KmsKeyId' => '

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption. The KmsKeyId can be any of the following formats:

  • // KMS Key ID

    "1234abcd-12ab-34cd-56ef-1234567890ab"

  • // Amazon Resource Name (ARN) of a KMS Key

    "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab"

  • // KMS Key Alias

    "alias/ExampleAlias"

  • // Amazon Resource Name (ARN) of a KMS Key Alias

    "arn:aws:kms:us-west-2:111122223333:alias/ExampleAlias"

If you don\'t provide a KMS key ID, Amazon SageMaker uses the default KMS key for Amazon S3 for your role\'s account. For more information, see KMS-Managed Encryption Keys in the Amazon Simple Storage Service Developer Guide.

The KMS key policy must grant permission to the IAM role that you specify in your CreateTramsformJob request. For more information, see Using Key Policies in AWS KMS in the AWS Key Management Service Developer Guide.

', 'ResourceConfig$VolumeKmsKeyId' => '

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the training job. The VolumeKmsKeyId can be any of the following formats:

  • // KMS Key ID

    "1234abcd-12ab-34cd-56ef-1234567890ab"

  • // Amazon Resource Name (ARN) of a KMS Key

    "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab"

', 'TransformOutput$KmsKeyId' => '

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt the model artifacts at rest using Amazon S3 server-side encryption. The KmsKeyId can be any of the following formats:

  • // KMS Key ID

    "1234abcd-12ab-34cd-56ef-1234567890ab"

  • // Amazon Resource Name (ARN) of a KMS Key

    "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab"

  • // KMS Key Alias

    "alias/ExampleAlias"

  • // Amazon Resource Name (ARN) of a KMS Key Alias

    "arn:aws:kms:us-west-2:111122223333:alias/ExampleAlias"

If you don\'t provide a KMS key ID, Amazon SageMaker uses the default KMS key for Amazon S3 for your role\'s account. For more information, see KMS-Managed Encryption Keys in the Amazon Simple Storage Service Developer Guide.

The KMS key policy must grant permission to the IAM role that you specify in your CreateTramsformJob request. For more information, see Using Key Policies in AWS KMS in the AWS Key Management Service Developer Guide.

', 'TransformResources$VolumeKmsKeyId' => '

The AWS Key Management Service (AWS KMS) key that Amazon SageMaker uses to encrypt data on the storage volume attached to the ML compute instance(s) that run the batch transform job. The VolumeKmsKeyId can be any of the following formats:

  • // KMS Key ID

    "1234abcd-12ab-34cd-56ef-1234567890ab"

  • // Amazon Resource Name (ARN) of a KMS Key

    "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab"

', ], ], 'LabelAttributeName' => [ 'base' => NULL, 'refs' => [ 'CreateLabelingJobRequest$LabelAttributeName' => '

The attribute name to use for the label in the output manifest file. This is the key for the key/value pair formed with the label that a worker assigns to the object. The name can\'t end with "-metadata". If you are running a semantic segmentation labeling job, the attribute name must end with "-ref". If you are running any other kind of labeling job, the attribute name must not end with "-ref".

', 'DescribeLabelingJobResponse$LabelAttributeName' => '

The attribute used as the label in the output manifest file.

', ], ], 'LabelCounter' => [ 'base' => NULL, 'refs' => [ 'LabelCounters$TotalLabeled' => '

The total number of objects labeled.

', 'LabelCounters$HumanLabeled' => '

The total number of objects labeled by a human worker.

', 'LabelCounters$MachineLabeled' => '

The total number of objects labeled by automated data labeling.

', 'LabelCounters$FailedNonRetryableError' => '

The total number of objects that could not be labeled due to an error.

', 'LabelCounters$Unlabeled' => '

The total number of objects not yet labeled.

', 'LabelCountersForWorkteam$HumanLabeled' => '

The total number of data objects labeled by a human worker.

', 'LabelCountersForWorkteam$PendingHuman' => '

The total number of data objects that need to be labeled by a human worker.

', 'LabelCountersForWorkteam$Total' => '

The total number of tasks in the labeling job.

', ], ], 'LabelCounters' => [ 'base' => '

Provides a breakdown of the number of objects labeled.

', 'refs' => [ 'DescribeLabelingJobResponse$LabelCounters' => '

Provides a breakdown of the number of data objects labeled by humans, the number of objects labeled by machine, the number of objects than couldn\'t be labeled, and the total number of objects labeled.

', 'LabelingJobSummary$LabelCounters' => '

Counts showing the progress of the labeling job.

', ], ], 'LabelCountersForWorkteam' => [ 'base' => '

Provides counts for human-labeled tasks in the labeling job.

', 'refs' => [ 'LabelingJobForWorkteamSummary$LabelCounters' => '

Provides information about the progress of a labeling job.

', ], ], 'LabelingJobAlgorithmSpecificationArn' => [ 'base' => NULL, 'refs' => [ 'LabelingJobAlgorithmsConfig$LabelingJobAlgorithmSpecificationArn' => '

Specifies the Amazon Resource Name (ARN) of the algorithm used for auto-labeling. You must select one of the following ARNs:

  • Image classification

    arn:aws:sagemaker:region:027400017018:labeling-job-algorithm-specification/image-classification

  • Text classification

    arn:aws:sagemaker:region:027400017018:labeling-job-algorithm-specification/text-classification

  • Object detection

    arn:aws:sagemaker:region:027400017018:labeling-job-algorithm-specification/object-detection

', ], ], 'LabelingJobAlgorithmsConfig' => [ 'base' => '

Provides configuration information for auto-labeling of your data objects. A LabelingJobAlgorithmsConfig object must be supplied in order to use auto-labeling.

', 'refs' => [ 'CreateLabelingJobRequest$LabelingJobAlgorithmsConfig' => '

Configures the information required to perform automated data labeling.

', 'DescribeLabelingJobResponse$LabelingJobAlgorithmsConfig' => '

Configuration information for automated data labeling.

', ], ], 'LabelingJobArn' => [ 'base' => NULL, 'refs' => [ 'CreateLabelingJobResponse$LabelingJobArn' => '

The Amazon Resource Name (ARN) of the labeling job. You use this ARN to identify the labeling job.

', 'DescribeLabelingJobResponse$LabelingJobArn' => '

The Amazon Resource Name (ARN) of the labeling job.

', 'DescribeTrainingJobResponse$LabelingJobArn' => '

The Amazon Resource Name (ARN) of the Amazon SageMaker Ground Truth labeling job that created the transform or training job.

', 'DescribeTransformJobResponse$LabelingJobArn' => '

The Amazon Resource Name (ARN) of the Amazon SageMaker Ground Truth labeling job that created the transform or training job.

', 'LabelingJobSummary$LabelingJobArn' => '

The Amazon Resource Name (ARN) assigned to the labeling job when it was created.

', 'TrainingJob$LabelingJobArn' => '

The Amazon Resource Name (ARN) of the labeling job.

', ], ], 'LabelingJobDataAttributes' => [ 'base' => '

Attributes of the data specified by the customer. Use these to describe the data to be labeled.

', 'refs' => [ 'LabelingJobInputConfig$DataAttributes' => '

Attributes of the data specified by the customer.

', ], ], 'LabelingJobDataSource' => [ 'base' => '

Provides information about the location of input data.

', 'refs' => [ 'LabelingJobInputConfig$DataSource' => '

The location of the input data.

', ], ], 'LabelingJobForWorkteamSummary' => [ 'base' => '

Provides summary information for a work team.

', 'refs' => [ 'LabelingJobForWorkteamSummaryList$member' => NULL, ], ], 'LabelingJobForWorkteamSummaryList' => [ 'base' => NULL, 'refs' => [ 'ListLabelingJobsForWorkteamResponse$LabelingJobSummaryList' => '

An array of LabelingJobSummary objects, each describing a labeling job.

', ], ], 'LabelingJobInputConfig' => [ 'base' => '

Input configuration information for a labeling job.

', 'refs' => [ 'CreateLabelingJobRequest$InputConfig' => '

Input data for the labeling job, such as the Amazon S3 location of the data objects and the location of the manifest file that describes the data objects.

', 'DescribeLabelingJobResponse$InputConfig' => '

Input configuration information for the labeling job, such as the Amazon S3 location of the data objects and the location of the manifest file that describes the data objects.

', 'LabelingJobSummary$InputConfig' => '

Input configuration for the labeling job.

', ], ], 'LabelingJobName' => [ 'base' => NULL, 'refs' => [ 'CreateLabelingJobRequest$LabelingJobName' => '

The name of the labeling job. This name is used to identify the job in a list of labeling jobs.

', 'DescribeLabelingJobRequest$LabelingJobName' => '

The name of the labeling job to return information for.

', 'DescribeLabelingJobResponse$LabelingJobName' => '

The name assigned to the labeling job when it was created.

', 'LabelingJobForWorkteamSummary$LabelingJobName' => '

The name of the labeling job that the work team is assigned to.

', 'LabelingJobSummary$LabelingJobName' => '

The name of the labeling job.

', 'StopLabelingJobRequest$LabelingJobName' => '

The name of the labeling job to stop.

', ], ], 'LabelingJobOutput' => [ 'base' => '

Specifies the location of the output produced by the labeling job.

', 'refs' => [ 'DescribeLabelingJobResponse$LabelingJobOutput' => '

The location of the output produced by the labeling job.

', 'LabelingJobSummary$LabelingJobOutput' => '

The location of the output produced by the labeling job.

', ], ], 'LabelingJobOutputConfig' => [ 'base' => '

Output configuration information for a labeling job.

', 'refs' => [ 'CreateLabelingJobRequest$OutputConfig' => '

The location of the output data and the AWS Key Management Service key ID for the key used to encrypt the output data, if any.

', 'DescribeLabelingJobResponse$OutputConfig' => '

The location of the job\'s output data and the AWS Key Management Service key ID for the key used to encrypt the output data, if any.

', ], ], 'LabelingJobResourceConfig' => [ 'base' => '

Provides configuration information for labeling jobs.

', 'refs' => [ 'LabelingJobAlgorithmsConfig$LabelingJobResourceConfig' => '

Provides configuration information for a labeling job.

', ], ], 'LabelingJobS3DataSource' => [ 'base' => '

The Amazon S3 location of the input data objects.

', 'refs' => [ 'LabelingJobDataSource$S3DataSource' => '

The Amazon S3 location of the input data objects.

', ], ], 'LabelingJobStatus' => [ 'base' => NULL, 'refs' => [ 'DescribeLabelingJobResponse$LabelingJobStatus' => '

The processing status of the labeling job.

', 'LabelingJobSummary$LabelingJobStatus' => '

The current status of the labeling job.

', 'ListLabelingJobsRequest$StatusEquals' => '

A filter that retrieves only labeling jobs with a specific status.

', ], ], 'LabelingJobStoppingConditions' => [ 'base' => '

A set of conditions for stopping a labeling job. If any of the conditions are met, the job is automatically stopped. You can use these conditions to control the cost of data labeling.

', 'refs' => [ 'CreateLabelingJobRequest$StoppingConditions' => '

A set of conditions for stopping the labeling job. If any of the conditions are met, the job is automatically stopped. You can use these conditions to control the cost of data labeling.

', 'DescribeLabelingJobResponse$StoppingConditions' => '

A set of conditions for stopping a labeling job. If any of the conditions are met, the job is automatically stopped.

', ], ], 'LabelingJobSummary' => [ 'base' => '

Provides summary information about a labeling job.

', 'refs' => [ 'LabelingJobSummaryList$member' => NULL, ], ], 'LabelingJobSummaryList' => [ 'base' => NULL, 'refs' => [ 'ListLabelingJobsResponse$LabelingJobSummaryList' => '

An array of LabelingJobSummary objects, each describing a labeling job.

', ], ], 'LambdaFunctionArn' => [ 'base' => NULL, 'refs' => [ 'AnnotationConsolidationConfig$AnnotationConsolidationLambdaArn' => '

The Amazon Resource Name (ARN) of a Lambda function implements the logic for annotation consolidation.

For the built-in bounding box, image classification, semantic segmentation, and text classification task types, Amazon SageMaker Ground Truth provides the following Lambda functions:

  • Bounding box - Finds the most similar boxes from different workers based on the Jaccard index of the boxes.

    arn:aws:lambda:us-east-1:432418664414:function:ACS-BoundingBox

    arn:aws:lambda:us-east-2:266458841044:function:ACS-BoundingBox

    arn:aws:lambda:us-west-2:081040173940:function:ACS-BoundingBox

    arn:aws:lambda:eu-west-1:568282634449:function:ACS-BoundingBox

    arn:aws:lambda:ap-northeast-1:477331159723:function:ACS-BoundingBox

  • Image classification - Uses a variant of the Expectation Maximization approach to estimate the true class of an image based on annotations from individual workers.

    arn:aws:lambda:us-east-1:432418664414:function:ACS-ImageMultiClass

    arn:aws:lambda:us-east-2:266458841044:function:ACS-ImageMultiClass

    arn:aws:lambda:us-west-2:081040173940:function:ACS-ImageMultiClass

    arn:aws:lambda:eu-west-1:568282634449:function:ACS-ImageMultiClass

    arn:aws:lambda:ap-northeast-1:477331159723:function:ACS-ImageMultiClass

  • Semantic segmentation - Treats each pixel in an image as a multi-class classification and treats pixel annotations from workers as "votes" for the correct label.

    arn:aws:lambda:us-east-1:432418664414:function:ACS-SemanticSegmentation

    arn:aws:lambda:us-east-2:266458841044:function:ACS-SemanticSegmentation

    arn:aws:lambda:us-west-2:081040173940:function:ACS-SemanticSegmentation

    arn:aws:lambda:eu-west-1:568282634449:function:ACS-SemanticSegmentation

    arn:aws:lambda:ap-northeast-1:477331159723:function:ACS-SemanticSegmentation

  • Text classification - Uses a variant of the Expectation Maximization approach to estimate the true class of text based on annotations from individual workers.

    arn:aws:lambda:us-east-1:432418664414:function:ACS-TextMultiClass

    arn:aws:lambda:us-east-2:266458841044:function:ACS-TextMultiClass

    arn:aws:lambda:us-west-2:081040173940:function:ACS-TextMultiClass

    arn:aws:lambda:eu-west-1:568282634449:function:ACS-TextMultiClass

    arn:aws:lambda:ap-northeast-1:477331159723:function:ACS-TextMultiClass

For more information, see Annotation Consolidation.

', 'HumanTaskConfig$PreHumanTaskLambdaArn' => '

The Amazon Resource Name (ARN) of a Lambda function that is run before a data object is sent to a human worker. Use this function to provide input to a custom labeling job.

For the built-in bounding box, image classification, semantic segmentation, and text classification task types, Amazon SageMaker Ground Truth provides the following Lambda functions:

US East (Northern Virginia) (us-east-1):

  • arn:aws:lambda:us-east-1:432418664414:function:PRE-BoundingBox

  • arn:aws:lambda:us-east-1:432418664414:function:PRE-ImageMultiClass

  • arn:aws:lambda:us-east-1:432418664414:function:PRE-SemanticSegmentation

  • arn:aws:lambda:us-east-1:432418664414:function:PRE-TextMultiClass

US East (Ohio) (us-east-2):

  • arn:aws:lambda:us-east-2:266458841044:function:PRE-BoundingBox

  • arn:aws:lambda:us-east-2:266458841044:function:PRE-ImageMultiClass

  • arn:aws:lambda:us-east-2:266458841044:function:PRE-SemanticSegmentation

  • arn:aws:lambda:us-east-2:266458841044:function:PRE-TextMultiClass

US West (Oregon) (us-west-2):

  • arn:aws:lambda:us-west-2:081040173940:function:PRE-BoundingBox

  • arn:aws:lambda:us-west-2:081040173940:function:PRE-ImageMultiClass

  • arn:aws:lambda:us-west-2:081040173940:function:PRE-SemanticSegmentation

  • arn:aws:lambda:us-west-2:081040173940:function:PRE-TextMultiClass

EU (Ireland) (eu-west-1):

  • arn:aws:lambda:eu-west-1:568282634449:function:PRE-BoundingBox

  • arn:aws:lambda:eu-west-1:568282634449:function:PRE-ImageMultiClass

  • arn:aws:lambda:eu-west-1:568282634449:function:PRE-SemanticSegmentation

  • arn:aws:lambda:eu-west-1:568282634449:function:PRE-TextMultiClass

Asia Pacific (Tokyo (ap-northeast-1):

  • arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-BoundingBox

  • arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-ImageMultiClass

  • arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-SemanticSegmentation

  • arn:aws:lambda:ap-northeast-1:477331159723:function:PRE-TextMultiClass

', 'LabelingJobSummary$PreHumanTaskLambdaArn' => '

The Amazon Resource Name (ARN) of a Lambda function. The function is run before each data object is sent to a worker.

', 'LabelingJobSummary$AnnotationConsolidationLambdaArn' => '

The Amazon Resource Name (ARN) of the Lambda function used to consolidate the annotations from individual workers into a label for a data object. For more information, see Annotation Consolidation.

', ], ], 'LastModifiedTime' => [ 'base' => NULL, 'refs' => [ 'CodeRepositorySummary$LastModifiedTime' => '

The date and time that the Git repository was last modified.

', 'CompilationJobSummary$LastModifiedTime' => '

The time when the model compilation job was last modified.

', 'DescribeCodeRepositoryOutput$LastModifiedTime' => '

The date and time that the repository was last changed.

', 'DescribeCompilationJobResponse$LastModifiedTime' => '

The time that the status of the model compilation job was last modified.

', 'DescribeNotebookInstanceLifecycleConfigOutput$LastModifiedTime' => '

A timestamp that tells when the lifecycle configuration was last modified.

', 'DescribeNotebookInstanceOutput$LastModifiedTime' => '

A timestamp. Use this parameter to retrieve the time when the notebook instance was last modified.

', 'ListCompilationJobsRequest$LastModifiedTimeAfter' => '

A filter that returns the model compilation jobs that were modified after a specified time.

', 'ListCompilationJobsRequest$LastModifiedTimeBefore' => '

A filter that returns the model compilation jobs that were modified before a specified time.

', 'ListNotebookInstanceLifecycleConfigsInput$LastModifiedTimeBefore' => '

A filter that returns only lifecycle configurations that were modified before the specified time (timestamp).

', 'ListNotebookInstanceLifecycleConfigsInput$LastModifiedTimeAfter' => '

A filter that returns only lifecycle configurations that were modified after the specified time (timestamp).

', 'ListNotebookInstancesInput$LastModifiedTimeBefore' => '

A filter that returns only notebook instances that were modified before the specified time (timestamp).

', 'ListNotebookInstancesInput$LastModifiedTimeAfter' => '

A filter that returns only notebook instances that were modified after the specified time (timestamp).

', 'NotebookInstanceLifecycleConfigSummary$LastModifiedTime' => '

A timestamp that tells when the lifecycle configuration was last modified.

', 'NotebookInstanceSummary$LastModifiedTime' => '

A timestamp that shows when the notebook instance was last modified.

', ], ], 'ListAlgorithmsInput' => [ 'base' => NULL, 'refs' => [], ], 'ListAlgorithmsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListCodeRepositoriesInput' => [ 'base' => NULL, 'refs' => [], ], 'ListCodeRepositoriesOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListCompilationJobsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListCompilationJobsResponse' => [ 'base' => NULL, 'refs' => [], ], 'ListCompilationJobsSortBy' => [ 'base' => NULL, 'refs' => [ 'ListCompilationJobsRequest$SortBy' => '

The field by which to sort results. The default is CreationTime.

', ], ], 'ListEndpointConfigsInput' => [ 'base' => NULL, 'refs' => [], ], 'ListEndpointConfigsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListEndpointsInput' => [ 'base' => NULL, 'refs' => [], ], 'ListEndpointsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListHyperParameterTuningJobsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListHyperParameterTuningJobsResponse' => [ 'base' => NULL, 'refs' => [], ], 'ListLabelingJobsForWorkteamRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListLabelingJobsForWorkteamResponse' => [ 'base' => NULL, 'refs' => [], ], 'ListLabelingJobsForWorkteamSortByOptions' => [ 'base' => NULL, 'refs' => [ 'ListLabelingJobsForWorkteamRequest$SortBy' => '

The field to sort results by. The default is CreationTime.

', ], ], 'ListLabelingJobsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListLabelingJobsResponse' => [ 'base' => NULL, 'refs' => [], ], 'ListModelPackagesInput' => [ 'base' => NULL, 'refs' => [], ], 'ListModelPackagesOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListModelsInput' => [ 'base' => NULL, 'refs' => [], ], 'ListModelsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListNotebookInstanceLifecycleConfigsInput' => [ 'base' => NULL, 'refs' => [], ], 'ListNotebookInstanceLifecycleConfigsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListNotebookInstancesInput' => [ 'base' => NULL, 'refs' => [], ], 'ListNotebookInstancesOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListSubscribedWorkteamsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListSubscribedWorkteamsResponse' => [ 'base' => NULL, 'refs' => [], ], 'ListTagsInput' => [ 'base' => NULL, 'refs' => [], ], 'ListTagsMaxResults' => [ 'base' => NULL, 'refs' => [ 'ListTagsInput$MaxResults' => '

Maximum number of tags to return.

', ], ], 'ListTagsOutput' => [ 'base' => NULL, 'refs' => [], ], 'ListTrainingJobsForHyperParameterTuningJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListTrainingJobsForHyperParameterTuningJobResponse' => [ 'base' => NULL, 'refs' => [], ], 'ListTrainingJobsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListTrainingJobsResponse' => [ 'base' => NULL, 'refs' => [], ], 'ListTransformJobsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListTransformJobsResponse' => [ 'base' => NULL, 'refs' => [], ], 'ListWorkteamsRequest' => [ 'base' => NULL, 'refs' => [], ], 'ListWorkteamsResponse' => [ 'base' => NULL, 'refs' => [], ], 'ListWorkteamsSortByOptions' => [ 'base' => NULL, 'refs' => [ 'ListWorkteamsRequest$SortBy' => '

The field to sort results by. The default is CreationTime.

', ], ], 'MaxConcurrentTaskCount' => [ 'base' => NULL, 'refs' => [ 'HumanTaskConfig$MaxConcurrentTaskCount' => '

Defines the maximum number of data objects that can be labeled by human workers at the same time. Each object may have more than one worker at one time.

', ], ], 'MaxConcurrentTransforms' => [ 'base' => NULL, 'refs' => [ 'CreateTransformJobRequest$MaxConcurrentTransforms' => '

The maximum number of parallel requests that can be sent to each instance in a transform job. The default value is 1. To allow Amazon SageMaker to determine the appropriate number for MaxConcurrentTransforms, set the value to 0.

', 'DescribeTransformJobResponse$MaxConcurrentTransforms' => '

The maximum number of parallel requests on each instance node that can be launched in a transform job. The default value is 1.

', 'TransformJobDefinition$MaxConcurrentTransforms' => '

The maximum number of parallel requests that can be sent to each instance in a transform job. The default value is 1.

', ], ], 'MaxHumanLabeledObjectCount' => [ 'base' => NULL, 'refs' => [ 'LabelingJobStoppingConditions$MaxHumanLabeledObjectCount' => '

The maximum number of objects that can be labeled by human workers.

', ], ], 'MaxNumberOfTrainingJobs' => [ 'base' => NULL, 'refs' => [ 'ResourceLimits$MaxNumberOfTrainingJobs' => '

The maximum number of training jobs that a hyperparameter tuning job can launch.

', ], ], 'MaxParallelTrainingJobs' => [ 'base' => NULL, 'refs' => [ 'ResourceLimits$MaxParallelTrainingJobs' => '

The maximum number of concurrent training jobs that a hyperparameter tuning job can launch.

', ], ], 'MaxPayloadInMB' => [ 'base' => NULL, 'refs' => [ 'CreateTransformJobRequest$MaxPayloadInMB' => '

The maximum allowed size of the payload, in MB. A payload is the data portion of a record (without metadata). The value in MaxPayloadInMB must be greater than, or equal to, the size of a single record. To estimate the size of a record in MB, divide the size of your dataset by the number of records. To ensure that the records fit within the maximum payload size, we recommend using a slightly larger value. The default value is 6 MB.

For cases where the payload might be arbitrarily large and is transmitted using HTTP chunked encoding, set the value to 0. This feature works only in supported algorithms. Currently, Amazon SageMaker built-in algorithms do not support HTTP chunked encoding.

', 'DescribeTransformJobResponse$MaxPayloadInMB' => '

The maximum payload size, in MB, used in the transform job.

', 'TransformJobDefinition$MaxPayloadInMB' => '

The maximum payload size allowed, in MB. A payload is the data portion of a record (without metadata).

', ], ], 'MaxPercentageOfInputDatasetLabeled' => [ 'base' => NULL, 'refs' => [ 'LabelingJobStoppingConditions$MaxPercentageOfInputDatasetLabeled' => '

The maximum number of input data objects that should be labeled.

', ], ], 'MaxResults' => [ 'base' => NULL, 'refs' => [ 'ListAlgorithmsInput$MaxResults' => '

The maximum number of algorithms to return in the response.

', 'ListCodeRepositoriesInput$MaxResults' => '

The maximum number of Git repositories to return in the response.

', 'ListCompilationJobsRequest$MaxResults' => '

The maximum number of model compilation jobs to return in the response.

', 'ListEndpointConfigsInput$MaxResults' => '

The maximum number of training jobs to return in the response.

', 'ListEndpointsInput$MaxResults' => '

The maximum number of endpoints to return in the response.

', 'ListHyperParameterTuningJobsRequest$MaxResults' => '

The maximum number of tuning jobs to return. The default value is 10.

', 'ListLabelingJobsForWorkteamRequest$MaxResults' => '

The maximum number of labeling jobs to return in each page of the response.

', 'ListLabelingJobsRequest$MaxResults' => '

The maximum number of labeling jobs to return in each page of the response.

', 'ListModelPackagesInput$MaxResults' => '

The maximum number of model packages to return in the response.

', 'ListModelsInput$MaxResults' => '

The maximum number of models to return in the response.

', 'ListNotebookInstanceLifecycleConfigsInput$MaxResults' => '

The maximum number of lifecycle configurations to return in the response.

', 'ListNotebookInstancesInput$MaxResults' => '

The maximum number of notebook instances to return.

', 'ListSubscribedWorkteamsRequest$MaxResults' => '

The maximum number of work teams to return in each page of the response.

', 'ListTrainingJobsForHyperParameterTuningJobRequest$MaxResults' => '

The maximum number of training jobs to return. The default value is 10.

', 'ListTrainingJobsRequest$MaxResults' => '

The maximum number of training jobs to return in the response.

', 'ListTransformJobsRequest$MaxResults' => '

The maximum number of transform jobs to return in the response. The default value is 10.

', 'ListWorkteamsRequest$MaxResults' => '

The maximum number of work teams to return in each page of the response.

', 'SearchRequest$MaxResults' => '

The maximum number of results to return in a SearchResponse.

', ], ], 'MaxRuntimeInSeconds' => [ 'base' => NULL, 'refs' => [ 'StoppingCondition$MaxRuntimeInSeconds' => '

The maximum length of time, in seconds, that the training job can run. If model training does not complete during this time, Amazon SageMaker ends the job. If value is not specified, default value is 1 day. Maximum value is 28 days.

', ], ], 'MemberDefinition' => [ 'base' => '

Defines the Amazon Cognito user group that is part of a work team.

', 'refs' => [ 'MemberDefinitions$member' => NULL, ], ], 'MemberDefinitions' => [ 'base' => NULL, 'refs' => [ 'CreateWorkteamRequest$MemberDefinitions' => '

A list of MemberDefinition objects that contains objects that identify the Amazon Cognito user pool that makes up the work team. For more information, see Amazon Cognito User Pools.

All of the CognitoMemberDefinition objects that make up the member definition must have the same ClientId and UserPool values.

', 'UpdateWorkteamRequest$MemberDefinitions' => '

A list of MemberDefinition objects that contain the updated work team members.

', 'Workteam$MemberDefinitions' => '

The Amazon Cognito user groups that make up the work team.

', ], ], 'MetricData' => [ 'base' => '

The name, value, and date and time of a metric that was emitted to Amazon CloudWatch.

', 'refs' => [ 'FinalMetricDataList$member' => NULL, ], ], 'MetricDefinition' => [ 'base' => '

Specifies a metric that the training algorithm writes to stderr or stdout. Amazon SageMakerhyperparameter tuning captures all defined metrics. You specify one metric that a hyperparameter tuning job uses as its objective metric to choose the best training job.

', 'refs' => [ 'MetricDefinitionList$member' => NULL, ], ], 'MetricDefinitionList' => [ 'base' => NULL, 'refs' => [ 'AlgorithmSpecification$MetricDefinitions' => '

A list of metric definition objects. Each object specifies the metric name and regular expressions used to parse algorithm logs. Amazon SageMaker publishes each metric to Amazon CloudWatch.

', 'HyperParameterAlgorithmSpecification$MetricDefinitions' => '

An array of MetricDefinition objects that specify the metrics that the algorithm emits.

', 'TrainingSpecification$MetricDefinitions' => '

A list of MetricDefinition objects, which are used for parsing metrics generated by the algorithm.

', ], ], 'MetricName' => [ 'base' => NULL, 'refs' => [ 'FinalHyperParameterTuningJobObjectiveMetric$MetricName' => '

The name of the objective metric.

', 'HyperParameterTuningJobObjective$MetricName' => '

The name of the metric to use for the objective metric.

', 'MetricData$MetricName' => '

The name of the metric.

', 'MetricDefinition$Name' => '

The name of the metric.

', ], ], 'MetricRegex' => [ 'base' => NULL, 'refs' => [ 'MetricDefinition$Regex' => '

A regular expression that searches the output of a training job and gets the value of the metric. For more information about using regular expressions to define metrics, see Defining Objective Metrics.

', ], ], 'MetricValue' => [ 'base' => NULL, 'refs' => [ 'FinalHyperParameterTuningJobObjectiveMetric$Value' => '

The value of the objective metric.

', ], ], 'ModelArn' => [ 'base' => NULL, 'refs' => [ 'CreateModelOutput$ModelArn' => '

The ARN of the model created in Amazon SageMaker.

', 'DescribeModelOutput$ModelArn' => '

The Amazon Resource Name (ARN) of the model.

', 'LabelingJobAlgorithmsConfig$InitialActiveLearningModelArn' => '

At the end of an auto-label job Amazon SageMaker Ground Truth sends the Amazon Resource Nam (ARN) of the final model used for auto-labeling. You can use this model as the starting point for subsequent similar jobs by providing the ARN of the model here.

', 'LabelingJobOutput$FinalActiveLearningModelArn' => '

The Amazon Resource Name (ARN) for the most recent Amazon SageMaker model trained as part of automated data labeling.

', 'ModelSummary$ModelArn' => '

The Amazon Resource Name (ARN) of the model.

', ], ], 'ModelArtifacts' => [ 'base' => '

Provides information about the location that is configured for storing model artifacts.

', 'refs' => [ 'DescribeCompilationJobResponse$ModelArtifacts' => '

Information about the location in Amazon S3 that has been configured for storing the model artifacts used in the compilation job.

', 'DescribeTrainingJobResponse$ModelArtifacts' => '

Information about the Amazon S3 location that is configured for storing model artifacts.

', 'TrainingJob$ModelArtifacts' => '

Information about the Amazon S3 location that is configured for storing model artifacts.

', ], ], 'ModelName' => [ 'base' => NULL, 'refs' => [ 'CreateModelInput$ModelName' => '

The name of the new model.

', 'CreateTransformJobRequest$ModelName' => '

The name of the model that you want to use for the transform job. ModelName must be the name of an existing Amazon SageMaker model within an AWS Region in an AWS account.

', 'DeleteModelInput$ModelName' => '

The name of the model to delete.

', 'DescribeModelInput$ModelName' => '

The name of the model.

', 'DescribeModelOutput$ModelName' => '

Name of the Amazon SageMaker model.

', 'DescribeTransformJobResponse$ModelName' => '

The name of the model used in the transform job.

', 'ModelSummary$ModelName' => '

The name of the model that you want a summary for.

', 'ProductionVariant$ModelName' => '

The name of the model that you want to host. This is the name that you specified when creating the model.

', ], ], 'ModelNameContains' => [ 'base' => NULL, 'refs' => [ 'ListModelsInput$NameContains' => '

A string in the training job name. This filter returns only models in the training job whose name contains the specified string.

', ], ], 'ModelPackageArn' => [ 'base' => NULL, 'refs' => [ 'CreateModelPackageOutput$ModelPackageArn' => '

The Amazon Resource Name (ARN) of the new model package.

', 'DescribeModelPackageOutput$ModelPackageArn' => '

The Amazon Resource Name (ARN) of the model package.

', 'ModelPackageSummary$ModelPackageArn' => '

The Amazon Resource Name (ARN) of the model package.

', ], ], 'ModelPackageContainerDefinition' => [ 'base' => '

Describes the Docker container for the model package.

', 'refs' => [ 'ModelPackageContainerDefinitionList$member' => NULL, ], ], 'ModelPackageContainerDefinitionList' => [ 'base' => NULL, 'refs' => [ 'InferenceSpecification$Containers' => '

The Amazon ECR registry path of the Docker image that contains the inference code.

', ], ], 'ModelPackageSortBy' => [ 'base' => NULL, 'refs' => [ 'ListModelPackagesInput$SortBy' => '

The parameter by which to sort the results. The default is CreationTime.

', ], ], 'ModelPackageStatus' => [ 'base' => NULL, 'refs' => [ 'DescribeModelPackageOutput$ModelPackageStatus' => '

The current status of the model package.

', 'ModelPackageSummary$ModelPackageStatus' => '

The overall status of the model package.

', ], ], 'ModelPackageStatusDetails' => [ 'base' => '

Specifies the validation and image scan statuses of the model package.

', 'refs' => [ 'DescribeModelPackageOutput$ModelPackageStatusDetails' => '

Details about the current status of the model package.

', ], ], 'ModelPackageStatusItem' => [ 'base' => '

Represents the overall status of a model package.

', 'refs' => [ 'ModelPackageStatusItemList$member' => NULL, ], ], 'ModelPackageStatusItemList' => [ 'base' => NULL, 'refs' => [ 'ModelPackageStatusDetails$ValidationStatuses' => '

The validation status of the model package.

', 'ModelPackageStatusDetails$ImageScanStatuses' => '

The status of the scan of the Docker image container for the model package.

', ], ], 'ModelPackageSummary' => [ 'base' => '

Provides summary information about a model package.

', 'refs' => [ 'ModelPackageSummaryList$member' => NULL, ], ], 'ModelPackageSummaryList' => [ 'base' => NULL, 'refs' => [ 'ListModelPackagesOutput$ModelPackageSummaryList' => '

An array of ModelPackageSummary objects, each of which lists a model package.

', ], ], 'ModelPackageValidationProfile' => [ 'base' => '

Contains data, such as the inputs and targeted instance types that are used in the process of validating the model package.

The data provided in the validation profile is made available to your buyers on AWS Marketplace.

', 'refs' => [ 'ModelPackageValidationProfiles$member' => NULL, ], ], 'ModelPackageValidationProfiles' => [ 'base' => NULL, 'refs' => [ 'ModelPackageValidationSpecification$ValidationProfiles' => '

An array of ModelPackageValidationProfile objects, each of which specifies a batch transform job that Amazon SageMaker runs to validate your model package.

', ], ], 'ModelPackageValidationSpecification' => [ 'base' => '

Specifies batch transform jobs that Amazon SageMaker runs to validate your model package.

', 'refs' => [ 'CreateModelPackageInput$ValidationSpecification' => '

Specifies configurations for one or more transform jobs that Amazon SageMaker runs to test the model package.

', 'DescribeModelPackageOutput$ValidationSpecification' => '

Configurations for one or more transform jobs that Amazon SageMaker runs to test the model package.

', ], ], 'ModelSortKey' => [ 'base' => NULL, 'refs' => [ 'ListModelsInput$SortBy' => '

Sorts the list of results. The default is CreationTime.

', ], ], 'ModelSummary' => [ 'base' => '

Provides summary information about a model.

', 'refs' => [ 'ModelSummaryList$member' => NULL, ], ], 'ModelSummaryList' => [ 'base' => NULL, 'refs' => [ 'ListModelsOutput$Models' => '

An array of ModelSummary objects, each of which lists a model.

', ], ], 'NameContains' => [ 'base' => NULL, 'refs' => [ 'ListAlgorithmsInput$NameContains' => '

A string in the algorithm name. This filter returns only algorithms whose name contains the specified string.

', 'ListCompilationJobsRequest$NameContains' => '

A filter that returns the model compilation jobs whose name contains a specified string.

', 'ListHyperParameterTuningJobsRequest$NameContains' => '

A string in the tuning job name. This filter returns only tuning jobs whose name contains the specified string.

', 'ListLabelingJobsRequest$NameContains' => '

A string in the labeling job name. This filter returns only labeling jobs whose name contains the specified string.

', 'ListModelPackagesInput$NameContains' => '

A string in the model package name. This filter returns only model packages whose name contains the specified string.

', 'ListTrainingJobsRequest$NameContains' => '

A string in the training job name. This filter returns only training jobs whose name contains the specified string.

', 'ListTransformJobsRequest$NameContains' => '

A string in the transform job name. This filter returns only transform jobs whose name contains the specified string.

', ], ], 'NestedFilters' => [ 'base' => '

Defines a list of NestedFilters objects. To satisfy the conditions specified in the NestedFilters call, a resource must satisfy the conditions of all of the filters.

For example, you could define a NestedFilters using the training job\'s InputDataConfig property to filter on Channel objects.

A NestedFilters object contains multiple filters. For example, to find all training jobs whose name contains train and that have cat/data in their S3Uri (specified in InputDataConfig), you need to create a NestedFilters object that specifies the InputDataConfig property with the following Filter objects:

  • \'{Name:"InputDataConfig.ChannelName", "Operator":"EQUALS", "Value":"train"}\',

  • \'{Name:"InputDataConfig.DataSource.S3DataSource.S3Uri", "Operator":"CONTAINS", "Value":"cat/data"}\'

', 'refs' => [ 'NestedFiltersList$member' => NULL, ], ], 'NestedFiltersList' => [ 'base' => NULL, 'refs' => [ 'SearchExpression$NestedFilters' => '

A list of nested filter objects.

', ], ], 'NetworkInterfaceId' => [ 'base' => NULL, 'refs' => [ 'DescribeNotebookInstanceOutput$NetworkInterfaceId' => '

The network interface IDs that Amazon SageMaker created at the time of creating the instance.

', ], ], 'NextToken' => [ 'base' => NULL, 'refs' => [ 'ListAlgorithmsInput$NextToken' => '

If the response to a previous ListAlgorithms request was truncated, the response includes a NextToken. To retrieve the next set of algorithms, use the token in the next request.

', 'ListAlgorithmsOutput$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of algorithms, use it in the subsequent request.

', 'ListCodeRepositoriesInput$NextToken' => '

If the result of a ListCodeRepositoriesOutput request was truncated, the response includes a NextToken. To get the next set of Git repositories, use the token in the next request.

', 'ListCodeRepositoriesOutput$NextToken' => '

If the result of a ListCodeRepositoriesOutput request was truncated, the response includes a NextToken. To get the next set of Git repositories, use the token in the next request.

', 'ListCompilationJobsRequest$NextToken' => '

If the result of the previous ListCompilationJobs request was truncated, the response includes a NextToken. To retrieve the next set of model compilation jobs, use the token in the next request.

', 'ListCompilationJobsResponse$NextToken' => '

If the response is truncated, Amazon SageMaker returns this NextToken. To retrieve the next set of model compilation jobs, use this token in the next request.

', 'ListHyperParameterTuningJobsRequest$NextToken' => '

If the result of the previous ListHyperParameterTuningJobs request was truncated, the response includes a NextToken. To retrieve the next set of tuning jobs, use the token in the next request.

', 'ListHyperParameterTuningJobsResponse$NextToken' => '

If the result of this ListHyperParameterTuningJobs request was truncated, the response includes a NextToken. To retrieve the next set of tuning jobs, use the token in the next request.

', 'ListLabelingJobsForWorkteamRequest$NextToken' => '

If the result of the previous ListLabelingJobsForWorkteam request was truncated, the response includes a NextToken. To retrieve the next set of labeling jobs, use the token in the next request.

', 'ListLabelingJobsForWorkteamResponse$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of labeling jobs, use it in the subsequent request.

', 'ListLabelingJobsRequest$NextToken' => '

If the result of the previous ListLabelingJobs request was truncated, the response includes a NextToken. To retrieve the next set of labeling jobs, use the token in the next request.

', 'ListLabelingJobsResponse$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of labeling jobs, use it in the subsequent request.

', 'ListModelPackagesInput$NextToken' => '

If the response to a previous ListModelPackages request was truncated, the response includes a NextToken. To retrieve the next set of model packages, use the token in the next request.

', 'ListModelPackagesOutput$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of model packages, use it in the subsequent request.

', 'ListNotebookInstanceLifecycleConfigsInput$NextToken' => '

If the result of a ListNotebookInstanceLifecycleConfigs request was truncated, the response includes a NextToken. To get the next set of lifecycle configurations, use the token in the next request.

', 'ListNotebookInstanceLifecycleConfigsOutput$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To get the next set of lifecycle configurations, use it in the next request.

', 'ListNotebookInstancesInput$NextToken' => '

If the previous call to the ListNotebookInstances is truncated, the response includes a NextToken. You can use this token in your subsequent ListNotebookInstances request to fetch the next set of notebook instances.

You might specify a filter or a sort order in your request. When response is truncated, you must use the same values for the filer and sort order in the next request.

', 'ListNotebookInstancesOutput$NextToken' => '

If the response to the previous ListNotebookInstances request was truncated, Amazon SageMaker returns this token. To retrieve the next set of notebook instances, use the token in the next request.

', 'ListSubscribedWorkteamsRequest$NextToken' => '

If the result of the previous ListSubscribedWorkteams request was truncated, the response includes a NextToken. To retrieve the next set of labeling jobs, use the token in the next request.

', 'ListSubscribedWorkteamsResponse$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of work teams, use it in the subsequent request.

', 'ListTagsInput$NextToken' => '

If the response to the previous ListTags request is truncated, Amazon SageMaker returns this token. To retrieve the next set of tags, use it in the subsequent request.

', 'ListTagsOutput$NextToken' => '

If response is truncated, Amazon SageMaker includes a token in the response. You can use this token in your subsequent request to fetch next set of tokens.

', 'ListTrainingJobsForHyperParameterTuningJobRequest$NextToken' => '

If the result of the previous ListTrainingJobsForHyperParameterTuningJob request was truncated, the response includes a NextToken. To retrieve the next set of training jobs, use the token in the next request.

', 'ListTrainingJobsForHyperParameterTuningJobResponse$NextToken' => '

If the result of this ListTrainingJobsForHyperParameterTuningJob request was truncated, the response includes a NextToken. To retrieve the next set of training jobs, use the token in the next request.

', 'ListTrainingJobsRequest$NextToken' => '

If the result of the previous ListTrainingJobs request was truncated, the response includes a NextToken. To retrieve the next set of training jobs, use the token in the next request.

', 'ListTrainingJobsResponse$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of training jobs, use it in the subsequent request.

', 'ListTransformJobsRequest$NextToken' => '

If the result of the previous ListTransformJobs request was truncated, the response includes a NextToken. To retrieve the next set of transform jobs, use the token in the next request.

', 'ListTransformJobsResponse$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of transform jobs, use it in the next request.

', 'ListWorkteamsRequest$NextToken' => '

If the result of the previous ListWorkteams request was truncated, the response includes a NextToken. To retrieve the next set of labeling jobs, use the token in the next request.

', 'ListWorkteamsResponse$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of work teams, use it in the subsequent request.

', 'SearchRequest$NextToken' => '

If more than MaxResults resource objects match the specified SearchExpression, the SearchResponse includes a NextToken. The NextToken can be passed to the next SearchRequest to continue retrieving results for the specified SearchExpression and Sort parameters.

', 'SearchResponse$NextToken' => '

If the result of the previous Search request was truncated, the response includes a NextToken. To retrieve the next set of results, use the token in the next request.

', ], ], 'NotebookInstanceAcceleratorType' => [ 'base' => NULL, 'refs' => [ 'NotebookInstanceAcceleratorTypes$member' => NULL, ], ], 'NotebookInstanceAcceleratorTypes' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$AcceleratorTypes' => '

A list of Elastic Inference (EI) instance types to associate with this notebook instance. Currently, only one instance type can be associated with a notebook instance. For more information, see Using Elastic Inference in Amazon SageMaker.

', 'DescribeNotebookInstanceOutput$AcceleratorTypes' => '

A list of the Elastic Inference (EI) instance types associated with this notebook instance. Currently only one EI instance type can be associated with a notebook instance. For more information, see Using Elastic Inference in Amazon SageMaker.

', 'UpdateNotebookInstanceInput$AcceleratorTypes' => '

A list of the Elastic Inference (EI) instance types to associate with this notebook instance. Currently only one EI instance type can be associated with a notebook instance. For more information, see Using Elastic Inference in Amazon SageMaker.

', ], ], 'NotebookInstanceArn' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceOutput$NotebookInstanceArn' => '

The Amazon Resource Name (ARN) of the notebook instance.

', 'DescribeNotebookInstanceOutput$NotebookInstanceArn' => '

The Amazon Resource Name (ARN) of the notebook instance.

', 'NotebookInstanceSummary$NotebookInstanceArn' => '

The Amazon Resource Name (ARN) of the notebook instance.

', ], ], 'NotebookInstanceLifecycleConfigArn' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceLifecycleConfigOutput$NotebookInstanceLifecycleConfigArn' => '

The Amazon Resource Name (ARN) of the lifecycle configuration.

', 'DescribeNotebookInstanceLifecycleConfigOutput$NotebookInstanceLifecycleConfigArn' => '

The Amazon Resource Name (ARN) of the lifecycle configuration.

', 'NotebookInstanceLifecycleConfigSummary$NotebookInstanceLifecycleConfigArn' => '

The Amazon Resource Name (ARN) of the lifecycle configuration.

', ], ], 'NotebookInstanceLifecycleConfigContent' => [ 'base' => NULL, 'refs' => [ 'NotebookInstanceLifecycleHook$Content' => '

A base64-encoded string that contains a shell script for a notebook instance lifecycle configuration.

', ], ], 'NotebookInstanceLifecycleConfigList' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceLifecycleConfigInput$OnCreate' => '

A shell script that runs only once, when you create a notebook instance. The shell script must be a base64-encoded string.

', 'CreateNotebookInstanceLifecycleConfigInput$OnStart' => '

A shell script that runs every time you start a notebook instance, including when you create the notebook instance. The shell script must be a base64-encoded string.

', 'DescribeNotebookInstanceLifecycleConfigOutput$OnCreate' => '

The shell script that runs only once, when you create a notebook instance.

', 'DescribeNotebookInstanceLifecycleConfigOutput$OnStart' => '

The shell script that runs every time you start a notebook instance, including when you create the notebook instance.

', 'UpdateNotebookInstanceLifecycleConfigInput$OnCreate' => '

The shell script that runs only once, when you create a notebook instance

', 'UpdateNotebookInstanceLifecycleConfigInput$OnStart' => '

The shell script that runs every time you start a notebook instance, including when you create the notebook instance.

', ], ], 'NotebookInstanceLifecycleConfigName' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$LifecycleConfigName' => '

The name of a lifecycle configuration to associate with the notebook instance. For information about lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

', 'CreateNotebookInstanceLifecycleConfigInput$NotebookInstanceLifecycleConfigName' => '

The name of the lifecycle configuration.

', 'DeleteNotebookInstanceLifecycleConfigInput$NotebookInstanceLifecycleConfigName' => '

The name of the lifecycle configuration to delete.

', 'DescribeNotebookInstanceLifecycleConfigInput$NotebookInstanceLifecycleConfigName' => '

The name of the lifecycle configuration to describe.

', 'DescribeNotebookInstanceLifecycleConfigOutput$NotebookInstanceLifecycleConfigName' => '

The name of the lifecycle configuration.

', 'DescribeNotebookInstanceOutput$NotebookInstanceLifecycleConfigName' => '

Returns the name of a notebook instance lifecycle configuration.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance

', 'ListNotebookInstancesInput$NotebookInstanceLifecycleConfigNameContains' => '

A string in the name of a notebook instances lifecycle configuration associated with this notebook instance. This filter returns only notebook instances associated with a lifecycle configuration with a name that contains the specified string.

', 'NotebookInstanceLifecycleConfigSummary$NotebookInstanceLifecycleConfigName' => '

The name of the lifecycle configuration.

', 'NotebookInstanceSummary$NotebookInstanceLifecycleConfigName' => '

The name of a notebook instance lifecycle configuration associated with this notebook instance.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

', 'UpdateNotebookInstanceInput$LifecycleConfigName' => '

The name of a lifecycle configuration to associate with the notebook instance. For information about lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

', 'UpdateNotebookInstanceLifecycleConfigInput$NotebookInstanceLifecycleConfigName' => '

The name of the lifecycle configuration.

', ], ], 'NotebookInstanceLifecycleConfigNameContains' => [ 'base' => NULL, 'refs' => [ 'ListNotebookInstanceLifecycleConfigsInput$NameContains' => '

A string in the lifecycle configuration name. This filter returns only lifecycle configurations whose name contains the specified string.

', ], ], 'NotebookInstanceLifecycleConfigSortKey' => [ 'base' => NULL, 'refs' => [ 'ListNotebookInstanceLifecycleConfigsInput$SortBy' => '

Sorts the list of results. The default is CreationTime.

', ], ], 'NotebookInstanceLifecycleConfigSortOrder' => [ 'base' => NULL, 'refs' => [ 'ListNotebookInstanceLifecycleConfigsInput$SortOrder' => '

The sort order for results.

', ], ], 'NotebookInstanceLifecycleConfigSummary' => [ 'base' => '

Provides a summary of a notebook instance lifecycle configuration.

', 'refs' => [ 'NotebookInstanceLifecycleConfigSummaryList$member' => NULL, ], ], 'NotebookInstanceLifecycleConfigSummaryList' => [ 'base' => NULL, 'refs' => [ 'ListNotebookInstanceLifecycleConfigsOutput$NotebookInstanceLifecycleConfigs' => '

An array of NotebookInstanceLifecycleConfiguration objects, each listing a lifecycle configuration.

', ], ], 'NotebookInstanceLifecycleHook' => [ 'base' => '

Contains the notebook instance lifecycle configuration script.

Each lifecycle configuration script has a limit of 16384 characters.

The value of the $PATH environment variable that is available to both scripts is /sbin:bin:/usr/sbin:/usr/bin.

View CloudWatch Logs for notebook instance lifecycle configurations in log group /aws/sagemaker/NotebookInstances in log stream [notebook-instance-name]/[LifecycleConfigHook].

Lifecycle configuration scripts cannot run for longer than 5 minutes. If a script runs for longer than 5 minutes, it fails and the notebook instance is not created or started.

For information about notebook instance lifestyle configurations, see Step 2.1: (Optional) Customize a Notebook Instance.

', 'refs' => [ 'NotebookInstanceLifecycleConfigList$member' => NULL, ], ], 'NotebookInstanceName' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$NotebookInstanceName' => '

The name of the new notebook instance.

', 'CreatePresignedNotebookInstanceUrlInput$NotebookInstanceName' => '

The name of the notebook instance.

', 'DeleteNotebookInstanceInput$NotebookInstanceName' => '

The name of the Amazon SageMaker notebook instance to delete.

', 'DescribeNotebookInstanceInput$NotebookInstanceName' => '

The name of the notebook instance that you want information about.

', 'DescribeNotebookInstanceOutput$NotebookInstanceName' => '

The name of the Amazon SageMaker notebook instance.

', 'NotebookInstanceSummary$NotebookInstanceName' => '

The name of the notebook instance that you want a summary for.

', 'StartNotebookInstanceInput$NotebookInstanceName' => '

The name of the notebook instance to start.

', 'StopNotebookInstanceInput$NotebookInstanceName' => '

The name of the notebook instance to terminate.

', 'UpdateNotebookInstanceInput$NotebookInstanceName' => '

The name of the notebook instance to update.

', ], ], 'NotebookInstanceNameContains' => [ 'base' => NULL, 'refs' => [ 'ListNotebookInstancesInput$NameContains' => '

A string in the notebook instances\' name. This filter returns only notebook instances whose name contains the specified string.

', ], ], 'NotebookInstanceSortKey' => [ 'base' => NULL, 'refs' => [ 'ListNotebookInstancesInput$SortBy' => '

The field to sort results by. The default is Name.

', ], ], 'NotebookInstanceSortOrder' => [ 'base' => NULL, 'refs' => [ 'ListNotebookInstancesInput$SortOrder' => '

The sort order for results.

', ], ], 'NotebookInstanceStatus' => [ 'base' => NULL, 'refs' => [ 'DescribeNotebookInstanceOutput$NotebookInstanceStatus' => '

The status of the notebook instance.

', 'ListNotebookInstancesInput$StatusEquals' => '

A filter that returns only notebook instances with the specified status.

', 'NotebookInstanceSummary$NotebookInstanceStatus' => '

The status of the notebook instance.

', ], ], 'NotebookInstanceSummary' => [ 'base' => '

Provides summary information for an Amazon SageMaker notebook instance.

', 'refs' => [ 'NotebookInstanceSummaryList$member' => NULL, ], ], 'NotebookInstanceSummaryList' => [ 'base' => NULL, 'refs' => [ 'ListNotebookInstancesOutput$NotebookInstances' => '

An array of NotebookInstanceSummary objects, one for each notebook instance.

', ], ], 'NotebookInstanceUrl' => [ 'base' => NULL, 'refs' => [ 'CreatePresignedNotebookInstanceUrlOutput$AuthorizedUrl' => '

A JSON object that contains the URL string.

', 'DescribeNotebookInstanceOutput$Url' => '

The URL that you use to connect to the Jupyter notebook that is running in your notebook instance.

', 'NotebookInstanceSummary$Url' => '

The URL that you use to connect to the Jupyter instance running in your notebook instance.

', ], ], 'NotebookInstanceVolumeSizeInGB' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$VolumeSizeInGB' => '

The size, in GB, of the ML storage volume to attach to the notebook instance. The default value is 5 GB.

', 'DescribeNotebookInstanceOutput$VolumeSizeInGB' => '

The size, in GB, of the ML storage volume attached to the notebook instance.

', 'UpdateNotebookInstanceInput$VolumeSizeInGB' => '

The size, in GB, of the ML storage volume to attach to the notebook instance. The default value is 5 GB.

', ], ], 'NumberOfHumanWorkersPerDataObject' => [ 'base' => NULL, 'refs' => [ 'HumanTaskConfig$NumberOfHumanWorkersPerDataObject' => '

The number of human workers that will label an object.

', ], ], 'ObjectiveStatus' => [ 'base' => NULL, 'refs' => [ 'HyperParameterTrainingJobSummary$ObjectiveStatus' => '

The status of the objective metric for the training job:

  • Succeeded: The final objective metric for the training job was evaluated by the hyperparameter tuning job and used in the hyperparameter tuning process.

  • Pending: The training job is in progress and evaluation of its final objective metric is pending.

  • Failed: The final objective metric for the training job was not evaluated, and was not used in the hyperparameter tuning process. This typically occurs when the training job failed or did not emit an objective metric.

', ], ], 'ObjectiveStatusCounter' => [ 'base' => NULL, 'refs' => [ 'ObjectiveStatusCounters$Succeeded' => '

The number of training jobs whose final objective metric was evaluated by the hyperparameter tuning job and used in the hyperparameter tuning process.

', 'ObjectiveStatusCounters$Pending' => '

The number of training jobs that are in progress and pending evaluation of their final objective metric.

', 'ObjectiveStatusCounters$Failed' => '

The number of training jobs whose final objective metric was not evaluated and used in the hyperparameter tuning process. This typically occurs when the training job failed or did not emit an objective metric.

', ], ], 'ObjectiveStatusCounters' => [ 'base' => '

Specifies the number of training jobs that this hyperparameter tuning job launched, categorized by the status of their objective metric. The objective metric status shows whether the final objective metric for the training job has been evaluated by the tuning job and used in the hyperparameter tuning process.

', 'refs' => [ 'DescribeHyperParameterTuningJobResponse$ObjectiveStatusCounters' => '

The ObjectiveStatusCounters object that specifies the number of training jobs, categorized by the status of their final objective metric, that this tuning job launched.

', 'HyperParameterTuningJobSummary$ObjectiveStatusCounters' => '

The ObjectiveStatusCounters object that specifies the numbers of training jobs, categorized by objective metric status, that this tuning job launched.

', ], ], 'Operator' => [ 'base' => NULL, 'refs' => [ 'Filter$Operator' => '

A Boolean binary operator that is used to evaluate the filter. The operator field contains one of the following values:

Equals

The specified resource in Name equals the specified Value.

NotEquals

The specified resource in Name does not equal the specified Value.

GreaterThan

The specified resource in Name is greater than the specified Value. Not supported for text-based properties.

GreaterThanOrEqualTo

The specified resource in Name is greater than or equal to the specified Value. Not supported for text-based properties.

LessThan

The specified resource in Name is less than the specified Value. Not supported for text-based properties.

LessThanOrEqualTo

The specified resource in Name is less than or equal to the specified Value. Not supported for text-based properties.

Contains

Only supported for text-based properties. The word-list of the property contains the specified Value.

If you have specified a filter Value, the default is Equals.

', ], ], 'OrderKey' => [ 'base' => NULL, 'refs' => [ 'ListEndpointConfigsInput$SortOrder' => '

The sort order for results. The default is Descending.

', 'ListEndpointsInput$SortOrder' => '

The sort order for results. The default is Descending.

', 'ListModelsInput$SortOrder' => '

The sort order for results. The default is Descending.

', ], ], 'OutputConfig' => [ 'base' => '

Contains information about the output location for the compiled model and the device (target) that the model runs on.

', 'refs' => [ 'CreateCompilationJobRequest$OutputConfig' => '

Provides information about the output location for the compiled model and the target device the model runs on.

', 'DescribeCompilationJobResponse$OutputConfig' => '

Information about the output location for the compiled model and the target device that the model runs on.

', ], ], 'OutputDataConfig' => [ 'base' => '

Provides information about how to store model training results (model artifacts).

', 'refs' => [ 'CreateTrainingJobRequest$OutputDataConfig' => '

Specifies the path to the S3 bucket where you want to store model artifacts. Amazon SageMaker creates subfolders for the artifacts.

', 'DescribeTrainingJobResponse$OutputDataConfig' => '

The S3 path where model artifacts that you configured when creating the job are stored. Amazon SageMaker creates subfolders for model artifacts.

', 'HyperParameterTrainingJobDefinition$OutputDataConfig' => '

Specifies the path to the Amazon S3 bucket where you store model artifacts from the training jobs that the tuning job launches.

', 'TrainingJob$OutputDataConfig' => '

The S3 path where model artifacts that you configured when creating the job are stored. Amazon SageMaker creates subfolders for model artifacts.

', 'TrainingJobDefinition$OutputDataConfig' => '

the path to the S3 bucket where you want to store model artifacts. Amazon SageMaker creates subfolders for the artifacts.

', ], ], 'PaginationToken' => [ 'base' => NULL, 'refs' => [ 'ListEndpointConfigsInput$NextToken' => '

If the result of the previous ListEndpointConfig request was truncated, the response includes a NextToken. To retrieve the next set of endpoint configurations, use the token in the next request.

', 'ListEndpointConfigsOutput$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of endpoint configurations, use it in the subsequent request

', 'ListEndpointsInput$NextToken' => '

If the result of a ListEndpoints request was truncated, the response includes a NextToken. To retrieve the next set of endpoints, use the token in the next request.

', 'ListEndpointsOutput$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of training jobs, use it in the subsequent request.

', 'ListModelsInput$NextToken' => '

If the response to a previous ListModels request was truncated, the response includes a NextToken. To retrieve the next set of models, use the token in the next request.

', 'ListModelsOutput$NextToken' => '

If the response is truncated, Amazon SageMaker returns this token. To retrieve the next set of models, use it in the subsequent request.

', ], ], 'ParameterKey' => [ 'base' => NULL, 'refs' => [ 'CategoricalParameterRange$Name' => '

The name of the categorical hyperparameter to tune.

', 'ContinuousParameterRange$Name' => '

The name of the continuous hyperparameter to tune.

', 'HyperParameters$key' => NULL, 'IntegerParameterRange$Name' => '

The name of the hyperparameter to search.

', ], ], 'ParameterName' => [ 'base' => NULL, 'refs' => [ 'HyperParameterSpecification$Name' => '

The name of this hyperparameter. The name must be unique.

', ], ], 'ParameterRange' => [ 'base' => '

Defines the possible values for categorical, continuous, and integer hyperparameters to be used by an algorithm.

', 'refs' => [ 'HyperParameterSpecification$Range' => '

The allowed range for this hyperparameter.

', ], ], 'ParameterRanges' => [ 'base' => '

Specifies ranges of integer, continuous, and categorical hyperparameters that a hyperparameter tuning job searches. The hyperparameter tuning job launches training jobs with hyperparameter values within these ranges to find the combination of values that result in the training job with the best performance as measured by the objective metric of the hyperparameter tuning job.

You can specify a maximum of 20 hyperparameters that a hyperparameter tuning job can search over. Every possible value of a categorical parameter range counts against this limit.

', 'refs' => [ 'HyperParameterTuningJobConfig$ParameterRanges' => '

The ParameterRanges object that specifies the ranges of hyperparameters that this tuning job searches.

', ], ], 'ParameterType' => [ 'base' => NULL, 'refs' => [ 'HyperParameterSpecification$Type' => '

The type of this hyperparameter. The valid types are Integer, Continuous, Categorical, and FreeText.

', ], ], 'ParameterValue' => [ 'base' => NULL, 'refs' => [ 'ContinuousParameterRange$MinValue' => '

The minimum value for the hyperparameter. The tuning job uses floating-point values between this value and MaxValuefor tuning.

', 'ContinuousParameterRange$MaxValue' => '

The maximum value for the hyperparameter. The tuning job uses floating-point values between MinValue value and this value for tuning.

', 'ContinuousParameterRangeSpecification$MinValue' => '

The minimum floating-point value allowed.

', 'ContinuousParameterRangeSpecification$MaxValue' => '

The maximum floating-point value allowed.

', 'HyperParameterSpecification$DefaultValue' => '

The default value for this hyperparameter. If a default value is specified, a hyperparameter cannot be required.

', 'HyperParameters$value' => NULL, 'IntegerParameterRange$MinValue' => '

The minimum value of the hyperparameter to search.

', 'IntegerParameterRange$MaxValue' => '

The maximum value of the hyperparameter to search.

', 'IntegerParameterRangeSpecification$MinValue' => '

The minimum integer value allowed.

', 'IntegerParameterRangeSpecification$MaxValue' => '

The maximum integer value allowed.

', 'ParameterValues$member' => NULL, ], ], 'ParameterValues' => [ 'base' => NULL, 'refs' => [ 'CategoricalParameterRange$Values' => '

A list of the categories for the hyperparameter.

', 'CategoricalParameterRangeSpecification$Values' => '

The allowed categories for the hyperparameter.

', ], ], 'ParentHyperParameterTuningJob' => [ 'base' => '

A previously completed or stopped hyperparameter tuning job to be used as a starting point for a new hyperparameter tuning job.

', 'refs' => [ 'ParentHyperParameterTuningJobs$member' => NULL, ], ], 'ParentHyperParameterTuningJobs' => [ 'base' => NULL, 'refs' => [ 'HyperParameterTuningJobWarmStartConfig$ParentHyperParameterTuningJobs' => '

An array of hyperparameter tuning jobs that are used as the starting point for the new hyperparameter tuning job. For more information about warm starting a hyperparameter tuning job, see Using a Previous Hyperparameter Tuning Job as a Starting Point.

Hyperparameter tuning jobs created before October 1, 2018 cannot be used as parent jobs for warm start tuning jobs.

', ], ], 'ProductId' => [ 'base' => NULL, 'refs' => [ 'DescribeAlgorithmOutput$ProductId' => '

The product identifier of the algorithm.

', 'ModelPackageContainerDefinition$ProductId' => '

The AWS Marketplace product ID of the model package.

', ], ], 'ProductListings' => [ 'base' => NULL, 'refs' => [ 'Workteam$ProductListingIds' => '

The Amazon Marketplace identifier for a vendor\'s work team.

', ], ], 'ProductionVariant' => [ 'base' => '

Identifies a model that you want to host and the resources to deploy for hosting it. If you are deploying multiple models, tell Amazon SageMaker how to distribute traffic among the models by specifying variant weights.

', 'refs' => [ 'ProductionVariantList$member' => NULL, ], ], 'ProductionVariantAcceleratorType' => [ 'base' => NULL, 'refs' => [ 'ProductionVariant$AcceleratorType' => '

The size of the Elastic Inference (EI) instance to use for the production variant. EI instances provide on-demand GPU computing for inference. For more information, see Using Elastic Inference in Amazon SageMaker. For more information, see Using Elastic Inference in Amazon SageMaker.

', ], ], 'ProductionVariantInstanceType' => [ 'base' => NULL, 'refs' => [ 'ProductionVariant$InstanceType' => '

The ML compute instance type.

', 'RealtimeInferenceInstanceTypes$member' => NULL, ], ], 'ProductionVariantList' => [ 'base' => NULL, 'refs' => [ 'CreateEndpointConfigInput$ProductionVariants' => '

An array of ProductionVariant objects, one for each model that you want to host at this endpoint.

', 'DescribeEndpointConfigOutput$ProductionVariants' => '

An array of ProductionVariant objects, one for each model that you want to host at this endpoint.

', ], ], 'ProductionVariantSummary' => [ 'base' => '

Describes weight and capacities for a production variant associated with an endpoint. If you sent a request to the UpdateEndpointWeightsAndCapacities API and the endpoint status is Updating, you get different desired and current values.

', 'refs' => [ 'ProductionVariantSummaryList$member' => NULL, ], ], 'ProductionVariantSummaryList' => [ 'base' => NULL, 'refs' => [ 'DescribeEndpointOutput$ProductionVariants' => '

An array of ProductionVariantSummary objects, one for each model hosted behind this endpoint.

', ], ], 'PropertyNameHint' => [ 'base' => NULL, 'refs' => [ 'PropertyNameQuery$PropertyNameHint' => '

Text that is part of a property\'s name. The property names of hyperparameter, metric, and tag key names that begin with the specified text in the PropertyNameHint.

', ], ], 'PropertyNameQuery' => [ 'base' => '

A type of SuggestionQuery. A suggestion query for retrieving property names that match the specified hint.

', 'refs' => [ 'SuggestionQuery$PropertyNameQuery' => '

A type of SuggestionQuery. Defines a property name hint. Only property names that match the specified hint are included in the response.

', ], ], 'PropertyNameSuggestion' => [ 'base' => '

A property name returned from a GetSearchSuggestions call that specifies a value in the PropertyNameQuery field.

', 'refs' => [ 'PropertyNameSuggestionList$member' => NULL, ], ], 'PropertyNameSuggestionList' => [ 'base' => NULL, 'refs' => [ 'GetSearchSuggestionsResponse$PropertyNameSuggestions' => '

A list of property names for a Resource that match a SuggestionQuery.

', ], ], 'PublicWorkforceTaskPrice' => [ 'base' => '

Defines the amount of money paid to an Amazon Mechanical Turk worker for each task performed.

Use one of the following prices for bounding box tasks. Prices are in US dollars.

  • 0.036

  • 0.048

  • 0.060

  • 0.072

  • 0.120

  • 0.240

  • 0.360

  • 0.480

  • 0.600

  • 0.720

  • 0.840

  • 0.960

  • 1.080

  • 1.200

Use one of the following prices for image classification, text classification, and custom tasks. Prices are in US dollars.

  • 0.012

  • 0.024

  • 0.036

  • 0.048

  • 0.060

  • 0.072

  • 0.120

  • 0.240

  • 0.360

  • 0.480

  • 0.600

  • 0.720

  • 0.840

  • 0.960

  • 1.080

  • 1.200

Use one of the following prices for semantic segmentation tasks. Prices are in US dollars.

  • 0.840

  • 0.960

  • 1.080

  • 1.200

', 'refs' => [ 'HumanTaskConfig$PublicWorkforceTaskPrice' => '

The price that you pay for each task performed by a public worker.

', ], ], 'RealtimeInferenceInstanceTypes' => [ 'base' => NULL, 'refs' => [ 'InferenceSpecification$SupportedRealtimeInferenceInstanceTypes' => '

A list of the instance types that are used to generate inferences in real-time.

', ], ], 'RecordWrapper' => [ 'base' => NULL, 'refs' => [ 'Channel$RecordWrapperType' => '

Specify RecordIO as the value when input data is in raw format but the training algorithm requires the RecordIO format. In this case, Amazon SageMaker wraps each individual S3 object in a RecordIO record. If the input data is already in RecordIO format, you don\'t need to set this attribute. For more information, see Create a Dataset Using RecordIO.

In File mode, leave this field unset or set it to None.

', ], ], 'RenderUiTemplateRequest' => [ 'base' => NULL, 'refs' => [], ], 'RenderUiTemplateResponse' => [ 'base' => NULL, 'refs' => [], ], 'RenderableTask' => [ 'base' => '

Contains input values for a task.

', 'refs' => [ 'RenderUiTemplateRequest$Task' => '

A RenderableTask object containing a representative task to render.

', ], ], 'RenderingError' => [ 'base' => '

A description of an error that occurred while rendering the template.

', 'refs' => [ 'RenderingErrorList$member' => NULL, ], ], 'RenderingErrorList' => [ 'base' => NULL, 'refs' => [ 'RenderUiTemplateResponse$Errors' => '

A list of one or more RenderingError objects if any were encountered while rendering the template. If there were no errors, the list is empty.

', ], ], 'ResourceArn' => [ 'base' => NULL, 'refs' => [ 'AddTagsInput$ResourceArn' => '

The Amazon Resource Name (ARN) of the resource that you want to tag.

', 'DeleteTagsInput$ResourceArn' => '

The Amazon Resource Name (ARN) of the resource whose tags you want to delete.

', 'ListTagsInput$ResourceArn' => '

The Amazon Resource Name (ARN) of the resource whose tags you want to retrieve.

', ], ], 'ResourceConfig' => [ 'base' => '

Describes the resources, including ML compute instances and ML storage volumes, to use for model training.

', 'refs' => [ 'CreateTrainingJobRequest$ResourceConfig' => '

The resources, including the ML compute instances and ML storage volumes, to use for model training.

ML storage volumes store model artifacts and incremental states. Training algorithms might also use ML storage volumes for scratch space. If you want Amazon SageMaker to use the ML storage volume to store the training data, choose File as the TrainingInputMode in the algorithm specification. For distributed training algorithms, specify an instance count greater than 1.

', 'DescribeTrainingJobResponse$ResourceConfig' => '

Resources, including ML compute instances and ML storage volumes, that are configured for model training.

', 'HyperParameterTrainingJobDefinition$ResourceConfig' => '

The resources, including the compute instances and storage volumes, to use for the training jobs that the tuning job launches.

Storage volumes store model artifacts and incremental states. Training algorithms might also use storage volumes for scratch space. If you want Amazon SageMaker to use the storage volume to store the training data, choose File as the TrainingInputMode in the algorithm specification. For distributed training algorithms, specify an instance count greater than 1.

', 'TrainingJob$ResourceConfig' => '

Resources, including ML compute instances and ML storage volumes, that are configured for model training.

', 'TrainingJobDefinition$ResourceConfig' => '

The resources, including the ML compute instances and ML storage volumes, to use for model training.

', ], ], 'ResourceInUse' => [ 'base' => '

Resource being accessed is in use.

', 'refs' => [], ], 'ResourceLimitExceeded' => [ 'base' => '

You have exceeded an Amazon SageMaker resource limit. For example, you might have too many training jobs created.

', 'refs' => [], ], 'ResourceLimits' => [ 'base' => '

Specifies the maximum number of training jobs and parallel training jobs that a hyperparameter tuning job can launch.

', 'refs' => [ 'HyperParameterTuningJobConfig$ResourceLimits' => '

The ResourceLimits object that specifies the maximum number of training jobs and parallel training jobs for this tuning job.

', 'HyperParameterTuningJobSummary$ResourceLimits' => '

The ResourceLimits object that specifies the maximum number of training jobs and parallel training jobs allowed for this tuning job.

', ], ], 'ResourceNotFound' => [ 'base' => '

Resource being access is not found.

', 'refs' => [], ], 'ResourcePropertyName' => [ 'base' => NULL, 'refs' => [ 'Filter$Name' => '

A property name. For example, TrainingJobName. For the list of valid property names returned in a search result for each supported resource, see TrainingJob properties. You must specify a valid property name for the resource.

', 'NestedFilters$NestedPropertyName' => '

The name of the property to use in the nested filters. The value must match a listed property name, such as InputDataConfig.

', 'PropertyNameSuggestion$PropertyName' => '

A suggested property name based on what you entered in the search textbox in the Amazon SageMaker console.

', 'SearchRequest$SortBy' => '

The name of the resource property used to sort the SearchResults. The default is LastModifiedTime.

', ], ], 'ResourceType' => [ 'base' => NULL, 'refs' => [ 'GetSearchSuggestionsRequest$Resource' => '

The name of the Amazon SageMaker resource to Search for. The only valid Resource value is TrainingJob.

', 'SearchRequest$Resource' => '

The name of the Amazon SageMaker resource to search for. Currently, the only valid Resource value is TrainingJob.

', ], ], 'ResponseMIMEType' => [ 'base' => NULL, 'refs' => [ 'ResponseMIMETypes$member' => NULL, ], ], 'ResponseMIMETypes' => [ 'base' => NULL, 'refs' => [ 'InferenceSpecification$SupportedResponseMIMETypes' => '

The supported MIME types for the output data.

', ], ], 'RoleArn' => [ 'base' => NULL, 'refs' => [ 'AlgorithmValidationSpecification$ValidationRole' => '

The IAM roles that Amazon SageMaker uses to run the training jobs.

', 'CreateCompilationJobRequest$RoleArn' => '

The Amazon Resource Name (ARN) of an IIAMAM role that enables Amazon SageMaker to perform tasks on your behalf.

During model compilation, Amazon SageMaker needs your permission to:

  • Read input data from an S3 bucket

  • Write model artifacts to an S3 bucket

  • Write logs to Amazon CloudWatch Logs

  • Publish metrics to Amazon CloudWatch

You grant permissions for all of these tasks to an IAM role. To pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission. For more information, see Amazon SageMaker Roles.

', 'CreateLabelingJobRequest$RoleArn' => '

The Amazon Resource Number (ARN) that Amazon SageMaker assumes to perform tasks on your behalf during data labeling. You must grant this role the necessary permissions so that Amazon SageMaker can successfully complete data labeling.

', 'CreateModelInput$ExecutionRoleArn' => '

The Amazon Resource Name (ARN) of the IAM role that Amazon SageMaker can assume to access model artifacts and docker image for deployment on ML compute instances or for batch transform jobs. Deploying on ML compute instances is part of model hosting. For more information, see Amazon SageMaker Roles.

To be able to pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission.

', 'CreateNotebookInstanceInput$RoleArn' => '

When you send any requests to AWS resources from the notebook instance, Amazon SageMaker assumes this role to perform tasks on your behalf. You must grant this role necessary permissions so Amazon SageMaker can perform these tasks. The policy must allow the Amazon SageMaker service principal (sagemaker.amazonaws.com) permissions to assume this role. For more information, see Amazon SageMaker Roles.

To be able to pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission.

', 'CreateTrainingJobRequest$RoleArn' => '

The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.

During model training, Amazon SageMaker needs your permission to read input data from an S3 bucket, download a Docker image that contains training code, write model artifacts to an S3 bucket, write logs to Amazon CloudWatch Logs, and publish metrics to Amazon CloudWatch. You grant permissions for all of these tasks to an IAM role. For more information, see Amazon SageMaker Roles.

To be able to pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission.

', 'DescribeCompilationJobResponse$RoleArn' => '

The Amazon Resource Name (ARN) of the model compilation job.

', 'DescribeLabelingJobResponse$RoleArn' => '

The Amazon Resource Name (ARN) that Amazon SageMaker assumes to perform tasks on your behalf during data labeling.

', 'DescribeModelOutput$ExecutionRoleArn' => '

The Amazon Resource Name (ARN) of the IAM role that you specified for the model.

', 'DescribeNotebookInstanceOutput$RoleArn' => '

The Amazon Resource Name (ARN) of the IAM role associated with the instance.

', 'DescribeTrainingJobResponse$RoleArn' => '

The AWS Identity and Access Management (IAM) role configured for the training job.

', 'HyperParameterTrainingJobDefinition$RoleArn' => '

The Amazon Resource Name (ARN) of the IAM role associated with the training jobs that the tuning job launches.

', 'ModelPackageValidationSpecification$ValidationRole' => '

The IAM roles to be used for the validation of the model package.

', 'RenderUiTemplateRequest$RoleArn' => '

The Amazon Resource Name (ARN) that has access to the S3 objects that are used by the template.

', 'TrainingJob$RoleArn' => '

The AWS Identity and Access Management (IAM) role configured for the training job.

', 'UpdateNotebookInstanceInput$RoleArn' => '

The Amazon Resource Name (ARN) of the IAM role that Amazon SageMaker can assume to access the notebook instance. For more information, see Amazon SageMaker Roles.

To be able to pass this role to Amazon SageMaker, the caller of this API must have the iam:PassRole permission.

', ], ], 'RootAccess' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$RootAccess' => '

Whether root access is enabled or disabled for users of the notebook instance. The default value is Enabled.

Lifecycle configurations need root access to be able to set up a notebook instance. Because of this, lifecycle configurations associated with a notebook instance always run with root access even if you disable root access for users.

', 'DescribeNotebookInstanceOutput$RootAccess' => '

Whether root access is enabled or disabled for users of the notebook instance.

Lifecycle configurations need root access to be able to set up a notebook instance. Because of this, lifecycle configurations associated with a notebook instance always run with root access even if you disable root access for users.

', 'UpdateNotebookInstanceInput$RootAccess' => '

Whether root access is enabled or disabled for users of the notebook instance. The default value is Enabled.

If you set this to Disabled, users don\'t have root access on the notebook instance, but lifecycle configuration scripts still run with root permissions.

', ], ], 'S3DataDistribution' => [ 'base' => NULL, 'refs' => [ 'S3DataSource$S3DataDistributionType' => '

If you want Amazon SageMaker to replicate the entire dataset on each ML compute instance that is launched for model training, specify FullyReplicated.

If you want Amazon SageMaker to replicate a subset of data on each ML compute instance that is launched for model training, specify ShardedByS3Key. If there are n ML compute instances launched for a training job, each instance gets approximately 1/n of the number of S3 objects. In this case, model training on each machine uses only the subset of training data.

Don\'t choose more ML compute instances for training than available S3 objects. If you do, some nodes won\'t get any data and you will pay for nodes that aren\'t getting any training data. This applies in both File and Pipe modes. Keep this in mind when developing algorithms.

In distributed training, where you use multiple ML compute EC2 instances, you might choose ShardedByS3Key. If the algorithm requires copying training data to the ML storage volume (when TrainingInputMode is set to File), this copies 1/n of the number of objects.

', ], ], 'S3DataSource' => [ 'base' => '

Describes the S3 data source.

', 'refs' => [ 'DataSource$S3DataSource' => '

The S3 location of the data source that is associated with a channel.

', ], ], 'S3DataType' => [ 'base' => NULL, 'refs' => [ 'S3DataSource$S3DataType' => '

If you choose S3Prefix, S3Uri identifies a key name prefix. Amazon SageMaker uses all objects that match the specified key name prefix for model training.

If you choose ManifestFile, S3Uri identifies an object that is a manifest file containing a list of object keys that you want Amazon SageMaker to use for model training.

If you choose AugmentedManifestFile, S3Uri identifies an object that is an augmented manifest file in JSON lines format. This file contains the data you want to use for model training. AugmentedManifestFile can only be used if the Channel\'s input mode is Pipe.

', 'TransformS3DataSource$S3DataType' => '

If you choose S3Prefix, S3Uri identifies a key name prefix. Amazon SageMaker uses all objects with the specified key name prefix for batch transform.

If you choose ManifestFile, S3Uri identifies an object that is a manifest file containing a list of object keys that you want Amazon SageMaker to use for batch transform.

', ], ], 'S3Uri' => [ 'base' => NULL, 'refs' => [ 'CreateLabelingJobRequest$LabelCategoryConfigS3Uri' => '

The S3 URL of the file that defines the categories used to label the data objects.

The file is a JSON structure in the following format:

{

"document-version": "2018-11-28"

"labels": [

{

"label": "label 1"

},

{

"label": "label 2"

},

...

{

"label": "label n"

}

]

}

', 'DescribeLabelingJobResponse$LabelCategoryConfigS3Uri' => '

The S3 location of the JSON file that defines the categories used to label data objects.

The file is a JSON structure in the following format:

{

"document-version": "2018-11-28"

"labels": [

{

"label": "label 1"

},

{

"label": "label 2"

},

...

{

"label": "label n"

}

]

}

', 'InputConfig$S3Uri' => '

The S3 path where the model artifacts, which result from model training, are stored. This path must point to a single gzip compressed tar archive (.tar.gz suffix).

', 'LabelingJobOutput$OutputDatasetS3Uri' => '

The Amazon S3 bucket location of the manifest file for labeled data.

', 'LabelingJobOutputConfig$S3OutputPath' => '

The Amazon S3 location to write output data.

', 'LabelingJobS3DataSource$ManifestS3Uri' => '

The Amazon S3 location of the manifest file that describes the input data objects.

', 'ModelArtifacts$S3ModelArtifacts' => '

The path of the S3 object that contains the model artifacts. For example, s3://bucket-name/keynameprefix/model.tar.gz.

', 'OutputConfig$S3OutputLocation' => '

Identifies the S3 path where you want Amazon SageMaker to store the model artifacts. For example, s3://bucket-name/key-name-prefix.

', 'OutputDataConfig$S3OutputPath' => '

Identifies the S3 path where you want Amazon SageMaker to store the model artifacts. For example, s3://bucket-name/key-name-prefix.

', 'S3DataSource$S3Uri' => '

Depending on the value specified for the S3DataType, identifies either a key name prefix or a manifest. For example:

  • A key name prefix might look like this: s3://bucketname/exampleprefix.

  • A manifest might look like this: s3://bucketname/example.manifest

    The manifest is an S3 object which is a JSON file with the following format:

    [

    {"prefix": "s3://customer_bucket/some/prefix/"},

    "relative/path/to/custdata-1",

    "relative/path/custdata-2",

    ...

    ]

    The preceding JSON matches the following s3Uris:

    s3://customer_bucket/some/prefix/relative/path/to/custdata-1

    s3://customer_bucket/some/prefix/relative/path/custdata-2

    ...

    The complete set of s3uris in this manifest is the input data for the channel for this datasource. The object that each s3uris points to must be readable by the IAM role that Amazon SageMaker uses to perform tasks on your behalf.

', 'TransformOutput$S3OutputPath' => '

The Amazon S3 path where you want Amazon SageMaker to store the results of the transform job. For example, s3://bucket-name/key-name-prefix.

For every S3 object used as input for the transform job, batch transform stores the transformed data with an .out suffix in a corresponding subfolder in the location in the output prefix. For example, for the input data stored at s3://bucket-name/input-name-prefix/dataset01/data.csv, batch transform stores the transformed data at s3://bucket-name/output-name-prefix/input-name-prefix/data.csv.out. Batch transform doesn\'t upload partially processed objects. For an input S3 object that contains multiple records, it creates an .out file only if the transform job succeeds on the entire file. When the input contains multiple S3 objects, the batch transform job processes the listed S3 objects and uploads only the output for successfully processed objects. If any object fails in the transform job batch transform marks the job as failed to prompt investigation.

', 'TransformS3DataSource$S3Uri' => '

Depending on the value specified for the S3DataType, identifies either a key name prefix or a manifest. For example:

  • A key name prefix might look like this: s3://bucketname/exampleprefix.

  • A manifest might look like this: s3://bucketname/example.manifest

    The manifest is an S3 object which is a JSON file with the following format:

    [

    {"prefix": "s3://customer_bucket/some/prefix/"},

    "relative/path/to/custdata-1",

    "relative/path/custdata-2",

    ...

    ]

    The preceding JSON matches the following S3Uris:

    s3://customer_bucket/some/prefix/relative/path/to/custdata-1

    s3://customer_bucket/some/prefix/relative/path/custdata-1

    ...

    The complete set of S3Uris in this manifest constitutes the input data for the channel for this datasource. The object that each S3Uris points to must be readable by the IAM role that Amazon SageMaker uses to perform tasks on your behalf.

', 'UiConfig$UiTemplateS3Uri' => '

The Amazon S3 bucket location of the UI template. For more information about the contents of a UI template, see Creating Your Custom Labeling Task Template.

', ], ], 'SearchExpression' => [ 'base' => '

A multi-expression that searches for the specified resource or resources in a search. All resource objects that satisfy the expression\'s condition are included in the search results. You must specify at least one subexpression, filter, or nested filter. A SearchExpression can contain up to twenty elements.

A SearchExpression contains the following components:

  • A list of Filter objects. Each filter defines a simple Boolean expression comprised of a resource property name, Boolean operator, and value.

  • A list of NestedFilter objects. Each nested filter defines a list of Boolean expressions using a list of resource properties. A nested filter is satisfied if a single object in the list satisfies all Boolean expressions.

  • A list of SearchExpression objects. A search expression object can be nested in a list of search expression objects.

  • A Boolean operator: And or Or.

', 'refs' => [ 'SearchExpressionList$member' => NULL, 'SearchRequest$SearchExpression' => '

A Boolean conditional statement. Resource objects must satisfy this condition to be included in search results. You must provide at least one subexpression, filter, or nested filter. The maximum number of recursive SubExpressions, NestedFilters, and Filters that can be included in a SearchExpression object is 50.

', ], ], 'SearchExpressionList' => [ 'base' => NULL, 'refs' => [ 'SearchExpression$SubExpressions' => '

A list of search expression objects.

', ], ], 'SearchRecord' => [ 'base' => '

An individual search result record that contains a single resource object.

', 'refs' => [ 'SearchResultsList$member' => NULL, ], ], 'SearchRequest' => [ 'base' => NULL, 'refs' => [], ], 'SearchResponse' => [ 'base' => NULL, 'refs' => [], ], 'SearchResultsList' => [ 'base' => NULL, 'refs' => [ 'SearchResponse$Results' => '

A list of SearchResult objects.

', ], ], 'SearchSortOrder' => [ 'base' => NULL, 'refs' => [ 'SearchRequest$SortOrder' => '

How SearchResults are ordered. Valid values are Ascending or Descending. The default is Descending.

', ], ], 'SecondaryStatus' => [ 'base' => NULL, 'refs' => [ 'DescribeTrainingJobResponse$SecondaryStatus' => '

Provides detailed information about the state of the training job. For detailed information on the secondary status of the training job, see StatusMessage under SecondaryStatusTransition.

Amazon SageMaker provides primary statuses and secondary statuses that apply to each of them:

InProgress
  • Starting - Starting the training job.

  • Downloading - An optional stage for algorithms that support File training input mode. It indicates that data is being downloaded to the ML storage volumes.

  • Training - Training is in progress.

  • Uploading - Training is complete and the model artifacts are being uploaded to the S3 location.

Completed
  • Completed - The training job has completed.

Failed
  • Failed - The training job has failed. The reason for the failure is returned in the FailureReason field of DescribeTrainingJobResponse.

Stopped
  • MaxRuntimeExceeded - The job stopped because it exceeded the maximum allowed runtime.

  • Stopped - The training job has stopped.

Stopping
  • Stopping - Stopping the training job.

Valid values for SecondaryStatus are subject to change.

We no longer support the following secondary statuses:

  • LaunchingMLInstances

  • PreparingTrainingStack

  • DownloadingTrainingImage

', 'SecondaryStatusTransition$Status' => '

Contains a secondary status information from a training job.

Status might be one of the following secondary statuses:

InProgress
  • Starting - Starting the training job.

  • Downloading - An optional stage for algorithms that support File training input mode. It indicates that data is being downloaded to the ML storage volumes.

  • Training - Training is in progress.

  • Uploading - Training is complete and the model artifacts are being uploaded to the S3 location.

Completed
  • Completed - The training job has completed.

Failed
  • Failed - The training job has failed. The reason for the failure is returned in the FailureReason field of DescribeTrainingJobResponse.

Stopped
  • MaxRuntimeExceeded - The job stopped because it exceeded the maximum allowed runtime.

  • Stopped - The training job has stopped.

Stopping
  • Stopping - Stopping the training job.

We no longer support the following secondary statuses:

  • LaunchingMLInstances

  • PreparingTrainingStack

  • DownloadingTrainingImage

', 'TrainingJob$SecondaryStatus' => '

Provides detailed information about the state of the training job. For detailed information about the secondary status of the training job, see StatusMessage under SecondaryStatusTransition.

Amazon SageMaker provides primary statuses and secondary statuses that apply to each of them:

InProgress
  • Starting - Starting the training job.

  • Downloading - An optional stage for algorithms that support File training input mode. It indicates that data is being downloaded to the ML storage volumes.

  • Training - Training is in progress.

  • Uploading - Training is complete and the model artifacts are being uploaded to the S3 location.

Completed
  • Completed - The training job has completed.

Failed
  • Failed - The training job has failed. The reason for the failure is returned in the FailureReason field of DescribeTrainingJobResponse.

Stopped
  • MaxRuntimeExceeded - The job stopped because it exceeded the maximum allowed runtime.

  • Stopped - The training job has stopped.

Stopping
  • Stopping - Stopping the training job.

Valid values for SecondaryStatus are subject to change.

We no longer support the following secondary statuses:

  • LaunchingMLInstances

  • PreparingTrainingStack

  • DownloadingTrainingImage

', ], ], 'SecondaryStatusTransition' => [ 'base' => '

An array element of DescribeTrainingJobResponse$SecondaryStatusTransitions. It provides additional details about a status that the training job has transitioned through. A training job can be in one of several states, for example, starting, downloading, training, or uploading. Within each state, there are a number of intermediate states. For example, within the starting state, Amazon SageMaker could be starting the training job or launching the ML instances. These transitional states are referred to as the job\'s secondary status.

', 'refs' => [ 'SecondaryStatusTransitions$member' => NULL, ], ], 'SecondaryStatusTransitions' => [ 'base' => NULL, 'refs' => [ 'DescribeTrainingJobResponse$SecondaryStatusTransitions' => '

A history of all of the secondary statuses that the training job has transitioned through.

', 'TrainingJob$SecondaryStatusTransitions' => '

A history of all of the secondary statuses that the training job has transitioned through.

', ], ], 'SecretArn' => [ 'base' => NULL, 'refs' => [ 'GitConfig$SecretArn' => '

The Amazon Resource Name (ARN) of the AWS Secrets Manager secret that contains the credentials used to access the git repository. The secret must have a staging label of AWSCURRENT and must be in the following format:

{"username": UserName, "password": Password}

', 'GitConfigForUpdate$SecretArn' => '

The Amazon Resource Name (ARN) of the AWS Secrets Manager secret that contains the credentials used to access the git repository. The secret must have a staging label of AWSCURRENT and must be in the following format:

{"username": UserName, "password": Password}

', ], ], 'SecurityGroupId' => [ 'base' => NULL, 'refs' => [ 'SecurityGroupIds$member' => NULL, 'VpcSecurityGroupIds$member' => NULL, ], ], 'SecurityGroupIds' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$SecurityGroupIds' => '

The VPC security group IDs, in the form sg-xxxxxxxx. The security groups must be for the same VPC as specified in the subnet.

', 'DescribeNotebookInstanceOutput$SecurityGroups' => '

The IDs of the VPC security groups.

', ], ], 'Seed' => [ 'base' => NULL, 'refs' => [ 'ShuffleConfig$Seed' => '

Determines the shuffling order in ShuffleConfig value.

', ], ], 'SessionExpirationDurationInSeconds' => [ 'base' => NULL, 'refs' => [ 'CreatePresignedNotebookInstanceUrlInput$SessionExpirationDurationInSeconds' => '

The duration of the session, in seconds. The default is 12 hours.

', ], ], 'ShuffleConfig' => [ 'base' => '

A configuration for a shuffle option for input data in a channel. If you use S3Prefix for S3DataType, the results of the S3 key prefix matches are shuffled. If you use ManifestFile, the order of the S3 object references in the ManifestFile is shuffled. If you use AugmentedManifestFile, the order of the JSON lines in the AugmentedManifestFile is shuffled. The shuffling order is determined using the Seed value.

For Pipe input mode, shuffling is done at the start of every epoch. With large datasets, this ensures that the order of the training data is different for each epoch, and it helps reduce bias and possible overfitting. In a multi-node training job when ShuffleConfig is combined with S3DataDistributionType of ShardedByS3Key, the data is shuffled across nodes so that the content sent to a particular node on the first epoch might be sent to a different node on the second epoch.

', 'refs' => [ 'Channel$ShuffleConfig' => '

A configuration for a shuffle option for input data in a channel. If you use S3Prefix for S3DataType, this shuffles the results of the S3 key prefix matches. If you use ManifestFile, the order of the S3 object references in the ManifestFile is shuffled. If you use AugmentedManifestFile, the order of the JSON lines in the AugmentedManifestFile is shuffled. The shuffling order is determined using the Seed value.

For Pipe input mode, shuffling is done at the start of every epoch. With large datasets this ensures that the order of the training data is different for each epoch, it helps reduce bias and possible overfitting. In a multi-node training job when ShuffleConfig is combined with S3DataDistributionType of ShardedByS3Key, the data is shuffled across nodes so that the content sent to a particular node on the first epoch might be sent to a different node on the second epoch.

', ], ], 'SortBy' => [ 'base' => NULL, 'refs' => [ 'ListLabelingJobsRequest$SortBy' => '

The field to sort results by. The default is CreationTime.

', 'ListTrainingJobsRequest$SortBy' => '

The field to sort results by. The default is CreationTime.

', 'ListTransformJobsRequest$SortBy' => '

The field to sort results by. The default is CreationTime.

', ], ], 'SortOrder' => [ 'base' => NULL, 'refs' => [ 'ListAlgorithmsInput$SortOrder' => '

The sort order for the results. The default is Ascending.

', 'ListCompilationJobsRequest$SortOrder' => '

The sort order for results. The default is Ascending.

', 'ListHyperParameterTuningJobsRequest$SortOrder' => '

The sort order for results. The default is Ascending.

', 'ListLabelingJobsForWorkteamRequest$SortOrder' => '

The sort order for results. The default is Ascending.

', 'ListLabelingJobsRequest$SortOrder' => '

The sort order for results. The default is Ascending.

', 'ListModelPackagesInput$SortOrder' => '

The sort order for the results. The default is Ascending.

', 'ListTrainingJobsForHyperParameterTuningJobRequest$SortOrder' => '

The sort order for results. The default is Ascending.

', 'ListTrainingJobsRequest$SortOrder' => '

The sort order for results. The default is Ascending.

', 'ListTransformJobsRequest$SortOrder' => '

The sort order for results. The default is Descending.

', 'ListWorkteamsRequest$SortOrder' => '

The sort order for results. The default is Ascending.

', ], ], 'SourceAlgorithm' => [ 'base' => '

Specifies an algorithm that was used to create the model package. The algorithm must be either an algorithm resource in your Amazon SageMaker account or an algorithm in AWS Marketplace that you are subscribed to.

', 'refs' => [ 'SourceAlgorithmList$member' => NULL, ], ], 'SourceAlgorithmList' => [ 'base' => NULL, 'refs' => [ 'SourceAlgorithmSpecification$SourceAlgorithms' => '

A list of the algorithms that were used to create a model package.

', ], ], 'SourceAlgorithmSpecification' => [ 'base' => '

A list of algorithms that were used to create a model package.

', 'refs' => [ 'CreateModelPackageInput$SourceAlgorithmSpecification' => '

Details about the algorithm that was used to create the model package.

', 'DescribeModelPackageOutput$SourceAlgorithmSpecification' => '

Details about the algorithm that was used to create the model package.

', ], ], 'SplitType' => [ 'base' => NULL, 'refs' => [ 'TransformInput$SplitType' => '

The method to use to split the transform job\'s data files into smaller batches. Splitting is necessary when the total size of each object is too large to fit in a single request. You can also use data splitting to improve performance by processing multiple concurrent mini-batches. The default value for SplitType is None, which indicates that input data files are not split, and request payloads contain the entire contents of an input object. Set the value of this parameter to Line to split records on a newline character boundary. SplitType also supports a number of record-oriented binary data formats.

When splitting is enabled, the size of a mini-batch depends on the values of the BatchStrategy and MaxPayloadInMB parameters. When the value of BatchStrategy is MultiRecord, Amazon SageMaker sends the maximum number of records in each request, up to the MaxPayloadInMB limit. If the value of BatchStrategy is SingleRecord, Amazon SageMaker sends individual records in each request.

Some data formats represent a record as a binary payload wrapped with extra padding bytes. When splitting is applied to a binary data format, padding is removed if the value of BatchStrategy is set to SingleRecord. Padding is not removed if the value of BatchStrategy is set to MultiRecord.

For more information about the RecordIO, see Data Format in the MXNet documentation. For more information about the TFRecord, see Consuming TFRecord data in the TensorFlow documentation.

', ], ], 'StartNotebookInstanceInput' => [ 'base' => NULL, 'refs' => [], ], 'StatusMessage' => [ 'base' => NULL, 'refs' => [ 'SecondaryStatusTransition$StatusMessage' => '

A detailed description of the progress within a secondary status.

Amazon SageMaker provides secondary statuses and status messages that apply to each of them:

Starting
  • Starting the training job.

  • Launching requested ML instances.

  • Insufficient capacity error from EC2 while launching instances, retrying!

  • Launched instance was unhealthy, replacing it!

  • Preparing the instances for training.

Training
  • Downloading the training image.

  • Training image download completed. Training in progress.

Status messages are subject to change. Therefore, we recommend not including them in code that programmatically initiates actions. For examples, don\'t use status messages in if statements.

To have an overview of your training job\'s progress, view TrainingJobStatus and SecondaryStatus in DescribeTrainingJobResponse, and StatusMessage together. For example, at the start of a training job, you might see the following:

  • TrainingJobStatus - InProgress

  • SecondaryStatus - Training

  • StatusMessage - Downloading the training image

', ], ], 'StopCompilationJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'StopHyperParameterTuningJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'StopLabelingJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'StopNotebookInstanceInput' => [ 'base' => NULL, 'refs' => [], ], 'StopTrainingJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'StopTransformJobRequest' => [ 'base' => NULL, 'refs' => [], ], 'StoppingCondition' => [ 'base' => '

Specifies how long model training can run. When model training reaches the limit, Amazon SageMaker ends the training job. Use this API to cap model training cost.

To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays job termination for120 seconds. Algorithms might use this 120-second window to save the model artifacts, so the results of training is not lost.

Training algorithms provided by Amazon SageMaker automatically saves the intermediate results of a model training job (it is best effort case, as model might not be ready to save as some stages, for example training just started). This intermediate data is a valid model artifact. You can use it to create a model (CreateModel).

', 'refs' => [ 'CreateCompilationJobRequest$StoppingCondition' => '

The duration allowed for model compilation.

', 'CreateTrainingJobRequest$StoppingCondition' => '

Sets a duration for training. Use this parameter to cap model training costs. To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays job termination for 120 seconds. Algorithms might use this 120-second window to save the model artifacts.

When Amazon SageMaker terminates a job because the stopping condition has been met, training algorithms provided by Amazon SageMaker save the intermediate results of the job. This intermediate data is a valid model artifact. You can use it to create a model using the CreateModel API.

', 'DescribeCompilationJobResponse$StoppingCondition' => '

The duration allowed for model compilation.

', 'DescribeTrainingJobResponse$StoppingCondition' => '

The condition under which to stop the training job.

', 'HyperParameterTrainingJobDefinition$StoppingCondition' => '

Sets a maximum duration for the training jobs that the tuning job launches. Use this parameter to limit model training costs.

To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal. This delays job termination for 120 seconds. Algorithms might use this 120-second window to save the model artifacts.

When Amazon SageMaker terminates a job because the stopping condition has been met, training algorithms provided by Amazon SageMaker save the intermediate results of the job.

', 'TrainingJob$StoppingCondition' => '

The condition under which to stop the training job.

', 'TrainingJobDefinition$StoppingCondition' => '

Sets a duration for training. Use this parameter to cap model training costs.

To stop a job, Amazon SageMaker sends the algorithm the SIGTERM signal, which delays job termination for 120 seconds. Algorithms might use this 120-second window to save the model artifacts.

', ], ], 'String' => [ 'base' => NULL, 'refs' => [ 'AlgorithmStatusItem$FailureReason' => '

if the overall status is Failed, the reason for the failure.

', 'ModelPackageStatusItem$FailureReason' => '

if the overall status is Failed, the reason for the failure.

', 'ProductListings$member' => NULL, 'RenderUiTemplateResponse$RenderedContent' => '

A Liquid template that renders the HTML for the worker UI.

', 'RenderingError$Code' => '

A unique identifier for a specific class of errors.

', 'RenderingError$Message' => '

A human-readable message describing the error.

', 'SubscribedWorkteam$SellerName' => '

The name of the vendor in the Amazon Marketplace.

', 'SubscribedWorkteam$ListingId' => '

', 'Workteam$SubDomain' => '

The URI of the labeling job\'s user interface. Workers open this URI to start labeling your data objects.

', ], ], 'String200' => [ 'base' => NULL, 'refs' => [ 'CreateWorkteamRequest$Description' => '

A description of the work team.

', 'SubscribedWorkteam$MarketplaceTitle' => '

The title of the service provided by the vendor in the Amazon Marketplace.

', 'SubscribedWorkteam$MarketplaceDescription' => '

The description of the vendor from the Amazon Marketplace.

', 'UpdateWorkteamRequest$Description' => '

An updated description for the work team.

', 'Workteam$Description' => '

A description of the work team.

', ], ], 'SubnetId' => [ 'base' => NULL, 'refs' => [ 'CreateNotebookInstanceInput$SubnetId' => '

The ID of the subnet in a VPC to which you would like to have a connectivity from your ML compute instance.

', 'DescribeNotebookInstanceOutput$SubnetId' => '

The ID of the VPC subnet.

', 'Subnets$member' => NULL, ], ], 'Subnets' => [ 'base' => NULL, 'refs' => [ 'VpcConfig$Subnets' => '

The ID of the subnets in the VPC to which you want to connect your training job or model.

', ], ], 'SubscribedWorkteam' => [ 'base' => '

Describes a work team of a vendor that does the a labelling job.

', 'refs' => [ 'DescribeSubscribedWorkteamResponse$SubscribedWorkteam' => '

A Workteam instance that contains information about the work team.

', 'SubscribedWorkteams$member' => NULL, ], ], 'SubscribedWorkteams' => [ 'base' => NULL, 'refs' => [ 'ListSubscribedWorkteamsResponse$SubscribedWorkteams' => '

An array of Workteam objects, each describing a work team.

', ], ], 'Success' => [ 'base' => NULL, 'refs' => [ 'DeleteWorkteamResponse$Success' => '

Returns true if the work team was successfully deleted; otherwise, returns false.

', ], ], 'SuggestionQuery' => [ 'base' => '

Limits the property names that are included in the response.

', 'refs' => [ 'GetSearchSuggestionsRequest$SuggestionQuery' => '

Limits the property names that are included in the response.

', ], ], 'Tag' => [ 'base' => '

Describes a tag.

', 'refs' => [ 'TagList$member' => NULL, ], ], 'TagKey' => [ 'base' => NULL, 'refs' => [ 'Tag$Key' => '

The tag key.

', 'TagKeyList$member' => NULL, ], ], 'TagKeyList' => [ 'base' => NULL, 'refs' => [ 'DeleteTagsInput$TagKeys' => '

An array or one or more tag keys to delete.

', ], ], 'TagList' => [ 'base' => NULL, 'refs' => [ 'AddTagsInput$Tags' => '

An array of Tag objects. Each tag is a key-value pair. Only the key parameter is required. If you don\'t specify a value, Amazon SageMaker sets the value to an empty string.

', 'AddTagsOutput$Tags' => '

A list of tags associated with the Amazon SageMaker resource.

', 'CreateEndpointConfigInput$Tags' => '

An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

', 'CreateEndpointInput$Tags' => '

An array of key-value pairs. For more information, see Using Cost Allocation Tagsin the AWS Billing and Cost Management User Guide.

', 'CreateHyperParameterTuningJobRequest$Tags' => '

An array of key-value pairs. You can use tags to categorize your AWS resources in different ways, for example, by purpose, owner, or environment. For more information, see AWS Tagging Strategies.

Tags that you specify for the tuning job are also added to all training jobs that the tuning job launches.

', 'CreateLabelingJobRequest$Tags' => '

An array of key/value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

', 'CreateModelInput$Tags' => '

An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

', 'CreateNotebookInstanceInput$Tags' => '

A list of tags to associate with the notebook instance. You can add tags later by using the CreateTags API.

', 'CreateTrainingJobRequest$Tags' => '

An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

', 'CreateTransformJobRequest$Tags' => '

(Optional) An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

', 'CreateWorkteamRequest$Tags' => '

', 'DescribeLabelingJobResponse$Tags' => '

An array of key/value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

', 'ListTagsOutput$Tags' => '

An array of Tag objects, each with a tag key and a value.

', 'TrainingJob$Tags' => '

An array of key-value pairs. For more information, see Using Cost Allocation Tags in the AWS Billing and Cost Management User Guide.

', ], ], 'TagValue' => [ 'base' => NULL, 'refs' => [ 'Tag$Value' => '

The tag value.

', ], ], 'TargetDevice' => [ 'base' => NULL, 'refs' => [ 'CompilationJobSummary$CompilationTargetDevice' => '

The type of device that the model will run on after compilation has completed.

', 'OutputConfig$TargetDevice' => '

Identifies the device that you want to run your model on after it has been compiled. For example: ml_c5.

', ], ], 'TaskAvailabilityLifetimeInSeconds' => [ 'base' => NULL, 'refs' => [ 'HumanTaskConfig$TaskAvailabilityLifetimeInSeconds' => '

The length of time that a task remains available for labelling by human workers.

', ], ], 'TaskCount' => [ 'base' => NULL, 'refs' => [ 'DesiredWeightAndCapacity$DesiredInstanceCount' => '

The variant\'s capacity.

', 'ProductionVariant$InitialInstanceCount' => '

Number of instances to launch initially.

', 'ProductionVariantSummary$CurrentInstanceCount' => '

The number of instances associated with the variant.

', 'ProductionVariantSummary$DesiredInstanceCount' => '

The number of instances requested in the UpdateEndpointWeightsAndCapacities request.

', ], ], 'TaskDescription' => [ 'base' => NULL, 'refs' => [ 'HumanTaskConfig$TaskDescription' => '

A description of the task for your human workers.

', ], ], 'TaskInput' => [ 'base' => NULL, 'refs' => [ 'RenderableTask$Input' => '

A JSON object that contains values for the variables defined in the template. It is made available to the template under the substitution variable task.input. For example, if you define a variable task.input.text in your template, you can supply the variable in the JSON object as "text": "sample text".

', ], ], 'TaskKeyword' => [ 'base' => NULL, 'refs' => [ 'TaskKeywords$member' => NULL, ], ], 'TaskKeywords' => [ 'base' => NULL, 'refs' => [ 'HumanTaskConfig$TaskKeywords' => '

Keywords used to describe the task so that workers on Amazon Mechanical Turk can discover the task.

', ], ], 'TaskTimeLimitInSeconds' => [ 'base' => NULL, 'refs' => [ 'HumanTaskConfig$TaskTimeLimitInSeconds' => '

The amount of time that a worker has to complete a task.

', ], ], 'TaskTitle' => [ 'base' => NULL, 'refs' => [ 'HumanTaskConfig$TaskTitle' => '

A title for the task for your human workers.

', ], ], 'TemplateContent' => [ 'base' => NULL, 'refs' => [ 'UiTemplate$Content' => '

The content of the Liquid template for the worker user interface.

', ], ], 'TenthFractionsOfACent' => [ 'base' => NULL, 'refs' => [ 'USD$TenthFractionsOfACent' => '

Fractions of a cent, in tenths.

', ], ], 'Timestamp' => [ 'base' => NULL, 'refs' => [ 'CompilationJobSummary$CompilationStartTime' => '

The time when the model compilation job started.

', 'CompilationJobSummary$CompilationEndTime' => '

The time when the model compilation job completed.

', 'DeployedImage$ResolutionTime' => '

The date and time when the image path for the model resolved to the ResolvedImage

', 'DescribeCompilationJobResponse$CompilationStartTime' => '

The time when the model compilation job started the CompilationJob instances.

You are billed for the time between this timestamp and the timestamp in the DescribeCompilationJobResponse$CompilationEndTime field. In Amazon CloudWatch Logs, the start time might be later than this time. That\'s because it takes time to download the compilation job, which depends on the size of the compilation job container.

', 'DescribeCompilationJobResponse$CompilationEndTime' => '

The time when the model compilation job on a compilation job instance ended. For a successful or stopped job, this is when the job\'s model artifacts have finished uploading. For a failed job, this is when Amazon SageMaker detected that the job failed.

', 'DescribeEndpointConfigOutput$CreationTime' => '

A timestamp that shows when the endpoint configuration was created.

', 'DescribeEndpointOutput$CreationTime' => '

A timestamp that shows when the endpoint was created.

', 'DescribeEndpointOutput$LastModifiedTime' => '

A timestamp that shows when the endpoint was last modified.

', 'DescribeHyperParameterTuningJobResponse$CreationTime' => '

The date and time that the tuning job started.

', 'DescribeHyperParameterTuningJobResponse$HyperParameterTuningEndTime' => '

The date and time that the tuning job ended.

', 'DescribeHyperParameterTuningJobResponse$LastModifiedTime' => '

The date and time that the status of the tuning job was modified.

', 'DescribeLabelingJobResponse$CreationTime' => '

The date and time that the labeling job was created.

', 'DescribeLabelingJobResponse$LastModifiedTime' => '

The date and time that the labeling job was last updated.

', 'DescribeModelOutput$CreationTime' => '

A timestamp that shows when the model was created.

', 'DescribeTrainingJobResponse$CreationTime' => '

A timestamp that indicates when the training job was created.

', 'DescribeTrainingJobResponse$TrainingStartTime' => '

Indicates the time when the training job starts on training instances. You are billed for the time interval between this time and the value of TrainingEndTime. The start time in CloudWatch Logs might be later than this time. The difference is due to the time it takes to download the training data and to the size of the training container.

', 'DescribeTrainingJobResponse$TrainingEndTime' => '

Indicates the time when the training job ends on training instances. You are billed for the time interval between the value of TrainingStartTime and this time. For successful jobs and stopped jobs, this is the time after model artifacts are uploaded. For failed jobs, this is the time when Amazon SageMaker detects a job failure.

', 'DescribeTrainingJobResponse$LastModifiedTime' => '

A timestamp that indicates when the status of the training job was last modified.

', 'DescribeTransformJobResponse$CreationTime' => '

A timestamp that shows when the transform Job was created.

', 'DescribeTransformJobResponse$TransformStartTime' => '

Indicates when the transform job starts on ML instances. You are billed for the time interval between this time and the value of TransformEndTime.

', 'DescribeTransformJobResponse$TransformEndTime' => '

Indicates when the transform job has been completed, or has stopped or failed. You are billed for the time interval between this time and the value of TransformStartTime.

', 'EndpointConfigSummary$CreationTime' => '

A timestamp that shows when the endpoint configuration was created.

', 'EndpointSummary$CreationTime' => '

A timestamp that shows when the endpoint was created.

', 'EndpointSummary$LastModifiedTime' => '

A timestamp that shows when the endpoint was last modified.

', 'HyperParameterTrainingJobSummary$CreationTime' => '

The date and time that the training job was created.

', 'HyperParameterTrainingJobSummary$TrainingStartTime' => '

The date and time that the training job started.

', 'HyperParameterTrainingJobSummary$TrainingEndTime' => '

Specifies the time when the training job ends on training instances. You are billed for the time interval between the value of TrainingStartTime and this time. For successful jobs and stopped jobs, this is the time after model artifacts are uploaded. For failed jobs, this is the time when Amazon SageMaker detects a job failure.

', 'HyperParameterTuningJobSummary$CreationTime' => '

The date and time that the tuning job was created.

', 'HyperParameterTuningJobSummary$HyperParameterTuningEndTime' => '

The date and time that the tuning job ended.

', 'HyperParameterTuningJobSummary$LastModifiedTime' => '

The date and time that the tuning job was modified.

', 'LabelingJobForWorkteamSummary$CreationTime' => '

The date and time that the labeling job was created.

', 'LabelingJobSummary$CreationTime' => '

The date and time that the job was created (timestamp).

', 'LabelingJobSummary$LastModifiedTime' => '

The date and time that the job was last modified (timestamp).

', 'ListCodeRepositoriesInput$LastModifiedTimeAfter' => '

A filter that returns only Git repositories that were last modified after the specified time.

', 'ListCodeRepositoriesInput$LastModifiedTimeBefore' => '

A filter that returns only Git repositories that were last modified before the specified time.

', 'ListEndpointConfigsInput$CreationTimeBefore' => '

A filter that returns only endpoint configurations created before the specified time (timestamp).

', 'ListEndpointConfigsInput$CreationTimeAfter' => '

A filter that returns only endpoint configurations created after the specified time (timestamp).

', 'ListEndpointsInput$CreationTimeBefore' => '

A filter that returns only endpoints that were created before the specified time (timestamp).

', 'ListEndpointsInput$CreationTimeAfter' => '

A filter that returns only endpoints that were created after the specified time (timestamp).

', 'ListEndpointsInput$LastModifiedTimeBefore' => '

A filter that returns only endpoints that were modified before the specified timestamp.

', 'ListEndpointsInput$LastModifiedTimeAfter' => '

A filter that returns only endpoints that were modified after the specified timestamp.

', 'ListHyperParameterTuningJobsRequest$CreationTimeAfter' => '

A filter that returns only tuning jobs that were created after the specified time.

', 'ListHyperParameterTuningJobsRequest$CreationTimeBefore' => '

A filter that returns only tuning jobs that were created before the specified time.

', 'ListHyperParameterTuningJobsRequest$LastModifiedTimeAfter' => '

A filter that returns only tuning jobs that were modified after the specified time.

', 'ListHyperParameterTuningJobsRequest$LastModifiedTimeBefore' => '

A filter that returns only tuning jobs that were modified before the specified time.

', 'ListLabelingJobsForWorkteamRequest$CreationTimeAfter' => '

A filter that returns only labeling jobs created after the specified time (timestamp).

', 'ListLabelingJobsForWorkteamRequest$CreationTimeBefore' => '

A filter that returns only labeling jobs created before the specified time (timestamp).

', 'ListLabelingJobsRequest$CreationTimeAfter' => '

A filter that returns only labeling jobs created after the specified time (timestamp).

', 'ListLabelingJobsRequest$CreationTimeBefore' => '

A filter that returns only labeling jobs created before the specified time (timestamp).

', 'ListLabelingJobsRequest$LastModifiedTimeAfter' => '

A filter that returns only labeling jobs modified after the specified time (timestamp).

', 'ListLabelingJobsRequest$LastModifiedTimeBefore' => '

A filter that returns only labeling jobs modified before the specified time (timestamp).

', 'ListModelsInput$CreationTimeBefore' => '

A filter that returns only models created before the specified time (timestamp).

', 'ListModelsInput$CreationTimeAfter' => '

A filter that returns only models created after the specified time (timestamp).

', 'ListTrainingJobsRequest$CreationTimeAfter' => '

A filter that returns only training jobs created after the specified time (timestamp).

', 'ListTrainingJobsRequest$CreationTimeBefore' => '

A filter that returns only training jobs created before the specified time (timestamp).

', 'ListTrainingJobsRequest$LastModifiedTimeAfter' => '

A filter that returns only training jobs modified after the specified time (timestamp).

', 'ListTrainingJobsRequest$LastModifiedTimeBefore' => '

A filter that returns only training jobs modified before the specified time (timestamp).

', 'ListTransformJobsRequest$CreationTimeAfter' => '

A filter that returns only transform jobs created after the specified time.

', 'ListTransformJobsRequest$CreationTimeBefore' => '

A filter that returns only transform jobs created before the specified time.

', 'ListTransformJobsRequest$LastModifiedTimeAfter' => '

A filter that returns only transform jobs modified after the specified time.

', 'ListTransformJobsRequest$LastModifiedTimeBefore' => '

A filter that returns only transform jobs modified before the specified time.

', 'MetricData$Timestamp' => '

The date and time that the algorithm emitted the metric.

', 'ModelSummary$CreationTime' => '

A timestamp that indicates when the model was created.

', 'SecondaryStatusTransition$StartTime' => '

A timestamp that shows when the training job transitioned to the current secondary status state.

', 'SecondaryStatusTransition$EndTime' => '

A timestamp that shows when the training job transitioned out of this secondary status state into another secondary status state or when the training job has ended.

', 'TrainingJob$CreationTime' => '

A timestamp that indicates when the training job was created.

', 'TrainingJob$TrainingStartTime' => '

Indicates the time when the training job starts on training instances. You are billed for the time interval between this time and the value of TrainingEndTime. The start time in CloudWatch Logs might be later than this time. The difference is due to the time it takes to download the training data and to the size of the training container.

', 'TrainingJob$TrainingEndTime' => '

Indicates the time when the training job ends on training instances. You are billed for the time interval between the value of TrainingStartTime and this time. For successful jobs and stopped jobs, this is the time after model artifacts are uploaded. For failed jobs, this is the time when Amazon SageMaker detects a job failure.

', 'TrainingJob$LastModifiedTime' => '

A timestamp that indicates when the status of the training job was last modified.

', 'TrainingJobSummary$CreationTime' => '

A timestamp that shows when the training job was created.

', 'TrainingJobSummary$TrainingEndTime' => '

A timestamp that shows when the training job ended. This field is set only if the training job has one of the terminal statuses (Completed, Failed, or Stopped).

', 'TrainingJobSummary$LastModifiedTime' => '

Timestamp when the training job was last modified.

', 'TransformJobSummary$CreationTime' => '

A timestamp that shows when the transform Job was created.

', 'TransformJobSummary$TransformEndTime' => '

Indicates when the transform job ends on compute instances. For successful jobs and stopped jobs, this is the exact time recorded after the results are uploaded. For failed jobs, this is when Amazon SageMaker detected that the job failed.

', 'TransformJobSummary$LastModifiedTime' => '

Indicates when the transform job was last modified.

', 'Workteam$CreateDate' => '

The date and time that the work team was created (timestamp).

', 'Workteam$LastUpdatedDate' => '

The date and time that the work team was last updated (timestamp).

', ], ], 'TrainingInputMode' => [ 'base' => NULL, 'refs' => [ 'AlgorithmSpecification$TrainingInputMode' => '

The input mode that the algorithm supports. For the input modes that Amazon SageMaker algorithms support, see Algorithms. If an algorithm supports the File input mode, Amazon SageMaker downloads the training data from S3 to the provisioned ML storage Volume, and mounts the directory to docker volume for training container. If an algorithm supports the Pipe input mode, Amazon SageMaker streams data directly from S3 to the container.

In File mode, make sure you provision ML storage volume with sufficient capacity to accommodate the data download from S3. In addition to the training data, the ML storage volume also stores the output model. The algorithm container use ML storage volume to also store intermediate information, if any.

For distributed algorithms using File mode, training data is distributed uniformly, and your training duration is predictable if the input data objects size is approximately same. Amazon SageMaker does not split the files any further for model training. If the object sizes are skewed, training won\'t be optimal as the data distribution is also skewed where one host in a training cluster is overloaded, thus becoming bottleneck in training.

', 'Channel$InputMode' => '

(Optional) The input mode to use for the data channel in a training job. If you don\'t set a value for InputMode, Amazon SageMaker uses the value set for TrainingInputMode. Use this parameter to override the TrainingInputMode setting in a AlgorithmSpecification request when you have a channel that needs a different input mode from the training job\'s general setting. To download the data from Amazon Simple Storage Service (Amazon S3) to the provisioned ML storage volume, and mount the directory to a Docker volume, use File input mode. To stream data directly from Amazon S3 to the container, choose Pipe input mode.

To use a model for incremental training, choose File input model.

', 'HyperParameterAlgorithmSpecification$TrainingInputMode' => '

The input mode that the algorithm supports: File or Pipe. In File input mode, Amazon SageMaker downloads the training data from Amazon S3 to the storage volume that is attached to the training instance and mounts the directory to the Docker volume for the training container. In Pipe input mode, Amazon SageMaker streams data directly from Amazon S3 to the container.

If you specify File mode, make sure that you provision the storage volume that is attached to the training instance with enough capacity to accommodate the training data downloaded from Amazon S3, the model artifacts, and intermediate information.

For more information about input modes, see Algorithms.

', 'InputModes$member' => NULL, 'TrainingJobDefinition$TrainingInputMode' => '

The input mode used by the algorithm for the training job. For the input modes that Amazon SageMaker algorithms support, see Algorithms.

If an algorithm supports the File input mode, Amazon SageMaker downloads the training data from S3 to the provisioned ML storage Volume, and mounts the directory to docker volume for training container. If an algorithm supports the Pipe input mode, Amazon SageMaker streams data directly from S3 to the container.

', ], ], 'TrainingInstanceCount' => [ 'base' => NULL, 'refs' => [ 'ResourceConfig$InstanceCount' => '

The number of ML compute instances to use. For distributed training, provide a value greater than 1.

', ], ], 'TrainingInstanceType' => [ 'base' => NULL, 'refs' => [ 'ResourceConfig$InstanceType' => '

The ML compute instance type.

', 'TrainingInstanceTypes$member' => NULL, ], ], 'TrainingInstanceTypes' => [ 'base' => NULL, 'refs' => [ 'TrainingSpecification$SupportedTrainingInstanceTypes' => '

A list of the instance types that this algorithm can use for training.

', ], ], 'TrainingJob' => [ 'base' => '

Contains information about a training job.

', 'refs' => [ 'SearchRecord$TrainingJob' => '

A TrainingJob object that is returned as part of a Search request.

', ], ], 'TrainingJobArn' => [ 'base' => NULL, 'refs' => [ 'CreateTrainingJobResponse$TrainingJobArn' => '

The Amazon Resource Name (ARN) of the training job.

', 'DescribeTrainingJobResponse$TrainingJobArn' => '

The Amazon Resource Name (ARN) of the training job.

', 'HyperParameterTrainingJobSummary$TrainingJobArn' => '

The Amazon Resource Name (ARN) of the training job.

', 'TrainingJob$TrainingJobArn' => '

The Amazon Resource Name (ARN) of the training job.

', 'TrainingJobSummary$TrainingJobArn' => '

The Amazon Resource Name (ARN) of the training job.

', ], ], 'TrainingJobDefinition' => [ 'base' => '

Defines the input needed to run a training job using the algorithm.

', 'refs' => [ 'AlgorithmValidationProfile$TrainingJobDefinition' => '

The TrainingJobDefinition object that describes the training job that Amazon SageMaker runs to validate your algorithm.

', ], ], 'TrainingJobEarlyStoppingType' => [ 'base' => NULL, 'refs' => [ 'HyperParameterTuningJobConfig$TrainingJobEarlyStoppingType' => '

Specifies whether to use early stopping for training jobs launched by the hyperparameter tuning job. This can be one of the following values (the default value is OFF):

OFF

Training jobs launched by the hyperparameter tuning job do not use early stopping.

AUTO

Amazon SageMaker stops training jobs launched by the hyperparameter tuning job when they are unlikely to perform better than previously completed training jobs. For more information, see Stop Training Jobs Early.

', ], ], 'TrainingJobName' => [ 'base' => NULL, 'refs' => [ 'CreateTrainingJobRequest$TrainingJobName' => '

The name of the training job. The name must be unique within an AWS Region in an AWS account.

', 'DescribeTrainingJobRequest$TrainingJobName' => '

The name of the training job.

', 'DescribeTrainingJobResponse$TrainingJobName' => '

Name of the model training job.

', 'HyperParameterTrainingJobSummary$TrainingJobName' => '

The name of the training job.

', 'StopTrainingJobRequest$TrainingJobName' => '

The name of the training job to stop.

', 'TrainingJob$TrainingJobName' => '

The name of the training job.

', 'TrainingJobSummary$TrainingJobName' => '

The name of the training job that you want a summary for.

', ], ], 'TrainingJobSortByOptions' => [ 'base' => NULL, 'refs' => [ 'ListTrainingJobsForHyperParameterTuningJobRequest$SortBy' => '

The field to sort results by. The default is Name.

If the value of this field is FinalObjectiveMetricValue, any training jobs that did not return an objective metric are not listed.

', ], ], 'TrainingJobStatus' => [ 'base' => NULL, 'refs' => [ 'DescribeTrainingJobResponse$TrainingJobStatus' => '

The status of the training job.

Amazon SageMaker provides the following training job statuses:

  • InProgress - The training is in progress.

  • Completed - The training job has completed.

  • Failed - The training job has failed. To see the reason for the failure, see the FailureReason field in the response to a DescribeTrainingJobResponse call.

  • Stopping - The training job is stopping.

  • Stopped - The training job has stopped.

For more detailed information, see SecondaryStatus.

', 'HyperParameterTrainingJobSummary$TrainingJobStatus' => '

The status of the training job.

', 'ListTrainingJobsForHyperParameterTuningJobRequest$StatusEquals' => '

A filter that returns only training jobs with the specified status.

', 'ListTrainingJobsRequest$StatusEquals' => '

A filter that retrieves only training jobs with a specific status.

', 'TrainingJob$TrainingJobStatus' => '

The status of the training job.

Training job statuses are:

  • InProgress - The training is in progress.

  • Completed - The training job has completed.

  • Failed - The training job has failed. To see the reason for the failure, see the FailureReason field in the response to a DescribeTrainingJobResponse call.

  • Stopping - The training job is stopping.

  • Stopped - The training job has stopped.

For more detailed information, see SecondaryStatus.

', 'TrainingJobSummary$TrainingJobStatus' => '

The status of the training job.

', ], ], 'TrainingJobStatusCounter' => [ 'base' => NULL, 'refs' => [ 'TrainingJobStatusCounters$Completed' => '

The number of completed training jobs launched by the hyperparameter tuning job.

', 'TrainingJobStatusCounters$InProgress' => '

The number of in-progress training jobs launched by a hyperparameter tuning job.

', 'TrainingJobStatusCounters$RetryableError' => '

The number of training jobs that failed, but can be retried. A failed training job can be retried only if it failed because an internal service error occurred.

', 'TrainingJobStatusCounters$NonRetryableError' => '

The number of training jobs that failed and can\'t be retried. A failed training job can\'t be retried if it failed because a client error occurred.

', 'TrainingJobStatusCounters$Stopped' => '

The number of training jobs launched by a hyperparameter tuning job that were manually stopped.

', ], ], 'TrainingJobStatusCounters' => [ 'base' => '

The numbers of training jobs launched by a hyperparameter tuning job, categorized by status.

', 'refs' => [ 'DescribeHyperParameterTuningJobResponse$TrainingJobStatusCounters' => '

The TrainingJobStatusCounters object that specifies the number of training jobs, categorized by status, that this tuning job launched.

', 'HyperParameterTuningJobSummary$TrainingJobStatusCounters' => '

The TrainingJobStatusCounters object that specifies the numbers of training jobs, categorized by status, that this tuning job launched.

', ], ], 'TrainingJobSummaries' => [ 'base' => NULL, 'refs' => [ 'ListTrainingJobsResponse$TrainingJobSummaries' => '

An array of TrainingJobSummary objects, each listing a training job.

', ], ], 'TrainingJobSummary' => [ 'base' => '

Provides summary information about a training job.

', 'refs' => [ 'TrainingJobSummaries$member' => NULL, ], ], 'TrainingSpecification' => [ 'base' => '

Defines how the algorithm is used for a training job.

', 'refs' => [ 'CreateAlgorithmInput$TrainingSpecification' => '

Specifies details about training jobs run by this algorithm, including the following:

  • The Amazon ECR path of the container and the version digest of the algorithm.

  • The hyperparameters that the algorithm supports.

  • The instance types that the algorithm supports for training.

  • Whether the algorithm supports distributed training.

  • The metrics that the algorithm emits to Amazon CloudWatch.

  • Which metrics that the algorithm emits can be used as the objective metric for hyperparameter tuning jobs.

  • The input channels that the algorithm supports for training data. For example, an algorithm might support train, validation, and test channels.

', 'DescribeAlgorithmOutput$TrainingSpecification' => '

Details about training jobs run by this algorithm.

', ], ], 'TransformDataSource' => [ 'base' => '

Describes the location of the channel data.

', 'refs' => [ 'TransformInput$DataSource' => '

Describes the location of the channel data, which is, the S3 location of the input data that the model can consume.

', ], ], 'TransformEnvironmentKey' => [ 'base' => NULL, 'refs' => [ 'TransformEnvironmentMap$key' => NULL, ], ], 'TransformEnvironmentMap' => [ 'base' => NULL, 'refs' => [ 'CreateTransformJobRequest$Environment' => '

The environment variables to set in the Docker container. We support up to 16 key and values entries in the map.

', 'DescribeTransformJobResponse$Environment' => '

The environment variables to set in the Docker container. We support up to 16 key and values entries in the map.

', 'TransformJobDefinition$Environment' => '

The environment variables to set in the Docker container. We support up to 16 key and values entries in the map.

', ], ], 'TransformEnvironmentValue' => [ 'base' => NULL, 'refs' => [ 'TransformEnvironmentMap$value' => NULL, ], ], 'TransformInput' => [ 'base' => '

Describes the input source of a transform job and the way the transform job consumes it.

', 'refs' => [ 'CreateTransformJobRequest$TransformInput' => '

Describes the input source and the way the transform job consumes it.

', 'DescribeTransformJobResponse$TransformInput' => '

Describes the dataset to be transformed and the Amazon S3 location where it is stored.

', 'TransformJobDefinition$TransformInput' => '

A description of the input source and the way the transform job consumes it.

', ], ], 'TransformInstanceCount' => [ 'base' => NULL, 'refs' => [ 'TransformResources$InstanceCount' => '

The number of ML compute instances to use in the transform job. For distributed transform, provide a value greater than 1. The default value is 1.

', ], ], 'TransformInstanceType' => [ 'base' => NULL, 'refs' => [ 'TransformInstanceTypes$member' => NULL, 'TransformResources$InstanceType' => '

The ML compute instance type for the transform job. For using built-in algorithms to transform moderately sized datasets, ml.m4.xlarge or ml.m5.large should suffice. There is no default value for InstanceType.

', ], ], 'TransformInstanceTypes' => [ 'base' => NULL, 'refs' => [ 'InferenceSpecification$SupportedTransformInstanceTypes' => '

A list of the instance types on which a transformation job can be run or on which an endpoint can be deployed.

', ], ], 'TransformJobArn' => [ 'base' => NULL, 'refs' => [ 'CreateTransformJobResponse$TransformJobArn' => '

The Amazon Resource Name (ARN) of the transform job.

', 'DescribeTransformJobResponse$TransformJobArn' => '

The Amazon Resource Name (ARN) of the transform job.

', 'TransformJobSummary$TransformJobArn' => '

The Amazon Resource Name (ARN) of the transform job.

', ], ], 'TransformJobDefinition' => [ 'base' => '

Defines the input needed to run a transform job using the inference specification specified in the algorithm.

', 'refs' => [ 'AlgorithmValidationProfile$TransformJobDefinition' => '

The TransformJobDefinition object that describes the transform job that Amazon SageMaker runs to validate your algorithm.

', 'ModelPackageValidationProfile$TransformJobDefinition' => '

The TransformJobDefinition object that describes the transform job used for the validation of the model package.

', ], ], 'TransformJobName' => [ 'base' => NULL, 'refs' => [ 'CreateTransformJobRequest$TransformJobName' => '

The name of the transform job. The name must be unique within an AWS Region in an AWS account.

', 'DescribeTransformJobRequest$TransformJobName' => '

The name of the transform job that you want to view details of.

', 'DescribeTransformJobResponse$TransformJobName' => '

The name of the transform job.

', 'StopTransformJobRequest$TransformJobName' => '

The name of the transform job to stop.

', 'TransformJobSummary$TransformJobName' => '

The name of the transform job.

', ], ], 'TransformJobStatus' => [ 'base' => NULL, 'refs' => [ 'DescribeTransformJobResponse$TransformJobStatus' => '

The status of the transform job. If the transform job failed, the reason is returned in the FailureReason field.

', 'ListTransformJobsRequest$StatusEquals' => '

A filter that retrieves only transform jobs with a specific status.

', 'TransformJobSummary$TransformJobStatus' => '

The status of the transform job.

', ], ], 'TransformJobSummaries' => [ 'base' => NULL, 'refs' => [ 'ListTransformJobsResponse$TransformJobSummaries' => '

An array of TransformJobSummary objects.

', ], ], 'TransformJobSummary' => [ 'base' => '

Provides a summary of a transform job. Multiple TransformJobSummary objects are returned as a list after in response to a ListTransformJobs call.

', 'refs' => [ 'TransformJobSummaries$member' => NULL, ], ], 'TransformOutput' => [ 'base' => '

Describes the results of a transform job.

', 'refs' => [ 'CreateTransformJobRequest$TransformOutput' => '

Describes the results of the transform job.

', 'DescribeTransformJobResponse$TransformOutput' => '

Identifies the Amazon S3 location where you want Amazon SageMaker to save the results from the transform job.

', 'TransformJobDefinition$TransformOutput' => '

Identifies the Amazon S3 location where you want Amazon SageMaker to save the results from the transform job.

', ], ], 'TransformResources' => [ 'base' => '

Describes the resources, including ML instance types and ML instance count, to use for transform job.

', 'refs' => [ 'CreateTransformJobRequest$TransformResources' => '

Describes the resources, including ML instance types and ML instance count, to use for the transform job.

', 'DescribeTransformJobResponse$TransformResources' => '

Describes the resources, including ML instance types and ML instance count, to use for the transform job.

', 'TransformJobDefinition$TransformResources' => '

Identifies the ML compute instances for the transform job.

', ], ], 'TransformS3DataSource' => [ 'base' => '

Describes the S3 data source.

', 'refs' => [ 'TransformDataSource$S3DataSource' => '

The S3 location of the data source that is associated with a channel.

', ], ], 'USD' => [ 'base' => '

Represents an amount of money in United States dollars/

', 'refs' => [ 'PublicWorkforceTaskPrice$AmountInUsd' => '

Defines the amount of money paid to a worker in United States dollars.

', ], ], 'UiConfig' => [ 'base' => '

Provided configuration information for the worker UI for a labeling job.

', 'refs' => [ 'HumanTaskConfig$UiConfig' => '

Information about the user interface that workers use to complete the labeling task.

', ], ], 'UiTemplate' => [ 'base' => '

The Liquid template for the worker user interface.

', 'refs' => [ 'RenderUiTemplateRequest$UiTemplate' => '

A Template object containing the worker UI template to render.

', ], ], 'UpdateCodeRepositoryInput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateCodeRepositoryOutput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateEndpointInput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateEndpointOutput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateEndpointWeightsAndCapacitiesInput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateEndpointWeightsAndCapacitiesOutput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateNotebookInstanceInput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateNotebookInstanceLifecycleConfigInput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateNotebookInstanceLifecycleConfigOutput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateNotebookInstanceOutput' => [ 'base' => NULL, 'refs' => [], ], 'UpdateWorkteamRequest' => [ 'base' => NULL, 'refs' => [], ], 'UpdateWorkteamResponse' => [ 'base' => NULL, 'refs' => [], ], 'Url' => [ 'base' => NULL, 'refs' => [ 'ContainerDefinition$ModelDataUrl' => '

The S3 path where the model artifacts, which result from model training, are stored. This path must point to a single gzip compressed tar archive (.tar.gz suffix).

If you provide a value for this parameter, Amazon SageMaker uses AWS Security Token Service to download model artifacts from the S3 path you provide. AWS STS is activated in your IAM user account by default. If you previously deactivated AWS STS for a region, you need to reactivate AWS STS for that region. For more information, see Activating and Deactivating AWS STS in an AWS Region in the AWS Identity and Access Management User Guide.

', 'ModelPackageContainerDefinition$ModelDataUrl' => '

The Amazon S3 path where the model artifacts, which result from model training, are stored. This path must point to a single gzip compressed tar archive (.tar.gz suffix).

', 'SourceAlgorithm$ModelDataUrl' => '

The Amazon S3 path where the model artifacts, which result from model training, are stored. This path must point to a single gzip compressed tar archive (.tar.gz suffix).

', ], ], 'VariantName' => [ 'base' => NULL, 'refs' => [ 'DesiredWeightAndCapacity$VariantName' => '

The name of the variant to update.

', 'ProductionVariant$VariantName' => '

The name of the production variant.

', 'ProductionVariantSummary$VariantName' => '

The name of the variant.

', ], ], 'VariantWeight' => [ 'base' => NULL, 'refs' => [ 'DesiredWeightAndCapacity$DesiredWeight' => '

The variant\'s weight.

', 'ProductionVariant$InitialVariantWeight' => '

Determines initial traffic distribution among all of the models that you specify in the endpoint configuration. The traffic to a production variant is determined by the ratio of the VariantWeight to the sum of all VariantWeight values across all ProductionVariants. If unspecified, it defaults to 1.0.

', 'ProductionVariantSummary$CurrentWeight' => '

The weight associated with the variant.

', 'ProductionVariantSummary$DesiredWeight' => '

The requested weight, as specified in the UpdateEndpointWeightsAndCapacities request.

', ], ], 'VolumeSizeInGB' => [ 'base' => NULL, 'refs' => [ 'ResourceConfig$VolumeSizeInGB' => '

The size of the ML storage volume that you want to provision.

ML storage volumes store model artifacts and incremental states. Training algorithms might also use the ML storage volume for scratch space. If you want to store the training data in the ML storage volume, choose File as the TrainingInputMode in the algorithm specification.

You must specify sufficient ML storage for your scenario.

Amazon SageMaker supports only the General Purpose SSD (gp2) ML storage volume type.

', ], ], 'VpcConfig' => [ 'base' => '

Specifies a VPC that your training jobs and hosted models have access to. Control access to and from your training and model containers by configuring the VPC. For more information, see Protect Endpoints by Using an Amazon Virtual Private Cloud and Protect Training Jobs by Using an Amazon Virtual Private Cloud.

', 'refs' => [ 'CreateModelInput$VpcConfig' => '

A VpcConfig object that specifies the VPC that you want your model to connect to. Control access to and from your model container by configuring the VPC. VpcConfig is used in hosting services and in batch transform. For more information, see Protect Endpoints by Using an Amazon Virtual Private Cloud and Protect Data in Batch Transform Jobs by Using an Amazon Virtual Private Cloud.

', 'CreateTrainingJobRequest$VpcConfig' => '

A VpcConfig object that specifies the VPC that you want your training job to connect to. Control access to and from your training container by configuring the VPC. For more information, see Protect Training Jobs by Using an Amazon Virtual Private Cloud.

', 'DescribeModelOutput$VpcConfig' => '

A VpcConfig object that specifies the VPC that this model has access to. For more information, see Protect Endpoints by Using an Amazon Virtual Private Cloud

', 'DescribeTrainingJobResponse$VpcConfig' => '

A VpcConfig object that specifies the VPC that this training job has access to. For more information, see Protect Training Jobs by Using an Amazon Virtual Private Cloud.

', 'HyperParameterTrainingJobDefinition$VpcConfig' => '

The VpcConfig object that specifies the VPC that you want the training jobs that this hyperparameter tuning job launches to connect to. Control access to and from your training container by configuring the VPC. For more information, see Protect Training Jobs by Using an Amazon Virtual Private Cloud.

', 'TrainingJob$VpcConfig' => '

A VpcConfig object that specifies the VPC that this training job has access to. For more information, see Protect Training Jobs by Using an Amazon Virtual Private Cloud.

', ], ], 'VpcSecurityGroupIds' => [ 'base' => NULL, 'refs' => [ 'VpcConfig$SecurityGroupIds' => '

The VPC security group IDs, in the form sg-xxxxxxxx. Specify the security groups for the VPC that is specified in the Subnets field.

', ], ], 'Workteam' => [ 'base' => '

Provides details about a labeling work team.

', 'refs' => [ 'DescribeWorkteamResponse$Workteam' => '

A Workteam instance that contains information about the work team.

', 'UpdateWorkteamResponse$Workteam' => '

A Workteam object that describes the updated work team.

', 'Workteams$member' => NULL, ], ], 'WorkteamArn' => [ 'base' => NULL, 'refs' => [ 'CreateWorkteamResponse$WorkteamArn' => '

The Amazon Resource Name (ARN) of the work team. You can use this ARN to identify the work team.

', 'DescribeSubscribedWorkteamRequest$WorkteamArn' => '

The Amazon Resource Name (ARN) of the subscribed work team to describe.

', 'HumanTaskConfig$WorkteamArn' => '

The Amazon Resource Name (ARN) of the work team assigned to complete the tasks.

', 'LabelingJobSummary$WorkteamArn' => '

The Amazon Resource Name (ARN) of the work team assigned to the job.

', 'ListLabelingJobsForWorkteamRequest$WorkteamArn' => '

The Amazon Resource Name (ARN) of the work team for which you want to see labeling jobs for.

', 'SubscribedWorkteam$WorkteamArn' => '

The Amazon Resource Name (ARN) of the vendor that you have subscribed.

', 'Workteam$WorkteamArn' => '

The Amazon Resource Name (ARN) that identifies the work team.

', ], ], 'WorkteamName' => [ 'base' => NULL, 'refs' => [ 'CreateWorkteamRequest$WorkteamName' => '

The name of the work team. Use this name to identify the work team.

', 'DeleteWorkteamRequest$WorkteamName' => '

The name of the work team to delete.

', 'DescribeWorkteamRequest$WorkteamName' => '

The name of the work team to return a description of.

', 'ListSubscribedWorkteamsRequest$NameContains' => '

A string in the work team name. This filter returns only work teams whose name contains the specified string.

', 'ListWorkteamsRequest$NameContains' => '

A string in the work team\'s name. This filter returns only work teams whose name contains the specified string.

', 'UpdateWorkteamRequest$WorkteamName' => '

The name of the work team to update.

', 'Workteam$WorkteamName' => '

The name of the work team.

', ], ], 'Workteams' => [ 'base' => NULL, 'refs' => [ 'ListWorkteamsResponse$Workteams' => '

An array of Workteam objects, each describing a work team.

', ], ], ],];