From 7f8f9a3d36b6a6ec4b02c6e89193288f2427c154 Mon Sep 17 00:00:00 2001 From: aws-sdk-go-automation <43143561+aws-sdk-go-automation@users.noreply.github.com> Date: Fri, 20 May 2022 11:23:33 -0700 Subject: [PATCH] Release v1.44.19 (2022-05-20) (#4409) Release v1.44.19 (2022-05-20) === ### Service Client Updates * `service/comprehend`: Updates service API and documentation * `service/logs`: Updates service documentation * Doc-only update to publish the new valid values for log retention --- CHANGELOG.md | 8 ++ aws/version.go | 2 +- models/apis/comprehend/2017-11-27/api-2.json | 17 ++- models/apis/comprehend/2017-11-27/docs-2.json | 36 ++--- models/apis/logs/2014-03-28/docs-2.json | 10 +- service/cloudwatchlogs/api.go | 54 ++++---- service/cloudwatchlogs/errors.go | 2 +- service/comprehend/api.go | 129 ++++++++++++++---- 8 files changed, 185 insertions(+), 73 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 639f02a76b1..548fa6acaff 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,11 @@ +Release v1.44.19 (2022-05-20) +=== + +### Service Client Updates +* `service/comprehend`: Updates service API and documentation +* `service/logs`: Updates service documentation + * Doc-only update to publish the new valid values for log retention + Release v1.44.18 (2022-05-19) === diff --git a/aws/version.go b/aws/version.go index d7f4047bbeb..3284a981265 100644 --- a/aws/version.go +++ b/aws/version.go @@ -5,4 +5,4 @@ package aws const SDKName = "aws-sdk-go" // SDKVersion is the version of this SDK -const SDKVersion = "1.44.18" +const SDKVersion = "1.44.19" diff --git a/models/apis/comprehend/2017-11-27/api-2.json b/models/apis/comprehend/2017-11-27/api-2.json index 1fb99f5c7f8..a38a7c619e1 100644 --- a/models/apis/comprehend/2017-11-27/api-2.json +++ b/models/apis/comprehend/2017-11-27/api-2.json @@ -1503,6 +1503,7 @@ "CustomerInputStringList":{ "type":"list", "member":{"shape":"CustomerInputString"}, + "min":1, "sensitive":true }, "DeleteDocumentClassifierRequest":{ @@ -2935,7 +2936,21 @@ "AWS_SECRET_KEY", "IP_ADDRESS", "MAC_ADDRESS", - "ALL" + "ALL", + "LICENSE_PLATE", + "VEHICLE_IDENTIFICATION_NUMBER", + "UK_NATIONAL_INSURANCE_NUMBER", + "CA_SOCIAL_INSURANCE_NUMBER", + "US_INDIVIDUAL_TAX_IDENTIFICATION_NUMBER", + "UK_UNIQUE_TAXPAYER_REFERENCE_NUMBER", + "IN_PERMANENT_ACCOUNT_NUMBER", + "IN_NREGA", + "INTERNATIONAL_BANK_ACCOUNT_NUMBER", + "SWIFT_CODE", + "UK_NATIONAL_HEALTH_SERVICE_NUMBER", + "CA_HEALTH_NUMBER", + "IN_AADHAAR", + "IN_VOTER_NUMBER" ] }, "PiiOutputDataConfig":{ diff --git a/models/apis/comprehend/2017-11-27/docs-2.json b/models/apis/comprehend/2017-11-27/docs-2.json index 9670fa60021..1b097ada297 100644 --- a/models/apis/comprehend/2017-11-27/docs-2.json +++ b/models/apis/comprehend/2017-11-27/docs-2.json @@ -10,16 +10,16 @@ "ClassifyDocument": "
Creates a new document classification request to analyze a single document in real-time, using a previously created and trained custom model and an endpoint.
", "ContainsPiiEntities": "Analyzes input text for the presence of personally identifiable information (PII) and returns the labels of identified PII entity types such as name, address, bank account number, or phone number.
", "CreateDocumentClassifier": "Creates a new document classifier that you can use to categorize documents. To create a classifier, you provide a set of training documents that labeled with the categories that you want to use. After the classifier is trained you can use it to categorize a set of labeled documents into the categories. For more information, see how-document-classification.
", - "CreateEndpoint": "Creates a model-specific endpoint for synchronous inference for a previously trained custom model
", + "CreateEndpoint": "Creates a model-specific endpoint for synchronous inference for a previously trained custom model For information about endpoints, see Managing endpoints.
", "CreateEntityRecognizer": "Creates an entity recognizer using submitted files. After your CreateEntityRecognizer
request is submitted, you can check job status using the API.
Deletes a previously created document classifier
Only those classifiers that are in terminated states (IN_ERROR, TRAINED) will be deleted. If an active inference job is using the model, a ResourceInUseException
will be returned.
This is an asynchronous action that puts the classifier into a DELETING state, and it is then removed by a background job. Once removed, the classifier disappears from your account and is no longer available for use.
", - "DeleteEndpoint": "Deletes a model-specific endpoint for a previously-trained custom model. All endpoints must be deleted in order for the model to be deleted.
", + "DeleteEndpoint": "Deletes a model-specific endpoint for a previously-trained custom model. All endpoints must be deleted in order for the model to be deleted. For information about endpoints, see Managing endpoints.
", "DeleteEntityRecognizer": "Deletes an entity recognizer.
Only those recognizers that are in terminated states (IN_ERROR, TRAINED) will be deleted. If an active inference job is using the model, a ResourceInUseException
will be returned.
This is an asynchronous action that puts the recognizer into a DELETING state, and it is then removed by a background job. Once removed, the recognizer disappears from your account and is no longer available for use.
", "DeleteResourcePolicy": "Deletes a resource-based policy that is attached to a custom model.
", "DescribeDocumentClassificationJob": "Gets the properties associated with a document classification job. Use this operation to get the status of a classification job.
", "DescribeDocumentClassifier": "Gets the properties associated with a document classifier.
", "DescribeDominantLanguageDetectionJob": "Gets the properties associated with a dominant language detection job. Use this operation to get the status of a detection job.
", - "DescribeEndpoint": "Gets the properties associated with a specific endpoint. Use this operation to get the status of an endpoint.
", + "DescribeEndpoint": "Gets the properties associated with a specific endpoint. Use this operation to get the status of an endpoint. For information about endpoints, see Managing endpoints.
", "DescribeEntitiesDetectionJob": "Gets the properties associated with an entities detection job. Use this operation to get the status of a detection job.
", "DescribeEntityRecognizer": "Provides details about an entity recognizer including status, S3 buckets containing training data, recognizer metadata, metrics, and so on.
", "DescribeEventsDetectionJob": "Gets the status and details of an events detection job.
", @@ -40,7 +40,7 @@ "ListDocumentClassifierSummaries": "Gets a list of summaries of the document classifiers that you have created
", "ListDocumentClassifiers": "Gets a list of the document classifiers that you have created.
", "ListDominantLanguageDetectionJobs": "Gets a list of the dominant language detection jobs that you have submitted.
", - "ListEndpoints": "Gets a list of all existing endpoints that you've created.
", + "ListEndpoints": "Gets a list of all existing endpoints that you've created. For information about endpoints, see Managing endpoints.
", "ListEntitiesDetectionJobs": "Gets a list of the entity detection jobs that you have submitted.
", "ListEntityRecognizerSummaries": "Gets a list of summaries for the entity recognizers that you have created.
", "ListEntityRecognizers": "Gets a list of the properties of all entity recognizers that you created, including recognizers currently in training. Allows you to filter the list of recognizers based on criteria such as status and submission time. This call returns up to 500 entity recognizers in the list, with a default number of 100 recognizers in the list.
The results of this list are not in any particular order. Please get the list and sort locally if needed.
", @@ -66,13 +66,13 @@ "StopEventsDetectionJob": "Stops an events detection job in progress.
", "StopKeyPhrasesDetectionJob": "Stops a key phrases detection job in progress.
If the job state is IN_PROGRESS
the job is marked for termination and put into the STOP_REQUESTED
state. If the job completes before it can be stopped, it is put into the COMPLETED
state; otherwise the job is stopped and put into the STOPPED
state.
If the job is in the COMPLETED
or FAILED
state when you call the StopDominantLanguageDetectionJob
operation, the operation returns a 400 Internal Request Exception.
When a job is stopped, any documents already processed are written to the output location.
", "StopPiiEntitiesDetectionJob": "Stops a PII entities detection job in progress.
", - "StopSentimentDetectionJob": "Stops a sentiment detection job in progress.
If the job state is IN_PROGRESS
the job is marked for termination and put into the STOP_REQUESTED
state. If the job completes before it can be stopped, it is put into the COMPLETED
state; otherwise the job is be stopped and put into the STOPPED
state.
If the job is in the COMPLETED
or FAILED
state when you call the StopDominantLanguageDetectionJob
operation, the operation returns a 400 Internal Request Exception.
When a job is stopped, any documents already processed are written to the output location.
", - "StopTargetedSentimentDetectionJob": "Stops a targeted sentiment detection job in progress.
If the job state is IN_PROGRESS
the job is marked for termination and put into the STOP_REQUESTED
state. If the job completes before it can be stopped, it is put into the COMPLETED
state; otherwise the job is be stopped and put into the STOPPED
state.
If the job is in the COMPLETED
or FAILED
state when you call the StopDominantLanguageDetectionJob
operation, the operation returns a 400 Internal Request Exception.
When a job is stopped, any documents already processed are written to the output location.
", + "StopSentimentDetectionJob": "Stops a sentiment detection job in progress.
If the job state is IN_PROGRESS
, the job is marked for termination and put into the STOP_REQUESTED
state. If the job completes before it can be stopped, it is put into the COMPLETED
state; otherwise the job is be stopped and put into the STOPPED
state.
If the job is in the COMPLETED
or FAILED
state when you call the StopDominantLanguageDetectionJob
operation, the operation returns a 400 Internal Request Exception.
When a job is stopped, any documents already processed are written to the output location.
", + "StopTargetedSentimentDetectionJob": "Stops a targeted sentiment detection job in progress.
If the job state is IN_PROGRESS
, the job is marked for termination and put into the STOP_REQUESTED
state. If the job completes before it can be stopped, it is put into the COMPLETED
state; otherwise the job is be stopped and put into the STOPPED
state.
If the job is in the COMPLETED
or FAILED
state when you call the StopDominantLanguageDetectionJob
operation, the operation returns a 400 Internal Request Exception.
When a job is stopped, any documents already processed are written to the output location.
", "StopTrainingDocumentClassifier": "Stops a document classifier training job while in progress.
If the training job state is TRAINING
, the job is marked for termination and put into the STOP_REQUESTED
state. If the training job completes before it can be stopped, it is put into the TRAINED
; otherwise the training job is stopped and put into the STOPPED
state and the service sends back an HTTP 200 response with an empty HTTP body.
Stops an entity recognizer training job while in progress.
If the training job state is TRAINING
, the job is marked for termination and put into the STOP_REQUESTED
state. If the training job completes before it can be stopped, it is put into the TRAINED
; otherwise the training job is stopped and putted into the STOPPED
state and the service sends back an HTTP 200 response with an empty HTTP body.
Associates a specific tag with an Amazon Comprehend resource. A tag is a key-value pair that adds as a metadata to a resource used by Amazon Comprehend. For example, a tag with \"Sales\" as the key might be added to a resource to indicate its use by the sales department.
", "UntagResource": "Removes a specific tag associated with an Amazon Comprehend resource.
", - "UpdateEndpoint": "Updates information about the specified endpoint.
" + "UpdateEndpoint": "Updates information about the specified endpoint. For information about endpoints, see Managing endpoints.
" }, "shapes": { "AnyLengthString": { @@ -390,7 +390,7 @@ "refs": { "BatchDetectDominantLanguageRequest$TextList": "A list containing the text of the input documents. The list can contain a maximum of 25 documents. Each document should contain at least 20 characters and must contain fewer than 5,000 bytes of UTF-8 encoded characters.
", "BatchDetectEntitiesRequest$TextList": "A list containing the text of the input documents. The list can contain a maximum of 25 documents. Each document must contain fewer than 5,000 bytes of UTF-8 encoded characters.
", - "BatchDetectKeyPhrasesRequest$TextList": "A list containing the text of the input documents. The list can contain a maximum of 25 documents. Each document must contain fewer that 5,000 bytes of UTF-8 encoded characters.
", + "BatchDetectKeyPhrasesRequest$TextList": "A list containing the text of the input documents. The list can contain a maximum of 25 documents. Each document must contain fewer than 5,000 bytes of UTF-8 encoded characters.
", "BatchDetectSentimentRequest$TextList": "A list containing the text of the input documents. The list can contain a maximum of 25 documents. Each document must contain fewer that 5,000 bytes of UTF-8 encoded characters.
", "BatchDetectSyntaxRequest$TextList": "A list containing the text of the input documents. The list can contain a maximum of 25 documents. Each document must contain fewer that 5,000 bytes of UTF-8 encoded characters.
" } @@ -678,7 +678,7 @@ "DocumentClassifierEndpointArn": { "base": null, "refs": { - "ClassifyDocumentRequest$EndpointArn": "The Amazon Resource Number (ARN) of the endpoint.
" + "ClassifyDocumentRequest$EndpointArn": "The Amazon Resource Number (ARN) of the endpoint. For information about endpoints, see Managing endpoints.
" } }, "DocumentClassifierFilter": { @@ -688,7 +688,7 @@ } }, "DocumentClassifierInputDataConfig": { - "base": "The input properties for training a document classifier.
For more information on how the input file is formatted, see how-document-classification-training-data.
", + "base": "The input properties for training a document classifier.
For more information on how the input file is formatted, see prep-classifier-data.
", "refs": { "CreateDocumentClassifierRequest$InputDataConfig": "Specifies the format and location of the input data for the job.
", "DocumentClassifierProperties$InputDataConfig": "The input data configuration that you supplied when you created the document classifier for training.
" @@ -814,7 +814,7 @@ } }, "EndpointProperties": { - "base": "Specifies information about the specified endpoint.
", + "base": "Specifies information about the specified endpoint. For information about endpoints, see Managing endpoints.
", "refs": { "DescribeEndpointResponse$EndpointProperties": "Describes information associated with the specific endpoint.
", "EndpointPropertiesList$member": null @@ -904,7 +904,7 @@ "EntityRecognizerEndpointArn": { "base": null, "refs": { - "DetectEntitiesRequest$EndpointArn": "The Amazon Resource Name of an endpoint that is associated with a custom entity recognition model. Provide an endpoint if you want to detect entities by using your own custom model instead of the default model that is used by Amazon Comprehend.
If you specify an endpoint, Amazon Comprehend uses the language of your custom model, and it ignores any language code that you provide in your request.
" + "DetectEntitiesRequest$EndpointArn": "The Amazon Resource Name of an endpoint that is associated with a custom entity recognition model. Provide an endpoint if you want to detect entities by using your own custom model instead of the default model that is used by Amazon Comprehend.
If you specify an endpoint, Amazon Comprehend uses the language of your custom model, and it ignores any language code that you provide in your request.
For information about endpoints, see Managing endpoints.
" } }, "EntityRecognizerEntityList": { @@ -1367,12 +1367,12 @@ "BatchDetectEntitiesRequest$LanguageCode": "The language of the input documents. You can specify any of the primary languages supported by Amazon Comprehend. All documents must be in the same language.
", "BatchDetectKeyPhrasesRequest$LanguageCode": "The language of the input documents. You can specify any of the primary languages supported by Amazon Comprehend. All documents must be in the same language.
", "BatchDetectSentimentRequest$LanguageCode": "The language of the input documents. You can specify any of the primary languages supported by Amazon Comprehend. All documents must be in the same language.
", - "ContainsPiiEntitiesRequest$LanguageCode": "The language of the input documents.
", + "ContainsPiiEntitiesRequest$LanguageCode": "The language of the input documents. Currently, English is the only valid language.
", "CreateDocumentClassifierRequest$LanguageCode": "The language of the input documents. You can specify any of the following languages supported by Amazon Comprehend: German (\"de\"), English (\"en\"), Spanish (\"es\"), French (\"fr\"), Italian (\"it\"), or Portuguese (\"pt\"). All documents must be in the same language.
", "CreateEntityRecognizerRequest$LanguageCode": "You can specify any of the following languages supported by Amazon Comprehend: English (\"en\"), Spanish (\"es\"), French (\"fr\"), Italian (\"it\"), German (\"de\"), or Portuguese (\"pt\"). All documents must be in the same language.
", "DetectEntitiesRequest$LanguageCode": "The language of the input documents. You can specify any of the primary languages supported by Amazon Comprehend. All documents must be in the same language.
If your request includes the endpoint for a custom entity recognition model, Amazon Comprehend uses the language of your custom model, and it ignores any language code that you specify here.
", "DetectKeyPhrasesRequest$LanguageCode": "The language of the input documents. You can specify any of the primary languages supported by Amazon Comprehend. All documents must be in the same language.
", - "DetectPiiEntitiesRequest$LanguageCode": "The language of the input documents.
", + "DetectPiiEntitiesRequest$LanguageCode": "The language of the input documents. Currently, English is the only valid language.
", "DetectSentimentRequest$LanguageCode": "The language of the input documents. You can specify any of the primary languages supported by Amazon Comprehend. All documents must be in the same language.
", "DocumentClassifierProperties$LanguageCode": "The language code for the language of the documents that the classifier was trained on.
", "EntitiesDetectionJobProperties$LanguageCode": "The language code of the input documents.
", @@ -1384,9 +1384,9 @@ "StartEntitiesDetectionJobRequest$LanguageCode": "The language of the input documents. All documents must be in the same language. You can specify any of the languages supported by Amazon Comprehend. If custom entities recognition is used, this parameter is ignored and the language used for training the model is used instead.
", "StartEventsDetectionJobRequest$LanguageCode": "The language code of the input documents.
", "StartKeyPhrasesDetectionJobRequest$LanguageCode": "The language of the input documents. You can specify any of the primary languages supported by Amazon Comprehend. All documents must be in the same language.
", - "StartPiiEntitiesDetectionJobRequest$LanguageCode": "The language of the input documents.
", + "StartPiiEntitiesDetectionJobRequest$LanguageCode": "The language of the input documents. Currently, English is the only valid language.
", "StartSentimentDetectionJobRequest$LanguageCode": "The language of the input documents. You can specify any of the primary languages supported by Amazon Comprehend. All documents must be in the same language.
", - "StartTargetedSentimentDetectionJobRequest$LanguageCode": "The language of the input documents. You can specify any of the primary languages supported by Amazon Comprehend. All documents must be in the same language.
", + "StartTargetedSentimentDetectionJobRequest$LanguageCode": "The language of the input documents. Currently, English is the only valid language.
", "TargetedSentimentDetectionJobProperties$LanguageCode": "The language code of the input documents.
" } }, @@ -2374,8 +2374,8 @@ "refs": { "CreateDocumentClassifierRequest$VpcConfig": "Configuration parameters for an optional private Virtual Private Cloud (VPC) containing the resources you are using for your custom classifier. For more information, see Amazon VPC.
", "CreateEntityRecognizerRequest$VpcConfig": "Configuration parameters for an optional private Virtual Private Cloud (VPC) containing the resources you are using for your custom entity recognizer. For more information, see Amazon VPC.
", - "DocumentClassificationJobProperties$VpcConfig": "Configuration parameters for a private Virtual Private Cloud (VPC) containing the resources you are using for your document classification job. For more information, see Amazon VPC.
", - "DocumentClassifierProperties$VpcConfig": "Configuration parameters for a private Virtual Private Cloud (VPC) containing the resources you are using for your custom classifier. For more information, see Amazon VPC.
", + "DocumentClassificationJobProperties$VpcConfig": "Configuration parameters for a private Virtual Private Cloud (VPC) containing the resources you are using for your document classification job. For more information, see Amazon VPC.
", + "DocumentClassifierProperties$VpcConfig": "Configuration parameters for a private Virtual Private Cloud (VPC) containing the resources you are using for your custom classifier. For more information, see Amazon VPC.
", "DominantLanguageDetectionJobProperties$VpcConfig": "Configuration parameters for a private Virtual Private Cloud (VPC) containing the resources you are using for your dominant language detection job. For more information, see Amazon VPC.
", "EntitiesDetectionJobProperties$VpcConfig": "Configuration parameters for a private Virtual Private Cloud (VPC) containing the resources you are using for your entity detection job. For more information, see Amazon VPC.
", "EntityRecognizerProperties$VpcConfig": "Configuration parameters for a private Virtual Private Cloud (VPC) containing the resources you are using for your custom entity recognizer. For more information, see Amazon VPC.
", diff --git a/models/apis/logs/2014-03-28/docs-2.json b/models/apis/logs/2014-03-28/docs-2.json index 70fb53d7684..6890a26b427 100644 --- a/models/apis/logs/2014-03-28/docs-2.json +++ b/models/apis/logs/2014-03-28/docs-2.json @@ -4,7 +4,7 @@ "operations": { "AssociateKmsKey": "Associates the specified Key Management Service customer master key (CMK) with the specified log group.
Associating an KMS CMK with a log group overrides any existing associations between the log group and a CMK. After a CMK is associated with a log group, all newly ingested data for the log group is encrypted using the CMK. This association is stored as long as the data encrypted with the CMK is still within CloudWatch Logs. This enables CloudWatch Logs to decrypt this data whenever it is requested.
CloudWatch Logs supports only symmetric CMKs. Do not use an associate an asymmetric CMK with your log group. For more information, see Using Symmetric and Asymmetric Keys.
It can take up to 5 minutes for this operation to take effect.
If you attempt to associate a CMK with a log group but the CMK does not exist or the CMK is disabled, you receive an InvalidParameterException
error.
Cancels the specified export task.
The task must be in the PENDING
or RUNNING
state.
Creates an export task, which allows you to efficiently export data from a log group to an Amazon S3 bucket. When you perform a CreateExportTask
operation, you must use credentials that have permission to write to the S3 bucket that you specify as the destination.
This is an asynchronous call. If all the required information is provided, this operation initiates an export task and responds with the ID of the task. After the task has started, you can use DescribeExportTasks to get the status of the export task. Each account can only have one active (RUNNING
or PENDING
) export task at a time. To cancel an export task, use CancelExportTask.
You can export logs from multiple log groups or multiple time ranges to the same S3 bucket. To separate out log data for each export task, you can specify a prefix to be used as the Amazon S3 key prefix for all exported objects.
Exporting to S3 buckets that are encrypted with AES-256 is supported. Exporting to S3 buckets encrypted with SSE-KMS is not supported.
", + "CreateExportTask": "Creates an export task, which allows you to efficiently export data from a log group to an Amazon S3 bucket. When you perform a CreateExportTask
operation, you must use credentials that have permission to write to the S3 bucket that you specify as the destination.
Exporting log data to Amazon S3 buckets that are encrypted by KMS is not supported. Exporting log data to Amazon S3 buckets that have S3 Object Lock enabled with a retention period is not supported.
Exporting to S3 buckets that are encrypted with AES-256 is supported.
This is an asynchronous call. If all the required information is provided, this operation initiates an export task and responds with the ID of the task. After the task has started, you can use DescribeExportTasks to get the status of the export task. Each account can only have one active (RUNNING
or PENDING
) export task at a time. To cancel an export task, use CancelExportTask.
You can export logs from multiple log groups or multiple time ranges to the same S3 bucket. To separate out log data for each export task, you can specify a prefix to be used as the Amazon S3 key prefix for all exported objects.
Time-based sorting on chunks of log data inside an exported file is not guaranteed. You can sort the exported log fild data by using Linux utilities.
Creates a log group with the specified name. You can create up to 20,000 log groups per account.
You must use the following guidelines when naming a log group:
Log group names must be unique within a region for an Amazon Web Services account.
Log group names can be between 1 and 512 characters long.
Log group names consist of the following characters: a-z, A-Z, 0-9, '_' (underscore), '-' (hyphen), '/' (forward slash), '.' (period), and '#' (number sign)
When you create a log group, by default the log events in the log group never expire. To set a retention policy so that events expire and are deleted after a specified time, use PutRetentionPolicy.
If you associate a Key Management Service customer master key (CMK) with the log group, ingested data is encrypted using the CMK. This association is stored as long as the data encrypted with the CMK is still within CloudWatch Logs. This enables CloudWatch Logs to decrypt this data whenever it is requested.
If you attempt to associate a CMK with the log group but the CMK does not exist or the CMK is disabled, you receive an InvalidParameterException
error.
CloudWatch Logs supports only symmetric CMKs. Do not associate an asymmetric CMK with your log group. For more information, see Using Symmetric and Asymmetric Keys.
Creates a log stream for the specified log group. A log stream is a sequence of log events that originate from a single source, such as an application instance or a resource that is being monitored.
There is no limit on the number of log streams that you can create for a log group. There is a limit of 50 TPS on CreateLogStream
operations, after which transactions are throttled.
You must use the following guidelines when naming a log stream:
Log stream names must be unique within the log group.
Log stream names can be between 1 and 512 characters long.
The ':' (colon) and '*' (asterisk) characters are not allowed.
Deletes the specified destination, and eventually disables all the subscription filters that publish to it. This operation does not delete the physical resource encapsulated by the destination.
", @@ -97,7 +97,7 @@ } }, "Days": { - "base": "The number of days to retain the log events in the specified log group. Possible values are: 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 545, 731, 1827, and 3653.
To set a log group to never have log events expire, use DeleteRetentionPolicy.
", + "base": "The number of days to retain the log events in the specified log group. Possible values are: 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 545, 731, 1827, 2192, 2557, 2922, 3288, and 3653.
To set a log group to never have log events expire, use DeleteRetentionPolicy.
", "refs": { "LogGroup$retentionInDays": null, "PutRetentionPolicyRequest$retentionInDays": null @@ -673,7 +673,7 @@ "LogGroups": { "base": null, "refs": { - "DescribeLogGroupsResponse$logGroups": "The log groups.
If the retentionInDays
value if not included for a log group, then that log group is set to have its events never expire.
The log groups.
If the retentionInDays
value is not included for a log group, then that log group is set to have its events never expire.
Multiple requests to update the same resource were in conflict.
", + "base": "Multiple concurrent requests to update the same resource were in conflict.
", "refs": { } }, @@ -1228,7 +1228,7 @@ "base": null, "refs": { "CreateExportTaskRequest$from": "The start time of the range for the request, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. Events with a timestamp earlier than this time are not exported.
", - "CreateExportTaskRequest$to": "The end time of the range for the request, expreswatchlogsdocused as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. Events with a timestamp later than this time are not exported.
", + "CreateExportTaskRequest$to": "The end time of the range for the request, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. Events with a timestamp later than this time are not exported.
", "Destination$creationTime": "The creation time of the destination, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC.
", "ExportTask$from": "The start time, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. Events with a timestamp before this time are not exported.
", "ExportTask$to": "The end time, expressed as the number of milliseconds after Jan 1, 1970 00:00:00 UTC. Events with a timestamp later than this time are not exported.
", diff --git a/service/cloudwatchlogs/api.go b/service/cloudwatchlogs/api.go index 53df7713084..8487b2f4fce 100644 --- a/service/cloudwatchlogs/api.go +++ b/service/cloudwatchlogs/api.go @@ -91,7 +91,7 @@ func (c *CloudWatchLogs) AssociateKmsKeyRequest(input *AssociateKmsKeyInput) (re // The specified resource does not exist. // // * OperationAbortedException -// Multiple requests to update the same resource were in conflict. +// Multiple concurrent requests to update the same resource were in conflict. // // * ServiceUnavailableException // The service cannot complete the request. @@ -258,6 +258,12 @@ func (c *CloudWatchLogs) CreateExportTaskRequest(input *CreateExportTaskInput) ( // you must use credentials that have permission to write to the S3 bucket that // you specify as the destination. // +// Exporting log data to Amazon S3 buckets that are encrypted by KMS is not +// supported. Exporting log data to Amazon S3 buckets that have S3 Object Lock +// enabled with a retention period is not supported. +// +// Exporting to S3 buckets that are encrypted with AES-256 is supported. +// // This is an asynchronous call. If all the required information is provided, // this operation initiates an export task and responds with the ID of the task. // After the task has started, you can use DescribeExportTasks (https://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_DescribeExportTasks.html) @@ -269,8 +275,8 @@ func (c *CloudWatchLogs) CreateExportTaskRequest(input *CreateExportTaskInput) ( // same S3 bucket. To separate out log data for each export task, you can specify // a prefix to be used as the Amazon S3 key prefix for all exported objects. // -// Exporting to S3 buckets that are encrypted with AES-256 is supported. Exporting -// to S3 buckets encrypted with SSE-KMS is not supported. +// Time-based sorting on chunks of log data inside an exported file is not guaranteed. +// You can sort the exported log fild data by using Linux utilities. // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about @@ -287,7 +293,7 @@ func (c *CloudWatchLogs) CreateExportTaskRequest(input *CreateExportTaskInput) ( // You have reached the maximum number of resources that can be created. // // * OperationAbortedException -// Multiple requests to update the same resource were in conflict. +// Multiple concurrent requests to update the same resource were in conflict. // // * ServiceUnavailableException // The service cannot complete the request. @@ -413,7 +419,7 @@ func (c *CloudWatchLogs) CreateLogGroupRequest(input *CreateLogGroupInput) (req // You have reached the maximum number of resources that can be created. // // * OperationAbortedException -// Multiple requests to update the same resource were in conflict. +// Multiple concurrent requests to update the same resource were in conflict. // // * ServiceUnavailableException // The service cannot complete the request. @@ -607,7 +613,7 @@ func (c *CloudWatchLogs) DeleteDestinationRequest(input *DeleteDestinationInput) // The specified resource does not exist. // // * OperationAbortedException -// Multiple requests to update the same resource were in conflict. +// Multiple concurrent requests to update the same resource were in conflict. // // * ServiceUnavailableException // The service cannot complete the request. @@ -697,7 +703,7 @@ func (c *CloudWatchLogs) DeleteLogGroupRequest(input *DeleteLogGroupInput) (req // The specified resource does not exist. // // * OperationAbortedException -// Multiple requests to update the same resource were in conflict. +// Multiple concurrent requests to update the same resource were in conflict. // // * ServiceUnavailableException // The service cannot complete the request. @@ -787,7 +793,7 @@ func (c *CloudWatchLogs) DeleteLogStreamRequest(input *DeleteLogStreamInput) (re // The specified resource does not exist. // // * OperationAbortedException -// Multiple requests to update the same resource were in conflict. +// Multiple concurrent requests to update the same resource were in conflict. // // * ServiceUnavailableException // The service cannot complete the request. @@ -876,7 +882,7 @@ func (c *CloudWatchLogs) DeleteMetricFilterRequest(input *DeleteMetricFilterInpu // The specified resource does not exist. // // * OperationAbortedException -// Multiple requests to update the same resource were in conflict. +// Multiple concurrent requests to update the same resource were in conflict. // // * ServiceUnavailableException // The service cannot complete the request. @@ -1146,7 +1152,7 @@ func (c *CloudWatchLogs) DeleteRetentionPolicyRequest(input *DeleteRetentionPoli // The specified resource does not exist. // // * OperationAbortedException -// Multiple requests to update the same resource were in conflict. +// Multiple concurrent requests to update the same resource were in conflict. // // * ServiceUnavailableException // The service cannot complete the request. @@ -1235,7 +1241,7 @@ func (c *CloudWatchLogs) DeleteSubscriptionFilterRequest(input *DeleteSubscripti // The specified resource does not exist. // // * OperationAbortedException -// Multiple requests to update the same resource were in conflict. +// Multiple concurrent requests to update the same resource were in conflict. // // * ServiceUnavailableException // The service cannot complete the request. @@ -2403,7 +2409,7 @@ func (c *CloudWatchLogs) DisassociateKmsKeyRequest(input *DisassociateKmsKeyInpu // The specified resource does not exist. // // * OperationAbortedException -// Multiple requests to update the same resource were in conflict. +// Multiple concurrent requests to update the same resource were in conflict. // // * ServiceUnavailableException // The service cannot complete the request. @@ -3181,7 +3187,7 @@ func (c *CloudWatchLogs) PutDestinationRequest(input *PutDestinationInput) (req // A parameter is specified incorrectly. // // * OperationAbortedException -// Multiple requests to update the same resource were in conflict. +// Multiple concurrent requests to update the same resource were in conflict. // // * ServiceUnavailableException // The service cannot complete the request. @@ -3275,7 +3281,7 @@ func (c *CloudWatchLogs) PutDestinationPolicyRequest(input *PutDestinationPolicy // A parameter is specified incorrectly. // // * OperationAbortedException -// Multiple requests to update the same resource were in conflict. +// Multiple concurrent requests to update the same resource were in conflict. // // * ServiceUnavailableException // The service cannot complete the request. @@ -3517,7 +3523,7 @@ func (c *CloudWatchLogs) PutMetricFilterRequest(input *PutMetricFilterInput) (re // The specified resource does not exist. // // * OperationAbortedException -// Multiple requests to update the same resource were in conflict. +// Multiple concurrent requests to update the same resource were in conflict. // // * LimitExceededException // You have reached the maximum number of resources that can be created. @@ -3798,7 +3804,7 @@ func (c *CloudWatchLogs) PutRetentionPolicyRequest(input *PutRetentionPolicyInpu // The specified resource does not exist. // // * OperationAbortedException -// Multiple requests to update the same resource were in conflict. +// Multiple concurrent requests to update the same resource were in conflict. // // * ServiceUnavailableException // The service cannot complete the request. @@ -3913,7 +3919,7 @@ func (c *CloudWatchLogs) PutSubscriptionFilterRequest(input *PutSubscriptionFilt // The specified resource does not exist. // // * OperationAbortedException -// Multiple requests to update the same resource were in conflict. +// Multiple concurrent requests to update the same resource were in conflict. // // * LimitExceededException // You have reached the maximum number of resources that can be created. @@ -4589,9 +4595,9 @@ type CreateExportTaskInput struct { // The name of the export task. TaskName *string `locationName:"taskName" min:"1" type:"string"` - // The end time of the range for the request, expreswatchlogsdocused as the - // number of milliseconds after Jan 1, 1970 00:00:00 UTC. Events with a timestamp - // later than this time are not exported. + // The end time of the range for the request, expressed as the number of milliseconds + // after Jan 1, 1970 00:00:00 UTC. Events with a timestamp later than this time + // are not exported. // // To is a required field To *int64 `locationName:"to" type:"long" required:"true"` @@ -5893,7 +5899,7 @@ type DescribeLogGroupsOutput struct { // The log groups. // - // If the retentionInDays value if not included for a log group, then that log + // If the retentionInDays value is not included for a log group, then that log // group is set to have its events never expire. LogGroups []*LogGroup `locationName:"logGroups" type:"list"` @@ -8215,7 +8221,7 @@ type LogGroup struct { // The number of days to retain the log events in the specified log group. Possible // values are: 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 545, 731, - // 1827, and 3653. + // 1827, 2192, 2557, 2922, 3288, and 3653. // // To set a log group to never have log events expire, use DeleteRetentionPolicy // (https://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_DeleteRetentionPolicy.html). @@ -8752,7 +8758,7 @@ func (s *MetricTransformation) SetUnit(v string) *MetricTransformation { return s } -// Multiple requests to update the same resource were in conflict. +// Multiple concurrent requests to update the same resource were in conflict. type OperationAbortedException struct { _ struct{} `type:"structure"` RespMetadata protocol.ResponseMetadata `json:"-" xml:"-"` @@ -9612,7 +9618,7 @@ type PutRetentionPolicyInput struct { // The number of days to retain the log events in the specified log group. Possible // values are: 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 545, 731, - // 1827, and 3653. + // 1827, 2192, 2557, 2922, 3288, and 3653. // // To set a log group to never have log events expire, use DeleteRetentionPolicy // (https://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_DeleteRetentionPolicy.html). diff --git a/service/cloudwatchlogs/errors.go b/service/cloudwatchlogs/errors.go index 52e58e3c68d..9da085e3286 100644 --- a/service/cloudwatchlogs/errors.go +++ b/service/cloudwatchlogs/errors.go @@ -52,7 +52,7 @@ const ( // ErrCodeOperationAbortedException for service response error code // "OperationAbortedException". // - // Multiple requests to update the same resource were in conflict. + // Multiple concurrent requests to update the same resource were in conflict. ErrCodeOperationAbortedException = "OperationAbortedException" // ErrCodeResourceAlreadyExistsException for service response error code diff --git a/service/comprehend/api.go b/service/comprehend/api.go index 2163cfb62e7..b305f84a7ee 100644 --- a/service/comprehend/api.go +++ b/service/comprehend/api.go @@ -823,7 +823,8 @@ func (c *Comprehend) CreateEndpointRequest(input *CreateEndpointInput) (req *req // CreateEndpoint API operation for Amazon Comprehend. // // Creates a model-specific endpoint for synchronous inference for a previously -// trained custom model +// trained custom model For information about endpoints, see Managing endpoints +// (https://docs.aws.amazon.com/comprehend/latest/dg/manage-endpoints.html). // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about @@ -1145,7 +1146,8 @@ func (c *Comprehend) DeleteEndpointRequest(input *DeleteEndpointInput) (req *req // DeleteEndpoint API operation for Amazon Comprehend. // // Deletes a model-specific endpoint for a previously-trained custom model. -// All endpoints must be deleted in order for the model to be deleted. +// All endpoints must be deleted in order for the model to be deleted. For information +// about endpoints, see Managing endpoints (https://docs.aws.amazon.com/comprehend/latest/dg/manage-endpoints.html). // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about @@ -1699,7 +1701,8 @@ func (c *Comprehend) DescribeEndpointRequest(input *DescribeEndpointInput) (req // DescribeEndpoint API operation for Amazon Comprehend. // // Gets the properties associated with a specific endpoint. Use this operation -// to get the status of an endpoint. +// to get the status of an endpoint. For information about endpoints, see Managing +// endpoints (https://docs.aws.amazon.com/comprehend/latest/dg/manage-endpoints.html). // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about @@ -3830,7 +3833,8 @@ func (c *Comprehend) ListEndpointsRequest(input *ListEndpointsInput) (req *reque // ListEndpoints API operation for Amazon Comprehend. // -// Gets a list of all existing endpoints that you've created. +// Gets a list of all existing endpoints that you've created. For information +// about endpoints, see Managing endpoints (https://docs.aws.amazon.com/comprehend/latest/dg/manage-endpoints.html). // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about @@ -6683,7 +6687,7 @@ func (c *Comprehend) StopSentimentDetectionJobRequest(input *StopSentimentDetect // // Stops a sentiment detection job in progress. // -// If the job state is IN_PROGRESS the job is marked for termination and put +// If the job state is IN_PROGRESS, the job is marked for termination and put // into the STOP_REQUESTED state. If the job completes before it can be stopped, // it is put into the COMPLETED state; otherwise the job is be stopped and put // into the STOPPED state. @@ -6779,7 +6783,7 @@ func (c *Comprehend) StopTargetedSentimentDetectionJobRequest(input *StopTargete // // Stops a targeted sentiment detection job in progress. // -// If the job state is IN_PROGRESS the job is marked for termination and put +// If the job state is IN_PROGRESS, the job is marked for termination and put // into the STOP_REQUESTED state. If the job completes before it can be stopped, // it is put into the COMPLETED state; otherwise the job is be stopped and put // into the STOPPED state. @@ -7260,7 +7264,8 @@ func (c *Comprehend) UpdateEndpointRequest(input *UpdateEndpointInput) (req *req // UpdateEndpoint API operation for Amazon Comprehend. // -// Updates information about the specified endpoint. +// Updates information about the specified endpoint. For information about endpoints, +// see Managing endpoints (https://docs.aws.amazon.com/comprehend/latest/dg/manage-endpoints.html). // // Returns awserr.Error for service API and SDK errors. Use runtime type assertions // with awserr.Error's Code and Message methods to get detailed information about @@ -7455,7 +7460,7 @@ type BatchDetectDominantLanguageInput struct { // String and GoString methods. // // TextList is a required field - TextList []*string `type:"list" required:"true" sensitive:"true"` + TextList []*string `min:"1" type:"list" required:"true" sensitive:"true"` } // String returns the string representation. @@ -7482,6 +7487,9 @@ func (s *BatchDetectDominantLanguageInput) Validate() error { if s.TextList == nil { invalidParams.Add(request.NewErrParamRequired("TextList")) } + if s.TextList != nil && len(s.TextList) < 1 { + invalidParams.Add(request.NewErrParamMinLen("TextList", 1)) + } if invalidParams.Len() > 0 { return invalidParams @@ -7606,7 +7614,7 @@ type BatchDetectEntitiesInput struct { // String and GoString methods. // // TextList is a required field - TextList []*string `type:"list" required:"true" sensitive:"true"` + TextList []*string `min:"1" type:"list" required:"true" sensitive:"true"` } // String returns the string representation. @@ -7636,6 +7644,9 @@ func (s *BatchDetectEntitiesInput) Validate() error { if s.TextList == nil { invalidParams.Add(request.NewErrParamRequired("TextList")) } + if s.TextList != nil && len(s.TextList) < 1 { + invalidParams.Add(request.NewErrParamMinLen("TextList", 1)) + } if invalidParams.Len() > 0 { return invalidParams @@ -7757,7 +7768,7 @@ type BatchDetectKeyPhrasesInput struct { LanguageCode *string `type:"string" required:"true" enum:"LanguageCode"` // A list containing the text of the input documents. The list can contain a - // maximum of 25 documents. Each document must contain fewer that 5,000 bytes + // maximum of 25 documents. Each document must contain fewer than 5,000 bytes // of UTF-8 encoded characters. // // TextList is a sensitive parameter and its value will be @@ -7765,7 +7776,7 @@ type BatchDetectKeyPhrasesInput struct { // String and GoString methods. // // TextList is a required field - TextList []*string `type:"list" required:"true" sensitive:"true"` + TextList []*string `min:"1" type:"list" required:"true" sensitive:"true"` } // String returns the string representation. @@ -7795,6 +7806,9 @@ func (s *BatchDetectKeyPhrasesInput) Validate() error { if s.TextList == nil { invalidParams.Add(request.NewErrParamRequired("TextList")) } + if s.TextList != nil && len(s.TextList) < 1 { + invalidParams.Add(request.NewErrParamMinLen("TextList", 1)) + } if invalidParams.Len() > 0 { return invalidParams @@ -7924,7 +7938,7 @@ type BatchDetectSentimentInput struct { // String and GoString methods. // // TextList is a required field - TextList []*string `type:"list" required:"true" sensitive:"true"` + TextList []*string `min:"1" type:"list" required:"true" sensitive:"true"` } // String returns the string representation. @@ -7954,6 +7968,9 @@ func (s *BatchDetectSentimentInput) Validate() error { if s.TextList == nil { invalidParams.Add(request.NewErrParamRequired("TextList")) } + if s.TextList != nil && len(s.TextList) < 1 { + invalidParams.Add(request.NewErrParamMinLen("TextList", 1)) + } if invalidParams.Len() > 0 { return invalidParams @@ -8095,7 +8112,7 @@ type BatchDetectSyntaxInput struct { // String and GoString methods. // // TextList is a required field - TextList []*string `type:"list" required:"true" sensitive:"true"` + TextList []*string `min:"1" type:"list" required:"true" sensitive:"true"` } // String returns the string representation. @@ -8125,6 +8142,9 @@ func (s *BatchDetectSyntaxInput) Validate() error { if s.TextList == nil { invalidParams.Add(request.NewErrParamRequired("TextList")) } + if s.TextList != nil && len(s.TextList) < 1 { + invalidParams.Add(request.NewErrParamMinLen("TextList", 1)) + } if invalidParams.Len() > 0 { return invalidParams @@ -8536,7 +8556,8 @@ func (s *ClassifierMetadata) SetNumberOfTrainedDocuments(v int64) *ClassifierMet type ClassifyDocumentInput struct { _ struct{} `type:"structure"` - // The Amazon Resource Number (ARN) of the endpoint. + // The Amazon Resource Number (ARN) of the endpoint. For information about endpoints, + // see Managing endpoints (https://docs.aws.amazon.com/comprehend/latest/dg/manage-endpoints.html). // // EndpointArn is a required field EndpointArn *string `type:"string" required:"true"` @@ -8715,7 +8736,8 @@ func (s *ConcurrentModificationException) RequestID() string { type ContainsPiiEntitiesInput struct { _ struct{} `type:"structure"` - // The language of the input documents. + // The language of the input documents. Currently, English is the only valid + // language. // // LanguageCode is a required field LanguageCode *string `type:"string" required:"true" enum:"LanguageCode"` @@ -10950,6 +10972,8 @@ type DetectEntitiesInput struct { // // If you specify an endpoint, Amazon Comprehend uses the language of your custom // model, and it ignores any language code that you provide in your request. + // + // For information about endpoints, see Managing endpoints (https://docs.aws.amazon.com/comprehend/latest/dg/manage-endpoints.html). EndpointArn *string `type:"string"` // The language of the input documents. You can specify any of the primary languages @@ -11167,7 +11191,8 @@ func (s *DetectKeyPhrasesOutput) SetKeyPhrases(v []*KeyPhrase) *DetectKeyPhrases type DetectPiiEntitiesInput struct { _ struct{} `type:"structure"` - // The language of the input documents. + // The language of the input documents. Currently, English is the only valid + // language. // // LanguageCode is a required field LanguageCode *string `type:"string" required:"true" enum:"LanguageCode"` @@ -11656,7 +11681,7 @@ type DocumentClassificationJobProperties struct { // Configuration parameters for a private Virtual Private Cloud (VPC) containing // the resources you are using for your document classification job. For more - // information, see Amazon VPC (https://docs.aws.amazon.com/vpc/latest/userguide/what-is-amazon-vpc.html). + // information, see Amazon VPC (https://docs.aws.amazon.com/vppc/latest/userguide/what-is-amazon-vpc.html). VpcConfig *VpcConfig `type:"structure"` } @@ -11823,7 +11848,7 @@ func (s *DocumentClassifierFilter) SetSubmitTimeBefore(v time.Time) *DocumentCla // The input properties for training a document classifier. // -// For more information on how the input file is formatted, see how-document-classification-training-data. +// For more information on how the input file is formatted, see prep-classifier-data. type DocumentClassifierInputDataConfig struct { _ struct{} `type:"structure"` @@ -12096,7 +12121,7 @@ type DocumentClassifierProperties struct { // Configuration parameters for a private Virtual Private Cloud (VPC) containing // the resources you are using for your custom classifier. For more information, - // see Amazon VPC (https://docs.aws.amazon.com/vpc/latest/userguide/what-is-amazon-vpc.html). + // see Amazon VPC (https://docs.aws.amazon.com/vppc/latest/userguide/what-is-amazon-vpc.html). VpcConfig *VpcConfig `type:"structure"` } @@ -12752,7 +12777,8 @@ func (s *EndpointFilter) SetStatus(v string) *EndpointFilter { return s } -// Specifies information about the specified endpoint. +// Specifies information about the specified endpoint. For information about +// endpoints, see Managing endpoints (https://docs.aws.amazon.com/comprehend/latest/dg/manage-endpoints.html). type EndpointProperties struct { _ struct{} `type:"structure"` @@ -19362,7 +19388,8 @@ type StartPiiEntitiesDetectionJobInput struct { // The identifier of the job. JobName *string `min:"1" type:"string"` - // The language of the input documents. + // The language of the input documents. Currently, English is the only valid + // language. // // LanguageCode is a required field LanguageCode *string `type:"string" required:"true" enum:"LanguageCode"` @@ -19853,8 +19880,8 @@ type StartTargetedSentimentDetectionJobInput struct { // The identifier of the job. JobName *string `min:"1" type:"string"` - // The language of the input documents. You can specify any of the primary languages - // supported by Amazon Comprehend. All documents must be in the same language. + // The language of the input documents. Currently, English is the only valid + // language. // // LanguageCode is a required field LanguageCode *string `type:"string" required:"true" enum:"LanguageCode"` @@ -22912,6 +22939,48 @@ const ( // PiiEntityTypeAll is a PiiEntityType enum value PiiEntityTypeAll = "ALL" + + // PiiEntityTypeLicensePlate is a PiiEntityType enum value + PiiEntityTypeLicensePlate = "LICENSE_PLATE" + + // PiiEntityTypeVehicleIdentificationNumber is a PiiEntityType enum value + PiiEntityTypeVehicleIdentificationNumber = "VEHICLE_IDENTIFICATION_NUMBER" + + // PiiEntityTypeUkNationalInsuranceNumber is a PiiEntityType enum value + PiiEntityTypeUkNationalInsuranceNumber = "UK_NATIONAL_INSURANCE_NUMBER" + + // PiiEntityTypeCaSocialInsuranceNumber is a PiiEntityType enum value + PiiEntityTypeCaSocialInsuranceNumber = "CA_SOCIAL_INSURANCE_NUMBER" + + // PiiEntityTypeUsIndividualTaxIdentificationNumber is a PiiEntityType enum value + PiiEntityTypeUsIndividualTaxIdentificationNumber = "US_INDIVIDUAL_TAX_IDENTIFICATION_NUMBER" + + // PiiEntityTypeUkUniqueTaxpayerReferenceNumber is a PiiEntityType enum value + PiiEntityTypeUkUniqueTaxpayerReferenceNumber = "UK_UNIQUE_TAXPAYER_REFERENCE_NUMBER" + + // PiiEntityTypeInPermanentAccountNumber is a PiiEntityType enum value + PiiEntityTypeInPermanentAccountNumber = "IN_PERMANENT_ACCOUNT_NUMBER" + + // PiiEntityTypeInNrega is a PiiEntityType enum value + PiiEntityTypeInNrega = "IN_NREGA" + + // PiiEntityTypeInternationalBankAccountNumber is a PiiEntityType enum value + PiiEntityTypeInternationalBankAccountNumber = "INTERNATIONAL_BANK_ACCOUNT_NUMBER" + + // PiiEntityTypeSwiftCode is a PiiEntityType enum value + PiiEntityTypeSwiftCode = "SWIFT_CODE" + + // PiiEntityTypeUkNationalHealthServiceNumber is a PiiEntityType enum value + PiiEntityTypeUkNationalHealthServiceNumber = "UK_NATIONAL_HEALTH_SERVICE_NUMBER" + + // PiiEntityTypeCaHealthNumber is a PiiEntityType enum value + PiiEntityTypeCaHealthNumber = "CA_HEALTH_NUMBER" + + // PiiEntityTypeInAadhaar is a PiiEntityType enum value + PiiEntityTypeInAadhaar = "IN_AADHAAR" + + // PiiEntityTypeInVoterNumber is a PiiEntityType enum value + PiiEntityTypeInVoterNumber = "IN_VOTER_NUMBER" ) // PiiEntityType_Values returns all elements of the PiiEntityType enum @@ -22940,6 +23009,20 @@ func PiiEntityType_Values() []string { PiiEntityTypeIpAddress, PiiEntityTypeMacAddress, PiiEntityTypeAll, + PiiEntityTypeLicensePlate, + PiiEntityTypeVehicleIdentificationNumber, + PiiEntityTypeUkNationalInsuranceNumber, + PiiEntityTypeCaSocialInsuranceNumber, + PiiEntityTypeUsIndividualTaxIdentificationNumber, + PiiEntityTypeUkUniqueTaxpayerReferenceNumber, + PiiEntityTypeInPermanentAccountNumber, + PiiEntityTypeInNrega, + PiiEntityTypeInternationalBankAccountNumber, + PiiEntityTypeSwiftCode, + PiiEntityTypeUkNationalHealthServiceNumber, + PiiEntityTypeCaHealthNumber, + PiiEntityTypeInAadhaar, + PiiEntityTypeInVoterNumber, } }