Create DataIntegrationFlow to map one or more different sources to one target using the SQL transformation query.
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SupplyChainClient, CreateDataIntegrationFlowCommand } from "@aws-sdk/client-supplychain"; // ES Modules import + * // const { SupplyChainClient, CreateDataIntegrationFlowCommand } = require("@aws-sdk/client-supplychain"); // CommonJS import + * const client = new SupplyChainClient(config); + * const input = { // CreateDataIntegrationFlowRequest + * instanceId: "STRING_VALUE", // required + * name: "STRING_VALUE", // required + * sources: [ // DataIntegrationFlowSourceList // required + * { // DataIntegrationFlowSource + * sourceType: "S3" || "DATASET", // required + * sourceName: "STRING_VALUE", // required + * s3Source: { // DataIntegrationFlowS3SourceConfiguration + * bucketName: "STRING_VALUE", // required + * prefix: "STRING_VALUE", // required + * options: { // DataIntegrationFlowS3Options + * fileType: "CSV" || "PARQUET" || "JSON", + * }, + * }, + * datasetSource: { // DataIntegrationFlowDatasetSourceConfiguration + * datasetIdentifier: "STRING_VALUE", // required + * options: { // DataIntegrationFlowDatasetOptions + * loadType: "INCREMENTAL" || "REPLACE", + * dedupeRecords: true || false, + * }, + * }, + * }, + * ], + * transformation: { // DataIntegrationFlowTransformation + * transformationType: "SQL" || "NONE", // required + * sqlTransformation: { // DataIntegrationFlowSQLTransformationConfiguration + * query: "STRING_VALUE", // required + * }, + * }, + * target: { // DataIntegrationFlowTarget + * targetType: "S3" || "DATASET", // required + * s3Target: { // DataIntegrationFlowS3TargetConfiguration + * bucketName: "STRING_VALUE", // required + * prefix: "STRING_VALUE", // required + * options: { + * fileType: "CSV" || "PARQUET" || "JSON", + * }, + * }, + * datasetTarget: { // DataIntegrationFlowDatasetTargetConfiguration + * datasetIdentifier: "STRING_VALUE", // required + * options: { + * loadType: "INCREMENTAL" || "REPLACE", + * dedupeRecords: true || false, + * }, + * }, + * }, + * tags: { // TagMap + * "You do not have the required privileges to perform this action.
+ * + * @throws {@link ConflictException} (client fault) + *Updating or deleting a resource can cause an inconsistent state.
+ * + * @throws {@link InternalServerException} (server fault) + *Unexpected error during processing of request.
+ * + * @throws {@link ServiceQuotaExceededException} (client fault) + *Request would cause a service quota to be exceeded.
+ * + * @throws {@link ThrottlingException} (client fault) + *Request was denied due to request throttling.
+ * + * @throws {@link ValidationException} (client fault) + *The input does not satisfy the constraints specified by an AWS service.
+ * + * @throws {@link ResourceNotFoundException} (client fault) + *Request references a resource which does not exist.
+ * + * @throws {@link SupplyChainServiceException} + *Base exception class for all service exceptions from SupplyChain service.
+ * + * @public + */ +export class CreateDataIntegrationFlowCommand extends $Command + .classBuilder< + CreateDataIntegrationFlowCommandInput, + CreateDataIntegrationFlowCommandOutput, + SupplyChainClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >() + .ep(commonParams) + .m(function (this: any, Command: any, cs: any, config: SupplyChainClientResolvedConfig, o: any) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; + }) + .s("GalaxyPublicAPIGateway", "CreateDataIntegrationFlow", {}) + .n("SupplyChainClient", "CreateDataIntegrationFlowCommand") + .f(void 0, void 0) + .ser(se_CreateDataIntegrationFlowCommand) + .de(de_CreateDataIntegrationFlowCommand) + .build() { + /** @internal type navigation helper, not in runtime. */ + protected declare static __types: { + api: { + input: CreateDataIntegrationFlowRequest; + output: CreateDataIntegrationFlowResponse; + }; + sdk: { + input: CreateDataIntegrationFlowCommandInput; + output: CreateDataIntegrationFlowCommandOutput; + }; + }; +} diff --git a/clients/client-supplychain/src/commands/CreateDataLakeDatasetCommand.ts b/clients/client-supplychain/src/commands/CreateDataLakeDatasetCommand.ts new file mode 100644 index 000000000000..1a097f14a861 --- /dev/null +++ b/clients/client-supplychain/src/commands/CreateDataLakeDatasetCommand.ts @@ -0,0 +1,147 @@ +// smithy-typescript generated code +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; + +import { commonParams } from "../endpoint/EndpointParameters"; +import { CreateDataLakeDatasetRequest, CreateDataLakeDatasetResponse } from "../models/models_0"; +import { de_CreateDataLakeDatasetCommand, se_CreateDataLakeDatasetCommand } from "../protocols/Aws_restJson1"; +import { ServiceInputTypes, ServiceOutputTypes, SupplyChainClientResolvedConfig } from "../SupplyChainClient"; + +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link CreateDataLakeDatasetCommand}. + */ +export interface CreateDataLakeDatasetCommandInput extends CreateDataLakeDatasetRequest {} +/** + * @public + * + * The output of {@link CreateDataLakeDatasetCommand}. + */ +export interface CreateDataLakeDatasetCommandOutput extends CreateDataLakeDatasetResponse, __MetadataBearer {} + +/** + *Create a data lake dataset.
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SupplyChainClient, CreateDataLakeDatasetCommand } from "@aws-sdk/client-supplychain"; // ES Modules import + * // const { SupplyChainClient, CreateDataLakeDatasetCommand } = require("@aws-sdk/client-supplychain"); // CommonJS import + * const client = new SupplyChainClient(config); + * const input = { // CreateDataLakeDatasetRequest + * instanceId: "STRING_VALUE", // required + * namespace: "STRING_VALUE", // required + * name: "STRING_VALUE", // required + * schema: { // DataLakeDatasetSchema + * name: "STRING_VALUE", // required + * fields: [ // DataLakeDatasetSchemaFieldList // required + * { // DataLakeDatasetSchemaField + * name: "STRING_VALUE", // required + * type: "INT" || "DOUBLE" || "STRING" || "TIMESTAMP", // required + * isRequired: true || false, // required + * }, + * ], + * }, + * description: "STRING_VALUE", + * tags: { // TagMap + * "You do not have the required privileges to perform this action.
+ * + * @throws {@link ConflictException} (client fault) + *Updating or deleting a resource can cause an inconsistent state.
+ * + * @throws {@link InternalServerException} (server fault) + *Unexpected error during processing of request.
+ * + * @throws {@link ServiceQuotaExceededException} (client fault) + *Request would cause a service quota to be exceeded.
+ * + * @throws {@link ThrottlingException} (client fault) + *Request was denied due to request throttling.
+ * + * @throws {@link ValidationException} (client fault) + *The input does not satisfy the constraints specified by an AWS service.
+ * + * @throws {@link ResourceNotFoundException} (client fault) + *Request references a resource which does not exist.
+ * + * @throws {@link SupplyChainServiceException} + *Base exception class for all service exceptions from SupplyChain service.
+ * + * @public + */ +export class CreateDataLakeDatasetCommand extends $Command + .classBuilder< + CreateDataLakeDatasetCommandInput, + CreateDataLakeDatasetCommandOutput, + SupplyChainClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >() + .ep(commonParams) + .m(function (this: any, Command: any, cs: any, config: SupplyChainClientResolvedConfig, o: any) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; + }) + .s("GalaxyPublicAPIGateway", "CreateDataLakeDataset", {}) + .n("SupplyChainClient", "CreateDataLakeDatasetCommand") + .f(void 0, void 0) + .ser(se_CreateDataLakeDatasetCommand) + .de(de_CreateDataLakeDatasetCommand) + .build() { + /** @internal type navigation helper, not in runtime. */ + protected declare static __types: { + api: { + input: CreateDataLakeDatasetRequest; + output: CreateDataLakeDatasetResponse; + }; + sdk: { + input: CreateDataLakeDatasetCommandInput; + output: CreateDataLakeDatasetCommandOutput; + }; + }; +} diff --git a/clients/client-supplychain/src/commands/DeleteDataIntegrationFlowCommand.ts b/clients/client-supplychain/src/commands/DeleteDataIntegrationFlowCommand.ts new file mode 100644 index 000000000000..dc2223b4c4a9 --- /dev/null +++ b/clients/client-supplychain/src/commands/DeleteDataIntegrationFlowCommand.ts @@ -0,0 +1,115 @@ +// smithy-typescript generated code +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; + +import { commonParams } from "../endpoint/EndpointParameters"; +import { DeleteDataIntegrationFlowRequest, DeleteDataIntegrationFlowResponse } from "../models/models_0"; +import { de_DeleteDataIntegrationFlowCommand, se_DeleteDataIntegrationFlowCommand } from "../protocols/Aws_restJson1"; +import { ServiceInputTypes, ServiceOutputTypes, SupplyChainClientResolvedConfig } from "../SupplyChainClient"; + +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteDataIntegrationFlowCommand}. + */ +export interface DeleteDataIntegrationFlowCommandInput extends DeleteDataIntegrationFlowRequest {} +/** + * @public + * + * The output of {@link DeleteDataIntegrationFlowCommand}. + */ +export interface DeleteDataIntegrationFlowCommandOutput extends DeleteDataIntegrationFlowResponse, __MetadataBearer {} + +/** + *Delete the DataIntegrationFlow.
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SupplyChainClient, DeleteDataIntegrationFlowCommand } from "@aws-sdk/client-supplychain"; // ES Modules import + * // const { SupplyChainClient, DeleteDataIntegrationFlowCommand } = require("@aws-sdk/client-supplychain"); // CommonJS import + * const client = new SupplyChainClient(config); + * const input = { // DeleteDataIntegrationFlowRequest + * instanceId: "STRING_VALUE", // required + * name: "STRING_VALUE", // required + * }; + * const command = new DeleteDataIntegrationFlowCommand(input); + * const response = await client.send(command); + * // { // DeleteDataIntegrationFlowResponse + * // instanceId: "STRING_VALUE", // required + * // name: "STRING_VALUE", // required + * // }; + * + * ``` + * + * @param DeleteDataIntegrationFlowCommandInput - {@link DeleteDataIntegrationFlowCommandInput} + * @returns {@link DeleteDataIntegrationFlowCommandOutput} + * @see {@link DeleteDataIntegrationFlowCommandInput} for command's `input` shape. + * @see {@link DeleteDataIntegrationFlowCommandOutput} for command's `response` shape. + * @see {@link SupplyChainClientResolvedConfig | config} for SupplyChainClient's `config` shape. + * + * @throws {@link AccessDeniedException} (client fault) + *You do not have the required privileges to perform this action.
+ * + * @throws {@link InternalServerException} (server fault) + *Unexpected error during processing of request.
+ * + * @throws {@link ResourceNotFoundException} (client fault) + *Request references a resource which does not exist.
+ * + * @throws {@link ThrottlingException} (client fault) + *Request was denied due to request throttling.
+ * + * @throws {@link ConflictException} (client fault) + *Updating or deleting a resource can cause an inconsistent state.
+ * + * @throws {@link ServiceQuotaExceededException} (client fault) + *Request would cause a service quota to be exceeded.
+ * + * @throws {@link ValidationException} (client fault) + *The input does not satisfy the constraints specified by an AWS service.
+ * + * @throws {@link SupplyChainServiceException} + *Base exception class for all service exceptions from SupplyChain service.
+ * + * @public + */ +export class DeleteDataIntegrationFlowCommand extends $Command + .classBuilder< + DeleteDataIntegrationFlowCommandInput, + DeleteDataIntegrationFlowCommandOutput, + SupplyChainClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >() + .ep(commonParams) + .m(function (this: any, Command: any, cs: any, config: SupplyChainClientResolvedConfig, o: any) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; + }) + .s("GalaxyPublicAPIGateway", "DeleteDataIntegrationFlow", {}) + .n("SupplyChainClient", "DeleteDataIntegrationFlowCommand") + .f(void 0, void 0) + .ser(se_DeleteDataIntegrationFlowCommand) + .de(de_DeleteDataIntegrationFlowCommand) + .build() { + /** @internal type navigation helper, not in runtime. */ + protected declare static __types: { + api: { + input: DeleteDataIntegrationFlowRequest; + output: DeleteDataIntegrationFlowResponse; + }; + sdk: { + input: DeleteDataIntegrationFlowCommandInput; + output: DeleteDataIntegrationFlowCommandOutput; + }; + }; +} diff --git a/clients/client-supplychain/src/commands/DeleteDataLakeDatasetCommand.ts b/clients/client-supplychain/src/commands/DeleteDataLakeDatasetCommand.ts new file mode 100644 index 000000000000..2a7654f75d2a --- /dev/null +++ b/clients/client-supplychain/src/commands/DeleteDataLakeDatasetCommand.ts @@ -0,0 +1,117 @@ +// smithy-typescript generated code +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; + +import { commonParams } from "../endpoint/EndpointParameters"; +import { DeleteDataLakeDatasetRequest, DeleteDataLakeDatasetResponse } from "../models/models_0"; +import { de_DeleteDataLakeDatasetCommand, se_DeleteDataLakeDatasetCommand } from "../protocols/Aws_restJson1"; +import { ServiceInputTypes, ServiceOutputTypes, SupplyChainClientResolvedConfig } from "../SupplyChainClient"; + +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link DeleteDataLakeDatasetCommand}. + */ +export interface DeleteDataLakeDatasetCommandInput extends DeleteDataLakeDatasetRequest {} +/** + * @public + * + * The output of {@link DeleteDataLakeDatasetCommand}. + */ +export interface DeleteDataLakeDatasetCommandOutput extends DeleteDataLakeDatasetResponse, __MetadataBearer {} + +/** + *Delete a data lake dataset.
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SupplyChainClient, DeleteDataLakeDatasetCommand } from "@aws-sdk/client-supplychain"; // ES Modules import + * // const { SupplyChainClient, DeleteDataLakeDatasetCommand } = require("@aws-sdk/client-supplychain"); // CommonJS import + * const client = new SupplyChainClient(config); + * const input = { // DeleteDataLakeDatasetRequest + * instanceId: "STRING_VALUE", // required + * namespace: "STRING_VALUE", // required + * name: "STRING_VALUE", // required + * }; + * const command = new DeleteDataLakeDatasetCommand(input); + * const response = await client.send(command); + * // { // DeleteDataLakeDatasetResponse + * // instanceId: "STRING_VALUE", // required + * // namespace: "STRING_VALUE", // required + * // name: "STRING_VALUE", // required + * // }; + * + * ``` + * + * @param DeleteDataLakeDatasetCommandInput - {@link DeleteDataLakeDatasetCommandInput} + * @returns {@link DeleteDataLakeDatasetCommandOutput} + * @see {@link DeleteDataLakeDatasetCommandInput} for command's `input` shape. + * @see {@link DeleteDataLakeDatasetCommandOutput} for command's `response` shape. + * @see {@link SupplyChainClientResolvedConfig | config} for SupplyChainClient's `config` shape. + * + * @throws {@link AccessDeniedException} (client fault) + *You do not have the required privileges to perform this action.
+ * + * @throws {@link InternalServerException} (server fault) + *Unexpected error during processing of request.
+ * + * @throws {@link ResourceNotFoundException} (client fault) + *Request references a resource which does not exist.
+ * + * @throws {@link ThrottlingException} (client fault) + *Request was denied due to request throttling.
+ * + * @throws {@link ValidationException} (client fault) + *The input does not satisfy the constraints specified by an AWS service.
+ * + * @throws {@link ConflictException} (client fault) + *Updating or deleting a resource can cause an inconsistent state.
+ * + * @throws {@link ServiceQuotaExceededException} (client fault) + *Request would cause a service quota to be exceeded.
+ * + * @throws {@link SupplyChainServiceException} + *Base exception class for all service exceptions from SupplyChain service.
+ * + * @public + */ +export class DeleteDataLakeDatasetCommand extends $Command + .classBuilder< + DeleteDataLakeDatasetCommandInput, + DeleteDataLakeDatasetCommandOutput, + SupplyChainClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >() + .ep(commonParams) + .m(function (this: any, Command: any, cs: any, config: SupplyChainClientResolvedConfig, o: any) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; + }) + .s("GalaxyPublicAPIGateway", "DeleteDataLakeDataset", {}) + .n("SupplyChainClient", "DeleteDataLakeDatasetCommand") + .f(void 0, void 0) + .ser(se_DeleteDataLakeDatasetCommand) + .de(de_DeleteDataLakeDatasetCommand) + .build() { + /** @internal type navigation helper, not in runtime. */ + protected declare static __types: { + api: { + input: DeleteDataLakeDatasetRequest; + output: DeleteDataLakeDatasetResponse; + }; + sdk: { + input: DeleteDataLakeDatasetCommandInput; + output: DeleteDataLakeDatasetCommandOutput; + }; + }; +} diff --git a/clients/client-supplychain/src/commands/GetDataIntegrationFlowCommand.ts b/clients/client-supplychain/src/commands/GetDataIntegrationFlowCommand.ts new file mode 100644 index 000000000000..9b5384847fde --- /dev/null +++ b/clients/client-supplychain/src/commands/GetDataIntegrationFlowCommand.ts @@ -0,0 +1,162 @@ +// smithy-typescript generated code +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; + +import { commonParams } from "../endpoint/EndpointParameters"; +import { GetDataIntegrationFlowRequest, GetDataIntegrationFlowResponse } from "../models/models_0"; +import { de_GetDataIntegrationFlowCommand, se_GetDataIntegrationFlowCommand } from "../protocols/Aws_restJson1"; +import { ServiceInputTypes, ServiceOutputTypes, SupplyChainClientResolvedConfig } from "../SupplyChainClient"; + +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetDataIntegrationFlowCommand}. + */ +export interface GetDataIntegrationFlowCommandInput extends GetDataIntegrationFlowRequest {} +/** + * @public + * + * The output of {@link GetDataIntegrationFlowCommand}. + */ +export interface GetDataIntegrationFlowCommandOutput extends GetDataIntegrationFlowResponse, __MetadataBearer {} + +/** + *View the DataIntegrationFlow details.
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SupplyChainClient, GetDataIntegrationFlowCommand } from "@aws-sdk/client-supplychain"; // ES Modules import + * // const { SupplyChainClient, GetDataIntegrationFlowCommand } = require("@aws-sdk/client-supplychain"); // CommonJS import + * const client = new SupplyChainClient(config); + * const input = { // GetDataIntegrationFlowRequest + * instanceId: "STRING_VALUE", // required + * name: "STRING_VALUE", // required + * }; + * const command = new GetDataIntegrationFlowCommand(input); + * const response = await client.send(command); + * // { // GetDataIntegrationFlowResponse + * // flow: { // DataIntegrationFlow + * // instanceId: "STRING_VALUE", // required + * // name: "STRING_VALUE", // required + * // sources: [ // DataIntegrationFlowSourceList // required + * // { // DataIntegrationFlowSource + * // sourceType: "S3" || "DATASET", // required + * // sourceName: "STRING_VALUE", // required + * // s3Source: { // DataIntegrationFlowS3SourceConfiguration + * // bucketName: "STRING_VALUE", // required + * // prefix: "STRING_VALUE", // required + * // options: { // DataIntegrationFlowS3Options + * // fileType: "CSV" || "PARQUET" || "JSON", + * // }, + * // }, + * // datasetSource: { // DataIntegrationFlowDatasetSourceConfiguration + * // datasetIdentifier: "STRING_VALUE", // required + * // options: { // DataIntegrationFlowDatasetOptions + * // loadType: "INCREMENTAL" || "REPLACE", + * // dedupeRecords: true || false, + * // }, + * // }, + * // }, + * // ], + * // transformation: { // DataIntegrationFlowTransformation + * // transformationType: "SQL" || "NONE", // required + * // sqlTransformation: { // DataIntegrationFlowSQLTransformationConfiguration + * // query: "STRING_VALUE", // required + * // }, + * // }, + * // target: { // DataIntegrationFlowTarget + * // targetType: "S3" || "DATASET", // required + * // s3Target: { // DataIntegrationFlowS3TargetConfiguration + * // bucketName: "STRING_VALUE", // required + * // prefix: "STRING_VALUE", // required + * // options: { + * // fileType: "CSV" || "PARQUET" || "JSON", + * // }, + * // }, + * // datasetTarget: { // DataIntegrationFlowDatasetTargetConfiguration + * // datasetIdentifier: "STRING_VALUE", // required + * // options: { + * // loadType: "INCREMENTAL" || "REPLACE", + * // dedupeRecords: true || false, + * // }, + * // }, + * // }, + * // createdTime: new Date("TIMESTAMP"), // required + * // lastModifiedTime: new Date("TIMESTAMP"), // required + * // }, + * // }; + * + * ``` + * + * @param GetDataIntegrationFlowCommandInput - {@link GetDataIntegrationFlowCommandInput} + * @returns {@link GetDataIntegrationFlowCommandOutput} + * @see {@link GetDataIntegrationFlowCommandInput} for command's `input` shape. + * @see {@link GetDataIntegrationFlowCommandOutput} for command's `response` shape. + * @see {@link SupplyChainClientResolvedConfig | config} for SupplyChainClient's `config` shape. + * + * @throws {@link AccessDeniedException} (client fault) + *You do not have the required privileges to perform this action.
+ * + * @throws {@link InternalServerException} (server fault) + *Unexpected error during processing of request.
+ * + * @throws {@link ResourceNotFoundException} (client fault) + *Request references a resource which does not exist.
+ * + * @throws {@link ThrottlingException} (client fault) + *Request was denied due to request throttling.
+ * + * @throws {@link ValidationException} (client fault) + *The input does not satisfy the constraints specified by an AWS service.
+ * + * @throws {@link ConflictException} (client fault) + *Updating or deleting a resource can cause an inconsistent state.
+ * + * @throws {@link ServiceQuotaExceededException} (client fault) + *Request would cause a service quota to be exceeded.
+ * + * @throws {@link SupplyChainServiceException} + *Base exception class for all service exceptions from SupplyChain service.
+ * + * @public + */ +export class GetDataIntegrationFlowCommand extends $Command + .classBuilder< + GetDataIntegrationFlowCommandInput, + GetDataIntegrationFlowCommandOutput, + SupplyChainClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >() + .ep(commonParams) + .m(function (this: any, Command: any, cs: any, config: SupplyChainClientResolvedConfig, o: any) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; + }) + .s("GalaxyPublicAPIGateway", "GetDataIntegrationFlow", {}) + .n("SupplyChainClient", "GetDataIntegrationFlowCommand") + .f(void 0, void 0) + .ser(se_GetDataIntegrationFlowCommand) + .de(de_GetDataIntegrationFlowCommand) + .build() { + /** @internal type navigation helper, not in runtime. */ + protected declare static __types: { + api: { + input: GetDataIntegrationFlowRequest; + output: GetDataIntegrationFlowResponse; + }; + sdk: { + input: GetDataIntegrationFlowCommandInput; + output: GetDataIntegrationFlowCommandOutput; + }; + }; +} diff --git a/clients/client-supplychain/src/commands/GetDataLakeDatasetCommand.ts b/clients/client-supplychain/src/commands/GetDataLakeDatasetCommand.ts new file mode 100644 index 000000000000..61c7507ceb7f --- /dev/null +++ b/clients/client-supplychain/src/commands/GetDataLakeDatasetCommand.ts @@ -0,0 +1,133 @@ +// smithy-typescript generated code +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; + +import { commonParams } from "../endpoint/EndpointParameters"; +import { GetDataLakeDatasetRequest, GetDataLakeDatasetResponse } from "../models/models_0"; +import { de_GetDataLakeDatasetCommand, se_GetDataLakeDatasetCommand } from "../protocols/Aws_restJson1"; +import { ServiceInputTypes, ServiceOutputTypes, SupplyChainClientResolvedConfig } from "../SupplyChainClient"; + +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link GetDataLakeDatasetCommand}. + */ +export interface GetDataLakeDatasetCommandInput extends GetDataLakeDatasetRequest {} +/** + * @public + * + * The output of {@link GetDataLakeDatasetCommand}. + */ +export interface GetDataLakeDatasetCommandOutput extends GetDataLakeDatasetResponse, __MetadataBearer {} + +/** + *Get a data lake dataset.
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SupplyChainClient, GetDataLakeDatasetCommand } from "@aws-sdk/client-supplychain"; // ES Modules import + * // const { SupplyChainClient, GetDataLakeDatasetCommand } = require("@aws-sdk/client-supplychain"); // CommonJS import + * const client = new SupplyChainClient(config); + * const input = { // GetDataLakeDatasetRequest + * instanceId: "STRING_VALUE", // required + * namespace: "STRING_VALUE", // required + * name: "STRING_VALUE", // required + * }; + * const command = new GetDataLakeDatasetCommand(input); + * const response = await client.send(command); + * // { // GetDataLakeDatasetResponse + * // dataset: { // DataLakeDataset + * // instanceId: "STRING_VALUE", // required + * // namespace: "STRING_VALUE", // required + * // name: "STRING_VALUE", // required + * // arn: "STRING_VALUE", // required + * // schema: { // DataLakeDatasetSchema + * // name: "STRING_VALUE", // required + * // fields: [ // DataLakeDatasetSchemaFieldList // required + * // { // DataLakeDatasetSchemaField + * // name: "STRING_VALUE", // required + * // type: "INT" || "DOUBLE" || "STRING" || "TIMESTAMP", // required + * // isRequired: true || false, // required + * // }, + * // ], + * // }, + * // description: "STRING_VALUE", + * // createdTime: new Date("TIMESTAMP"), // required + * // lastModifiedTime: new Date("TIMESTAMP"), // required + * // }, + * // }; + * + * ``` + * + * @param GetDataLakeDatasetCommandInput - {@link GetDataLakeDatasetCommandInput} + * @returns {@link GetDataLakeDatasetCommandOutput} + * @see {@link GetDataLakeDatasetCommandInput} for command's `input` shape. + * @see {@link GetDataLakeDatasetCommandOutput} for command's `response` shape. + * @see {@link SupplyChainClientResolvedConfig | config} for SupplyChainClient's `config` shape. + * + * @throws {@link AccessDeniedException} (client fault) + *You do not have the required privileges to perform this action.
+ * + * @throws {@link InternalServerException} (server fault) + *Unexpected error during processing of request.
+ * + * @throws {@link ResourceNotFoundException} (client fault) + *Request references a resource which does not exist.
+ * + * @throws {@link ThrottlingException} (client fault) + *Request was denied due to request throttling.
+ * + * @throws {@link ValidationException} (client fault) + *The input does not satisfy the constraints specified by an AWS service.
+ * + * @throws {@link ConflictException} (client fault) + *Updating or deleting a resource can cause an inconsistent state.
+ * + * @throws {@link ServiceQuotaExceededException} (client fault) + *Request would cause a service quota to be exceeded.
+ * + * @throws {@link SupplyChainServiceException} + *Base exception class for all service exceptions from SupplyChain service.
+ * + * @public + */ +export class GetDataLakeDatasetCommand extends $Command + .classBuilder< + GetDataLakeDatasetCommandInput, + GetDataLakeDatasetCommandOutput, + SupplyChainClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >() + .ep(commonParams) + .m(function (this: any, Command: any, cs: any, config: SupplyChainClientResolvedConfig, o: any) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; + }) + .s("GalaxyPublicAPIGateway", "GetDataLakeDataset", {}) + .n("SupplyChainClient", "GetDataLakeDatasetCommand") + .f(void 0, void 0) + .ser(se_GetDataLakeDatasetCommand) + .de(de_GetDataLakeDatasetCommand) + .build() { + /** @internal type navigation helper, not in runtime. */ + protected declare static __types: { + api: { + input: GetDataLakeDatasetRequest; + output: GetDataLakeDatasetResponse; + }; + sdk: { + input: GetDataLakeDatasetCommandInput; + output: GetDataLakeDatasetCommandOutput; + }; + }; +} diff --git a/clients/client-supplychain/src/commands/ListDataIntegrationFlowsCommand.ts b/clients/client-supplychain/src/commands/ListDataIntegrationFlowsCommand.ts new file mode 100644 index 000000000000..fe7fd8ec2e26 --- /dev/null +++ b/clients/client-supplychain/src/commands/ListDataIntegrationFlowsCommand.ts @@ -0,0 +1,166 @@ +// smithy-typescript generated code +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; + +import { commonParams } from "../endpoint/EndpointParameters"; +import { ListDataIntegrationFlowsRequest, ListDataIntegrationFlowsResponse } from "../models/models_0"; +import { de_ListDataIntegrationFlowsCommand, se_ListDataIntegrationFlowsCommand } from "../protocols/Aws_restJson1"; +import { ServiceInputTypes, ServiceOutputTypes, SupplyChainClientResolvedConfig } from "../SupplyChainClient"; + +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListDataIntegrationFlowsCommand}. + */ +export interface ListDataIntegrationFlowsCommandInput extends ListDataIntegrationFlowsRequest {} +/** + * @public + * + * The output of {@link ListDataIntegrationFlowsCommand}. + */ +export interface ListDataIntegrationFlowsCommandOutput extends ListDataIntegrationFlowsResponse, __MetadataBearer {} + +/** + *Lists all the DataIntegrationFlows in a paginated way.
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SupplyChainClient, ListDataIntegrationFlowsCommand } from "@aws-sdk/client-supplychain"; // ES Modules import + * // const { SupplyChainClient, ListDataIntegrationFlowsCommand } = require("@aws-sdk/client-supplychain"); // CommonJS import + * const client = new SupplyChainClient(config); + * const input = { // ListDataIntegrationFlowsRequest + * instanceId: "STRING_VALUE", // required + * nextToken: "STRING_VALUE", + * maxResults: Number("int"), + * }; + * const command = new ListDataIntegrationFlowsCommand(input); + * const response = await client.send(command); + * // { // ListDataIntegrationFlowsResponse + * // flows: [ // DataIntegrationFlowList // required + * // { // DataIntegrationFlow + * // instanceId: "STRING_VALUE", // required + * // name: "STRING_VALUE", // required + * // sources: [ // DataIntegrationFlowSourceList // required + * // { // DataIntegrationFlowSource + * // sourceType: "S3" || "DATASET", // required + * // sourceName: "STRING_VALUE", // required + * // s3Source: { // DataIntegrationFlowS3SourceConfiguration + * // bucketName: "STRING_VALUE", // required + * // prefix: "STRING_VALUE", // required + * // options: { // DataIntegrationFlowS3Options + * // fileType: "CSV" || "PARQUET" || "JSON", + * // }, + * // }, + * // datasetSource: { // DataIntegrationFlowDatasetSourceConfiguration + * // datasetIdentifier: "STRING_VALUE", // required + * // options: { // DataIntegrationFlowDatasetOptions + * // loadType: "INCREMENTAL" || "REPLACE", + * // dedupeRecords: true || false, + * // }, + * // }, + * // }, + * // ], + * // transformation: { // DataIntegrationFlowTransformation + * // transformationType: "SQL" || "NONE", // required + * // sqlTransformation: { // DataIntegrationFlowSQLTransformationConfiguration + * // query: "STRING_VALUE", // required + * // }, + * // }, + * // target: { // DataIntegrationFlowTarget + * // targetType: "S3" || "DATASET", // required + * // s3Target: { // DataIntegrationFlowS3TargetConfiguration + * // bucketName: "STRING_VALUE", // required + * // prefix: "STRING_VALUE", // required + * // options: { + * // fileType: "CSV" || "PARQUET" || "JSON", + * // }, + * // }, + * // datasetTarget: { // DataIntegrationFlowDatasetTargetConfiguration + * // datasetIdentifier: "STRING_VALUE", // required + * // options: { + * // loadType: "INCREMENTAL" || "REPLACE", + * // dedupeRecords: true || false, + * // }, + * // }, + * // }, + * // createdTime: new Date("TIMESTAMP"), // required + * // lastModifiedTime: new Date("TIMESTAMP"), // required + * // }, + * // ], + * // nextToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListDataIntegrationFlowsCommandInput - {@link ListDataIntegrationFlowsCommandInput} + * @returns {@link ListDataIntegrationFlowsCommandOutput} + * @see {@link ListDataIntegrationFlowsCommandInput} for command's `input` shape. + * @see {@link ListDataIntegrationFlowsCommandOutput} for command's `response` shape. + * @see {@link SupplyChainClientResolvedConfig | config} for SupplyChainClient's `config` shape. + * + * @throws {@link AccessDeniedException} (client fault) + *You do not have the required privileges to perform this action.
+ * + * @throws {@link InternalServerException} (server fault) + *Unexpected error during processing of request.
+ * + * @throws {@link ThrottlingException} (client fault) + *Request was denied due to request throttling.
+ * + * @throws {@link ValidationException} (client fault) + *The input does not satisfy the constraints specified by an AWS service.
+ * + * @throws {@link ConflictException} (client fault) + *Updating or deleting a resource can cause an inconsistent state.
+ * + * @throws {@link ResourceNotFoundException} (client fault) + *Request references a resource which does not exist.
+ * + * @throws {@link ServiceQuotaExceededException} (client fault) + *Request would cause a service quota to be exceeded.
+ * + * @throws {@link SupplyChainServiceException} + *Base exception class for all service exceptions from SupplyChain service.
+ * + * @public + */ +export class ListDataIntegrationFlowsCommand extends $Command + .classBuilder< + ListDataIntegrationFlowsCommandInput, + ListDataIntegrationFlowsCommandOutput, + SupplyChainClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >() + .ep(commonParams) + .m(function (this: any, Command: any, cs: any, config: SupplyChainClientResolvedConfig, o: any) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; + }) + .s("GalaxyPublicAPIGateway", "ListDataIntegrationFlows", {}) + .n("SupplyChainClient", "ListDataIntegrationFlowsCommand") + .f(void 0, void 0) + .ser(se_ListDataIntegrationFlowsCommand) + .de(de_ListDataIntegrationFlowsCommand) + .build() { + /** @internal type navigation helper, not in runtime. */ + protected declare static __types: { + api: { + input: ListDataIntegrationFlowsRequest; + output: ListDataIntegrationFlowsResponse; + }; + sdk: { + input: ListDataIntegrationFlowsCommandInput; + output: ListDataIntegrationFlowsCommandOutput; + }; + }; +} diff --git a/clients/client-supplychain/src/commands/ListDataLakeDatasetsCommand.ts b/clients/client-supplychain/src/commands/ListDataLakeDatasetsCommand.ts new file mode 100644 index 000000000000..8c55a1e65f2f --- /dev/null +++ b/clients/client-supplychain/src/commands/ListDataLakeDatasetsCommand.ts @@ -0,0 +1,137 @@ +// smithy-typescript generated code +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; + +import { commonParams } from "../endpoint/EndpointParameters"; +import { ListDataLakeDatasetsRequest, ListDataLakeDatasetsResponse } from "../models/models_0"; +import { de_ListDataLakeDatasetsCommand, se_ListDataLakeDatasetsCommand } from "../protocols/Aws_restJson1"; +import { ServiceInputTypes, ServiceOutputTypes, SupplyChainClientResolvedConfig } from "../SupplyChainClient"; + +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListDataLakeDatasetsCommand}. + */ +export interface ListDataLakeDatasetsCommandInput extends ListDataLakeDatasetsRequest {} +/** + * @public + * + * The output of {@link ListDataLakeDatasetsCommand}. + */ +export interface ListDataLakeDatasetsCommandOutput extends ListDataLakeDatasetsResponse, __MetadataBearer {} + +/** + *List the data lake datasets for a specific instance and name space.
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SupplyChainClient, ListDataLakeDatasetsCommand } from "@aws-sdk/client-supplychain"; // ES Modules import + * // const { SupplyChainClient, ListDataLakeDatasetsCommand } = require("@aws-sdk/client-supplychain"); // CommonJS import + * const client = new SupplyChainClient(config); + * const input = { // ListDataLakeDatasetsRequest + * instanceId: "STRING_VALUE", // required + * namespace: "STRING_VALUE", // required + * nextToken: "STRING_VALUE", + * maxResults: Number("int"), + * }; + * const command = new ListDataLakeDatasetsCommand(input); + * const response = await client.send(command); + * // { // ListDataLakeDatasetsResponse + * // datasets: [ // DataLakeDatasetList // required + * // { // DataLakeDataset + * // instanceId: "STRING_VALUE", // required + * // namespace: "STRING_VALUE", // required + * // name: "STRING_VALUE", // required + * // arn: "STRING_VALUE", // required + * // schema: { // DataLakeDatasetSchema + * // name: "STRING_VALUE", // required + * // fields: [ // DataLakeDatasetSchemaFieldList // required + * // { // DataLakeDatasetSchemaField + * // name: "STRING_VALUE", // required + * // type: "INT" || "DOUBLE" || "STRING" || "TIMESTAMP", // required + * // isRequired: true || false, // required + * // }, + * // ], + * // }, + * // description: "STRING_VALUE", + * // createdTime: new Date("TIMESTAMP"), // required + * // lastModifiedTime: new Date("TIMESTAMP"), // required + * // }, + * // ], + * // nextToken: "STRING_VALUE", + * // }; + * + * ``` + * + * @param ListDataLakeDatasetsCommandInput - {@link ListDataLakeDatasetsCommandInput} + * @returns {@link ListDataLakeDatasetsCommandOutput} + * @see {@link ListDataLakeDatasetsCommandInput} for command's `input` shape. + * @see {@link ListDataLakeDatasetsCommandOutput} for command's `response` shape. + * @see {@link SupplyChainClientResolvedConfig | config} for SupplyChainClient's `config` shape. + * + * @throws {@link AccessDeniedException} (client fault) + *You do not have the required privileges to perform this action.
+ * + * @throws {@link InternalServerException} (server fault) + *Unexpected error during processing of request.
+ * + * @throws {@link ResourceNotFoundException} (client fault) + *Request references a resource which does not exist.
+ * + * @throws {@link ThrottlingException} (client fault) + *Request was denied due to request throttling.
+ * + * @throws {@link ValidationException} (client fault) + *The input does not satisfy the constraints specified by an AWS service.
+ * + * @throws {@link ConflictException} (client fault) + *Updating or deleting a resource can cause an inconsistent state.
+ * + * @throws {@link ServiceQuotaExceededException} (client fault) + *Request would cause a service quota to be exceeded.
+ * + * @throws {@link SupplyChainServiceException} + *Base exception class for all service exceptions from SupplyChain service.
+ * + * @public + */ +export class ListDataLakeDatasetsCommand extends $Command + .classBuilder< + ListDataLakeDatasetsCommandInput, + ListDataLakeDatasetsCommandOutput, + SupplyChainClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >() + .ep(commonParams) + .m(function (this: any, Command: any, cs: any, config: SupplyChainClientResolvedConfig, o: any) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; + }) + .s("GalaxyPublicAPIGateway", "ListDataLakeDatasets", {}) + .n("SupplyChainClient", "ListDataLakeDatasetsCommand") + .f(void 0, void 0) + .ser(se_ListDataLakeDatasetsCommand) + .de(de_ListDataLakeDatasetsCommand) + .build() { + /** @internal type navigation helper, not in runtime. */ + protected declare static __types: { + api: { + input: ListDataLakeDatasetsRequest; + output: ListDataLakeDatasetsResponse; + }; + sdk: { + input: ListDataLakeDatasetsCommandInput; + output: ListDataLakeDatasetsCommandOutput; + }; + }; +} diff --git a/clients/client-supplychain/src/commands/ListTagsForResourceCommand.ts b/clients/client-supplychain/src/commands/ListTagsForResourceCommand.ts new file mode 100644 index 000000000000..a1f413cf03b3 --- /dev/null +++ b/clients/client-supplychain/src/commands/ListTagsForResourceCommand.ts @@ -0,0 +1,115 @@ +// smithy-typescript generated code +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; + +import { commonParams } from "../endpoint/EndpointParameters"; +import { ListTagsForResourceRequest, ListTagsForResourceResponse } from "../models/models_0"; +import { de_ListTagsForResourceCommand, se_ListTagsForResourceCommand } from "../protocols/Aws_restJson1"; +import { ServiceInputTypes, ServiceOutputTypes, SupplyChainClientResolvedConfig } from "../SupplyChainClient"; + +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link ListTagsForResourceCommand}. + */ +export interface ListTagsForResourceCommandInput extends ListTagsForResourceRequest {} +/** + * @public + * + * The output of {@link ListTagsForResourceCommand}. + */ +export interface ListTagsForResourceCommandOutput extends ListTagsForResourceResponse, __MetadataBearer {} + +/** + *List all the tags for an Amazon Web ServicesSupply Chain resource.
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SupplyChainClient, ListTagsForResourceCommand } from "@aws-sdk/client-supplychain"; // ES Modules import + * // const { SupplyChainClient, ListTagsForResourceCommand } = require("@aws-sdk/client-supplychain"); // CommonJS import + * const client = new SupplyChainClient(config); + * const input = { // ListTagsForResourceRequest + * resourceArn: "STRING_VALUE", // required + * }; + * const command = new ListTagsForResourceCommand(input); + * const response = await client.send(command); + * // { // ListTagsForResourceResponse + * // tags: { // TagMap // required + * // "You do not have the required privileges to perform this action.
+ * + * @throws {@link InternalServerException} (server fault) + *Unexpected error during processing of request.
+ * + * @throws {@link ResourceNotFoundException} (client fault) + *Request references a resource which does not exist.
+ * + * @throws {@link ThrottlingException} (client fault) + *Request was denied due to request throttling.
+ * + * @throws {@link ValidationException} (client fault) + *The input does not satisfy the constraints specified by an AWS service.
+ * + * @throws {@link ConflictException} (client fault) + *Updating or deleting a resource can cause an inconsistent state.
+ * + * @throws {@link ServiceQuotaExceededException} (client fault) + *Request would cause a service quota to be exceeded.
+ * + * @throws {@link SupplyChainServiceException} + *Base exception class for all service exceptions from SupplyChain service.
+ * + * @public + */ +export class ListTagsForResourceCommand extends $Command + .classBuilder< + ListTagsForResourceCommandInput, + ListTagsForResourceCommandOutput, + SupplyChainClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >() + .ep(commonParams) + .m(function (this: any, Command: any, cs: any, config: SupplyChainClientResolvedConfig, o: any) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; + }) + .s("GalaxyPublicAPIGateway", "ListTagsForResource", {}) + .n("SupplyChainClient", "ListTagsForResourceCommand") + .f(void 0, void 0) + .ser(se_ListTagsForResourceCommand) + .de(de_ListTagsForResourceCommand) + .build() { + /** @internal type navigation helper, not in runtime. */ + protected declare static __types: { + api: { + input: ListTagsForResourceRequest; + output: ListTagsForResourceResponse; + }; + sdk: { + input: ListTagsForResourceCommandInput; + output: ListTagsForResourceCommandOutput; + }; + }; +} diff --git a/clients/client-supplychain/src/commands/TagResourceCommand.ts b/clients/client-supplychain/src/commands/TagResourceCommand.ts new file mode 100644 index 000000000000..a6247565e867 --- /dev/null +++ b/clients/client-supplychain/src/commands/TagResourceCommand.ts @@ -0,0 +1,114 @@ +// smithy-typescript generated code +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; + +import { commonParams } from "../endpoint/EndpointParameters"; +import { TagResourceRequest, TagResourceResponse } from "../models/models_0"; +import { de_TagResourceCommand, se_TagResourceCommand } from "../protocols/Aws_restJson1"; +import { ServiceInputTypes, ServiceOutputTypes, SupplyChainClientResolvedConfig } from "../SupplyChainClient"; + +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link TagResourceCommand}. + */ +export interface TagResourceCommandInput extends TagResourceRequest {} +/** + * @public + * + * The output of {@link TagResourceCommand}. + */ +export interface TagResourceCommandOutput extends TagResourceResponse, __MetadataBearer {} + +/** + *Create tags for an Amazon Web Services Supply chain resource.
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SupplyChainClient, TagResourceCommand } from "@aws-sdk/client-supplychain"; // ES Modules import + * // const { SupplyChainClient, TagResourceCommand } = require("@aws-sdk/client-supplychain"); // CommonJS import + * const client = new SupplyChainClient(config); + * const input = { // TagResourceRequest + * resourceArn: "STRING_VALUE", // required + * tags: { // TagMap // required + * "You do not have the required privileges to perform this action.
+ * + * @throws {@link InternalServerException} (server fault) + *Unexpected error during processing of request.
+ * + * @throws {@link ResourceNotFoundException} (client fault) + *Request references a resource which does not exist.
+ * + * @throws {@link ThrottlingException} (client fault) + *Request was denied due to request throttling.
+ * + * @throws {@link ValidationException} (client fault) + *The input does not satisfy the constraints specified by an AWS service.
+ * + * @throws {@link ConflictException} (client fault) + *Updating or deleting a resource can cause an inconsistent state.
+ * + * @throws {@link ServiceQuotaExceededException} (client fault) + *Request would cause a service quota to be exceeded.
+ * + * @throws {@link SupplyChainServiceException} + *Base exception class for all service exceptions from SupplyChain service.
+ * + * @public + */ +export class TagResourceCommand extends $Command + .classBuilder< + TagResourceCommandInput, + TagResourceCommandOutput, + SupplyChainClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >() + .ep(commonParams) + .m(function (this: any, Command: any, cs: any, config: SupplyChainClientResolvedConfig, o: any) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; + }) + .s("GalaxyPublicAPIGateway", "TagResource", {}) + .n("SupplyChainClient", "TagResourceCommand") + .f(void 0, void 0) + .ser(se_TagResourceCommand) + .de(de_TagResourceCommand) + .build() { + /** @internal type navigation helper, not in runtime. */ + protected declare static __types: { + api: { + input: TagResourceRequest; + output: {}; + }; + sdk: { + input: TagResourceCommandInput; + output: TagResourceCommandOutput; + }; + }; +} diff --git a/clients/client-supplychain/src/commands/UntagResourceCommand.ts b/clients/client-supplychain/src/commands/UntagResourceCommand.ts new file mode 100644 index 000000000000..ce5b09aeb16e --- /dev/null +++ b/clients/client-supplychain/src/commands/UntagResourceCommand.ts @@ -0,0 +1,114 @@ +// smithy-typescript generated code +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; + +import { commonParams } from "../endpoint/EndpointParameters"; +import { UntagResourceRequest, UntagResourceResponse } from "../models/models_0"; +import { de_UntagResourceCommand, se_UntagResourceCommand } from "../protocols/Aws_restJson1"; +import { ServiceInputTypes, ServiceOutputTypes, SupplyChainClientResolvedConfig } from "../SupplyChainClient"; + +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UntagResourceCommand}. + */ +export interface UntagResourceCommandInput extends UntagResourceRequest {} +/** + * @public + * + * The output of {@link UntagResourceCommand}. + */ +export interface UntagResourceCommandOutput extends UntagResourceResponse, __MetadataBearer {} + +/** + *Delete tags for an Amazon Web Services Supply chain resource.
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SupplyChainClient, UntagResourceCommand } from "@aws-sdk/client-supplychain"; // ES Modules import + * // const { SupplyChainClient, UntagResourceCommand } = require("@aws-sdk/client-supplychain"); // CommonJS import + * const client = new SupplyChainClient(config); + * const input = { // UntagResourceRequest + * resourceArn: "STRING_VALUE", // required + * tagKeys: [ // TagKeyList // required + * "STRING_VALUE", + * ], + * }; + * const command = new UntagResourceCommand(input); + * const response = await client.send(command); + * // {}; + * + * ``` + * + * @param UntagResourceCommandInput - {@link UntagResourceCommandInput} + * @returns {@link UntagResourceCommandOutput} + * @see {@link UntagResourceCommandInput} for command's `input` shape. + * @see {@link UntagResourceCommandOutput} for command's `response` shape. + * @see {@link SupplyChainClientResolvedConfig | config} for SupplyChainClient's `config` shape. + * + * @throws {@link AccessDeniedException} (client fault) + *You do not have the required privileges to perform this action.
+ * + * @throws {@link InternalServerException} (server fault) + *Unexpected error during processing of request.
+ * + * @throws {@link ResourceNotFoundException} (client fault) + *Request references a resource which does not exist.
+ * + * @throws {@link ThrottlingException} (client fault) + *Request was denied due to request throttling.
+ * + * @throws {@link ValidationException} (client fault) + *The input does not satisfy the constraints specified by an AWS service.
+ * + * @throws {@link ConflictException} (client fault) + *Updating or deleting a resource can cause an inconsistent state.
+ * + * @throws {@link ServiceQuotaExceededException} (client fault) + *Request would cause a service quota to be exceeded.
+ * + * @throws {@link SupplyChainServiceException} + *Base exception class for all service exceptions from SupplyChain service.
+ * + * @public + */ +export class UntagResourceCommand extends $Command + .classBuilder< + UntagResourceCommandInput, + UntagResourceCommandOutput, + SupplyChainClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >() + .ep(commonParams) + .m(function (this: any, Command: any, cs: any, config: SupplyChainClientResolvedConfig, o: any) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; + }) + .s("GalaxyPublicAPIGateway", "UntagResource", {}) + .n("SupplyChainClient", "UntagResourceCommand") + .f(void 0, void 0) + .ser(se_UntagResourceCommand) + .de(de_UntagResourceCommand) + .build() { + /** @internal type navigation helper, not in runtime. */ + protected declare static __types: { + api: { + input: UntagResourceRequest; + output: {}; + }; + sdk: { + input: UntagResourceCommandInput; + output: UntagResourceCommandOutput; + }; + }; +} diff --git a/clients/client-supplychain/src/commands/UpdateDataIntegrationFlowCommand.ts b/clients/client-supplychain/src/commands/UpdateDataIntegrationFlowCommand.ts new file mode 100644 index 000000000000..971c537b9983 --- /dev/null +++ b/clients/client-supplychain/src/commands/UpdateDataIntegrationFlowCommand.ts @@ -0,0 +1,205 @@ +// smithy-typescript generated code +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; + +import { commonParams } from "../endpoint/EndpointParameters"; +import { UpdateDataIntegrationFlowRequest, UpdateDataIntegrationFlowResponse } from "../models/models_0"; +import { de_UpdateDataIntegrationFlowCommand, se_UpdateDataIntegrationFlowCommand } from "../protocols/Aws_restJson1"; +import { ServiceInputTypes, ServiceOutputTypes, SupplyChainClientResolvedConfig } from "../SupplyChainClient"; + +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateDataIntegrationFlowCommand}. + */ +export interface UpdateDataIntegrationFlowCommandInput extends UpdateDataIntegrationFlowRequest {} +/** + * @public + * + * The output of {@link UpdateDataIntegrationFlowCommand}. + */ +export interface UpdateDataIntegrationFlowCommandOutput extends UpdateDataIntegrationFlowResponse, __MetadataBearer {} + +/** + *Update the DataIntegrationFlow.
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SupplyChainClient, UpdateDataIntegrationFlowCommand } from "@aws-sdk/client-supplychain"; // ES Modules import + * // const { SupplyChainClient, UpdateDataIntegrationFlowCommand } = require("@aws-sdk/client-supplychain"); // CommonJS import + * const client = new SupplyChainClient(config); + * const input = { // UpdateDataIntegrationFlowRequest + * instanceId: "STRING_VALUE", // required + * name: "STRING_VALUE", // required + * sources: [ // DataIntegrationFlowSourceList + * { // DataIntegrationFlowSource + * sourceType: "S3" || "DATASET", // required + * sourceName: "STRING_VALUE", // required + * s3Source: { // DataIntegrationFlowS3SourceConfiguration + * bucketName: "STRING_VALUE", // required + * prefix: "STRING_VALUE", // required + * options: { // DataIntegrationFlowS3Options + * fileType: "CSV" || "PARQUET" || "JSON", + * }, + * }, + * datasetSource: { // DataIntegrationFlowDatasetSourceConfiguration + * datasetIdentifier: "STRING_VALUE", // required + * options: { // DataIntegrationFlowDatasetOptions + * loadType: "INCREMENTAL" || "REPLACE", + * dedupeRecords: true || false, + * }, + * }, + * }, + * ], + * transformation: { // DataIntegrationFlowTransformation + * transformationType: "SQL" || "NONE", // required + * sqlTransformation: { // DataIntegrationFlowSQLTransformationConfiguration + * query: "STRING_VALUE", // required + * }, + * }, + * target: { // DataIntegrationFlowTarget + * targetType: "S3" || "DATASET", // required + * s3Target: { // DataIntegrationFlowS3TargetConfiguration + * bucketName: "STRING_VALUE", // required + * prefix: "STRING_VALUE", // required + * options: { + * fileType: "CSV" || "PARQUET" || "JSON", + * }, + * }, + * datasetTarget: { // DataIntegrationFlowDatasetTargetConfiguration + * datasetIdentifier: "STRING_VALUE", // required + * options: { + * loadType: "INCREMENTAL" || "REPLACE", + * dedupeRecords: true || false, + * }, + * }, + * }, + * }; + * const command = new UpdateDataIntegrationFlowCommand(input); + * const response = await client.send(command); + * // { // UpdateDataIntegrationFlowResponse + * // flow: { // DataIntegrationFlow + * // instanceId: "STRING_VALUE", // required + * // name: "STRING_VALUE", // required + * // sources: [ // DataIntegrationFlowSourceList // required + * // { // DataIntegrationFlowSource + * // sourceType: "S3" || "DATASET", // required + * // sourceName: "STRING_VALUE", // required + * // s3Source: { // DataIntegrationFlowS3SourceConfiguration + * // bucketName: "STRING_VALUE", // required + * // prefix: "STRING_VALUE", // required + * // options: { // DataIntegrationFlowS3Options + * // fileType: "CSV" || "PARQUET" || "JSON", + * // }, + * // }, + * // datasetSource: { // DataIntegrationFlowDatasetSourceConfiguration + * // datasetIdentifier: "STRING_VALUE", // required + * // options: { // DataIntegrationFlowDatasetOptions + * // loadType: "INCREMENTAL" || "REPLACE", + * // dedupeRecords: true || false, + * // }, + * // }, + * // }, + * // ], + * // transformation: { // DataIntegrationFlowTransformation + * // transformationType: "SQL" || "NONE", // required + * // sqlTransformation: { // DataIntegrationFlowSQLTransformationConfiguration + * // query: "STRING_VALUE", // required + * // }, + * // }, + * // target: { // DataIntegrationFlowTarget + * // targetType: "S3" || "DATASET", // required + * // s3Target: { // DataIntegrationFlowS3TargetConfiguration + * // bucketName: "STRING_VALUE", // required + * // prefix: "STRING_VALUE", // required + * // options: { + * // fileType: "CSV" || "PARQUET" || "JSON", + * // }, + * // }, + * // datasetTarget: { // DataIntegrationFlowDatasetTargetConfiguration + * // datasetIdentifier: "STRING_VALUE", // required + * // options: { + * // loadType: "INCREMENTAL" || "REPLACE", + * // dedupeRecords: true || false, + * // }, + * // }, + * // }, + * // createdTime: new Date("TIMESTAMP"), // required + * // lastModifiedTime: new Date("TIMESTAMP"), // required + * // }, + * // }; + * + * ``` + * + * @param UpdateDataIntegrationFlowCommandInput - {@link UpdateDataIntegrationFlowCommandInput} + * @returns {@link UpdateDataIntegrationFlowCommandOutput} + * @see {@link UpdateDataIntegrationFlowCommandInput} for command's `input` shape. + * @see {@link UpdateDataIntegrationFlowCommandOutput} for command's `response` shape. + * @see {@link SupplyChainClientResolvedConfig | config} for SupplyChainClient's `config` shape. + * + * @throws {@link AccessDeniedException} (client fault) + *You do not have the required privileges to perform this action.
+ * + * @throws {@link InternalServerException} (server fault) + *Unexpected error during processing of request.
+ * + * @throws {@link ResourceNotFoundException} (client fault) + *Request references a resource which does not exist.
+ * + * @throws {@link ThrottlingException} (client fault) + *Request was denied due to request throttling.
+ * + * @throws {@link ValidationException} (client fault) + *The input does not satisfy the constraints specified by an AWS service.
+ * + * @throws {@link ConflictException} (client fault) + *Updating or deleting a resource can cause an inconsistent state.
+ * + * @throws {@link ServiceQuotaExceededException} (client fault) + *Request would cause a service quota to be exceeded.
+ * + * @throws {@link SupplyChainServiceException} + *Base exception class for all service exceptions from SupplyChain service.
+ * + * @public + */ +export class UpdateDataIntegrationFlowCommand extends $Command + .classBuilder< + UpdateDataIntegrationFlowCommandInput, + UpdateDataIntegrationFlowCommandOutput, + SupplyChainClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >() + .ep(commonParams) + .m(function (this: any, Command: any, cs: any, config: SupplyChainClientResolvedConfig, o: any) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; + }) + .s("GalaxyPublicAPIGateway", "UpdateDataIntegrationFlow", {}) + .n("SupplyChainClient", "UpdateDataIntegrationFlowCommand") + .f(void 0, void 0) + .ser(se_UpdateDataIntegrationFlowCommand) + .de(de_UpdateDataIntegrationFlowCommand) + .build() { + /** @internal type navigation helper, not in runtime. */ + protected declare static __types: { + api: { + input: UpdateDataIntegrationFlowRequest; + output: UpdateDataIntegrationFlowResponse; + }; + sdk: { + input: UpdateDataIntegrationFlowCommandInput; + output: UpdateDataIntegrationFlowCommandOutput; + }; + }; +} diff --git a/clients/client-supplychain/src/commands/UpdateDataLakeDatasetCommand.ts b/clients/client-supplychain/src/commands/UpdateDataLakeDatasetCommand.ts new file mode 100644 index 000000000000..051d31c5d1e6 --- /dev/null +++ b/clients/client-supplychain/src/commands/UpdateDataLakeDatasetCommand.ts @@ -0,0 +1,134 @@ +// smithy-typescript generated code +import { getEndpointPlugin } from "@smithy/middleware-endpoint"; +import { getSerdePlugin } from "@smithy/middleware-serde"; +import { Command as $Command } from "@smithy/smithy-client"; +import { MetadataBearer as __MetadataBearer } from "@smithy/types"; + +import { commonParams } from "../endpoint/EndpointParameters"; +import { UpdateDataLakeDatasetRequest, UpdateDataLakeDatasetResponse } from "../models/models_0"; +import { de_UpdateDataLakeDatasetCommand, se_UpdateDataLakeDatasetCommand } from "../protocols/Aws_restJson1"; +import { ServiceInputTypes, ServiceOutputTypes, SupplyChainClientResolvedConfig } from "../SupplyChainClient"; + +/** + * @public + */ +export type { __MetadataBearer }; +export { $Command }; +/** + * @public + * + * The input for {@link UpdateDataLakeDatasetCommand}. + */ +export interface UpdateDataLakeDatasetCommandInput extends UpdateDataLakeDatasetRequest {} +/** + * @public + * + * The output of {@link UpdateDataLakeDatasetCommand}. + */ +export interface UpdateDataLakeDatasetCommandOutput extends UpdateDataLakeDatasetResponse, __MetadataBearer {} + +/** + *Update a data lake dataset.
+ * @example + * Use a bare-bones client and the command you need to make an API call. + * ```javascript + * import { SupplyChainClient, UpdateDataLakeDatasetCommand } from "@aws-sdk/client-supplychain"; // ES Modules import + * // const { SupplyChainClient, UpdateDataLakeDatasetCommand } = require("@aws-sdk/client-supplychain"); // CommonJS import + * const client = new SupplyChainClient(config); + * const input = { // UpdateDataLakeDatasetRequest + * instanceId: "STRING_VALUE", // required + * namespace: "STRING_VALUE", // required + * name: "STRING_VALUE", // required + * description: "STRING_VALUE", + * }; + * const command = new UpdateDataLakeDatasetCommand(input); + * const response = await client.send(command); + * // { // UpdateDataLakeDatasetResponse + * // dataset: { // DataLakeDataset + * // instanceId: "STRING_VALUE", // required + * // namespace: "STRING_VALUE", // required + * // name: "STRING_VALUE", // required + * // arn: "STRING_VALUE", // required + * // schema: { // DataLakeDatasetSchema + * // name: "STRING_VALUE", // required + * // fields: [ // DataLakeDatasetSchemaFieldList // required + * // { // DataLakeDatasetSchemaField + * // name: "STRING_VALUE", // required + * // type: "INT" || "DOUBLE" || "STRING" || "TIMESTAMP", // required + * // isRequired: true || false, // required + * // }, + * // ], + * // }, + * // description: "STRING_VALUE", + * // createdTime: new Date("TIMESTAMP"), // required + * // lastModifiedTime: new Date("TIMESTAMP"), // required + * // }, + * // }; + * + * ``` + * + * @param UpdateDataLakeDatasetCommandInput - {@link UpdateDataLakeDatasetCommandInput} + * @returns {@link UpdateDataLakeDatasetCommandOutput} + * @see {@link UpdateDataLakeDatasetCommandInput} for command's `input` shape. + * @see {@link UpdateDataLakeDatasetCommandOutput} for command's `response` shape. + * @see {@link SupplyChainClientResolvedConfig | config} for SupplyChainClient's `config` shape. + * + * @throws {@link AccessDeniedException} (client fault) + *You do not have the required privileges to perform this action.
+ * + * @throws {@link InternalServerException} (server fault) + *Unexpected error during processing of request.
+ * + * @throws {@link ResourceNotFoundException} (client fault) + *Request references a resource which does not exist.
+ * + * @throws {@link ThrottlingException} (client fault) + *Request was denied due to request throttling.
+ * + * @throws {@link ValidationException} (client fault) + *The input does not satisfy the constraints specified by an AWS service.
+ * + * @throws {@link ConflictException} (client fault) + *Updating or deleting a resource can cause an inconsistent state.
+ * + * @throws {@link ServiceQuotaExceededException} (client fault) + *Request would cause a service quota to be exceeded.
+ * + * @throws {@link SupplyChainServiceException} + *Base exception class for all service exceptions from SupplyChain service.
+ * + * @public + */ +export class UpdateDataLakeDatasetCommand extends $Command + .classBuilder< + UpdateDataLakeDatasetCommandInput, + UpdateDataLakeDatasetCommandOutput, + SupplyChainClientResolvedConfig, + ServiceInputTypes, + ServiceOutputTypes + >() + .ep(commonParams) + .m(function (this: any, Command: any, cs: any, config: SupplyChainClientResolvedConfig, o: any) { + return [ + getSerdePlugin(config, this.serialize, this.deserialize), + getEndpointPlugin(config, Command.getEndpointParameterInstructions()), + ]; + }) + .s("GalaxyPublicAPIGateway", "UpdateDataLakeDataset", {}) + .n("SupplyChainClient", "UpdateDataLakeDatasetCommand") + .f(void 0, void 0) + .ser(se_UpdateDataLakeDatasetCommand) + .de(de_UpdateDataLakeDatasetCommand) + .build() { + /** @internal type navigation helper, not in runtime. */ + protected declare static __types: { + api: { + input: UpdateDataLakeDatasetRequest; + output: UpdateDataLakeDatasetResponse; + }; + sdk: { + input: UpdateDataLakeDatasetCommandInput; + output: UpdateDataLakeDatasetCommandOutput; + }; + }; +} diff --git a/clients/client-supplychain/src/commands/index.ts b/clients/client-supplychain/src/commands/index.ts index b1e83f7c278b..15a96488081d 100644 --- a/clients/client-supplychain/src/commands/index.ts +++ b/clients/client-supplychain/src/commands/index.ts @@ -1,4 +1,17 @@ // smithy-typescript generated code export * from "./CreateBillOfMaterialsImportJobCommand"; +export * from "./CreateDataIntegrationFlowCommand"; +export * from "./CreateDataLakeDatasetCommand"; +export * from "./DeleteDataIntegrationFlowCommand"; +export * from "./DeleteDataLakeDatasetCommand"; export * from "./GetBillOfMaterialsImportJobCommand"; +export * from "./GetDataIntegrationFlowCommand"; +export * from "./GetDataLakeDatasetCommand"; +export * from "./ListDataIntegrationFlowsCommand"; +export * from "./ListDataLakeDatasetsCommand"; +export * from "./ListTagsForResourceCommand"; export * from "./SendDataIntegrationEventCommand"; +export * from "./TagResourceCommand"; +export * from "./UntagResourceCommand"; +export * from "./UpdateDataIntegrationFlowCommand"; +export * from "./UpdateDataLakeDatasetCommand"; diff --git a/clients/client-supplychain/src/index.ts b/clients/client-supplychain/src/index.ts index db689d7f627e..f5e3a3312079 100644 --- a/clients/client-supplychain/src/index.ts +++ b/clients/client-supplychain/src/index.ts @@ -17,6 +17,7 @@ export { ClientInputEndpointParameters } from "./endpoint/EndpointParameters"; export type { RuntimeExtension } from "./runtimeExtensions"; export type { SupplyChainExtensionConfiguration } from "./extensionConfiguration"; export * from "./commands"; +export * from "./pagination"; export * from "./models"; export { SupplyChainServiceException } from "./models/SupplyChainServiceException"; diff --git a/clients/client-supplychain/src/models/models_0.ts b/clients/client-supplychain/src/models/models_0.ts index a62bf7ed356d..6092014319a9 100644 --- a/clients/client-supplychain/src/models/models_0.ts +++ b/clients/client-supplychain/src/models/models_0.ts @@ -114,7 +114,7 @@ export interface CreateBillOfMaterialsImportJobRequest { s3uri: string | undefined; /** - *An idempotency token.
+ *An idempotency token ensures the API request is only completed no more than once. This way, retrying the request will not trigger the operation multiple times. A client token is a unique, case-sensitive string of 33 to 128 ASCII characters. To make an idempotent API request, specify a client token in the request. You should not reuse the same client token for other requests. If you retry a successful request with the same client token, the request will succeed with no further actions being taken, and you will receive the same API response as the original successful request.
* @public */ clientToken?: string; @@ -266,6 +266,527 @@ export interface GetBillOfMaterialsImportJobResponse { job: BillOfMaterialsImportJob | undefined; } +/** + * @public + * @enum + */ +export const DataIntegrationFlowLoadType = { + INCREMENTAL: "INCREMENTAL", + REPLACE: "REPLACE", +} as const; + +/** + * @public + */ +export type DataIntegrationFlowLoadType = + (typeof DataIntegrationFlowLoadType)[keyof typeof DataIntegrationFlowLoadType]; + +/** + *The dataset options used in dataset source and target configurations.
+ * @public + */ +export interface DataIntegrationFlowDatasetOptions { + /** + *The dataset data load type in dataset options.
+ * @public + */ + loadType?: DataIntegrationFlowLoadType; + + /** + *The dataset load option to remove duplicates.
+ * @public + */ + dedupeRecords?: boolean; +} + +/** + *The dataset DataIntegrationFlow source configuration parameters.
+ * @public + */ +export interface DataIntegrationFlowDatasetSourceConfiguration { + /** + *The ARN of the dataset.
+ * @public + */ + datasetIdentifier: string | undefined; + + /** + *The dataset DataIntegrationFlow source options.
+ * @public + */ + options?: DataIntegrationFlowDatasetOptions; +} + +/** + * @public + * @enum + */ +export const DataIntegrationFlowFileType = { + CSV: "CSV", + JSON: "JSON", + PARQUET: "PARQUET", +} as const; + +/** + * @public + */ +export type DataIntegrationFlowFileType = + (typeof DataIntegrationFlowFileType)[keyof typeof DataIntegrationFlowFileType]; + +/** + *The Amazon S3 options used in S3 source and target configurations.
+ * @public + */ +export interface DataIntegrationFlowS3Options { + /** + *The Amazon S3 file type in S3 options.
+ * @public + */ + fileType?: DataIntegrationFlowFileType; +} + +/** + *The S3 DataIntegrationFlow source configuration parameters.
+ * @public + */ +export interface DataIntegrationFlowS3SourceConfiguration { + /** + *The bucketName of the S3 source objects.
+ * @public + */ + bucketName: string | undefined; + + /** + *The prefix of the S3 source objects.
+ * @public + */ + prefix: string | undefined; + + /** + *The other options of the S3 DataIntegrationFlow source.
+ * @public + */ + options?: DataIntegrationFlowS3Options; +} + +/** + * @public + * @enum + */ +export const DataIntegrationFlowSourceType = { + DATASET: "DATASET", + S3: "S3", +} as const; + +/** + * @public + */ +export type DataIntegrationFlowSourceType = + (typeof DataIntegrationFlowSourceType)[keyof typeof DataIntegrationFlowSourceType]; + +/** + *The DataIntegrationFlow source parameters.
+ * @public + */ +export interface DataIntegrationFlowSource { + /** + *The DataIntegrationFlow source type.
+ * @public + */ + sourceType: DataIntegrationFlowSourceType | undefined; + + /** + *The DataIntegrationFlow source name that can be used as table alias in SQL transformation query.
+ * @public + */ + sourceName: string | undefined; + + /** + *The S3 DataIntegrationFlow source.
+ * @public + */ + s3Source?: DataIntegrationFlowS3SourceConfiguration; + + /** + *The dataset DataIntegrationFlow source.
+ * @public + */ + datasetSource?: DataIntegrationFlowDatasetSourceConfiguration; +} + +/** + *The dataset DataIntegrationFlow target configuration parameters.
+ * @public + */ +export interface DataIntegrationFlowDatasetTargetConfiguration { + /** + *The dataset ARN.
+ * @public + */ + datasetIdentifier: string | undefined; + + /** + *The dataset DataIntegrationFlow target options.
+ * @public + */ + options?: DataIntegrationFlowDatasetOptions; +} + +/** + *The S3 DataIntegrationFlow target configuration parameters.
+ * @public + */ +export interface DataIntegrationFlowS3TargetConfiguration { + /** + *The bucketName of the S3 target objects.
+ * @public + */ + bucketName: string | undefined; + + /** + *The prefix of the S3 target objects.
+ * @public + */ + prefix: string | undefined; + + /** + *The S3 DataIntegrationFlow target options.
+ * @public + */ + options?: DataIntegrationFlowS3Options; +} + +/** + * @public + * @enum + */ +export const DataIntegrationFlowTargetType = { + DATASET: "DATASET", + S3: "S3", +} as const; + +/** + * @public + */ +export type DataIntegrationFlowTargetType = + (typeof DataIntegrationFlowTargetType)[keyof typeof DataIntegrationFlowTargetType]; + +/** + *The DataIntegrationFlow target parameters.
+ * @public + */ +export interface DataIntegrationFlowTarget { + /** + *The DataIntegrationFlow target type.
+ * @public + */ + targetType: DataIntegrationFlowTargetType | undefined; + + /** + *The S3 DataIntegrationFlow target.
+ * @public + */ + s3Target?: DataIntegrationFlowS3TargetConfiguration; + + /** + *The dataset DataIntegrationFlow target.
+ * @public + */ + datasetTarget?: DataIntegrationFlowDatasetTargetConfiguration; +} + +/** + *The SQL DataIntegrationFlow transformation configuration parameters.
+ * @public + */ +export interface DataIntegrationFlowSQLTransformationConfiguration { + /** + *The transformation SQL query body based on SparkSQL.
+ * @public + */ + query: string | undefined; +} + +/** + * @public + * @enum + */ +export const DataIntegrationFlowTransformationType = { + NONE: "NONE", + SQL: "SQL", +} as const; + +/** + * @public + */ +export type DataIntegrationFlowTransformationType = + (typeof DataIntegrationFlowTransformationType)[keyof typeof DataIntegrationFlowTransformationType]; + +/** + *The DataIntegrationFlow transformation parameters.
+ * @public + */ +export interface DataIntegrationFlowTransformation { + /** + *The DataIntegrationFlow transformation type.
+ * @public + */ + transformationType: DataIntegrationFlowTransformationType | undefined; + + /** + *The SQL DataIntegrationFlow transformation configuration.
+ * @public + */ + sqlTransformation?: DataIntegrationFlowSQLTransformationConfiguration; +} + +/** + *The request parameters for CreateDataIntegrationFlow.
+ * @public + */ +export interface CreateDataIntegrationFlowRequest { + /** + *The Amazon Web Services Supply Chain instance identifier.
+ * @public + */ + instanceId: string | undefined; + + /** + *Name of the DataIntegrationFlow.
+ * @public + */ + name: string | undefined; + + /** + *The source configurations for DataIntegrationFlow.
+ * @public + */ + sources: DataIntegrationFlowSource[] | undefined; + + /** + *The transformation configurations for DataIntegrationFlow.
+ * @public + */ + transformation: DataIntegrationFlowTransformation | undefined; + + /** + *The target configurations for DataIntegrationFlow.
+ * @public + */ + target: DataIntegrationFlowTarget | undefined; + + /** + *The tags of the DataIntegrationFlow to be created
+ * @public + */ + tags?: RecordThe response parameters for CreateDataIntegrationFlow.
+ * @public + */ +export interface CreateDataIntegrationFlowResponse { + /** + *The Amazon Web Services Supply Chain instance identifier.
+ * @public + */ + instanceId: string | undefined; + + /** + *The name of the DataIntegrationFlow created.
+ * @public + */ + name: string | undefined; +} + +/** + * @public + * @enum + */ +export const DataLakeDatasetSchemaFieldType = { + DOUBLE: "DOUBLE", + INT: "INT", + STRING: "STRING", + TIMESTAMP: "TIMESTAMP", +} as const; + +/** + * @public + */ +export type DataLakeDatasetSchemaFieldType = + (typeof DataLakeDatasetSchemaFieldType)[keyof typeof DataLakeDatasetSchemaFieldType]; + +/** + *The dataset field details.
+ * @public + */ +export interface DataLakeDatasetSchemaField { + /** + *The dataset field name.
+ * @public + */ + name: string | undefined; + + /** + *The dataset field type.
+ * @public + */ + type: DataLakeDatasetSchemaFieldType | undefined; + + /** + *Indicate if the field is required or not.
+ * @public + */ + isRequired: boolean | undefined; +} + +/** + *The schema details of the dataset.
+ * @public + */ +export interface DataLakeDatasetSchema { + /** + *The name of the dataset schema.
+ * @public + */ + name: string | undefined; + + /** + *The list of field details of the dataset schema.
+ * @public + */ + fields: DataLakeDatasetSchemaField[] | undefined; +} + +/** + *The request parameters for CreateDataLakeDataset.
+ * @public + */ +export interface CreateDataLakeDatasetRequest { + /** + *The Amazon Web Services Supply Chain instance identifier.
+ * @public + */ + instanceId: string | undefined; + + /** + *The name space of the dataset.
+ *+ * asc - For information on the Amazon Web Services Supply Chain supported datasets see https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html.
+ *+ * default - For datasets with custom user-defined schemas.
+ *The name of the dataset. For asc name space, the name must be one of the supported data entities under https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html.
+ * @public + */ + name: string | undefined; + + /** + *The custom schema of the data lake dataset and is only required when the name space is default.
+ * @public + */ + schema?: DataLakeDatasetSchema; + + /** + *The description of the dataset.
+ * @public + */ + description?: string; + + /** + *The tags of the dataset.
+ * @public + */ + tags?: RecordThe data lake dataset details.
+ * @public + */ +export interface DataLakeDataset { + /** + *The Amazon Web Services Supply Chain instance identifier.
+ * @public + */ + instanceId: string | undefined; + + /** + *The name space of the dataset. The available values are:
+ *+ * asc - For information on the Amazon Web Services Supply Chain supported datasets see https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html.
+ *+ * default - For datasets with custom user-defined schemas.
+ *The name of the dataset. For asc name space, the name must be one of the supported data entities under https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html.
+ * @public + */ + name: string | undefined; + + /** + *The arn of the dataset.
+ * @public + */ + arn: string | undefined; + + /** + *The schema of the dataset.
+ * @public + */ + schema: DataLakeDatasetSchema | undefined; + + /** + *The description of the dataset.
+ * @public + */ + description?: string; + + /** + *The creation time of the dataset.
+ * @public + */ + createdTime: Date | undefined; + + /** + *The last modified time of the dataset.
+ * @public + */ + lastModifiedTime: Date | undefined; +} + +/** + *The response parameters of CreateDataLakeDataset.
+ * @public + */ +export interface CreateDataLakeDatasetResponse { + /** + *The detail of created dataset.
+ * @public + */ + dataset: DataLakeDataset | undefined; +} + /** * @public * @enum @@ -291,13 +812,272 @@ export const DataIntegrationEventType = { /** * @public */ -export type DataIntegrationEventType = (typeof DataIntegrationEventType)[keyof typeof DataIntegrationEventType]; +export type DataIntegrationEventType = (typeof DataIntegrationEventType)[keyof typeof DataIntegrationEventType]; + +/** + *The request parameters for SendDataIntegrationEvent.
+ * @public + */ +export interface SendDataIntegrationEventRequest { + /** + *The AWS Supply Chain instance identifier.
+ * @public + */ + instanceId: string | undefined; + + /** + *The data event type.
+ * @public + */ + eventType: DataIntegrationEventType | undefined; + + /** + *The data payload of the event. For more information on the data schema to use, see Data entities supported in AWS Supply Chain + * .
+ * @public + */ + data: string | undefined; + + /** + *Event identifier (for example, orderId for InboundOrder) used for data sharing or partitioning.
+ * @public + */ + eventGroupId: string | undefined; + + /** + *The event timestamp (in epoch seconds).
+ * @public + */ + eventTimestamp?: Date; + + /** + *The idempotent client token.
+ * @public + */ + clientToken?: string; +} + +/** + *The response parameters for SendDataIntegrationEvent.
+ * @public + */ +export interface SendDataIntegrationEventResponse { + /** + *The unique event identifier.
+ * @public + */ + eventId: string | undefined; +} + +/** + *The DataIntegrationFlow details.
+ * @public + */ +export interface DataIntegrationFlow { + /** + *The DataIntegrationFlow instance ID.
+ * @public + */ + instanceId: string | undefined; + + /** + *The DataIntegrationFlow name.
+ * @public + */ + name: string | undefined; + + /** + *The DataIntegrationFlow source configurations.
+ * @public + */ + sources: DataIntegrationFlowSource[] | undefined; + + /** + *The DataIntegrationFlow transformation configurations.
+ * @public + */ + transformation: DataIntegrationFlowTransformation | undefined; + + /** + *The DataIntegrationFlow target configuration.
+ * @public + */ + target: DataIntegrationFlowTarget | undefined; + + /** + *The DataIntegrationFlow creation timestamp.
+ * @public + */ + createdTime: Date | undefined; + + /** + *The DataIntegrationFlow last modified timestamp.
+ * @public + */ + lastModifiedTime: Date | undefined; +} + +/** + *The request parameters for DeleteDataIntegrationFlow.
+ * @public + */ +export interface DeleteDataIntegrationFlowRequest { + /** + *The Amazon Web Services Supply Chain instance identifier.
+ * @public + */ + instanceId: string | undefined; + + /** + *The name of the DataIntegrationFlow to be deleted.
+ * @public + */ + name: string | undefined; +} /** - *The request parameters for SendDataIntegrationEvent.
+ *The response parameters for DeleteDataIntegrationFlow.
* @public */ -export interface SendDataIntegrationEventRequest { +export interface DeleteDataIntegrationFlowResponse { + /** + *The Amazon Web Services Supply Chain instance identifier.
+ * @public + */ + instanceId: string | undefined; + + /** + *The name of the DataIntegrationFlow deleted.
+ * @public + */ + name: string | undefined; +} + +/** + *The request parameters for GetDataIntegrationFlow.
+ * @public + */ +export interface GetDataIntegrationFlowRequest { + /** + *The Amazon Web Services Supply Chain instance identifier.
+ * @public + */ + instanceId: string | undefined; + + /** + *The name of the DataIntegrationFlow created.
+ * @public + */ + name: string | undefined; +} + +/** + *The response parameters for GetDataIntegrationFlow.
+ * @public + */ +export interface GetDataIntegrationFlowResponse { + /** + *The details of the DataIntegrationFlow returned.
+ * @public + */ + flow: DataIntegrationFlow | undefined; +} + +/** + *The request parameters for ListDataIntegrationFlows.
+ * @public + */ +export interface ListDataIntegrationFlowsRequest { + /** + *The Amazon Web Services Supply Chain instance identifier.
+ * @public + */ + instanceId: string | undefined; + + /** + *The pagination token to fetch the next page of the DataIntegrationFlows.
+ * @public + */ + nextToken?: string; + + /** + *Specify the maximum number of DataIntegrationFlows to fetch in one paginated request.
+ * @public + */ + maxResults?: number; +} + +/** + *The response parameters for ListDataIntegrationFlows.
+ * @public + */ +export interface ListDataIntegrationFlowsResponse { + /** + *The response parameters for ListDataIntegrationFlows.
+ * @public + */ + flows: DataIntegrationFlow[] | undefined; + + /** + *The pagination token to fetch the next page of the DataIntegrationFlows.
+ * @public + */ + nextToken?: string; +} + +/** + *The request parameters for UpdateDataIntegrationFlow.
+ * @public + */ +export interface UpdateDataIntegrationFlowRequest { + /** + *The Amazon Web Services Supply Chain instance identifier.
+ * @public + */ + instanceId: string | undefined; + + /** + *The name of the DataIntegrationFlow to be updated.
+ * @public + */ + name: string | undefined; + + /** + *The new source configurations for the DataIntegrationFlow.
+ * @public + */ + sources?: DataIntegrationFlowSource[]; + + /** + *The new transformation configurations for the DataIntegrationFlow.
+ * @public + */ + transformation?: DataIntegrationFlowTransformation; + + /** + *The new target configurations for the DataIntegrationFlow.
+ * @public + */ + target?: DataIntegrationFlowTarget; +} + +/** + *The response parameters for UpdateDataIntegrationFlow.
+ * @public + */ +export interface UpdateDataIntegrationFlowResponse { + /** + *The details of the updated DataIntegrationFlow.
+ * @public + */ + flow: DataIntegrationFlow | undefined; +} + +/** + *The request parameters of DeleteDataLakeDataset.
+ * @public + */ +export interface DeleteDataLakeDatasetRequest { /** *The AWS Supply Chain instance identifier.
* @public @@ -305,49 +1085,281 @@ export interface SendDataIntegrationEventRequest { instanceId: string | undefined; /** - *The data event type.
+ *The namespace of the dataset. The available values are:
+ *asc: for + * AWS Supply Chain supported datasets + * .
+ *default: for datasets with custom user-defined schemas.
+ *The data payload of the event. For more information on the data schema to use, see Data entities supported in AWS Supply Chain
+ * The name of the dataset. If the namespace is asc, the name must be one of the supported data entities
* . The response parameters of DeleteDataLakeDataset. Event identifier (for example, orderId for InboundOrder) used for data sharing or partitioning. The AWS Supply Chain instance identifier. The event timestamp (in epoch seconds). The namespace of deleted dataset. The idempotent client token. The name of deleted dataset. The response parameters for SendDataIntegrationEvent. The request parameters for GetDataLakeDataset. The unique event identifier. The Amazon Web Services Supply Chain instance identifier. The name space of the dataset. The available values are:
+ * asc - For information on the Amazon Web Services Supply Chain supported datasets see https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html.
+ * default - For datasets with custom user-defined schemas. The name of the dataset. For asc name space, the name must be one of the supported data entities under https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html. The response parameters for UpdateDataLakeDataset. The fetched dataset details. The request parameters of ListDataLakeDatasets. The Amazon Web Services Supply Chain instance identifier. The namespace of the dataset. The available values are: asc: for
+ * AWS Supply Chain supported datasets
+ * . default: for datasets with custom user-defined schemas. The pagination token to fetch next page of datasets. The max number of datasets to fetch in this paginated request. The response parameters of ListDataLakeDatasets. The list of fetched dataset details. The pagination token to fetch next page of datasets. The request parameters of UpdateDataLakeDataset. The Amazon Web Services Chain instance identifier. The name space of the dataset. The available values are:
+ * asc - For information on the Amazon Web Services Supply Chain supported datasets see https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html.
+ * default - For datasets with custom user-defined schemas. The name of the dataset. For asc name space, the name must be one of the supported data entities under https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html. The updated description of the data lake dataset. The response parameters of UpdateDataLakeDataset. The updated dataset details. The request parameters of ListTagsForResource. The Amazon Web Services Supply chain resource ARN that needs tags to be listed. The response parameters of ListTagsForResource. The tags added to an Amazon Web Services Supply Chain resource. The request parameters of TagResource. The Amazon Web Services Supply chain resource ARN that needs to be tagged. The tags of the Amazon Web Services Supply chain resource to be created. The response parameters for TagResource. The request parameters of UntagResource. The Amazon Web Services Supply chain resource ARN that needs to be untagged. The list of tag keys to be deleted for an Amazon Web Services Supply Chain resource. The response parameters of UntagResource. An idempotency token. An idempotency token ensures the API request is only completed no more than once. This way, retrying the request will not trigger the operation multiple times. A client token is a unique, case-sensitive string of 33 to 128 ASCII characters. To make an idempotent API request, specify a client token in the request. You should not reuse the same client token for other requests. If you retry a successful request with the same client token, the request will succeed with no further actions being taken, and you will receive the same API response as the original successful request. Create DataIntegrationFlow to map one or more different sources to one target using the SQL transformation query. The Amazon Web Services Supply Chain instance identifier. Name of the DataIntegrationFlow. The source configurations for DataIntegrationFlow. The transformation configurations for DataIntegrationFlow. The target configurations for DataIntegrationFlow. The tags of the DataIntegrationFlow to be created The request parameters for CreateDataIntegrationFlow. The Amazon Web Services Supply Chain instance identifier. The name of the DataIntegrationFlow created. The response parameters for CreateDataIntegrationFlow. \n AWS Supply Chain is a cloud-based application that works with your enterprise resource planning (ERP) and supply chain management systems. Using AWS Supply Chain, you can connect and extract your inventory, supply, and demand related data from existing ERP or supply chain systems into a single data model.\n The AWS Supply Chain API supports configuration data import for Supply Planning. \n All AWS Supply chain API operations are Amazon-authenticated and certificate-signed. They not only require the use of the AWS SDK, but also allow for the exclusive use of AWS Identity and Access Management users and roles to help facilitate access, trust, and permission policies.\n Create a data lake dataset. The Amazon Web Services Supply Chain instance identifier. The name space of the dataset. \n asc - For information on the Amazon Web Services Supply Chain supported datasets see https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html. \n default - For datasets with custom user-defined schemas. The name of the dataset. For asc name space, the name must be one of the supported data entities under https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html. The custom schema of the data lake dataset and is only required when the name space is default. The description of the dataset. The tags of the dataset. The request parameters for CreateDataLakeDataset. The detail of created dataset. The response parameters of CreateDataLakeDataset. The DataIntegrationFlow instance ID. The DataIntegrationFlow name. The DataIntegrationFlow source configurations. The DataIntegrationFlow transformation configurations. The DataIntegrationFlow target configuration. The DataIntegrationFlow creation timestamp. The DataIntegrationFlow last modified timestamp. The DataIntegrationFlow details. The dataset data load type in dataset options. The dataset load option to remove duplicates. The dataset options used in dataset source and target configurations. The ARN of the dataset. The dataset DataIntegrationFlow source options. The dataset DataIntegrationFlow source configuration parameters. The dataset ARN. The dataset DataIntegrationFlow target options. The dataset DataIntegrationFlow target configuration parameters. The Amazon S3 file type in S3 options. The Amazon S3 options used in S3 source and target configurations. The bucketName of the S3 source objects. The prefix of the S3 source objects. The other options of the S3 DataIntegrationFlow source. The S3 DataIntegrationFlow source configuration parameters. The bucketName of the S3 target objects. The prefix of the S3 target objects. The S3 DataIntegrationFlow target options. The S3 DataIntegrationFlow target configuration parameters. The transformation SQL query body based on SparkSQL. The SQL DataIntegrationFlow transformation configuration parameters. The DataIntegrationFlow source type. The DataIntegrationFlow source name that can be used as table alias in SQL transformation query. The S3 DataIntegrationFlow source. The dataset DataIntegrationFlow source. The DataIntegrationFlow source parameters. The DataIntegrationFlow target type. The S3 DataIntegrationFlow target. The dataset DataIntegrationFlow target. The DataIntegrationFlow target parameters. The DataIntegrationFlow transformation type. The SQL DataIntegrationFlow transformation configuration. The DataIntegrationFlow transformation parameters. The Amazon Web Services Supply Chain instance identifier. The name space of the dataset. The available values are: \n asc - For information on the Amazon Web Services Supply Chain supported datasets see https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html. \n default - For datasets with custom user-defined schemas. The name of the dataset. For asc name space, the name must be one of the supported data entities under https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html. The arn of the dataset. The schema of the dataset. The description of the dataset. The creation time of the dataset. The last modified time of the dataset. The data lake dataset details. The name of the dataset schema. The list of field details of the dataset schema. The schema details of the dataset. The dataset field name. The dataset field type. Indicate if the field is required or not. The dataset field details. Delete the DataIntegrationFlow. The Amazon Web Services Supply Chain instance identifier. The name of the DataIntegrationFlow to be deleted. The request parameters for DeleteDataIntegrationFlow. The Amazon Web Services Supply Chain instance identifier. The name of the DataIntegrationFlow deleted. The response parameters for DeleteDataIntegrationFlow. Delete a data lake dataset. The AWS Supply Chain instance identifier. The namespace of the dataset. The available values are: asc: for \n AWS Supply Chain supported datasets\n . default: for datasets with custom user-defined schemas. The name of the dataset. If the namespace is asc, the name must be one of the supported data entities\n . The request parameters of DeleteDataLakeDataset. The AWS Supply Chain instance identifier. The namespace of deleted dataset. The name of deleted dataset. The response parameters of DeleteDataLakeDataset. \n AWS Supply Chain is a cloud-based application that works with your enterprise resource planning (ERP) and supply chain management systems. Using AWS Supply Chain, you can connect and extract your inventory, supply, and demand related data from existing ERP or supply chain systems into a single data model.\n The AWS Supply Chain API supports configuration data import for Supply Planning. \n All AWS Supply chain API operations are Amazon-authenticated and certificate-signed. They not only require the use of the AWS SDK, but also allow for the exclusive use of AWS Identity and Access Management users and roles to help facilitate access, trust, and permission policies.\n Get status and details of a BillOfMaterialsImportJob. The AWS Supply Chain instance identifier. The BillOfMaterialsImportJob identifier. The request parameters for GetBillOfMaterialsImportJob. The BillOfMaterialsImportJob. The response parameters for GetBillOfMaterialsImportJob. View the DataIntegrationFlow details. The Amazon Web Services Supply Chain instance identifier. The name of the DataIntegrationFlow created. The request parameters for GetDataIntegrationFlow. The details of the DataIntegrationFlow returned. The response parameters for GetDataIntegrationFlow. Get a data lake dataset. The Amazon Web Services Supply Chain instance identifier. The name space of the dataset. The available values are: \n asc - For information on the Amazon Web Services Supply Chain supported datasets see https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html. \n default - For datasets with custom user-defined schemas. The name of the dataset. For asc name space, the name must be one of the supported data entities under https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html. The request parameters for GetDataLakeDataset. The fetched dataset details. The response parameters for UpdateDataLakeDataset. Unexpected error during processing of request. Lists all the DataIntegrationFlows in a paginated way. The Amazon Web Services Supply Chain instance identifier. The pagination token to fetch the next page of the DataIntegrationFlows. Specify the maximum number of DataIntegrationFlows to fetch in one paginated request. The request parameters for ListDataIntegrationFlows. The response parameters for ListDataIntegrationFlows. The pagination token to fetch the next page of the DataIntegrationFlows. The response parameters for ListDataIntegrationFlows. List the data lake datasets for a specific instance and name space. The Amazon Web Services Supply Chain instance identifier. The namespace of the dataset. The available values are: asc: for \n AWS Supply Chain supported datasets\n . default: for datasets with custom user-defined schemas. The pagination token to fetch next page of datasets. The max number of datasets to fetch in this paginated request. The request parameters of ListDataLakeDatasets. The list of fetched dataset details. The pagination token to fetch next page of datasets. The response parameters of ListDataLakeDatasets. List all the tags for an Amazon Web ServicesSupply Chain resource. The Amazon Web Services Supply chain resource ARN that needs tags to be listed. The request parameters of ListTagsForResource. The tags added to an Amazon Web Services Supply Chain resource. The response parameters of ListTagsForResource. Request references a resource which does not exist. Send the transactional data payload for the event with real-time data for analysis or monitoring. The real-time data events are stored in an Amazon Web Services service before being processed and stored in data lake. \n New data events are synced with data lake at 5 PM GMT everyday. The updated transactional data is available in data lake after ingestion. The AWS Supply Chain instance identifier. The data event type. The data payload of the event. For more information on the data schema to use, see Data entities supported in AWS Supply Chain\n . Event identifier (for example, orderId for InboundOrder) used for data sharing or partitioning. The event timestamp (in epoch seconds). The idempotent client token. The request parameters for SendDataIntegrationEvent. The unique event identifier. The response parameters for SendDataIntegrationEvent. Request would cause a service quota to be exceeded. Get status and details of a BillOfMaterialsImportJob. Create tags for an Amazon Web Services Supply chain resource. The AWS Supply Chain instance identifier. The Amazon Web Services Supply chain resource ARN that needs to be tagged. The BillOfMaterialsImportJob identifier. The request parameters for GetBillOfMaterialsImportJob. The BillOfMaterialsImportJob. The tags of the Amazon Web Services Supply chain resource to be created. The response parameters for GetBillOfMaterialsImportJob. The request parameters of TagResource. Unexpected error during processing of request. The response parameters for TagResource. Request references a resource which does not exist. Request was denied due to request throttling. Send the transactional data payload for the event with real-time data for analysis or monitoring. The real-time data events are stored in an Amazon Web Services service before being processed and stored in data lake. \n New data events are synced with data lake at 5 PM GMT everyday. The updated transactional data is available in data lake after ingestion. Delete tags for an Amazon Web Services Supply chain resource. The Amazon Web Services Supply chain resource ARN that needs to be untagged. The list of tag keys to be deleted for an Amazon Web Services Supply Chain resource. The request parameters of UntagResource. The response parameters of UntagResource. Update the DataIntegrationFlow. The AWS Supply Chain instance identifier. The Amazon Web Services Supply Chain instance identifier. The data event type. The data payload of the event. For more information on the data schema to use, see Data entities supported in AWS Supply Chain\n . The name of the DataIntegrationFlow to be updated. Event identifier (for example, orderId for InboundOrder) used for data sharing or partitioning. The new source configurations for the DataIntegrationFlow. The event timestamp (in epoch seconds). The new transformation configurations for the DataIntegrationFlow. The idempotent client token. The new target configurations for the DataIntegrationFlow. The request parameters for SendDataIntegrationEvent. The request parameters for UpdateDataIntegrationFlow. The unique event identifier. The details of the updated DataIntegrationFlow. The response parameters for SendDataIntegrationEvent. The response parameters for UpdateDataIntegrationFlow. Request would cause a service quota to be exceeded. Update a data lake dataset. The Amazon Web Services Chain instance identifier. The name space of the dataset. The available values are: \n asc - For information on the Amazon Web Services Supply Chain supported datasets see https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html. \n default - For datasets with custom user-defined schemas. The name of the dataset. For asc name space, the name must be one of the supported data entities under https://docs.aws.amazon.com/aws-supply-chain/latest/userguide/data-model-asc.html. The updated description of the data lake dataset. Request was denied due to request throttling. The request parameters of UpdateDataLakeDataset. The updated dataset details. The response parameters of UpdateDataLakeDataset.
+ *
+ * @public
+ */
+ namespace: string | undefined;
+
+ /**
+ *
+ *
+ * @public
+ */
+ namespace: string | undefined;
+
+ /**
+ *
+ *
+ * @public
+ */
+ namespace: string | undefined;
+
+ /**
+ * \n
",
+ "smithy.api#httpLabel": {},
+ "smithy.api#required": {}
+ }
+ },
+ "name": {
+ "target": "com.amazonaws.supplychain#DataLakeDatasetName",
+ "traits": {
+ "smithy.api#documentation": "\n
",
+ "smithy.api#required": {},
+ "smithy.api#resourceIdentifier": "namespace"
+ }
+ },
+ "name": {
+ "target": "com.amazonaws.supplychain#DataLakeDatasetName",
+ "traits": {
+ "smithy.api#documentation": "\n
",
+ "smithy.api#httpLabel": {},
+ "smithy.api#required": {}
+ }
+ },
+ "name": {
+ "target": "com.amazonaws.supplychain#DataLakeDatasetName",
+ "traits": {
+ "smithy.api#documentation": "\n
",
+ "smithy.api#httpLabel": {},
+ "smithy.api#required": {}
+ }
+ },
+ "name": {
+ "target": "com.amazonaws.supplychain#DataLakeDatasetName",
+ "traits": {
+ "smithy.api#documentation": "\n
",
+ "smithy.api#httpLabel": {},
+ "smithy.api#required": {}
+ }
+ },
+ "nextToken": {
+ "target": "com.amazonaws.supplychain#DataLakeDatasetNextToken",
+ "traits": {
+ "smithy.api#documentation": "\n
",
+ "smithy.api#httpLabel": {},
+ "smithy.api#required": {}
+ }
+ },
+ "name": {
+ "target": "com.amazonaws.supplychain#DataLakeDatasetName",
+ "traits": {
+ "smithy.api#documentation": "