Skip to content

Commit

Permalink
feat(client-omics): Minor model changes to accomodate batch imports f…
Browse files Browse the repository at this point in the history
…eature
  • Loading branch information
awstools committed Feb 28, 2023
1 parent 95b351a commit 386065a
Show file tree
Hide file tree
Showing 8 changed files with 397 additions and 301 deletions.
5 changes: 3 additions & 2 deletions clients/client-omics/src/Omics.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1970,7 +1970,8 @@ export class Omics extends OmicsClient {
}

/**
* <p>Starts a read set activation job.</p>
* <p>Activates an archived read set. To reduce storage charges, Amazon Omics archives unused read
* sets after 30 days.</p>
*/
public startReadSetActivationJob(
args: StartReadSetActivationJobCommandInput,
Expand Down Expand Up @@ -2002,7 +2003,7 @@ export class Omics extends OmicsClient {
}

/**
* <p>Starts a read set export job.</p>
* <p>Exports a read set to Amazon S3.</p>
*/
public startReadSetExportJob(
args: StartReadSetExportJobCommandInput,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@ export interface StartReadSetActivationJobCommandInput extends StartReadSetActiv
export interface StartReadSetActivationJobCommandOutput extends StartReadSetActivationJobResponse, __MetadataBearer {}

/**
* <p>Starts a read set activation job.</p>
* <p>Activates an archived read set. To reduce storage charges, Amazon Omics archives unused read
* sets after 30 days.</p>
* @example
* Use a bare-bones client and the command you need to make an API call.
* ```javascript
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ export interface StartReadSetExportJobCommandInput extends StartReadSetExportJob
export interface StartReadSetExportJobCommandOutput extends StartReadSetExportJobResponse, __MetadataBearer {}

/**
* <p>Starts a read set export job.</p>
* <p>Exports a read set to Amazon S3.</p>
* @example
* Use a bare-bones client and the command you need to make an API call.
* ```javascript
Expand Down
2 changes: 1 addition & 1 deletion clients/client-omics/src/endpoint/EndpointParameters.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ export const resolveClientEndpointParameters = <T>(
};

export interface EndpointParameters extends __EndpointParameters {
Region: string;
Region?: string;
UseDualStack?: boolean;
UseFIPS?: boolean;
Endpoint?: string;
Expand Down
38 changes: 19 additions & 19 deletions clients/client-omics/src/endpoint/ruleset.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,25 +6,25 @@ import { RuleSetObject } from "@aws-sdk/util-endpoints";
or see "smithy.rules#endpointRuleSet"
in codegen/sdk-codegen/aws-models/omics.json */

const r="fn",
const q="required",
r="fn",
s="argv",
t="ref";
const a=true,
b=false,
c="String",
d="PartitionResult",
e="tree",
f="error",
g="endpoint",
h={"required":true,"default":false,"type":"Boolean"},
i={[t]:"Endpoint"},
j={[r]:"booleanEquals",[s]:[{[t]:"UseFIPS"},true]},
k={[r]:"booleanEquals",[s]:[{[t]:"UseDualStack"},true]},
l={},
m={[r]:"booleanEquals",[s]:[true,{[r]:"getAttr",[s]:[{[t]:d},"supportsFIPS"]}]},
n={[r]:"booleanEquals",[s]:[true,{[r]:"getAttr",[s]:[{[t]:d},"supportsDualStack"]}]},
o=[i],
p=[j],
q=[k];
const _data={version:"1.0",parameters:{Region:{required:a,type:c},UseDualStack:h,UseFIPS:h,Endpoint:{required:b,type:c}},rules:[{conditions:[{[r]:"aws.partition",[s]:[{[t]:"Region"}],assign:d}],type:e,rules:[{conditions:[{[r]:"isSet",[s]:o},{[r]:"parseURL",[s]:o,assign:"url"}],type:e,rules:[{conditions:p,error:"Invalid Configuration: FIPS and custom endpoint are not supported",type:f},{type:e,rules:[{conditions:q,error:"Invalid Configuration: Dualstack and custom endpoint are not supported",type:f},{endpoint:{url:i,properties:l,headers:l},type:g}]}]},{conditions:[j,k],type:e,rules:[{conditions:[m,n],type:e,rules:[{endpoint:{url:"https://omics-fips.{Region}.{PartitionResult#dualStackDnsSuffix}",properties:l,headers:l},type:g}]},{error:"FIPS and DualStack are enabled, but this partition does not support one or both",type:f}]},{conditions:p,type:e,rules:[{conditions:[m],type:e,rules:[{endpoint:{url:"https://omics-fips.{Region}.{PartitionResult#dnsSuffix}",properties:l,headers:l},type:g}]},{error:"FIPS is enabled but this partition does not support FIPS",type:f}]},{conditions:q,type:e,rules:[{conditions:[n],type:e,rules:[{endpoint:{url:"https://omics.{Region}.{PartitionResult#dualStackDnsSuffix}",properties:l,headers:l},type:g}]},{error:"DualStack is enabled but this partition does not support DualStack",type:f}]},{endpoint:{url:"https://omics.{Region}.{PartitionResult#dnsSuffix}",properties:l,headers:l},type:g}]}]};
const a="isSet",
b="tree",
c="error",
d="endpoint",
e="PartitionResult",
f={[q]:false,"type":"String"},
g={[q]:true,"default":false,"type":"Boolean"},
h={[t]:"Endpoint"},
i={[r]:"booleanEquals",[s]:[{[t]:"UseFIPS"},true]},
j={[r]:"booleanEquals",[s]:[{[t]:"UseDualStack"},true]},
k={},
l={[r]:"booleanEquals",[s]:[true,{[r]:"getAttr",[s]:[{[t]:e},"supportsFIPS"]}]},
m={[r]:"booleanEquals",[s]:[true,{[r]:"getAttr",[s]:[{[t]:e},"supportsDualStack"]}]},
n=[i],
o=[j],
p=[{[t]:"Region"}];
const _data={version:"1.0",parameters:{Region:f,UseDualStack:g,UseFIPS:g,Endpoint:f},rules:[{conditions:[{[r]:a,[s]:[h]}],type:b,rules:[{conditions:n,error:"Invalid Configuration: FIPS and custom endpoint are not supported",type:c},{type:b,rules:[{conditions:o,error:"Invalid Configuration: Dualstack and custom endpoint are not supported",type:c},{endpoint:{url:h,properties:k,headers:k},type:d}]}]},{type:b,rules:[{conditions:[{[r]:a,[s]:p}],type:b,rules:[{conditions:[{[r]:"aws.partition",[s]:p,assign:e}],type:b,rules:[{conditions:[i,j],type:b,rules:[{conditions:[l,m],type:b,rules:[{type:b,rules:[{endpoint:{url:"https://omics-fips.{Region}.{PartitionResult#dualStackDnsSuffix}",properties:k,headers:k},type:d}]}]},{error:"FIPS and DualStack are enabled, but this partition does not support one or both",type:c}]},{conditions:n,type:b,rules:[{conditions:[l],type:b,rules:[{type:b,rules:[{endpoint:{url:"https://omics-fips.{Region}.{PartitionResult#dnsSuffix}",properties:k,headers:k},type:d}]}]},{error:"FIPS is enabled but this partition does not support FIPS",type:c}]},{conditions:o,type:b,rules:[{conditions:[m],type:b,rules:[{type:b,rules:[{endpoint:{url:"https://omics.{Region}.{PartitionResult#dualStackDnsSuffix}",properties:k,headers:k},type:d}]}]},{error:"DualStack is enabled but this partition does not support DualStack",type:c}]},{type:b,rules:[{endpoint:{url:"https://omics.{Region}.{PartitionResult#dnsSuffix}",properties:k,headers:k},type:d}]}]}]},{error:"Invalid Configuration: Missing Region",type:c}]}]};
export const ruleSet: RuleSetObject = _data;
53 changes: 32 additions & 21 deletions clients/client-omics/src/models/models_0.ts
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,10 @@ export enum JobStatus {
* The Job has completed
*/
COMPLETED = "COMPLETED",
/**
* The Job completed with failed runs
*/
COMPLETED_WITH_FAILURES = "COMPLETED_WITH_FAILURES",
/**
* The Job failed
*/
Expand Down Expand Up @@ -850,7 +854,7 @@ export interface CreateAnnotationStoreResponse {
id: string | undefined;

/**
* <p>The store's genome reference.</p>
* <p>The store's genome reference. Required for all stores except TSV format with generic annotations.</p>
*/
reference?: ReferenceItem;

Expand Down Expand Up @@ -1296,7 +1300,7 @@ export interface CreateRunGroupRequest {
maxRuns?: number;

/**
* <p>A max duration for the group.</p>
* <p>A maximum run time for the group in minutes.</p>
*/
maxDuration?: number;

Expand All @@ -1306,7 +1310,7 @@ export interface CreateRunGroupRequest {
tags?: Record<string, string>;

/**
* <p>A request ID for the group.</p>
* <p>To ensure that requests don't run multiple times, specify a unique ID for each request.</p>
*/
requestId?: string;
}
Expand Down Expand Up @@ -1498,7 +1502,7 @@ export interface CreateWorkflowRequest {
parameterTemplate?: Record<string, WorkflowParameter>;

/**
* <p>A storage capacity for the workflow.</p>
* <p>A storage capacity for the workflow in gigabytes.</p>
*/
storageCapacity?: number;

Expand All @@ -1508,7 +1512,7 @@ export interface CreateWorkflowRequest {
tags?: Record<string, string>;

/**
* <p>A request ID for the workflow.</p>
* <p>To ensure that requests don't run multiple times, specify a unique ID for each request.</p>
*/
requestId?: string;
}
Expand All @@ -1518,6 +1522,7 @@ export enum WorkflowStatus {
CREATING = "CREATING",
DELETED = "DELETED",
FAILED = "FAILED",
INACTIVE = "INACTIVE",
UPDATING = "UPDATING",
}

Expand Down Expand Up @@ -1842,7 +1847,7 @@ export interface GetReadSetActivationJobResponse {
completionTime?: Date;

/**
* <p>The job's sources.</p>
* <p>The job's source files.</p>
*/
sources?: ActivateReadSetSourceItem[];
}
Expand Down Expand Up @@ -2042,7 +2047,7 @@ export interface GetReadSetImportJobResponse {
completionTime?: Date;

/**
* <p>The job's sources.</p>
* <p>The job's source files.</p>
*/
sources: ImportReadSetSourceItem[] | undefined;
}
Expand Down Expand Up @@ -2319,7 +2324,7 @@ export interface GetReferenceImportJobResponse {
completionTime?: Date;

/**
* <p>The job's sources.</p>
* <p>The job's source files.</p>
*/
sources: ImportReferenceSourceItem[] | undefined;
}
Expand Down Expand Up @@ -2484,6 +2489,7 @@ export enum RunStatus {

export enum WorkflowType {
PRIVATE = "PRIVATE",
SERVICE = "SERVICE",
}

export interface GetRunResponse {
Expand Down Expand Up @@ -2553,7 +2559,7 @@ export interface GetRunResponse {
parameters?: __DocumentType;

/**
* <p>The run's storage capacity.</p>
* <p>The run's storage capacity in gigabytes.</p>
*/
storageCapacity?: number;

Expand Down Expand Up @@ -2637,7 +2643,7 @@ export interface GetRunGroupResponse {
maxRuns?: number;

/**
* <p>The group's maximum run duration.</p>
* <p>The group's maximum run time in minutes.</p>
*/
maxDuration?: number;

Expand Down Expand Up @@ -2696,7 +2702,7 @@ export interface GetRunTaskResponse {
cpus?: number;

/**
* <p>The task's memory setting.</p>
* <p>The task's memory use in gigabytes.</p>
*/
memory?: number;

Expand Down Expand Up @@ -2785,6 +2791,11 @@ export interface VariantImportItemDetail {
* <p>The item's job status.</p>
*/
jobStatus: JobStatus | string | undefined;

/**
* <p> A message that provides additional context about a job </p>
*/
statusMessage?: string;
}

export interface GetVariantImportResponse {
Expand Down Expand Up @@ -2986,7 +2997,7 @@ export interface GetWorkflowResponse {
parameterTemplate?: Record<string, WorkflowParameter>;

/**
* <p>The workflow's storage capacity.</p>
* <p>The workflow's storage capacity in gigabytes.</p>
*/
storageCapacity?: number;

Expand Down Expand Up @@ -3621,7 +3632,7 @@ export interface RunGroupListItem {
maxRuns?: number;

/**
* <p>The group's maximum duration setting.</p>
* <p>The group's maximum duration setting in minutes.</p>
*/
maxDuration?: number;

Expand Down Expand Up @@ -3779,7 +3790,7 @@ export interface TaskListItem {
cpus?: number;

/**
* <p>The task's memory.</p>
* <p>The task's memory use in gigabyes.</p>
*/
memory?: number;

Expand Down Expand Up @@ -4223,7 +4234,7 @@ export interface StartReferenceImportJobRequest {
clientToken?: string;

/**
* <p>Sources for the job.</p>
* <p>The job's source files.</p>
*/
sources: StartReferenceImportJobSourceItem[] | undefined;
}
Expand Down Expand Up @@ -4277,7 +4288,7 @@ export interface UpdateRunGroupRequest {
maxRuns?: number;

/**
* <p>The maximum amount of time to run.</p>
* <p>A maximum run time for the group in minutes.</p>
*/
maxDuration?: number;
}
Expand Down Expand Up @@ -4324,7 +4335,7 @@ export interface StartRunRequest {
parameters?: __DocumentType;

/**
* <p>A storage capacity for the run.</p>
* <p>A storage capacity for the run in gigabytes.</p>
*/
storageCapacity?: number;

Expand All @@ -4344,7 +4355,7 @@ export interface StartRunRequest {
tags?: Record<string, string>;

/**
* <p>A request ID for the run.</p>
* <p>To ensure that requests don't run multiple times, specify a unique ID for each request.</p>
*/
requestId?: string;
}
Expand Down Expand Up @@ -4393,7 +4404,7 @@ export interface StartReadSetActivationJobRequest {
clientToken?: string;

/**
* <p>The job's sources.</p>
* <p>The job's source files.</p>
*/
sources: StartReadSetActivationJobSourceItem[] | undefined;
}
Expand Down Expand Up @@ -4442,7 +4453,7 @@ export interface StartReadSetExportJobRequest {
clientToken?: string;

/**
* <p>Sources for the job.</p>
* <p>The job's source files.</p>
*/
sources: ExportReadSet[] | undefined;
}
Expand Down Expand Up @@ -4541,7 +4552,7 @@ export interface StartReadSetImportJobRequest {
clientToken?: string;

/**
* <p>Source files to import.</p>
* <p>The job's source files.</p>
*/
sources: StartReadSetImportJobSourceItem[] | undefined;
}
Expand Down
1 change: 1 addition & 0 deletions clients/client-omics/src/protocols/Aws_restJson1.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8281,6 +8281,7 @@ const deserializeAws_restJson1VariantImportItemDetail = (
return {
jobStatus: __expectString(output.jobStatus),
source: __expectString(output.source),
statusMessage: __expectString(output.statusMessage),
} as any;
};

Expand Down
Loading

0 comments on commit 386065a

Please sign in to comment.