Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(ecs): add support for elastic inference accelerators in ECS task defintions #13950

Merged
Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 36 additions & 0 deletions packages/@aws-cdk/aws-ecs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -788,3 +788,39 @@ new ecs.FargateService(stack, 'FargateService', {

app.synth();
```

## Elastic Inference Accelerators

Currently, this feature is only supported for services with EC2 launch types.

To add elastic inference accelerators to your EC2 instance, first add
`inferenceAccelerator` field to the EC2TaskDefinition and set the `deviceName`
and `deviceType` properties.

```ts
const inferenceAccelerators = [{
deviceName: 'device1',
deviceType: 'eia2.medium',
}];

const taskDefinition = new ecs.Ec2TaskDefinition(stack, 'Ec2TaskDef', {
inferenceAccelerators,
});
```

To enable using the inference accelerator in the containers, set the
`type` and `value` properties accordingly. The `value` should match the
`DeviceName` for an `InferenceAccelerator` specified in a task definition.

```ts
const resourceRequirements = [{
type: ecs.ResourceRequirementType.INFERENCEACCELERATOR,
value: 'device1',
}];

taskDefinition.addContainer('cont', {
image: ecs.ContainerImage.fromRegistry('test'),
memoryLimitMiB: 1024,
resourceRequirements,
});
```
72 changes: 72 additions & 0 deletions packages/@aws-cdk/aws-ecs/lib/base/task-definition.ts
Original file line number Diff line number Diff line change
Expand Up @@ -184,6 +184,15 @@ export interface TaskDefinitionProps extends CommonTaskDefinitionProps {
* @default - PidMode used by the task is not specified
*/
readonly pidMode?: PidMode;

/**
* The inference accelerators to use for the containers in the task.
*
* Not supported in Fargate.
*
* @default - No inference accelerators.
*/
readonly inferenceAccelerators?: InferenceAccelerator[];
}

/**
Expand Down Expand Up @@ -322,6 +331,11 @@ export class TaskDefinition extends TaskDefinitionBase {
*/
private readonly placementConstraints = new Array<CfnTaskDefinition.TaskDefinitionPlacementConstraintProperty>();

/**
* Inference accelerators for task instances
*/
private readonly _inferenceAccelerators: InferenceAccelerator[] = [];

private _executionRole?: iam.IRole;

private _referencesSecretJsonField?: boolean;
Expand Down Expand Up @@ -354,12 +368,20 @@ export class TaskDefinition extends TaskDefinitionBase {
throw new Error(`Fargate-compatible tasks require both CPU (${props.cpu}) and memory (${props.memoryMiB}) specifications`);
}

if (props.inferenceAccelerators && props.inferenceAccelerators.length > 0 && this.isFargateCompatible) {
SoManyHs marked this conversation as resolved.
Show resolved Hide resolved
upparekh marked this conversation as resolved.
Show resolved Hide resolved
throw new Error('Cannot use inference accelerators on tasks that run on Fargate');
}

this._executionRole = props.executionRole;

this.taskRole = props.taskRole || new iam.Role(this, 'TaskRole', {
assumedBy: new iam.ServicePrincipal('ecs-tasks.amazonaws.com'),
});

if (props.inferenceAccelerators) {
props.inferenceAccelerators.forEach(ia => this.addInferenceAccelerator(ia));
}

const taskDef = new CfnTaskDefinition(this, 'Resource', {
containerDefinitions: Lazy.any({ produce: () => this.renderContainers() }, { omitEmptyArray: true }),
volumes: Lazy.any({ produce: () => this.renderVolumes() }, { omitEmptyArray: true }),
Expand All @@ -380,6 +402,10 @@ export class TaskDefinition extends TaskDefinitionBase {
memory: props.memoryMiB,
ipcMode: props.ipcMode,
pidMode: props.pidMode,
inferenceAccelerators: Lazy.any({
produce: () =>
!isFargateCompatible(this.compatibility) ? this.renderInferenceAccelerators() : undefined,
}, { omitEmptyArray: true }),
});

if (props.placementConstraints) {
Expand All @@ -393,6 +419,13 @@ export class TaskDefinition extends TaskDefinitionBase {
return this._executionRole;
}

/**
* Public getter method to access list of inference accelerators attached to the instance.
*/
public get inferenceAccelerators(): InferenceAccelerator[] {
return this._inferenceAccelerators;
}

private renderVolumes(): CfnTaskDefinition.VolumeProperty[] {
return this.volumes.map(renderVolume);

Expand All @@ -419,6 +452,17 @@ export class TaskDefinition extends TaskDefinitionBase {
}
}

private renderInferenceAccelerators(): CfnTaskDefinition.InferenceAcceleratorProperty[] {
return this._inferenceAccelerators.map(renderInferenceAccelerator);

function renderInferenceAccelerator(inferenceAccelerator: InferenceAccelerator) : CfnTaskDefinition.InferenceAcceleratorProperty {
return {
deviceName: inferenceAccelerator.deviceName,
deviceType: inferenceAccelerator.deviceType,
};
}
}

/**
* Validate the existence of the input target and set default values.
*
Expand Down Expand Up @@ -531,6 +575,16 @@ export class TaskDefinition extends TaskDefinitionBase {
extension.extend(this);
}

/**
* Adds an inference accelerator to the task definition.
*/
public addInferenceAccelerator(inferenceAccelerator: InferenceAccelerator) {
if (isFargateCompatible(this.compatibility)) {
throw new Error('Cannot use inference accelerators on tasks that run on Fargate');
}
this._inferenceAccelerators.push(inferenceAccelerator);
}

/**
* Creates the task execution IAM role if it doesn't already exist.
*/
Expand Down Expand Up @@ -683,6 +737,24 @@ export enum PidMode {
TASK = 'task',
}

/**
* Elastic Inference Accelerator.
* For more information, see [Elastic Inference Basics](https://docs.aws.amazon.com/elastic-inference/latest/developerguide/basics.html)
*/
export interface InferenceAccelerator {
/**
* The Elastic Inference accelerator device name.
* @default - empty
*/
readonly deviceName?: string;

/**
* The Elastic Inference accelerator type to use. The allowed values are: eia2.medium, eia2.large and eia2.xlarge.
* @default - empty
*/
readonly deviceType?: string;
}

/**
* A data volume used in a task definition.
*
Expand Down
63 changes: 56 additions & 7 deletions packages/@aws-cdk/aws-ecs/lib/container-definition.ts
Original file line number Diff line number Diff line change
Expand Up @@ -294,6 +294,12 @@ export interface ContainerDefinitionOptions {
* @default - No ports are mapped.
*/
readonly portMappings?: PortMapping[];

/**
* The inference accelerators referenced by the container.
* @default - No inference accelerators assigned.
*/
readonly inferenceAcceleratorResources?: string[];
}

/**
Expand Down Expand Up @@ -386,6 +392,11 @@ export class ContainerDefinition extends CoreConstruct {
*/
public readonly referencesSecretJsonField?: boolean;

/**
* The inference accelerators referenced by this container.
*/
private readonly inferenceAcceleratorResources: string[] = [];

/**
* The configured container links
*/
Expand Down Expand Up @@ -443,6 +454,10 @@ export class ContainerDefinition extends CoreConstruct {
if (props.portMappings) {
this.addPortMappings(...props.portMappings);
}

if (props.inferenceAcceleratorResources) {
this.addInferenceAcceleratorResource(...props.inferenceAcceleratorResources);
}
}

/**
Expand Down Expand Up @@ -516,6 +531,23 @@ export class ContainerDefinition extends CoreConstruct {
}));
}

/**
* This method adds one or more resources to the container.
*/
public addInferenceAcceleratorResource(...inferenceAcceleratorResources: string[]) {
if (!this.taskDefinition.inferenceAccelerators) {
throw new Error('InferenceAccelerator resource(s) defined in container definition without specifying any inference accelerators in task definition.');
upparekh marked this conversation as resolved.
Show resolved Hide resolved
}
this.inferenceAcceleratorResources.push(...inferenceAcceleratorResources.map(resource => {
for (const inferenceAccelerator of this.taskDefinition.inferenceAccelerators) {
if (resource === inferenceAccelerator.deviceName) {
return resource;
}
}
throw new Error(`Resource value (${resource}) doesn't match any inference accelerator device name.`);
upparekh marked this conversation as resolved.
Show resolved Hide resolved
}));
}

/**
* This method adds one or more ulimits to the container.
*/
Expand Down Expand Up @@ -631,7 +663,7 @@ export class ContainerDefinition extends CoreConstruct {
healthCheck: this.props.healthCheck && renderHealthCheck(this.props.healthCheck),
links: cdk.Lazy.list({ produce: () => this.links }, { omitEmpty: true }),
linuxParameters: this.linuxParameters && this.linuxParameters.renderLinuxParameters(),
resourceRequirements: (this.props.gpuCount !== undefined) ? renderResourceRequirements(this.props.gpuCount) : undefined,
resourceRequirements: renderResourceRequirements(this.props.gpuCount, this.inferenceAcceleratorResources),
};
}
}
Expand Down Expand Up @@ -742,12 +774,29 @@ function getHealthCheckCommand(hc: HealthCheck): string[] {
return hcCommand.concat(cmd);
}

function renderResourceRequirements(gpuCount: number): CfnTaskDefinition.ResourceRequirementProperty[] | undefined {
if (gpuCount === 0) { return undefined; }
return [{
type: 'GPU',
value: gpuCount.toString(),
}];
function renderResourceRequirements(gpuCount: number = 0, inferenceAcceleratorResources: string[] = []):
CfnTaskDefinition.ResourceRequirementProperty[] | undefined {
if (inferenceAcceleratorResources.length > 0 && gpuCount > 0) {
throw new Error('Both inference accelerator and gpu count defined in the container definition.');
Copy link
Contributor

@iamhopaul123 iamhopaul123 Apr 8, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
throw new Error('Both inference accelerator and gpu count defined in the container definition.');
throw new Error('Cannot define both inference accelerator and gpu count in the container definition.');

We might need to be more specific for the reason why this condition fails https://uxdworld.com/2018/05/30/how-to-write-good-error-messages/
we also need unit test for this.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Discussed about this offline but posting here for the record, we decided to render both gpuCount and inferenceAcceleratorResources properties and let the validators in other stages handle this check.

}
if (inferenceAcceleratorResources.length > 0) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Just a nit but this check might not be required?

const ret = [];

for (const resource of inferenceAcceleratorResources) {
ret.push({
type: 'InferenceAccelerator',
value: resource,
});
}
return ret;
}
if (gpuCount > 0) {
return [{
type: 'GPU',
value: gpuCount.toString(),
}];
}
return undefined;
}

/**
Expand Down
13 changes: 12 additions & 1 deletion packages/@aws-cdk/aws-ecs/lib/ec2/ec2-task-definition.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { Construct } from 'constructs';
import { ImportedTaskDefinition } from '../base/_imported-task-definition';
import {
CommonTaskDefinitionAttributes,
CommonTaskDefinitionProps,
Expand All @@ -8,9 +9,9 @@ import {
NetworkMode,
PidMode,
TaskDefinition,
InferenceAccelerator,
} from '../base/task-definition';
import { PlacementConstraint } from '../placement';
import { ImportedTaskDefinition } from '../base/_imported-task-definition';

/**
* The properties for a task definition run on an EC2 cluster.
Expand Down Expand Up @@ -51,6 +52,15 @@ export interface Ec2TaskDefinitionProps extends CommonTaskDefinitionProps {
* @default - PidMode used by the task is not specified
*/
readonly pidMode?: PidMode;

/**
* The inference accelerators to use for the containers in the task.
*
* Not supported in Fargate.
*
* @default - No inference accelerators.
*/
readonly inferenceAccelerators?: InferenceAccelerator[];
}

/**
Expand Down Expand Up @@ -109,6 +119,7 @@ export class Ec2TaskDefinition extends TaskDefinition implements IEc2TaskDefinit
placementConstraints: props.placementConstraints,
ipcMode: props.ipcMode,
pidMode: props.pidMode,
inferenceAccelerators: props.inferenceAccelerators,
});
}
}
Loading