Skip to content

Commit

Permalink
fix(core): detect and resolve stringified number tokens (aws#19578)
Browse files Browse the repository at this point in the history
Number tokens are encoded as a range of very large negative numbers (for
example: -1.888154589709072e+289). When these are naively stringified,
the `resolve()` method doesn't recognize and translate them anymore,
and these numbers end up in the target template in a confusing way.

However, recognizing them is actually not that hard and can be done
using a regex. We can then do the token resolution appropriately, making
it so that construct authors do not have to call
`Tokenization.stringifyNumber()` anymore in order to support
stringification of number values.

Fixes aws#19546, closes aws#19550.


----

### All Submissions:

* [ ] Have you followed the guidelines in our [Contributing guide?](https://github.com/aws/aws-cdk/blob/master/CONTRIBUTING.md)

### Adding new Unconventional Dependencies:

* [ ] This PR adds new unconventional dependencies following the process described [here](https://github.com/aws/aws-cdk/blob/master/CONTRIBUTING.md/#adding-new-unconventional-dependencies)

### New Features

* [ ] Have you added the new feature to an [integration test](https://github.com/aws/aws-cdk/blob/master/INTEGRATION_TESTS.md)?
	* [ ] Did you use `cdk-integ` to deploy the infrastructure and generate the snapshot (i.e. `cdk-integ` without `--dry-run`)?

*By submitting this pull request, I confirm that my contribution is made under the terms of the Apache-2.0 license*
  • Loading branch information
rix0rrr authored and Stephen Potter committed Apr 27, 2022
1 parent a0f1300 commit 3ece514
Show file tree
Hide file tree
Showing 6 changed files with 96 additions and 26 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,33 @@ test('supports tokens', () => {
});
});

test('container overrides are tokens', () => {
// WHEN
const task = new BatchSubmitJob(stack, 'Task', {
jobDefinitionArn: batchJobDefinition.jobDefinitionArn,
jobName: 'JobName',
jobQueueArn: batchJobQueue.jobQueueArn,
containerOverrides: {
memory: cdk.Size.mebibytes(sfn.JsonPath.numberAt('$.asdf')),
},
});

// THEN
expect(stack.resolve(task.toStateJson())).toEqual({
Type: 'Task',
Resource: { 'Fn::Join': ['', ['arn:', { Ref: 'AWS::Partition' }, ':states:::batch:submitJob.sync']] },
End: true,
Parameters: {
JobDefinition: { Ref: 'JobDefinition24FFE3ED' },
JobName: 'JobName',
JobQueue: { Ref: 'JobQueueEE3AD499' },
ContainerOverrides: {
ResourceRequirements: [{ 'Type': 'MEMORY', 'Value.$': '$.asdf' }],
},
},
});
});

test('supports passing task input into payload', () => {
// WHEN
const task = new BatchSubmitJob(stack, 'Task', {
Expand Down
17 changes: 9 additions & 8 deletions packages/@aws-cdk/core/lib/private/cloudformation-lang.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,8 @@ export class CloudFormationLang {

// Some case analysis to produce minimal expressions
if (parts.length === 1) { return parts[0]; }
if (parts.length === 2 && typeof parts[0] === 'string' && typeof parts[1] === 'string') {
return parts[0] + parts[1];
if (parts.length === 2 && isConcatable(parts[0]) && isConcatable(parts[1])) {
return `${parts[0]}${parts[1]}`;
}

// Otherwise return a Join intrinsic (already in the target document language to avoid taking
Expand Down Expand Up @@ -323,8 +323,8 @@ export function minimalCloudFormationJoin(delimiter: string, values: any[]): any
const el = values[i];
if (isSplicableFnJoinIntrinsic(el)) {
values.splice(i, 1, ...el['Fn::Join'][1]);
} else if (i > 0 && isPlainString(values[i - 1]) && isPlainString(values[i])) {
values[i - 1] += delimiter + values[i];
} else if (i > 0 && isConcatable(values[i - 1]) && isConcatable(values[i])) {
values[i - 1] = `${values[i-1]}${delimiter}${values[i]}`;
values.splice(i, 1);
} else {
i += 1;
Expand All @@ -333,10 +333,6 @@ export function minimalCloudFormationJoin(delimiter: string, values: any[]): any

return values;

function isPlainString(obj: any): boolean {
return typeof obj === 'string' && !Token.isUnresolved(obj);
}

function isSplicableFnJoinIntrinsic(obj: any): boolean {
if (!isIntrinsic(obj)) { return false; }
if (Object.keys(obj)[0] !== 'Fn::Join') { return false; }
Expand All @@ -351,6 +347,11 @@ export function minimalCloudFormationJoin(delimiter: string, values: any[]): any
}
}

function isConcatable(obj: any): boolean {
return ['string', 'number'].includes(typeof obj) && !Token.isUnresolved(obj);
}


/**
* Return whether the given value represents a CloudFormation intrinsic
*/
Expand Down
16 changes: 14 additions & 2 deletions packages/@aws-cdk/core/lib/private/encoding.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,10 @@ const QUOTED_BEGIN_STRING_TOKEN_MARKER = regexQuote(BEGIN_STRING_TOKEN_MARKER);
const QUOTED_BEGIN_LIST_TOKEN_MARKER = regexQuote(BEGIN_LIST_TOKEN_MARKER);
const QUOTED_END_TOKEN_MARKER = regexQuote(END_TOKEN_MARKER);

const STRING_TOKEN_REGEX = new RegExp(`${QUOTED_BEGIN_STRING_TOKEN_MARKER}([${VALID_KEY_CHARS}]+)${QUOTED_END_TOKEN_MARKER}`, 'g');
// Sometimes the number of digits is different
export const STRINGIFIED_NUMBER_PATTERN = '-1\\.\\d{10,16}e\\+289';

const STRING_TOKEN_REGEX = new RegExp(`${QUOTED_BEGIN_STRING_TOKEN_MARKER}([${VALID_KEY_CHARS}]+)${QUOTED_END_TOKEN_MARKER}|(${STRINGIFIED_NUMBER_PATTERN})`, 'g');
const LIST_TOKEN_REGEX = new RegExp(`${QUOTED_BEGIN_LIST_TOKEN_MARKER}([${VALID_KEY_CHARS}]+)${QUOTED_END_TOKEN_MARKER}`, 'g');

/**
Expand Down Expand Up @@ -52,7 +55,7 @@ export class TokenString {
ret.addLiteral(this.str.substring(rest, m.index));
}

ret.addToken(lookup(m[1]));
ret.addToken(lookup(m[1] ?? m[2]));

rest = this.re.lastIndex;
m = this.re.exec(this.str);
Expand Down Expand Up @@ -218,3 +221,12 @@ export function extractTokenDouble(encoded: number): number | undefined {
return ints[0] + shl32(ints[1] & 0xFFFF);
/* eslint-enable no-bitwise */
}

const STRINGIFIED_NUMBER_REGEX = new RegExp(STRINGIFIED_NUMBER_PATTERN);

/**
* Return whether the given string contains accidentally stringified number tokens
*/
export function stringContainsNumberTokens(x: string) {
return !!x.match(STRINGIFIED_NUMBER_REGEX);
}
6 changes: 5 additions & 1 deletion packages/@aws-cdk/core/lib/private/token-map.ts
Original file line number Diff line number Diff line change
Expand Up @@ -177,8 +177,12 @@ export class TokenMap {

private registerNumberKey(token: IResolvable): number {
const counter = this.tokenCounter++;
const dbl = createTokenDouble(counter);
// Register in the number map, as well as a string representation of that token
// in the string map.
this.numberTokenMap.set(counter, token);
return createTokenDouble(counter);
this.stringTokenMap.set(`${dbl}`, token);
return dbl;
}
}

Expand Down
6 changes: 4 additions & 2 deletions packages/@aws-cdk/core/lib/string-fragments.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { IFragmentConcatenator, IResolvable } from './resolvable';
import { isResolvableObject } from './token';
import { isResolvableObject, Token } from './token';

/**
* Result of the split of a string with Tokens
Expand Down Expand Up @@ -71,8 +71,10 @@ export class TokenizedStringFragments {
const mapped = mapper.mapToken(f.token);
if (isResolvableObject(mapped)) {
ret.addToken(mapped);
} else {
} else if (Token.isUnresolved(mapped)) {
ret.addIntrinsic(mapped);
} else {
ret.addLiteral(mapped);
}
break;
case 'intrinsic':
Expand Down
50 changes: 37 additions & 13 deletions packages/@aws-cdk/core/test/tokens.test.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { Fn, isResolvableObject, Lazy, Stack, Token, Tokenization } from '../lib';
import { createTokenDouble, extractTokenDouble } from '../lib/private/encoding';
import { CfnResource, Fn, isResolvableObject, Lazy, Stack, Token, Tokenization } from '../lib';
import { createTokenDouble, extractTokenDouble, stringContainsNumberTokens, STRINGIFIED_NUMBER_PATTERN } from '../lib/private/encoding';
import { Intrinsic } from '../lib/private/intrinsic';
import { findTokens } from '../lib/private/resolve';
import { IResolvable } from '../lib/resolvable';
Expand Down Expand Up @@ -482,15 +482,12 @@ describe('tokens', () => {
expect(() => {
resolve({ value: encoded[0] });
}).toThrow(/Found an encoded list/);


});
});

describe('number encoding', () => {
test('basic integer encoding works', () => {
expect(16).toEqual(extractTokenDouble(createTokenDouble(16)));

});

test('arbitrary integers can be encoded, stringified, and recovered', () => {
Expand All @@ -504,16 +501,12 @@ describe('tokens', () => {
const decoded = extractTokenDouble(roundtripped);
expect(decoded).toEqual(x);
}


});

test('arbitrary numbers are correctly detected as not being tokens', () => {
expect(undefined).toEqual(extractTokenDouble(0));
expect(undefined).toEqual(extractTokenDouble(1243));
expect(undefined).toEqual(extractTokenDouble(4835e+532));


});

test('can number-encode and resolve Token objects', () => {
Expand All @@ -528,8 +521,42 @@ describe('tokens', () => {
// THEN
const resolved = resolve({ value: encoded });
expect(resolved).toEqual({ value: 123 });
});

test('regex detects all stringifications of encoded tokens', () => {
expect(stringContainsNumberTokens(`${createTokenDouble(0)}`)).toBeTruthy();
expect(stringContainsNumberTokens(`${createTokenDouble(Math.pow(2, 48) - 1)}`)).toBeTruthy(); // MAX_ENCODABLE_INTEGER
expect(stringContainsNumberTokens('1234')).toBeFalsy();
});

test('check that the first N encoded numbers can be detected', () => {
const re = new RegExp(STRINGIFIED_NUMBER_PATTERN);
// Ran this up to 1 million offline
for (let i = 0; i < 1000; i++) {
expect(`${createTokenDouble(i)}`).toMatch(re);
}
});

test('handle stringified number token', () => {
// GIVEN
const tok = `the answer is: ${Lazy.number({ produce: () => 86 })}`;

// THEN
expect(resolve({ value: `${tok}` })).toEqual({
value: 'the answer is: 86',
});
});

test('handle stringified number reference', () => {
const stack = new Stack();
const res = new CfnResource(stack, 'Resource', { type: 'My::Resource' });
// GIVEN
const tok = `the answer is: ${Token.asNumber(res.ref)}`;

// THEN
expect(resolve({ value: `${tok}` })).toEqual({
value: { 'Fn::Join': ['', ['the answer is: ', { Ref: 'Resource' }]] },
});
});
});

Expand Down Expand Up @@ -694,25 +721,21 @@ describe('tokens', () => {
describe('stringifyNumber', () => {
test('converts number to string', () => {
expect(Tokenization.stringifyNumber(100)).toEqual('100');

});

test('converts tokenized number to string', () => {
expect(resolve(Tokenization.stringifyNumber({
resolve: () => 100,
} as any))).toEqual('100');

});

test('string remains the same', () => {
expect(Tokenization.stringifyNumber('123' as any)).toEqual('123');

});

test('Ref remains the same', () => {
const val = { Ref: 'SomeLogicalId' };
expect(Tokenization.stringifyNumber(val as any)).toEqual(val);

});

test('lazy Ref remains the same', () => {
Expand Down Expand Up @@ -791,3 +814,4 @@ function tokensThatResolveTo(value: any): Token[] {
function resolve(x: any) {
return new Stack().resolve(x);
}

0 comments on commit 3ece514

Please sign in to comment.