diff --git a/.vscode/launch.json b/.vscode/launch.json index 02d73d813..cc9c9d687 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -9,12 +9,22 @@ "request": "launch", "name": "Azurite Service - Loki", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], - "args": ["${workspaceFolder}/src/azurite.ts", "-d", "debug.log"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], + "args": [ + "${workspaceFolder}/src/azurite.ts", + "-d", + "debug.log" + ], "env": { "AZURITE_ACCOUNTS": "" }, - "skipFiles": ["node_modules/*/**", "/*/**"], + "skipFiles": [ + "node_modules/*/**", + "/*/**" + ], "outputCapture": "std" }, { @@ -35,8 +45,16 @@ "request": "launch", "name": "Azurite Service - Loki, Loose", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], - "args": ["${workspaceFolder}/src/azurite.ts", "-d", "debug.log", "-L"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], + "args": [ + "${workspaceFolder}/src/azurite.ts", + "-d", + "debug.log", + "-L" + ], "env": { "AZURITE_ACCOUNTS": "" }, @@ -47,7 +65,10 @@ "request": "launch", "name": "Azurite Service - Loki, Loose, HTTPS, OAuth", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], "args": [ "${workspaceFolder}/src/azurite.ts", "-L", @@ -70,7 +91,10 @@ "request": "launch", "name": "Azurite Queue Service - Loki, HTTPS, OAuth", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], "args": [ "${workspaceFolder}/src/queue/main.ts", "-d", @@ -92,7 +116,10 @@ "request": "launch", "name": "Azurite Blob Service - Loki, HTTPS, OAuth", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], "args": [ "${workspaceFolder}/src/blob/main.ts", "-d", @@ -114,8 +141,15 @@ "request": "launch", "name": "Azurite Blob Service - SQL", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], - "args": ["${workspaceFolder}/src/blob/main.ts", "-d", "debug.log"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], + "args": [ + "${workspaceFolder}/src/blob/main.ts", + "-d", + "debug.log" + ], "env": { "AZURITE_DB": "mysql://root:my-secret-pw@127.0.0.1:3306/azurite_blob", "AZURITE_ACCOUNTS": "" @@ -127,8 +161,15 @@ "request": "launch", "name": "Azurite Table Service - Loki", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], - "args": ["${workspaceFolder}/src/table/main.ts", "-d", "debug.log"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], + "args": [ + "${workspaceFolder}/src/table/main.ts", + "-d", + "debug.log" + ], "env": { "AZURITE_ACCOUNTS": "" }, @@ -139,8 +180,13 @@ "request": "launch", "name": "Current TS File", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], - "args": ["${workspaceFolder}/${relativeFile}"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], + "args": [ + "${workspaceFolder}/${relativeFile}" + ], "outputCapture": "std" }, { @@ -148,7 +194,10 @@ "request": "launch", "name": "Current Mocha TS File - Loki", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], "args": [ "${workspaceFolder}/node_modules/mocha/bin/_mocha", "-u", @@ -162,7 +211,7 @@ "AZURITE_ACCOUNTS": "", "AZURE_TABLE_STORAGE": "", "DATATABLES_ACCOUNT_NAME": "", - "DATATABLES_ACCOUNT_KEY" : "", + "DATATABLES_ACCOUNT_KEY": "", "AZURE_DATATABLES_STORAGE_STRING": "https://.table.core.windows.net", "AZURE_DATATABLES_SAS": "?", "NODE_TLS_REJECT_UNAUTHORIZED": "0" @@ -175,7 +224,10 @@ "request": "launch", "name": "EXE Mocha TS File - Loki", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], "args": [ "${workspaceFolder}/node_modules/mocha/bin/_mocha", "-u", @@ -183,7 +235,7 @@ "--timeout", "999999", "--colors", - "${workspaceFolder}/tests/exe.test.ts", + "${workspaceFolder}/tests/exe.test.ts", "--exit" ], "env": { @@ -199,7 +251,10 @@ "request": "launch", "name": "Current Mocha TS File - SQL", "cwd": "${workspaceFolder}", - "runtimeArgs": ["-r", "ts-node/register"], + "runtimeArgs": [ + "-r", + "ts-node/register" + ], "args": [ "${workspaceFolder}/node_modules/mocha/bin/_mocha", "-u", @@ -211,7 +266,7 @@ ], "env": { "AZURITE_ACCOUNTS": "", - "AZURITE_TEST_DB": "mysql://root:my-secret-pw@127.0.0.1:3306/azurite_blob_test", + "AZURITE_TEST_DB": "mysql://root:!!123abc@127.0.0.1:3306/azurite_blob", "NODE_TLS_REJECT_UNAUTHORIZED": "0" }, "internalConsoleOptions": "openOnSessionStart", @@ -250,9 +305,13 @@ "type": "extensionHost", "request": "launch", "runtimeExecutable": "${execPath}", - "args": ["--extensionDevelopmentPath=${workspaceFolder}"], - "outFiles": ["${workspaceFolder}/dist/**/*.js"], + "args": [ + "--extensionDevelopmentPath=${workspaceFolder}" + ], + "outFiles": [ + "${workspaceFolder}/dist/**/*.js" + ], "preLaunchTask": "npm: watch" } ] -} +} \ No newline at end of file diff --git a/src/blob/errors/StorageErrorFactory.ts b/src/blob/errors/StorageErrorFactory.ts index 5290b7854..5fd4bca05 100644 --- a/src/blob/errors/StorageErrorFactory.ts +++ b/src/blob/errors/StorageErrorFactory.ts @@ -780,7 +780,6 @@ export default class StorageErrorFactory { { ReceivedCopyStatus: copyStatus } ); } - public static getInvalidMetadata(contextID: string): StorageError { return new StorageError( @@ -825,7 +824,7 @@ export default class StorageErrorFactory { "The tags specified are invalid. It contains characters that are not permitted.", contextID ); - } + } public static getInvalidXmlDocument( contextID: string = "" diff --git a/src/blob/handlers/ContainerHandler.ts b/src/blob/handlers/ContainerHandler.ts index 9fde0d7b4..450dc8049 100644 --- a/src/blob/handlers/ContainerHandler.ts +++ b/src/blob/handlers/ContainerHandler.ts @@ -368,7 +368,48 @@ export default class ContainerHandler extends BaseHandler public async filterBlobs(options: Models.ContainerFilterBlobsOptionalParams, context: Context ): Promise { - throw new NotImplementedError(context.contextId!); + const blobCtx = new BlobStorageContext(context); + const accountName = blobCtx.account!; + const containerName = blobCtx.container!; + await this.metadataStore.checkContainerExist( + context, + accountName, + containerName + ); + + const request = context.request!; + const marker = options.marker; + options.marker = options.marker || ""; + if ( + options.maxresults === undefined || + options.maxresults > DEFAULT_LIST_BLOBS_MAX_RESULTS + ) { + options.maxresults = DEFAULT_LIST_BLOBS_MAX_RESULTS; + } + + const [blobs, nextMarker] = await this.metadataStore.filterBlobs( + context, + accountName, + containerName, + options.where, + options.maxresults, + marker, + ); + + const serviceEndpoint = `${request.getEndpoint()}/${accountName}`; + const response: Models.ContainerFilterBlobsResponse = { + statusCode: 200, + requestId: context.contextId, + version: BLOB_API_VERSION, + date: context.startTime, + serviceEndpoint, + where: options.where!, + blobs: blobs, + clientRequestId: options.requestId, + nextMarker: `${nextMarker || ""}` + }; + + return response; } /** diff --git a/src/blob/handlers/ServiceHandler.ts b/src/blob/handlers/ServiceHandler.ts index 27a2d3f96..c7cb5010e 100644 --- a/src/blob/handlers/ServiceHandler.ts +++ b/src/blob/handlers/ServiceHandler.ts @@ -6,6 +6,7 @@ import IServiceHandler from "../generated/handlers/IServiceHandler"; import { parseXML } from "../generated/utils/xml"; import { BLOB_API_VERSION, + DEFAULT_LIST_BLOBS_MAX_RESULTS, DEFAULT_LIST_CONTAINERS_MAX_RESULTS, EMULATOR_ACCOUNT_ISHIERARCHICALNAMESPACEENABLED, EMULATOR_ACCOUNT_KIND, @@ -22,8 +23,7 @@ import { Readable } from "stream"; import { OAuthLevel } from "../../common/models"; import { BEARER_TOKEN_PREFIX } from "../../common/utils/constants"; import { decode } from "jsonwebtoken"; -import { getUserDelegationKeyValue } from "../utils/utils"; -import NotImplementedError from "../errors/NotImplementedError"; +import { getUserDelegationKeyValue } from "../utils/utils" /** * ServiceHandler handles Azure Storage Blob service related requests. @@ -373,10 +373,44 @@ export default class ServiceHandler extends BaseHandler return this.getAccountInfo(context); } - public filterBlobs( + public async filterBlobs( options: Models.ServiceFilterBlobsOptionalParams, context: Context ): Promise { - throw new NotImplementedError(context.contextId); + const blobCtx = new BlobStorageContext(context); + const accountName = blobCtx.account!; + + const request = context.request!; + const marker = options.marker; + options.marker = options.marker || ""; + if ( + options.maxresults === undefined || + options.maxresults > DEFAULT_LIST_BLOBS_MAX_RESULTS + ) { + options.maxresults = DEFAULT_LIST_BLOBS_MAX_RESULTS; + } + + const [blobs, nextMarker] = await this.metadataStore.filterBlobs( + context, + accountName, + undefined, + options.where, + options.maxresults, + marker, + ); + + const serviceEndpoint = `${request.getEndpoint()}/${accountName}`; + const response: Models.ServiceFilterBlobsResponse = { + statusCode: 200, + requestId: context.contextId, + version: BLOB_API_VERSION, + date: context.startTime, + serviceEndpoint, + where: options.where!, + blobs: blobs, + clientRequestId: options.requestId, + nextMarker: `${nextMarker || ""}` + }; + return response; } } diff --git a/src/blob/persistence/FilterBlobPage.ts b/src/blob/persistence/FilterBlobPage.ts new file mode 100644 index 000000000..5fda2a210 --- /dev/null +++ b/src/blob/persistence/FilterBlobPage.ts @@ -0,0 +1,128 @@ + +/** + * This implements a page of blob results. + * When maxResults is smaller than the number of prefixed items in the metadata source, multiple reads from + * the source may be necessary. + * + * @export + * @class FilterBlobPage + */ +export default class FilterBlobPage { + readonly maxResults: number; + + filterBlobItems: FilterBlobType[] = []; + latestMarker: string = ""; + + // isFull indicates we could only (maybe) add a prefix + private isFull: boolean = false; + + // isExhausted indicates nothing more should be added + private isExhausted: boolean = false; + + constructor(maxResults: number) { + this.maxResults = maxResults; + } + + /** + * Empty the page (useful in unit tests) + * + */ + public reset() { + this.filterBlobItems.splice(0); + this.isFull = false; + this.isExhausted = false; + this.latestMarker = ""; + } + + private updateFull() { + this.isFull = (this.filterBlobItems.length === this.maxResults); + } + + /** + * addItem will add to the blob list if possible and update the full/exhausted state of the page + */ + private addItem(item: FilterBlobType): boolean { + if (this.isExhausted) { + return false; + } + let added: boolean = false; + if (! this.isFull) { + this.filterBlobItems.push(item); + added = true; + } + this.updateFull(); + + // if a blob causes fullness the next item read cannot be squashed only duplicate prefixes can + this.isExhausted = this.isFull; + return added; + } + + /** + * Add a BlobType item to the appropriate collection, update the marker + * + * When the page becomes full, items may still be added iff the item is existing prefix + * + * Return the number of items added + */ + private add(name: string, item: FilterBlobType): boolean { + if (this.isExhausted) { + return false; + } + if (name < this.latestMarker) { + throw new Error("add received unsorted item. add must be called on sorted data"); + } + const marker = (name > this.latestMarker) ? name : this.latestMarker; + let added: boolean = false; + added = this.addItem(item); + if (added) { + this.latestMarker = marker; + } + return added; + } + + /** + * Iterate over an array blobs read from a source and add them until the page cannot accept new items + */ + private processList(docs: FilterBlobType[], nameFn: (item: FilterBlobType) => string): number { + let added: number = 0; + for (const item of docs) { + if (this.add(nameFn(item), item)) { + added++; + } + if (this.isExhausted) break; + } + return added; + } + + /** + * Fill the page if possible by using the provided reader function. + * + * For any BlobType, the name is used with delimiter to treat the item as a blob or + * a BlobPrefix for the list blobs result. + * + * This function will use the reader for BlobType to keep reading from a metadata + * data source until the source has no more items or the page cannot add any more items. + * + * Return the contents of the page, blobs, prefixes, and a continuation token if applicable + */ + public async fill( + reader: (offset: number) => Promise, + namer: (item: FilterBlobType) => string, + ): Promise<[FilterBlobType[], string]> { + let offset: number = 0; + let docs = await reader(offset); + let added: number = 0; + while (docs.length) { + added = this.processList(docs, namer); + offset += added; + if (added < this.maxResults) { + break; + } + docs = await reader(offset); + } + return [ + this.filterBlobItems, + added < docs.length ? this.latestMarker : "" + ]; + } +} diff --git a/src/blob/persistence/IBlobMetadataStore.ts b/src/blob/persistence/IBlobMetadataStore.ts index 12d91ca7d..186a9a536 100644 --- a/src/blob/persistence/IBlobMetadataStore.ts +++ b/src/blob/persistence/IBlobMetadataStore.ts @@ -4,6 +4,7 @@ import IDataStore from "../../common/IDataStore"; import IGCExtentProvider from "../../common/IGCExtentProvider"; import * as Models from "../generated/artifacts/models"; import Context from "../generated/Context"; +import { FilterBlobItem } from "../generated/artifacts/models"; /** * This model describes a chunk inside a persistency extent for a given extent ID. @@ -153,6 +154,8 @@ interface IGetBlobPropertiesRes { } export type GetBlobPropertiesRes = IGetBlobPropertiesRes; +export type FilterBlobModel = FilterBlobItem; + // The response model for each lease-related request. interface IBlobLeaseResponse { properties: Models.BlobPropertiesInternal; @@ -212,8 +215,8 @@ export type BlockModel = IBlockAdditionalProperties & PersistencyBlockModel; */ export interface IBlobMetadataStore extends IGCExtentProvider, - IDataStore, - ICleaner { + IDataStore, + ICleaner { /** * Update blob service properties. Create service properties if not exists in persistency layer. * @@ -502,6 +505,15 @@ export interface IBlobMetadataStore includeUncommittedBlobs?: boolean ): Promise<[BlobModel[], string | undefined]>; + filterBlobs( + context: Context, + account: string, + container?: string, + where?: string, + maxResults?: number, + marker?: string, + ): Promise<[FilterBlobModel[], string | undefined]>; + /** * Create blob item in persistency layer. Will replace if blob exists. * @@ -1078,7 +1090,7 @@ export interface IBlobMetadataStore listUncommittedBlockPersistencyChunks( marker?: string, maxResults?: number - ): Promise<[IExtentChunk[], string | undefined]>; + ): Promise<[IExtentChunk[], string | undefined]>; /** * Set blob tags. @@ -1103,7 +1115,7 @@ export interface IBlobMetadataStore leaseAccessConditions: Models.LeaseAccessConditions | undefined, tags: Models.BlobTags | undefined, modifiedAccessConditions?: Models.ModifiedAccessConditions - ): Promise; + ): Promise; /** * Get blob tags. @@ -1125,7 +1137,7 @@ export interface IBlobMetadataStore blob: string, snapshot: string | undefined, leaseAccessConditions: Models.LeaseAccessConditions | undefined, - modifiedAccessConditions?: Models.ModifiedAccessConditions + modifiedAccessConditions?: Models.ModifiedAccessConditions, ): Promise; } diff --git a/src/blob/persistence/LokiBlobMetadataStore.ts b/src/blob/persistence/LokiBlobMetadataStore.ts index dbf231c9b..a6b43594c 100644 --- a/src/blob/persistence/LokiBlobMetadataStore.ts +++ b/src/blob/persistence/LokiBlobMetadataStore.ts @@ -47,6 +47,7 @@ import IBlobMetadataStore, { ChangeContainerLeaseResponse, ContainerModel, CreateSnapshotResponse, + FilterBlobModel, GetBlobPropertiesRes, GetContainerAccessPolicyResponse, GetContainerPropertiesResponse, @@ -62,7 +63,9 @@ import IBlobMetadataStore, { SetContainerAccessPolicyOptions } from "./IBlobMetadataStore"; import PageWithDelimiter from "./PageWithDelimiter"; -import { getBlobTagsCount, getTagsFromString } from "../utils/utils"; +import FilterBlobPage from "./FilterBlobPage"; +import { generateQueryBlobWithTagsWhereFunction } from "./QueryInterpreter/QueryInterpreter"; +import { getBlobTagsCount, getTagsFromString, toBlobTags } from "../utils/utils"; /** * This is a metadata source implementation for blob based on loki DB. @@ -821,6 +824,80 @@ export default class LokiBlobMetadataStore } } + public async filterBlobs( + context: Context, + account: string, + container?: string, + where?: string, + maxResults: number = DEFAULT_LIST_BLOBS_MAX_RESULTS, + marker: string = "", + ): Promise<[FilterBlobModel[], string | undefined]> { + const query: any = {}; + if (account !== undefined) { + query.accountName = account; + } + if (container !== undefined) { + query.containerName = container; + await this.checkContainerExist( + context, + account, + container + ); + } + + const filterFunction = generateQueryBlobWithTagsWhereFunction(context, where!); + + const coll = this.db.getCollection(this.BLOBS_COLLECTION); + const page = new FilterBlobPage(maxResults); + const readPage = async (offset: number): Promise => { + const doc = await coll + .chain() + .find(query) + .where((obj) => { + return obj.name > marker!; + }) + .where((obj) => { + return obj.snapshot === undefined || obj.snapshot === ''; + }) + .sort((obj1, obj2) => { + if (obj1.name === obj2.name) return 0; + if (obj1.name > obj2.name) return 1; + return -1; + }) + .offset(offset) + .limit(maxResults) + .data(); + + return doc.map((item) => { + let blobItem: FilterBlobModel; + blobItem = { + name: item.name, + containerName: item.containerName, + tags: item.blobTags + }; + return blobItem; + }).filter((blobItem) => { + const tagsMeetConditions = filterFunction(blobItem); + if (tagsMeetConditions.length !== 0) { + blobItem.tags = { blobTagSet: toBlobTags(tagsMeetConditions) }; + return true; + } + return false; + }); + }; + + const nameItem = (item: FilterBlobModel) => { + return item.name; + }; + + const [blobItems, nextMarker] = await page.fill(readPage, nameItem); + + return [ + blobItems, + nextMarker + ]; + } + public async listBlobs( context: Context, account: string, @@ -3402,6 +3479,20 @@ export default class LokiBlobMetadataStore context ); + if (modifiedAccessConditions?.ifTags) { + const validateFunction = generateQueryBlobWithTagsWhereFunction(context, modifiedAccessConditions?.ifTags, 'x-ms-if-tags'); + const blobItem: FilterBlobModel = { + name: doc.name, + containerName: doc.containerName, + tags: doc.blobTags + }; + + if (modifiedAccessConditions?.ifTags !== undefined + && validateFunction(blobItem).length === 0) { + throw StorageErrorFactory.getConditionNotMet(context.contextId!); + } + } + return doc.blobTags; } diff --git a/src/blob/persistence/QueryInterpreter/IQueryContext.ts b/src/blob/persistence/QueryInterpreter/IQueryContext.ts new file mode 100644 index 000000000..3f42ca1a4 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/IQueryContext.ts @@ -0,0 +1 @@ +export type IQueryContext = any; \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryInterpreter.ts b/src/blob/persistence/QueryInterpreter/QueryInterpreter.ts new file mode 100644 index 000000000..543197a65 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryInterpreter.ts @@ -0,0 +1,77 @@ +import { BlobTags } from "../../generated/artifacts/models"; +import Context from "../../generated/Context"; +import { FilterBlobModel } from "../IBlobMetadataStore"; +import BinaryOperatorNode from "./QueryNodes/BinaryOperatorNode"; +import ExpressionNode from "./QueryNodes/ExpressionNode"; +import IQueryNode, { TagContent } from "./QueryNodes/IQueryNode"; +import KeyNode from "./QueryNodes/KeyNode"; +import parseQuery from "./QueryParser"; + +export default function executeQuery(context: FilterBlobModel, queryTree: IQueryNode): TagContent[] { + let tags: any = {}; + const blobTags = context.tags; + if (blobTags) { + let blobTagsValue: BlobTags; + if (typeof (blobTags) === 'string') { + blobTagsValue = JSON.parse(blobTags as any); + } + else { + blobTagsValue = blobTags; + } + blobTagsValue.blobTagSet.forEach((aTag) => { + tags[aTag.key] = aTag.value; + }) + } + tags["@container"] = context.containerName; + return queryTree.evaluate(tags) +} + +/** + * Validates that the provided query tree represents a valid query. + * + * That is, a query containing at least one conditional expression, + * where every conditional expression operates on at least + * one column or built-in identifier (i.e. comparison between two constants is not allowed). + * + * @param {IQueryNode} queryTree + */ +export function validateQueryTree(queryTree: IQueryNode) { + const identifierReferences = countIdentifierReferences(queryTree); + + if (!identifierReferences) { + throw new Error("Invalid Query, no identifier references found.") + } +} + +function countIdentifierReferences(queryTree: IQueryNode): number { + if (queryTree instanceof KeyNode) { + return 1; + } + + if (queryTree instanceof BinaryOperatorNode) { + return countIdentifierReferences(queryTree.left) + countIdentifierReferences(queryTree.right) + } + + if (queryTree instanceof ExpressionNode) { + return countIdentifierReferences(queryTree.child) + } + + return 0 +} + + +export function generateQueryBlobWithTagsWhereFunction( + requestContext: Context, + query: string | undefined, + conditionHeader?: string, +): (entity: any) => TagContent[] { + if (query === undefined) { + return () => { + return []; + } + } + + const queryTree = parseQuery(requestContext, query, conditionHeader); + validateQueryTree(queryTree); + return (entity) => executeQuery(entity, queryTree); +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/AndNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/AndNode.ts new file mode 100644 index 000000000..48ffab431 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/AndNode.ts @@ -0,0 +1,20 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; +import { TagContent } from "./IQueryNode"; + +export default class AndNode extends BinaryOperatorNode { + get name(): string { + return `and` + } + + evaluate(context: IQueryContext): TagContent[] { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + if (leftContent.length !== 0 && rightContent.length !== 0) { + return leftContent.concat(rightContent); + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/BinaryOperatorNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/BinaryOperatorNode.ts new file mode 100644 index 000000000..101ffc47e --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/BinaryOperatorNode.ts @@ -0,0 +1,14 @@ +import { IQueryContext } from "../IQueryContext"; +import IQueryNode from "./IQueryNode"; + +export default abstract class BinaryOperatorNode implements IQueryNode { + constructor(public left: IQueryNode, public right: IQueryNode) { } + + abstract evaluate(context: IQueryContext): any + + abstract get name(): string + + toString(): string { + return `(${this.left.toString()} ${this.name} ${this.right.toString()})` + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/ConstantNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/ConstantNode.ts new file mode 100644 index 000000000..94b7236b2 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/ConstantNode.ts @@ -0,0 +1,20 @@ +import { IQueryContext } from "../IQueryContext"; +import IQueryNode, { TagContent } from "./IQueryNode"; + +export default class ConstantNode implements IQueryNode { + constructor(private value: string) { } + + get name(): string { + return "constant" + } + + evaluate(_context: IQueryContext): TagContent[] { + return [{ + value: this.value + }]; + } + + toString(): string { + return JSON.stringify(this.value) + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/EqualsNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/EqualsNode.ts new file mode 100644 index 000000000..ffcf09cf9 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/EqualsNode.ts @@ -0,0 +1,26 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; +import { TagContent } from "./IQueryNode"; + +export default class EqualsNode extends BinaryOperatorNode { + get name(): string { + return `eq` + } + + evaluate(context: IQueryContext): TagContent[] { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + + if (leftContent[0].value === rightContent[0].value) { + if (leftContent[0].key !== undefined) { + return leftContent; + } + else { + return rightContent; + } + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/ExpressionNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/ExpressionNode.ts new file mode 100644 index 000000000..8c3180de1 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/ExpressionNode.ts @@ -0,0 +1,18 @@ +import { IQueryContext } from "../IQueryContext"; +import IQueryNode, { TagContent } from "./IQueryNode"; + +export default class ExpressionNode implements IQueryNode { + constructor(public child: IQueryNode) { } + + get name(): string { + return "expression" + } + + evaluate(context: IQueryContext): TagContent[] { + return this.child.evaluate(context) + } + + toString(): string { + return `(${this.child.toString()})` + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanEqualNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanEqualNode.ts new file mode 100644 index 000000000..d2c79c375 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanEqualNode.ts @@ -0,0 +1,27 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; + +export default class GreaterThanEqualNode extends BinaryOperatorNode { + get name(): string { + return `gte` + } + + evaluate(context: IQueryContext): any { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + + if (leftContent[0].value !== undefined + && rightContent[0].value !== undefined + && (leftContent[0].value >= rightContent[0].value)) { + if (leftContent[0].key !== undefined) { + return leftContent; + } + else { + return rightContent; + } + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanNode.ts new file mode 100644 index 000000000..3e461c20d --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/GreaterThanNode.ts @@ -0,0 +1,28 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; +import { TagContent } from "./IQueryNode"; + +export default class GreaterThanNode extends BinaryOperatorNode { + get name(): string { + return `gt` + } + + evaluate(context: IQueryContext): TagContent[] { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + + if (leftContent[0].value !== undefined + && rightContent[0].value !== undefined + && (leftContent[0].value > rightContent[0].value)) { + if (leftContent[0].key !== undefined) { + return leftContent; + } + else { + return rightContent; + } + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/IQueryNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/IQueryNode.ts new file mode 100644 index 000000000..9f045961b --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/IQueryNode.ts @@ -0,0 +1,14 @@ +import { IQueryContext } from "../IQueryContext"; + +export interface TagContent { + key?: string; + value?: string; +} + +export default interface IQueryNode { + get name(): string + + evaluate(context: IQueryContext): TagContent[] + + toString(): string +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/KeyNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/KeyNode.ts new file mode 100644 index 000000000..dc97adf92 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/KeyNode.ts @@ -0,0 +1,21 @@ +import { IQueryContext } from "../IQueryContext"; +import IQueryNode, { TagContent } from "./IQueryNode"; + +export default class KeyNode implements IQueryNode { + constructor(private identifier: string) { } + + get name(): string { + return "id" + } + + evaluate(context: IQueryContext): TagContent[] { + return [{ + key: this.identifier, + value: context[this.identifier] + }]; + } + + toString(): string { + return this.identifier; + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/LessThanEqualNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/LessThanEqualNode.ts new file mode 100644 index 000000000..c6c8ef1d8 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/LessThanEqualNode.ts @@ -0,0 +1,27 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; + +export default class LessThanEqualNode extends BinaryOperatorNode { + get name(): string { + return `lte` + } + + evaluate(context: IQueryContext): any { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + + if (leftContent[0].value !== undefined + && rightContent[0].value !== undefined + && (leftContent[0].value <= rightContent[0].value)) { + if (leftContent[0].key !== undefined) { + return leftContent; + } + else { + return rightContent; + } + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/LessThanNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/LessThanNode.ts new file mode 100644 index 000000000..d5d788927 --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/LessThanNode.ts @@ -0,0 +1,27 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; + +export default class LessThanNode extends BinaryOperatorNode { + get name(): string { + return `lt` + } + + evaluate(context: IQueryContext): any { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + + if (leftContent[0].value !== undefined + && rightContent[0].value !== undefined + && (leftContent[0].value < rightContent[0].value)) { + if (leftContent[0].key !== undefined) { + return leftContent; + } + else { + return rightContent; + } + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/NotEqualsNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/NotEqualsNode.ts new file mode 100644 index 000000000..b757f9cac --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/NotEqualsNode.ts @@ -0,0 +1,28 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; +import { TagContent } from "./IQueryNode"; + +export default class NotEqualsNode extends BinaryOperatorNode { + get name(): string { + return `ne` + } + + evaluate(context: IQueryContext): TagContent[] { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + + if (leftContent[0].value !== undefined + && rightContent[0].value !== undefined + && (leftContent[0].value !== rightContent[0].value)) { + if (leftContent[0].key !== undefined) { + return leftContent; + } + else { + return rightContent; + } + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryNodes/OrNode.ts b/src/blob/persistence/QueryInterpreter/QueryNodes/OrNode.ts new file mode 100644 index 000000000..0337bab1d --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryNodes/OrNode.ts @@ -0,0 +1,20 @@ +import { IQueryContext } from "../IQueryContext"; +import BinaryOperatorNode from "./BinaryOperatorNode"; +import { TagContent } from "./IQueryNode"; + +export default class OrNode extends BinaryOperatorNode { + get name(): string { + return `or` + } + + evaluate(context: IQueryContext): TagContent[] { + const leftContent = this.left.evaluate(context); + const rightContent = this.right.evaluate(context); + if (leftContent.length !== 0 || rightContent.length !== 0) { + return leftContent.concat(rightContent); + } + else { + return []; + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/QueryInterpreter/QueryParser.ts b/src/blob/persistence/QueryInterpreter/QueryParser.ts new file mode 100644 index 000000000..4eb30408a --- /dev/null +++ b/src/blob/persistence/QueryInterpreter/QueryParser.ts @@ -0,0 +1,607 @@ +import StorageError from "../../errors/StorageError"; +import StorageErrorFactory from "../../errors/StorageErrorFactory"; +import Context from "../../generated/Context"; +import AndNode from "./QueryNodes/AndNode"; +import ConstantNode from "./QueryNodes/ConstantNode"; +import EqualsNode from "./QueryNodes/EqualsNode"; +import ExpressionNode from "./QueryNodes/ExpressionNode"; +import GreaterThanEqualNode from "./QueryNodes/GreaterThanEqualNode"; +import GreaterThanNode from "./QueryNodes/GreaterThanNode"; +import IQueryNode from "./QueryNodes/IQueryNode"; +import KeyNode from "./QueryNodes/KeyNode"; +import LessThanEqualNode from "./QueryNodes/LessThanEqualNode"; +import LessThanNode from "./QueryNodes/LessThanNode"; +import NotEqualsNode from "./QueryNodes/NotEqualsNode"; +import OrNode from "./QueryNodes/OrNode"; + +/** + * This file is used to parse query string for Azure Blob filter by tags and x-ms-if-tags conditions. + * https://learn.microsoft.com/en-us/azure/storage/blobs/storage-manage-find-blobs?tabs=azure-portal + * https://learn.microsoft.com/en-us/rest/api/storageservices/specifying-conditional-headers-for-blob-service-operations + */ + +enum ComparisonType { + Equal, + Greater, + Less, + NotEqual +} + +interface ComparisonNode { + key: string; + existedComparison: ComparisonType[]; +} + +export default function parseQuery( + requestContext: Context, + query: string, + conditionsHeader?: string): IQueryNode { + return new QueryParser(requestContext, query, conditionsHeader).visit() +} + +/** + * A recursive descent parser for Azure Blob filter by tags query syntax. + * + * This parser is implemented using a recursive descent strategy, which composes + * layers of syntax hierarchy, roughly corresponding to the structure of an EBNF + * grammar. Each layer of the hierarchy is implemented as a method which consumes + * the syntax for that layer, and then calls the next layer of the hierarchy. + * + * So for example, the syntax tree that we currently use is composed of: + * - QUERY := EXPRESSION + * - EXPRESSION := OR + * - OR := AND ("or" OR)* + * - AND := UNARY ("and" AND)* + * - UNARY := ("not")? EXPRESSION_GROUP + * - EXPRESSION_GROUP := ("(" EXPRESSION ")") | BINARY + * - BINARY := IDENTIFIER_OR_CONSTANT (OPERATOR IDENTIFIER_OR_CONSTANT)? + * - IDENTIFIER_OR_CONSTANT := CONSTANT | IDENTIFIER + * - CONSTANT := STRING + */ +class QueryParser { + constructor( + requestContext: Context, + query: string, + conditionHeader?: string) { + this.queryString = query; + this.requestContext = requestContext; + this.query = new ParserContext(this.requestContext, query, conditionHeader); + this.conditionHeader = conditionHeader; + } + + private requestContext: Context; + private query: ParserContext; + private comparisonNodes: Record = {}; + private comparisonCount: number = 0; + private conditionHeader?: string; + private queryString: string; + + validateWithPreviousComparison(key: string, currentComparison: ComparisonType) { + if (this.conditionHeader) return; + if (currentComparison === ComparisonType.NotEqual) { + return; + } + + if (this.comparisonNodes[key]) { + for (let i = 0; i < this.comparisonNodes[key].existedComparison.length; ++i) { + if (currentComparison === ComparisonType.Equal) { + throw new Error("can't have multiple conditions for a single tag unless they define a range"); + } + + if (currentComparison === ComparisonType.Greater && + (this.comparisonNodes[key].existedComparison[i] === ComparisonType.Less + || this.comparisonNodes[key].existedComparison[i] === ComparisonType.Equal)) { + throw new Error("can't have multiple conditions for a single tag unless they define a range"); + } + + + if (currentComparison === ComparisonType.Less && + (this.comparisonNodes[key].existedComparison[i] === ComparisonType.Greater + || this.comparisonNodes[key].existedComparison[i] === ComparisonType.Equal)) { + throw new Error("can't have multiple conditions for a single tag unless they define a range"); + } + } + } + + return; + } + + appendComparionNode(key: string, currentComparison: ComparisonType) { + if (this.conditionHeader) { + return; + } + + if (key !== '@container') { + if (!this.comparisonNodes.hasOwnProperty(key)) { + ++this.comparisonCount; + } + } + + if (this.comparisonCount > 10) { + throw new StorageError( + 400, + `InvalidQueryParameterValue`, + `Error parsing query: there can be at most 10 unique tags in a query`, + this.requestContext.contextId!, + { + QueryParameterName: `where`, + QueryParameterValue: this.queryString + }); + } + + if (this.comparisonNodes[key]) { + this.comparisonNodes[key].existedComparison.push(currentComparison); + } + else { + this.comparisonNodes[key] = { + key: key, + existedComparison: [currentComparison] + } + } + } + + /** + * Visits the root of the query syntax tree, returning the corresponding root node. + * + * @returns {IQueryNode} + */ + visit(): IQueryNode { + return this.visitQuery(); + } + + /** + * Visits the QUERY layer of the query syntax tree, returning the appropriate node. + * + * @returns {IQueryNode} + */ + private visitQuery(): IQueryNode { + const tree = this.visitExpression(); + + this.query.skipWhitespace(); + this.query.assertEndOfQuery(); + + return tree; + } + + /** + * Visits the EXPRESSION layer of the query syntax tree, returning the appropriate node. + * + * EXPRESSION := OR + * + * @returns {IQueryNode} + */ + private visitExpression(): IQueryNode { + return this.visitOr(); + } + + /** + * Visits the OR layer of the query syntax tree, returning the appropriate node. + * + * OR := AND ("or" OR)* + * + * @returns {IQueryNode} + */ + private visitOr(): IQueryNode { + const left = this.visitAnd(); + + this.query.skipWhitespace(); + if (this.query.consume("or", true)) { + if (!this.conditionHeader) { + throw new Error("Or not allowed"); + } + const right = this.visitOr(); + return new OrNode(left, right); + } else { + return left; + } + } + + /** + * Visits the AND layer of the query syntax tree, returning the appropriate node. + * + * AND := UNARY ("and" AND)* + * + * @returns {IQueryNode} + */ + private visitAnd(): IQueryNode { + const left = this.visitUnary(); + + this.query.skipWhitespace(); + if (this.query.consume("and", true)) { + const right = this.visitAnd(); + + return new AndNode(left, right); + } else { + return left; + } + } + + /** + * Visits the UNARY layer of the query syntax tree, returning the appropriate node. + * + * UNARY := ("not")? EXPRESSION_GROUP + * + * @returns {IQueryNode} + */ + private visitUnary(): IQueryNode { + this.query.skipWhitespace(); + const right = this.visitExpressionGroup() + return right; + } + + /** + * Visits the EXPRESSION_GROUP layer of the query syntax tree, returning the appropriate node. + * + * EXPRESSION_GROUP := ("(" OR ")") | BINARY + * + * @returns {IQueryNode} + */ + private visitExpressionGroup(): IQueryNode { + this.query.skipWhitespace(); + if (this.query.consume("(")) { + const child = this.visitExpression() + + this.query.skipWhitespace(); + this.query.consume(")") || this.query.throw(`Expected a ')' to close the expression group, but found '${this.query.peek()}' instead.`) + + return new ExpressionNode(child) + } else { + return this.visitBinary() + } + } + + /** + * Visits the BINARY layer of the query syntax tree, returning the appropriate node. + * + * BINARY := IDENTIFIER_OR_CONSTANT (OPERATOR IDENTIFIER_OR_CONSTANT)? + * + * @returns {IQueryNode} + */ + private visitBinary(): IQueryNode { + const left = this.visitKey(); + + this.query.skipWhitespace(); + const operator = this.query.consumeOneOf(true, "=", ">=", "<=", "<>", ">", "<") + if (operator) { + const right = this.visitValue(); + + switch (operator) { + case "=": + this.validateWithPreviousComparison(left.toString(), ComparisonType.Equal); + this.appendComparionNode(left.toString(), ComparisonType.Equal); + return new EqualsNode(left, right); + case "<>": + if (!this.conditionHeader) { + throw new Error("<> not allowed"); + } + this.validateWithPreviousComparison(left.toString(), ComparisonType.NotEqual); + this.appendComparionNode(left.toString(), ComparisonType.NotEqual); + return new NotEqualsNode(left, right); + case ">=": + this.validateWithPreviousComparison(left.toString(), ComparisonType.Greater); + this.appendComparionNode(left.toString(), ComparisonType.Greater); + return new GreaterThanEqualNode(left, right); + case ">": + this.validateWithPreviousComparison(left.toString(), ComparisonType.Greater); + this.appendComparionNode(left.toString(), ComparisonType.Greater); + return new GreaterThanNode(left, right); + case "<": + this.validateWithPreviousComparison(left.toString(), ComparisonType.Less); + this.appendComparionNode(left.toString(), ComparisonType.Less); + return new LessThanNode(left, right); + case "<=": + this.validateWithPreviousComparison(left.toString(), ComparisonType.Less); + this.appendComparionNode(left.toString(), ComparisonType.Less); + return new LessThanEqualNode(left, right); + } + } + + return left; + } + + /** + * Visits the IDENTIFIER_OR_CONSTANT layer of the query syntax tree, returning the appropriate node. + * + * IDENTIFIER_OR_CONSTANT := CONSTANT | IDENTIFIER + * + * @returns {IQueryNode} + */ + private visitValue(): IQueryNode { + this.query.skipWhitespace(); + + if (`'`.includes(this.query.peek())) { + return this.visitString(); + } + this.query.throw('expecting tag value'); + } + + private ContainsInvalidTagKeyCharacter(key: string): boolean { + for (let c of key) { + if (!(c >= 'a' && c <= 'z' || + c >= 'A' && c <= 'Z' || + c >= '0' && c <= '9' || + c == '_')) { + return true; + } + } + return false; + } + + private validateKey(key: string) { + if (key.startsWith("@")) { + if (this.conditionHeader) { + this.query.throw("x-ms-if-tags not support container"); + } + + if (key !== "@container") { + this.query.throw(`unsupported parameter '${key}'`); + } + // Key is @container, no need for further check. + return; + } + + if (!this.conditionHeader && ((key.length == 0) || (key.length > 128))) { + this.query.throw('tag must be between 1 and 128 characters in length'); + } + if (this.ContainsInvalidTagKeyCharacter(key)) { + this.query.throw(`unexpected '${key}'`); + } + } + + private validateValue(value: string) { + if (!this.conditionHeader && (value.length > 256)) { + this.query.throw(`tag value must be between 0 and 256 characters in length`); + } + for (let c of value) { + if (!(c >= 'a' && c <= 'z' || + c >= 'A' && c <= 'Z' || + c >= '0' && c <= '9' || + c == ' ' || + c == '+' || + c == '-' || + c == '.' || + c == '/' || + c == ':' || + c == '=' || + c == '_')) { + this.query.throw(`'${c}' not permitted in tag name or value`); + } + } + } + + /** + * Visits the STRING layer of the query syntax tree, returning the appropriate node. + * + * Strings are wrapped in either single quotes (') or double quotes (") and may contain + * doubled-up quotes to introduce a literal. + */ + private visitString(isAKey: boolean = false): IQueryNode { + const openCharacter = this.query.take() + + /** + * Strings are terminated by the same character that opened them. + * But we also allow doubled-up characters to represent a literal, which means we need to only terminate a string + * when we receive an odd-number of closing characters followed by a non-closing character. + * + * Conceptually, this is represented by the following state machine: + * + * - start: normal + * - normal+(current: !') -> normal + * - normal+(current: ', next: ') -> escaping + * - normal+(current: ', next: !') -> end + * - escaping+(current: ') -> normal + * + * We can implement this using the state field of the `take` method's predicate. + */ + const content = this.query.take((c, peek, state) => { + if (state === "escaping") { + return "normal"; + } else if (c === openCharacter && peek === openCharacter) { + return "escaping"; + } else if (c !== openCharacter) { + return "normal"; + } else { + return false; + } + }); + + this.query.consume(openCharacter) || this.query.throw(`Expected a \`${openCharacter}\` to close the string, but found ${this.query.peek()} instead.`); + + if (isAKey) { + const keyName = content.replace(new RegExp(`${openCharacter}${openCharacter}`, 'g'), openCharacter); + this.validateKey(keyName); + return new KeyNode(keyName); + } + else { + const value = content.replace(new RegExp(`${openCharacter}${openCharacter}`, 'g'), openCharacter); + this.validateValue(value); + return new ConstantNode(value); + } + } + + /** + * Visits the IDENTIFIER layer of the query syntax tree, returning the appropriate node. + * + * Identifiers are a sequence of characters which are not whitespace. + * + * @returns {IQueryNode} + */ + private visitKey(): IQueryNode { + // A key name can be surrounded by double quotes. + if (`"`.includes(this.query.peek())) { + return this.visitString(true); + } + else { + const identifier = this.query.take( + c => !!c.trim() && c !== '=' && c != '>' && c !== '<' + ) || this.query.throw(`Expected a valid identifier, but found '${this.query.peek()}' instead.`); + this.validateKey(identifier); + return new KeyNode(identifier) + } + } +} + +/** + * Provides the logic and helper functions for consuming tokens from a query string. + * This includes low level constructs like peeking at the next character, consuming a + * specific sequence of characters, and skipping whitespace. + */ +export class ParserContext { + constructor( + private requestContext: Context, + private query: string, + private conditionHeader?: string) { + } + private tokenPosition: number = 0; + + /** + * Asserts that the query has been fully consumed. + * + * This method should be called after the parser has finished consuming the known parts of the query. + * Any remaining query after this point is indicative of a syntax error. + */ + assertEndOfQuery() { + if (this.tokenPosition < this.query.length) { + this.throw(`Unexpected token '${this.peek()}'.`) + } + } + + /** + * Retrieves the next character in the query without advancing the parser. + * + * @returns {string} A single character, or `undefined` if the end of the query has been reached. + */ + peek(): string { + return this.query[this.tokenPosition] + } + + /** + * Advances the parser past any whitespace characters. + */ + skipWhitespace() { + while (this.query[this.tokenPosition] && !this.query[this.tokenPosition].trim()) { + this.tokenPosition++ + } + } + + /** + * Attempts to consume a given sequence of characters from the query, + * advancing the parser if the sequence is found. + * + * @param {string} sequence The sequence of characters which should be consumed. + * @param {boolean} ignoreCase Whether or not the case of the characters should be ignored. + * @returns {boolean} `true` if the sequence was consumed, `false` otherwise. + */ + consume(sequence: string, ignoreCase: boolean = false): boolean { + const normalize = ignoreCase ? (s: string) => s.toLowerCase() : (s: string) => s; + + if (normalize(this.query.substring(this.tokenPosition, this.tokenPosition + sequence.length)) === normalize(sequence)) { + this.tokenPosition += sequence.length + return true + } + + return false + } + + /** + * Attempts to consume one of a given set of sequences from the query, + * advancing the parser if one of the sequences is found. + * + * Sequences are tested in the order they are provided, and the first + * sequence which is found is consumed. As such, it is important to + * avoid prefixes appearing before their longer counterparts. + * + * @param {boolean} ignoreCase Whether or not the case of the characters should be ignored. + * @param {string[]} options The list of character sequences which should be consumed. + * @returns {string | null} The sequence which was consumed, or `null` if none of the sequences were found. + */ + consumeOneOf(ignoreCase: boolean = false, ...options: string[]): string | null { + for (const option of options) { + if (this.consume(option, ignoreCase)) { + return option + } + } + + return null + } + + /** + * Consumes a sequence of characters from the query based on a character predicate function. + * + * The predicate function is called for each character in the query, and the sequence is + * consumed until the predicate returns `false` or the end of the query is reached. + * + * @param {Function} predicate The function which determines which characters should be consumed. + * @returns {string} The sequence of characters which were consumed. + */ + take(predicate?: (char: string, peek: string, state: T | undefined) => T): string { + const start = this.tokenPosition + let until = this.tokenPosition + + if (predicate) { + let state: T | undefined; + while (this.query[until]) { + state = predicate(this.query[until], this.query[until + 1], state) + if (!state) { + break + } + + until++; + } + } else { + // If no predicate is provided, then just take one character + until++ + } + + this.tokenPosition = until + return this.query.substring(start, until) + } + + /** + * Consumes a sequence of characters from the query based on a character predicate function, + * and then consumes a terminating sequence of characters (throwing an exception if these are not found). + * + * This function is particularly useful for consuming sequences of characters which are surrounded + * by a prefix and suffix, such as strings. + * + * @param {string} prefix The prefix which should be consumed. + * @param {Function} predicate The function which determines which characters should be consumed. + * @param {string} suffix The suffix which should be consumed. + * @returns {string | null} The sequence of characters which were consumed, or `null` if the prefix was not found. + */ + takeWithTerminator(prefix: string, predicate: (char: string, peek: string, state: T | undefined) => T, suffix: string): string | null { + if (!this.consume(prefix)) { + return null; + } + + const value = this.take(predicate); + this.consume(suffix) || this.throw(`Expected "${suffix}" to close the "${prefix}...${suffix}", but found '${this.peek()}' instead.`); + + return value; + } + + /** + * Throws an exception with a message indicating the position of the parser in the query. + * @param {string} message The message to include in the exception. + */ + throw(message: string): never { + if (this.conditionHeader) { + throw StorageErrorFactory.getInvalidHeaderValue( + this.requestContext.contextId!, { + HeaderName: this.conditionHeader, + HeaderValue: this.query + }); + } + else { + throw new StorageError( + 400, + `InvalidQueryParameterValue`, + `Error parsing query at or near character position ${this.tokenPosition}: ${message}`, + this.requestContext.contextId!, + { + QueryParameterName: `where`, + QueryParameterValue: this.query + }); + } + } +} \ No newline at end of file diff --git a/src/blob/persistence/SqlBlobMetadataStore.ts b/src/blob/persistence/SqlBlobMetadataStore.ts index 45666056f..df0babea7 100644 --- a/src/blob/persistence/SqlBlobMetadataStore.ts +++ b/src/blob/persistence/SqlBlobMetadataStore.ts @@ -53,6 +53,7 @@ import IBlobMetadataStore, { ChangeContainerLeaseResponse, ContainerModel, CreateSnapshotResponse, + FilterBlobModel, GetBlobPropertiesRes, GetContainerAccessPolicyResponse, GetContainerPropertiesResponse, @@ -67,7 +68,9 @@ import IBlobMetadataStore, { SetContainerAccessPolicyOptions } from "./IBlobMetadataStore"; import PageWithDelimiter from "./PageWithDelimiter"; -import { getBlobTagsCount, getTagsFromString } from "../utils/utils"; +import FilterBlobPage from "./FilterBlobPage"; +import { getBlobTagsCount, getTagsFromString, toBlobTags } from "../utils/utils"; +import { generateQueryBlobWithTagsWhereFunction } from "./QueryInterpreter/QueryInterpreter"; // tslint:disable: max-classes-per-file class ServicesModel extends Model { } @@ -1227,6 +1230,79 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore { }); } + public async filterBlobs( + context: Context, + account: string, + container?: string, + where?: string, + maxResults: number = DEFAULT_LIST_BLOBS_MAX_RESULTS, + marker?: string, + ): Promise<[FilterBlobModel[], string | undefined]> { + return this.sequelize.transaction(async (t) => { + if (container) { + await this.assertContainerExists(context, account, container, t); + } + + let whereQuery: any; + if (container) { + whereQuery = { + accountName: account, + containerName: container + } + } + else { + whereQuery = { + accountName: account + }; + }; + + if (marker !== undefined) { + if (whereQuery.blobName !== undefined) { + whereQuery.blobName[Op.gt] = marker; + } else { + whereQuery.blobName = { + [Op.gt]: marker + }; + } + } + whereQuery.snapshot = ""; + whereQuery.deleting = 0; + + // fill the page by possibly querying multiple times + const page = new FilterBlobPage(maxResults); + + const nameItem = (item: BlobsModel): string => { + return this.getModelValue(item, "blobName", true); + }; + const filterFunction = generateQueryBlobWithTagsWhereFunction(context, where!); + + const readPage = async (off: number): Promise => { + return (await BlobsModel.findAll({ + where: whereQuery as any, + order: [["blobName", "ASC"]], + transaction: t, + limit: maxResults, + offset: off + })); + }; + + const [blobItems, nextMarker] = await page.fill(readPage, nameItem); + + const filterBlobModelMapper = (model: BlobsModel) => { + return this.convertDbModelToFilterBlobModel(model); + }; + + return [blobItems.map(filterBlobModelMapper).filter((blobItem) => { + const tagsMeetConditions = filterFunction(blobItem); + if (tagsMeetConditions.length !== 0) { + blobItem.tags = { blobTagSet: toBlobTags(tagsMeetConditions) }; + return true; + } + return false; + }), nextMarker]; + }); + } + public async listBlobs( context: Context, account: string, @@ -3078,6 +3154,14 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore { }; } + private convertDbModelToFilterBlobModel(dbModel: BlobsModel): FilterBlobModel { + return { + containerName: this.getModelValue(dbModel, "containerName", true), + name: this.getModelValue(dbModel, "blobName", true), + tags: this.deserializeModelValue(dbModel, "blobTags") + }; + } + private convertDbModelToBlobModel(dbModel: BlobsModel): BlobModel { const contentProperties: IBlobContentProperties = this.convertDbModelToBlobContentProperties( dbModel @@ -3414,6 +3498,12 @@ export default class SqlBlobMetadataStore implements IBlobMetadataStore { context ).validate(new BlobReadLeaseValidator(leaseAccessConditions)); + if (modifiedAccessConditions?.ifTags) { + const validateFunction = generateQueryBlobWithTagsWhereFunction(context, modifiedAccessConditions?.ifTags, 'x-ms-if-tags'); + if (!validateFunction(blobModel)) { + throw new Error("412"); + } + } return blobModel.blobTags; }); } diff --git a/src/blob/utils/utils.ts b/src/blob/utils/utils.ts index 38df653c6..0f7fe895c 100644 --- a/src/blob/utils/utils.ts +++ b/src/blob/utils/utils.ts @@ -3,6 +3,7 @@ import { createWriteStream, PathLike } from "fs"; import StorageErrorFactory from "../errors/StorageErrorFactory"; import { USERDELEGATIONKEY_BASIC_KEY } from "./constants"; import { BlobTag, BlobTags } from "@azure/storage-blob"; +import { TagContent } from "../persistence/QueryInterpreter/QueryNodes/IQueryNode"; export function checkApiVersion( inputApiVersion: string, @@ -153,7 +154,7 @@ export function getUserDelegationKeyValue( signedStartsOn: string, signedExpiresOn: string, signedVersion: string, -) : string { +): string { const stringToSign = [ signedObjectid, signedTenantid, @@ -167,19 +168,18 @@ export function getUserDelegationKeyValue( } export function getBlobTagsCount( - blobTags: BlobTags | undefined -) : number | undefined { + blobTags: BlobTags | undefined +): number | undefined { return (blobTags === undefined || blobTags?.blobTagSet.length === 0) ? undefined : blobTags?.blobTagSet.length } export function getTagsFromString(blobTagsString: string, contextID: string): BlobTags | undefined { - if (blobTagsString === '' || blobTagsString === undefined) - { + if (blobTagsString === '' || blobTagsString === undefined) { return undefined; } - let blobTags:BlobTag[] = []; + let blobTags: BlobTag[] = []; const rawTags = blobTagsString.split("&"); - rawTags.forEach((rawTag)=>{ + rawTags.forEach((rawTag) => { const tagpair = rawTag.split("="); blobTags.push({ // When the Blob tag is input with header, it's encoded, sometimes space will be encoded to "+" ("+" will be encoded to "%2B") @@ -190,28 +190,28 @@ export function getTagsFromString(blobTagsString: string, contextID: string): Bl }) validateBlobTag( { - blobTagSet:blobTags, + blobTagSet: blobTags, }, contextID ); return { - blobTagSet:blobTags, + blobTagSet: blobTags, }; } // validate as the limitation from https://learn.microsoft.com/en-us/rest/api/storageservices/set-blob-tags?tabs=azure-ad#request-body export function validateBlobTag(tags: BlobTags, contextID: string): void { - if (tags.blobTagSet.length > 10){ + if (tags.blobTagSet.length > 10) { throw StorageErrorFactory.getTagsTooLarge(contextID); } - tags.blobTagSet.forEach((tag)=>{ - if (tag.key.length == 0){ + tags.blobTagSet.forEach((tag) => { + if (tag.key.length == 0) { throw StorageErrorFactory.getEmptyTagName(contextID); } - if (tag.key.length > 128){ + if (tag.key.length > 128) { throw StorageErrorFactory.getTagsTooLarge(contextID); } - if (tag.value.length > 256){ + if (tag.value.length > 256) { throw StorageErrorFactory.getTagsTooLarge(contextID); } if (ContainsInvalidTagCharacter(tag.key)) { @@ -223,23 +223,37 @@ export function validateBlobTag(tags: BlobTags, contextID: string): void { }); } -function ContainsInvalidTagCharacter(s: string): boolean{ - for (let c of s) - { +function ContainsInvalidTagCharacter(s: string): boolean { + for (let c of s) { if (!(c >= 'a' && c <= 'z' || - c >= 'A' && c <= 'Z' || - c >= '0' && c <= '9' || - c == ' ' || - c == '+' || - c == '-' || - c == '.' || - c == '/' || - c == ':' || - c == '=' || - c == '_')) - { - return true; + c >= 'A' && c <= 'Z' || + c >= '0' && c <= '9' || + c == ' ' || + c == '+' || + c == '-' || + c == '.' || + c == '/' || + c == ':' || + c == '=' || + c == '_')) { + return true; } } - return false; + return false; +} + +export function toBlobTags(input: TagContent[]): BlobTag[] { + const tags: Record = {}; + input.forEach(element => { + if (element.key !== '@container') { + tags[element.key!] = element.value!; + } + }); + + return Object.entries(tags).map(([key, value]) => { + return { + key: key, + value: value + } + }); } \ No newline at end of file diff --git a/tests/blob/apis/blob.test.ts b/tests/blob/apis/blob.test.ts index ac07bc396..fedd23a8d 100644 --- a/tests/blob/apis/blob.test.ts +++ b/tests/blob/apis/blob.test.ts @@ -3,7 +3,8 @@ import { StorageSharedKeyCredential, newPipeline, BlobServiceClient, - BlobItem + BlobItem, + Tags } from "@azure/storage-blob"; import assert = require("assert"); @@ -479,9 +480,9 @@ describe("BlobAPIs", () => { const metadata = { "Content-SHA256": "a" }; - + // set metadata should fail - let hasError = false; + let hasError = false; try { await blobClient.setMetadata(metadata); } catch (error) { @@ -489,8 +490,7 @@ describe("BlobAPIs", () => { assert.strictEqual(error.code, 'InvalidMetadata'); hasError = true; } - if (!hasError) - { + if (!hasError) { assert.fail(); } @@ -839,7 +839,7 @@ describe("BlobAPIs", () => { ); } }); - + it("Upload blob with accesstier should get accessTierInferred as false @loki", async () => { const blobName = getUniqueName("blob"); @@ -849,7 +849,7 @@ describe("BlobAPIs", () => { const properties = await blobClient.getProperties(); assert.equal(false, properties.accessTierInferred); - + blobClient.delete(); }); @@ -990,15 +990,14 @@ describe("BlobAPIs", () => { await sourceBlobClient.setAccessTier("Archive"); // Copy from Archive blob without accesstier will fail - let hasError = false; + let hasError = false; try { await destBlobClient.beginCopyFromURL(sourceBlobClient.url); } catch (error) { assert.deepStrictEqual(error.statusCode, 409); hasError = true; } - if (!hasError) - { + if (!hasError) { assert.fail(); } @@ -1153,11 +1152,10 @@ describe("BlobAPIs", () => { try { await destBlobClient.beginCopyFromURL('/devstoreaccount1/container78/blob125') - } - catch (error) - { + } + catch (error) { assert.deepStrictEqual(error.statusCode, 400); - assert.deepStrictEqual(error.code, 'InvalidHeaderValue'); + assert.deepStrictEqual(error.code, 'InvalidHeaderValue'); return; } assert.fail(); @@ -1202,14 +1200,14 @@ describe("BlobAPIs", () => { // async copy try { await destBlobClient.beginCopyFromURL( - sourceBlobClient.url, - { - conditions: + sourceBlobClient.url, { - ifNoneMatch: "*" - } - }); - } + conditions: + { + ifNoneMatch: "*" + } + }); + } catch (error) { assert.deepStrictEqual(error.statusCode, 409); return; @@ -1219,14 +1217,14 @@ describe("BlobAPIs", () => { // Sync copy try { await destBlobClient.syncCopyFromURL( - sourceBlobClient.url, - { - conditions: + sourceBlobClient.url, { - ifNoneMatch: "*" - } - }); - } + conditions: + { + ifNoneMatch: "*" + } + }); + } catch (error) { assert.deepStrictEqual(error.statusCode, 409); return; @@ -1349,8 +1347,8 @@ describe("BlobAPIs", () => { assert.equal(getResult.leaseStatus, "locked"); await destLeaseClient.releaseLease(); - }); - + }); + it("Synchronized copy blob should work to override tag @loki", async () => { const tags = { tag1: "val1" @@ -1458,7 +1456,7 @@ describe("BlobAPIs", () => { result.contentDisposition, blobHTTPHeaders.blobContentDisposition ); - }); + }); it("set/get blob tag should work, with base blob or snapshot @loki @sql", async () => { const tags = { @@ -1485,13 +1483,13 @@ describe("BlobAPIs", () => { const blobClientSnapshot = blobClient.withSnapshot(snapshotResponse.snapshot!); let outputTags2 = (await blobClientSnapshot.getTags()).tags; assert.deepStrictEqual(outputTags2, tags); - + // Set/get tags on snapshot, base blob tags should not be impacted, etag, lastModified should not change var properties1 = await blobClientSnapshot.getProperties(); await blobClientSnapshot.setTags(tags2); outputTags2 = (await blobClientSnapshot.getTags()).tags; assert.deepStrictEqual(outputTags2, tags2); - var properties2 = await blobClientSnapshot.getProperties(); + var properties2 = await blobClientSnapshot.getProperties(); assert.deepStrictEqual(properties1.etag, properties2.etag); assert.deepStrictEqual(properties1.lastModified, properties2.lastModified); @@ -1514,12 +1512,12 @@ describe("BlobAPIs", () => { const blockBlobName1 = "block1"; const blockBlobName2 = "block2"; - + let blockBlobClient1 = containerClient.getBlockBlobClient(blockBlobName1); let blockBlobClient2 = containerClient.getBlockBlobClient(blockBlobName2); - + // Upload block blob with tags - await blockBlobClient1.upload(content, content.length, + await blockBlobClient1.upload(content, content.length, { tags: tags }); @@ -1529,7 +1527,7 @@ describe("BlobAPIs", () => { assert.deepStrictEqual(outputTags, tags); // Get blob properties, can get tag count - let blobProperties = await blockBlobClient1.getProperties(); + let blobProperties = await blockBlobClient1.getProperties(); assert.deepStrictEqual(blobProperties._response.parsedHeaders.tagCount, 2); // download blob, can get tag count @@ -1552,46 +1550,40 @@ describe("BlobAPIs", () => { ).value; let blobs = (await listResult).segment.blobItems; let blobNotChecked = blobs!.length; - blobs.forEach((blobItem: BlobItem) => - { - if (blobItem.name === blockBlobName1) - { + blobs.forEach((blobItem: BlobItem) => { + if (blobItem.name === blockBlobName1) { assert.deepStrictEqual(blobItem.properties.tagCount, 2); blobNotChecked--; } - if (blobItem.name === blockBlobName2) - { + if (blobItem.name === blockBlobName2) { assert.deepStrictEqual(blobItem.properties.tagCount, 3); blobNotChecked--; } }); - assert.deepStrictEqual(blobs!.length-2, blobNotChecked); + assert.deepStrictEqual(blobs!.length - 2, blobNotChecked); // listBlobsFlat with include tags can get tag listResult = ( await containerClient - .listBlobsFlat({includeTags: true}) + .listBlobsFlat({ includeTags: true }) .byPage() .next() ).value; blobs = (await listResult).segment.blobItems; blobNotChecked = blobs!.length; - blobs.forEach((blobItem: BlobItem) => - { - if (blobItem.name === blockBlobName1) - { + blobs.forEach((blobItem: BlobItem) => { + if (blobItem.name === blockBlobName1) { assert.deepStrictEqual(blobItem.properties.tagCount, 2); assert.deepStrictEqual(blobItem.tags, tags); blobNotChecked--; } - if (blobItem.name === blockBlobName2) - { + if (blobItem.name === blockBlobName2) { assert.deepStrictEqual(blobItem.properties.tagCount, 3); assert.deepStrictEqual(blobItem.tags, tags2); blobNotChecked--; } - }); - assert.deepStrictEqual(blobs!.length-2, blobNotChecked); + }); + assert.deepStrictEqual(blobs!.length - 2, blobNotChecked); // listBlobsByHierarchy can get tag count const delimiter = "/"; @@ -1603,46 +1595,40 @@ describe("BlobAPIs", () => { ).value; blobs = (await listResult).segment.blobItems; blobNotChecked = blobs!.length; - blobs.forEach((blobItem: BlobItem) => - { - if (blobItem.name === blockBlobName1) - { + blobs.forEach((blobItem: BlobItem) => { + if (blobItem.name === blockBlobName1) { assert.deepStrictEqual(blobItem.properties.tagCount, 2); blobNotChecked--; } - if (blobItem.name === blockBlobName2) - { + if (blobItem.name === blockBlobName2) { assert.deepStrictEqual(blobItem.properties.tagCount, 3); blobNotChecked--; } }); - assert.deepStrictEqual(blobs!.length-2, blobNotChecked); + assert.deepStrictEqual(blobs!.length - 2, blobNotChecked); // listBlobsByHierarchy include tags can get tag listResult = ( await containerClient - .listBlobsByHierarchy(delimiter, {includeTags: true}) + .listBlobsByHierarchy(delimiter, { includeTags: true }) .byPage() .next() ).value; blobs = (await listResult).segment.blobItems; blobNotChecked = blobs!.length; - blobs.forEach((blobItem: BlobItem) => - { - if (blobItem.name === blockBlobName1) - { + blobs.forEach((blobItem: BlobItem) => { + if (blobItem.name === blockBlobName1) { assert.deepStrictEqual(blobItem.properties.tagCount, 2); assert.deepStrictEqual(blobItem.tags, tags); blobNotChecked--; } - if (blobItem.name === blockBlobName2) - { + if (blobItem.name === blockBlobName2) { assert.deepStrictEqual(blobItem.properties.tagCount, 3); assert.deepStrictEqual(blobItem.tags, tags2); blobNotChecked--; } }); - assert.deepStrictEqual(blobs!.length-2, blobNotChecked); + assert.deepStrictEqual(blobs!.length - 2, blobNotChecked); // clean up blockBlobClient1.delete(); @@ -1666,24 +1652,24 @@ describe("BlobAPIs", () => { const pageBlobName2 = "page2"; const appendBlobName1 = "append1"; const appendBlobName2 = "append2"; - + let blockBlobClient1 = containerClient.getBlockBlobClient(blockBlobName1); let blockBlobClient2 = containerClient.getBlockBlobClient(blockBlobName2); let pageBlobClient1 = containerClient.getBlockBlobClient(pageBlobName1); let pageBlobClient2 = containerClient.getBlockBlobClient(pageBlobName2); let appendBlobClient1 = containerClient.getBlockBlobClient(appendBlobName1); let appendBlobClient2 = containerClient.getBlockBlobClient(appendBlobName2); - + // Upload blob with tags - await blockBlobClient1.upload(content, content.length, + await blockBlobClient1.upload(content, content.length, { tags: tags }); - await pageBlobClient1.upload(content, content.length, + await pageBlobClient1.upload(content, content.length, { tags: tags }); - await appendBlobClient1.upload(content, content.length, + await appendBlobClient1.upload(content, content.length, { tags: tags }); @@ -1724,28 +1710,25 @@ describe("BlobAPIs", () => { // listBlobsFlat with include tags can get tag let listResult = ( await containerClient - .listBlobsFlat({includeTags: true}) + .listBlobsFlat({ includeTags: true }) .byPage() .next() ).value; let blobs = (await listResult).segment.blobItems; let blobNotChecked = blobs!.length; - blobs.forEach((blobItem: BlobItem) => - { - if (blobItem.name === blockBlobName1 || blobItem.name === pageBlobName1 || blobItem.name === appendBlobName1 ) - { + blobs.forEach((blobItem: BlobItem) => { + if (blobItem.name === blockBlobName1 || blobItem.name === pageBlobName1 || blobItem.name === appendBlobName1) { assert.deepStrictEqual(blobItem.properties.tagCount, 2); assert.deepStrictEqual(blobItem.tags, tags); blobNotChecked--; } - if (blobItem.name === blockBlobName2 || blobItem.name === pageBlobName2 || blobItem.name === appendBlobName2 ) - { + if (blobItem.name === blockBlobName2 || blobItem.name === pageBlobName2 || blobItem.name === appendBlobName2) { assert.deepStrictEqual(blobItem.properties.tagCount, 3); assert.deepStrictEqual(blobItem.tags, tags2); blobNotChecked--; } - }); - assert.deepStrictEqual(blobs!.length-6, blobNotChecked); + }); + assert.deepStrictEqual(blobs!.length - 6, blobNotChecked); // clean up blockBlobClient1.delete(); @@ -1756,12 +1739,12 @@ describe("BlobAPIs", () => { appendBlobClient2.delete(); }); - it("set blob tag fail with invalid tag. @loki @sql", async () => { + it("set blob tag fail with invalid tag. @loki @sql", async () => { - const blockBlobName1 = "block1"; + const blockBlobName1 = "block1"; let blockBlobClient1 = containerClient.getBlockBlobClient(blockBlobName1); await blockBlobClient1.upload(content, content.length); - + // tag count should <= 10 const tooManyTags = { tag1: "val1", @@ -1775,7 +1758,7 @@ describe("BlobAPIs", () => { tag9: "val2", tag10: "val2", tag11: "val2", - }; + }; let statusCode = 0; try { await await blockBlobClient1.setTags(tooManyTags);; @@ -1794,7 +1777,7 @@ describe("BlobAPIs", () => { tag8: "val2", tag9: "val2", tag10: "val2", - }; + }; await blockBlobClient1.setTags(tags1); let outputTags = (await blockBlobClient1.getTags()).tags; assert.deepStrictEqual(outputTags, tags1); @@ -1802,7 +1785,7 @@ describe("BlobAPIs", () => { // key length should >0 and <= 128 const emptyKeyTags = { "": "123123123", - }; + }; statusCode = 0; try { await await blockBlobClient1.setTags(emptyKeyTags);; @@ -1812,7 +1795,7 @@ describe("BlobAPIs", () => { assert.deepStrictEqual(statusCode, 400); const tooLongKeyTags = { "key123401234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890012345678901234567890": "val1", - }; + }; statusCode = 0; try { await await blockBlobClient1.setTags(tooLongKeyTags);; @@ -1822,7 +1805,7 @@ describe("BlobAPIs", () => { assert.deepStrictEqual(statusCode, 400); let tags2 = { "key12301234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890012345678901234567890": "val1", - }; + }; await blockBlobClient1.setTags(tags2); outputTags = (await blockBlobClient1.getTags()).tags; assert.deepStrictEqual(outputTags, tags2); @@ -1830,10 +1813,10 @@ describe("BlobAPIs", () => { // value length should <= 256 const tooLongvalueTags = { tag1: "val12345678900123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789001234567890123456789001234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890012345678901234567890", - }; + }; statusCode = 0; try { - await blockBlobClient1.upload(content, content.length, + await blockBlobClient1.upload(content, content.length, { tags: tooLongvalueTags }); @@ -1843,8 +1826,8 @@ describe("BlobAPIs", () => { assert.deepStrictEqual(statusCode, 400); let tags3 = { tag1: "va12345678900123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789001234567890123456789001234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890012345678901234567890", - }; - await blockBlobClient1.upload(content, content.length, + }; + await blockBlobClient1.upload(content, content.length, { tags: tags3 }); @@ -1854,10 +1837,10 @@ describe("BlobAPIs", () => { // invalid char in key let invalidTags = { tag1: "abc%abc", - }; + }; statusCode = 0; try { - await blockBlobClient1.upload(content, content.length, + await blockBlobClient1.upload(content, content.length, { tags: invalidTags }); @@ -1868,10 +1851,10 @@ describe("BlobAPIs", () => { let invalidTags1 = { "abc#ew": "abc", - }; + }; statusCode = 0; try { - await blockBlobClient1.upload(content, content.length, + await blockBlobClient1.upload(content, content.length, { tags: invalidTags1 }); @@ -1882,8 +1865,8 @@ describe("BlobAPIs", () => { let tags4 = { "azAz09 +-./:=_": "azAz09 +-./:=_", - }; - await blockBlobClient1.upload(content, content.length, + }; + await blockBlobClient1.upload(content, content.length, { tags: tags4 }); @@ -1893,8 +1876,8 @@ describe("BlobAPIs", () => { // clean up blockBlobClient1.delete(); }); - - it("Set and get blob tags should work with lease condition @loki @sql", async () => { + + it("Set and get blob tags should work with lease condition @loki @sql", async () => { const guid = "ca761232ed4211cebacd00aa0057b223"; const leaseClient = blockBlobClient.getBlobLeaseClient(guid); await leaseClient.acquireLease(-1); @@ -1934,6 +1917,123 @@ describe("BlobAPIs", () => { await leaseClient.releaseLease(); }); + it("get blob tag with ifTags condition @loki @sql", async () => { + const tags = { + tag1: "val1", + tag2: "val2", + }; + + // Set/get tags on base blob, etag, lastModified should not change + var properties1 = await blobClient.getProperties(); + await blobClient.setTags(tags); + let outputTags1 = (await blobClient.getTags({ conditions: { tagConditions: `tag1='val1'` } })).tags; + assert.deepStrictEqual(outputTags1, tags); + var properties2 = await blobClient.getProperties(); + assert.deepStrictEqual(properties1.etag, properties2.etag); + assert.deepStrictEqual(properties1.lastModified, properties2.lastModified); + try { + (await blobClient.getTags({ conditions: { tagConditions: `tag1='val11'` } })).tags; + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + }); + + it("get blob tag with long ifTags condition @loki @sql", async () => { + const tags = { + tag1: "val1", + tag2: "val2", + }; + + let queryString = `tag1 <> 'v0' `; + for (let index = 1; index < 700; ++index) { + queryString += `and tag1 <> 'v${index}'`; + } + + await blobClient.setTags(tags); + const result = (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.deepStrictEqual(tags, result); + }); + + it("get blob tag with invalid ifTags condition string @loki @sql", async () => { + const tags: Tags = { + key1: 'value1' + }; + await blobClient.setTags(tags); + + let queryString = `key111==value1`; + try { + (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidHeaderValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidHeaderValue'); + assert.ok((err as any).details.message.startsWith('The value for one of the HTTP headers is not in the correct format.')); + } + + // ifTags header doesn't support @container + queryString = `@container='value1'`; + try { + (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidHeaderValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidHeaderValue'); + assert.ok((err as any).details.message.startsWith('The value for one of the HTTP headers is not in the correct format.')); + } + + queryString = `key--1='value1'`; + try { + (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidHeaderValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidHeaderValue'); + assert.ok((err as any).details.message.startsWith('The value for one of the HTTP headers is not in the correct format.')); + } + + queryString = `key1='value$$##'`; + try { + (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidHeaderValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidHeaderValue'); + assert.ok((err as any).details.message.startsWith('The value for one of the HTTP headers is not in the correct format.')); + } + + // key length longer than 128 + queryString = `key12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890<>'value1'`; + try { + (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.fail("Should not reach here."); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 412); + assert.deepStrictEqual((err as any).code, 'ConditionNotMet'); + assert.deepStrictEqual((err as any).details.errorCode, 'ConditionNotMet'); + assert.ok((err as any).details.message.startsWith('The condition specified using HTTP conditional header(s) is not met.')); + } + + // key length longer than 256 + queryString = `key1<>'value12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890'`; + + const result = (await blobClient.getTags({ conditions: { tagConditions: queryString } })).tags; + assert.deepStrictEqual(result, tags); + }); + it("Acquire Lease on Breaking Lease status, if LeaseId not match, throw LeaseIdMismatchWithLease error @loki @sql", async () => { // TODO: implement the case later }); diff --git a/tests/blob/apis/container.test.ts b/tests/blob/apis/container.test.ts index ca9aa039a..29cd5ebb7 100644 --- a/tests/blob/apis/container.test.ts +++ b/tests/blob/apis/container.test.ts @@ -6,7 +6,8 @@ import { BlobServiceClient, generateAccountSASQueryParameters, newPipeline, - StorageSharedKeyCredential + StorageSharedKeyCredential, + Tags } from "@azure/storage-blob"; import assert = require("assert"); import StorageErrorFactory from "../../../src/blob/errors/StorageErrorFactory"; @@ -674,7 +675,7 @@ describe("ContainerAPIs", () => { const inputmarker = undefined; let result = ( await containerClient - .listBlobsByHierarchy("/",{ + .listBlobsByHierarchy("/", { prefix: "" }) .byPage({ @@ -1151,7 +1152,354 @@ describe("ContainerAPIs", () => { assert.equal(result.segment.blobItems.length, 4); }); - // Skip the case currently since js sdk calculate the stringToSign with "+" in prefix instead of decode to space + it("filter blob by tags should work on container @loki @sql", async () => { + + const key1 = getUniqueName("key"); + const key2 = getUniqueName("key2"); + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + const tags1: Tags = {}; + tags1[key1] = getUniqueName("val1"); + tags1[key2] = "default"; + await appendBlobClient1.create({ tags: tags1 }); + + const blobName2 = getUniqueName("blobname2"); + const appendBlobClient2 = containerClient.getAppendBlobClient(blobName2); + const tags2: Tags = {}; + tags2[key1] = getUniqueName("val2"); + tags2[key2] = "default"; + await appendBlobClient2.create({ tags: tags2 }); + + const blobName3 = getUniqueName("blobname3"); + const appendBlobClient3 = containerClient.getAppendBlobClient(blobName3); + const tags3: Tags = {}; + tags3[key1] = getUniqueName("val3"); + tags3[key2] = "default"; + await appendBlobClient3.create({ tags: tags3 }); + + const expectedTags1: Tags = {}; + expectedTags1[key1] = tags1[key1]; + for await (const blob of containerClient.findBlobsByTags(`${key1}='${tags1[key1]}'`)) { + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, expectedTags1); + assert.deepStrictEqual(blob.tagValue, tags1[key1]); + } + + const expectedTags2: Tags = {}; + expectedTags2[key1] = tags2[key1]; + const blobs = []; + for await (const blob of containerClient.findBlobsByTags(`${key1}='${tags2[key1]}'`)) { + blobs.push(blob); + } + assert.deepStrictEqual(blobs.length, 1); + assert.deepStrictEqual(blobs[0].containerName, containerName); + assert.deepStrictEqual(blobs[0].name, blobName2); + assert.deepStrictEqual(blobs[0].tags, expectedTags2); + assert.deepStrictEqual(blobs[0].tagValue, tags2[key1]); + + const blobsWithTag2 = []; + for await (const segment of containerClient.findBlobsByTags(`${key2}='default'`).byPage({ + maxPageSize: 1, + })) { + assert.ok(segment.blobs.length <= 1); + for (const blob of segment.blobs) { + blobsWithTag2.push(blob); + } + } + assert.deepStrictEqual(blobsWithTag2.length, 3); + }); + + it("filter blob by tags should work on container @loki @sql", async () => { + + const key1 = getUniqueName("key"); + const key2 = getUniqueName("key2"); + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + const tags1: Tags = {}; + tags1[key1] = getUniqueName("val1"); + tags1[key2] = "default"; + await appendBlobClient1.create({ tags: tags1 }); + + await appendBlobClient1.createSnapshot(); + + const blobName2 = getUniqueName("blobname2"); + const appendBlobClient2 = containerClient.getAppendBlobClient(blobName2); + const tags2: Tags = {}; + tags2[key1] = getUniqueName("val2"); + tags2[key2] = "default"; + await appendBlobClient2.create({ tags: tags2 }); + + const blobName3 = getUniqueName("blobname3"); + const appendBlobClient3 = containerClient.getAppendBlobClient(blobName3); + const tags3: Tags = {}; + tags3[key1] = getUniqueName("val3"); + tags3[key2] = "default"; + await appendBlobClient3.create({ tags: tags3 }); + + const expectedTags1: Tags = {}; + expectedTags1[key1] = tags1[key1]; + for await (const blob of containerClient.findBlobsByTags(`${key1}='${tags1[key1]}'`)) { + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, expectedTags1); + assert.deepStrictEqual(blob.tagValue, tags1[key1]); + } + + const expectedTags2: Tags = {}; + expectedTags2[key1] = tags2[key1]; + const blobs = []; + for await (const blob of containerClient.findBlobsByTags(`${key1}='${tags2[key1]}'`)) { + blobs.push(blob); + } + assert.deepStrictEqual(blobs.length, 1); + assert.deepStrictEqual(blobs[0].containerName, containerName); + assert.deepStrictEqual(blobs[0].name, blobName2); + assert.deepStrictEqual(blobs[0].tags, expectedTags2); + assert.deepStrictEqual(blobs[0].tagValue, tags2[key1]); + + const blobsWithTag2 = []; + for await (const segment of containerClient.findBlobsByTags(`${key2}='default'`).byPage({ + maxPageSize: 1, + })) { + assert.ok(segment.blobs.length <= 1); + for (const blob of segment.blobs) { + blobsWithTag2.push(blob); + } + } + assert.deepStrictEqual(blobsWithTag2.length, 3); + }); + + it("filter blob by tags with more than limited conditions on container @loki @sql", async () => { + const tags: Tags = {}; + const tagsLength = 10; + + let queryString = ''; + for (let i = 0; i < tagsLength; ++i) { + const key = getUniqueName("key" + i); + const value = getUniqueName("val" + i); + tags[key] = value; + queryString += `${key}='${value}' and `; + } + + queryString += `anotherkey='anotherValue'`; + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + + await appendBlobClient1.createSnapshot(); + + try { + (await containerClient.findBlobsByTags(queryString).byPage().next()).value; + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query: there can be at most 10 unique tags in a query')); + } + + it("filter blob by tags with conditions number equal to limitation on container @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags: Tags = {}; + const tagsLength = 10; + + let queryString = ''; + for (let i = 0; i < tagsLength; ++i) { + const key = getUniqueName("key" + i); + const value = getUniqueName("val" + i); + tags[key] = value; + queryString += `${key}='${value}'`; + } + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + + await appendBlobClient1.createSnapshot(); + + let blobCountCount = 0; + for await (const blob of containerClient.findBlobsByTags(queryString)) { + ++blobCountCount; + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, tags); + } + assert.deepStrictEqual(blobCountCount, 1, "Blob with snapshot should not be returned."); + + await containerClient.delete(); + }); + + it("filter blob by tags with invalid key chars on container @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + let queryString = `'key 1'='valffffff'`; + + try { + (await containerClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position')); + } + + queryString = `'key-1'='valffffff'`; + + try { + (await containerClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position')); + } + + containerClient.delete(); + }); + + it("filter blob by tags with valid special key chars on container @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags: Tags = { + key_1: 'value_1' + }; + const queryString = `key_1='value_1'`; + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + + await appendBlobClient1.createSnapshot(); + + let blobCountCount = 0; + for await (const blob of containerClient.findBlobsByTags(queryString)) { + ++blobCountCount; + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, tags); + } + assert.deepStrictEqual(blobCountCount, 1, "Blob with snapshot should not be returned."); + + await containerClient.delete(); + }); + + it("filter blob by tags with long key on container @loki @sql", async function () { + const queryString = `key12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890='value'`; + try { + (await containerClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position') + && (err as any).details.message.includes('tag must be between 1 and 128 characters in length')); + } + }); + + it("filter blob by tags with invalid value chars on container @loki @sql", async function () { + const queryString = `key1='valffffff @'`; + + try { + (await containerClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position') + && (err as any).details.message.includes('not permitted in tag name or value')); + } + }); + + it("filter blob by tags with valid special value chars on container @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags: Tags = { + key_1: 'value +-.:=_/' + }; + const queryString = `key_1='value +-.:=_/' and @container='${containerName}'`; + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + + await appendBlobClient1.createSnapshot(); + + let blobCountCount = 0; + for await (const blob of containerClient.findBlobsByTags(queryString)) { + ++blobCountCount; + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, tags); + } + assert.deepStrictEqual(blobCountCount, 1, "Blob with snapshot should not be returned."); + + await containerClient.delete(); + }); + + it("filter blob by tags with long value on container @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const queryString = `key_1='value12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890'`; + try { + (await containerClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position') + && (err as any).details.message.includes('tag value must be between 0 and 256 characters in length')); + } + containerClient.delete(); + }); + + it("filter blob by tags with continuationToken on container @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags: Tags = { + key_1: 'value_1' + }; + const queryString = `key_1='value_1'`; + + for (let index = 0; index < 5002; ++index) { + const blobName1 = getUniqueName("blobname" + index); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + } + + let result = (await containerClient.findBlobsByTags(queryString).byPage().next()).value; + assert.ok(result.continuationToken !== undefined); + + await containerClient.delete(); + }); + }); + + // Skip the case currently since js sdk caculate the stringToSign with "+" in prefix instead of decode to space it.skip("List blob should success with '+' in query @loki @sql", async () => { const blobClients = []; let blobNames: Array = [ @@ -1165,7 +1513,7 @@ describe("ContainerAPIs", () => { await blockBlobClient.upload("", 0); blobClients.push(blobClient); } - + // list with prefix has "+" instead of "%20" for space // create service client let pipeline = newPipeline( @@ -1205,7 +1553,7 @@ describe("ContainerAPIs", () => { gotNames.push(item.name); } assert.deepStrictEqual(gotNames, blobNames); - + // clean up for (const blob of blobClients) { await blob.delete(); diff --git a/tests/blob/apis/service.test.ts b/tests/blob/apis/service.test.ts index 1f52e86d9..cf9e94850 100644 --- a/tests/blob/apis/service.test.ts +++ b/tests/blob/apis/service.test.ts @@ -6,7 +6,8 @@ import { generateAccountSASQueryParameters, newPipeline, SASProtocol, - StorageSharedKeyCredential + StorageSharedKeyCredential, + Tags } from "@azure/storage-blob"; import * as assert from "assert"; @@ -55,12 +56,12 @@ describe("ServiceAPIs", () => { await server.clean(); }); - it(`getUserDelegationKey with Key credential should fail @loki @sql`, async () => { + it(`getUserDelegationKey with Key credential should fail @loki @sql`, async () => { const startTime = new Date(); startTime.setHours(startTime.getHours() - 1); const expiryTime = new Date(); expiryTime.setDate(expiryTime.getDate() + 1); - + try { await serviceClient.getUserDelegationKey(startTime, expiryTime); assert.fail("Should fail to invoke getUserDelegationKey with account key credentials") @@ -72,7 +73,7 @@ describe("ServiceAPIs", () => { it(`getUserDelegationKey with SAS token credential should fail @loki @sql`, async () => { const sasTokenStart = new Date(); sasTokenStart.setHours(sasTokenStart.getHours() - 1); - + const sasTokenExpiry = new Date(); sasTokenExpiry.setDate(sasTokenExpiry.getDate() + 1); @@ -98,11 +99,11 @@ describe("ServiceAPIs", () => { const skStart = new Date(); skStart.setHours(skStart.getHours() - 1); - + const skExpiry = new Date(); skExpiry.setDate(skExpiry.getDate() + 1); - - try { + + try { await serviceClientWithSAS.getUserDelegationKey(skStart, skExpiry); assert.fail("Should fail to invoke getUserDelegationKey with SAS token credentials") } catch (error) { @@ -411,7 +412,7 @@ describe("ServiceAPIs", () => { await containerClient1.delete(); await containerClient2.delete(); }); - + // fix issue 2382, 2416 it("ListContainers without include metadata should not return container metadata. @loki @sql", async () => { const containerNamePrefix = getUniqueName("container"); @@ -431,7 +432,7 @@ describe("ServiceAPIs", () => { .byPage() .next() ).value; - + assert.equal(result1.containerItems!.length, 2); assert.ok(result1.containerItems![0].name.startsWith(containerNamePrefix)); assert.ok(result1.containerItems![1].name.startsWith(containerNamePrefix)); @@ -448,7 +449,7 @@ describe("ServiceAPIs", () => { .byPage() .next() ).value; - + assert.equal(result2.containerItems!.length, 2); assert.ok(result2.containerItems![0].name.startsWith(containerNamePrefix)); assert.ok(result2.containerItems![1].name.startsWith(containerNamePrefix)); @@ -511,6 +512,320 @@ describe("ServiceAPIs", () => { assert.ok(err); });; }); + + it("Find blob by tags should work @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const key1 = getUniqueName("key"); + const key2 = getUniqueName("key2"); + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + const tags1: Tags = {}; + tags1[key1] = getUniqueName("val1"); + tags1[key2] = "default"; + await appendBlobClient1.create({ tags: tags1 }); + + const blobName2 = getUniqueName("blobname2"); + const appendBlobClient2 = containerClient.getAppendBlobClient(blobName2); + const tags2: Tags = {}; + tags2[key1] = getUniqueName("val2"); + tags2[key2] = "default"; + await appendBlobClient2.create({ tags: tags2 }); + + const blobName3 = getUniqueName("blobname3"); + const appendBlobClient3 = containerClient.getAppendBlobClient(blobName3); + const tags3: Tags = {}; + tags3[key1] = getUniqueName("val3"); + tags3[key2] = "default"; + await appendBlobClient3.create({ tags: tags3 }); + + const expectedTags1: Tags = {}; + expectedTags1[key1] = tags1[key1]; + for await (const blob of serviceClient.findBlobsByTags(`${key1}='${tags1[key1]}'`)) { + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, expectedTags1); + assert.deepStrictEqual(blob.tagValue, tags1[key1]); + } + + const expectedTags2: Tags = {}; + expectedTags2[key1] = tags2[key1]; + const blobs = []; + for await (const blob of serviceClient.findBlobsByTags(`${key1}='${tags2[key1]}'`)) { + blobs.push(blob); + } + assert.deepStrictEqual(blobs.length, 1); + assert.deepStrictEqual(blobs[0].containerName, containerName); + assert.deepStrictEqual(blobs[0].name, blobName2); + assert.deepStrictEqual(blobs[0].tags, expectedTags2); + assert.deepStrictEqual(blobs[0].tagValue, tags2[key1]); + + const blobsWithTag2 = []; + for await (const segment of serviceClient.findBlobsByTags(`${key2}='default'`).byPage({ + maxPageSize: 1, + })) { + assert.ok(segment.blobs.length <= 1); + for (const blob of segment.blobs) { + blobsWithTag2.push(blob); + } + } + assert.deepStrictEqual(blobsWithTag2.length, 3); + + for await (const blob of serviceClient.findBlobsByTags( + `@container='${containerName}' AND ${key1}='${tags1[key1]}' AND ${key2}='default'`, + )) { + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, tags1); + assert.deepStrictEqual(blob.tagValue, ""); + } + + await containerClient.delete(); + }); + + it("filter blob by tags with more than limited conditions on service @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags: Tags = {}; + const tagsLength = 10; + + let queryString = ''; + for (let i = 0; i < tagsLength; ++i) { + const key = getUniqueName("key" + i); + const value = getUniqueName("val" + i); + tags[key] = value; + queryString += `${key}='${value}' and `; + } + + queryString += `anotherkey='anotherValue'`; + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + + await appendBlobClient1.createSnapshot(); + + try { + (await serviceClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail("Should not reach here"); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query: there can be at most 10 unique tags in a query')); + } + + await containerClient.delete(); + }); + + it("filter blob by tags with conditions number equal to limitation on service @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags: Tags = {}; + const tagsLength = 10; + + let queryString = ''; + for (let i = 0; i < tagsLength; ++i) { + const key = getUniqueName("key" + i); + const value = getUniqueName("val" + i); + tags[key] = value; + queryString += `${key}='${value}' and `; + } + + // key @container isn't count in limitation + queryString += `@container='${containerName}'`; + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + + await appendBlobClient1.createSnapshot(); + + let blobCountCount = 0; + for await (const blob of serviceClient.findBlobsByTags(queryString)) { + ++blobCountCount; + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, tags); + } + assert.deepStrictEqual(blobCountCount, 1, "Blob with snapshot should not be returned."); + + await containerClient.delete(); + }); + + it("filter blob by tags with invalid key chars on service @loki @sql", async function () { + let queryString = `key1='valffffff' and @container11='1111'`; + + try { + (await serviceClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position') + && (err as any).details.message.includes('unsupported parameter')); + } + + queryString = `'key 1'='valffffff'`; + + try { + (await serviceClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position')); + } + + queryString = `'key-1'='valffffff'`; + + try { + (await serviceClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position')); + } + }); + + it("filter blob by tags with valid special key chars on service @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags: Tags = { + key_1: 'value_1' + }; + const queryString = `key_1='value_1'`; + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + + await appendBlobClient1.createSnapshot(); + + let blobCountCount = 0; + for await (const blob of serviceClient.findBlobsByTags(queryString)) { + ++blobCountCount; + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, tags); + } + assert.deepStrictEqual(blobCountCount, 1, "Blob with snapshot should not be returned."); + + await containerClient.delete(); + }); + + it("filter blob by tags with long key @loki @sql", async function () { + const queryString = `key12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890='value'`; + try { + (await serviceClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position') + && (err as any).details.message.includes('tag must be between 1 and 128 characters in length')); + } + }); + + it("filter blob by tags with invalid value chars on service @loki @sql", async function () { + const queryString = `key1='valffffff @'`; + + try { + (await serviceClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position') + && (err as any).details.message.includes('not permitted in tag name or value')); + } + }); + + it("filter blob by tags with valid special value chars on service @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags: Tags = { + key_1: 'value +-.:=_/' + }; + const queryString = `key_1='value +-.:=_/' and @container='${containerName}'`; + + const blobName1 = getUniqueName("blobname1"); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + + await appendBlobClient1.createSnapshot(); + + let blobCountCount = 0; + for await (const blob of serviceClient.findBlobsByTags(queryString)) { + ++blobCountCount; + assert.deepStrictEqual(blob.containerName, containerName); + assert.deepStrictEqual(blob.name, blobName1); + assert.deepStrictEqual(blob.tags, tags); + } + assert.deepStrictEqual(blobCountCount, 1, "Blob with snapshot should not be returned."); + + await containerClient.delete(); + }); + + it("filter blob by tags with long value @loki @sql", async function () { + const queryString = `key_1='value12345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890'`; + try { + (await serviceClient.findBlobsByTags(queryString).byPage().next()).value; + assert.fail('Should not reach here'); + } + catch (err) { + assert.deepStrictEqual((err as any).statusCode, 400); + assert.deepStrictEqual((err as any).code, 'InvalidQueryParameterValue'); + assert.deepStrictEqual((err as any).details.errorCode, 'InvalidQueryParameterValue'); + assert.ok((err as any).details.message.startsWith('Error parsing query at or near character position') + && (err as any).details.message.includes('tag value must be between 0 and 256 characters in length')); + } + }); + + it("filter blob by tags with continuationToken on service @loki @sql", async function () { + const containerName = getUniqueName("container1"); + const containerClient = serviceClient.getContainerClient(containerName); + await containerClient.create(); + + const tags: Tags = { + key_1: 'value_1' + }; + const queryString = `key_1='value_1'`; + + for (let index = 0; index < 5002; ++index) { + const blobName1 = getUniqueName("blobname" + index); + const appendBlobClient1 = containerClient.getAppendBlobClient(blobName1); + await appendBlobClient1.create({ tags: tags }); + } + + let result = (await serviceClient.findBlobsByTags(queryString).byPage().next()).value; + assert.ok(result.continuationToken !== undefined); + + await containerClient.delete(); + }); }); describe("ServiceAPIs - secondary location endpoint", () => {