diff --git a/.eslintrc.json b/.eslintrc.json index 355ff1839..e95ead59b 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -1,8 +1,7 @@ { "env": { "es6": true, - "node": true, - "mocha": true + "node": true }, "extends": [ "eslint:recommended", @@ -67,7 +66,6 @@ "unicorn/prefer-set-has": "off", "unicorn/prefer-spread": "off", "unicorn/prefer-string-replace-all": "error", - "unicorn/unicorn/no-lonely-if": "off", "arrow-body-style": ["error", "as-needed"], "curly": "error", "no-console": "error", @@ -84,6 +82,7 @@ } } ], + "jsdoc/require-param-type": "error", "jsdoc/tag-lines": ["warn", "any", { "startLines": 1 }], "spaced-comment": ["warn", "always", { "block": { "exceptions": ["*"], "balanced": true } }] }, @@ -103,7 +102,8 @@ "extends": ["plugin:mocha/recommended"], "plugins": ["mocha"], "rules": { - "mocha/no-mocha-arrows": "off" + "mocha/no-mocha-arrows": "off", + "mocha/no-pending-tests": "off" } } ] diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 1412a88c2..3664a97a2 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -2,9 +2,9 @@ ## What changes did you make? (Give an overview) -... +- closes #1234 -## Is there anything you'd like reviewers to focus on? +## Further details (optional) ... diff --git a/.husky/post-checkout b/.husky/post-checkout index 2607e40f5..5bf2cfe67 100644 --- a/.husky/post-checkout +++ b/.husky/post-checkout @@ -32,6 +32,7 @@ if [[ ${PACKAGES[@]} ]]; then done echo "📦 Running npm install to update your dependencies..." npm install + npm run lint:fix else echo "📦 All packages up-to-date. No need to run npm install." fi diff --git a/.husky/post-merge b/.husky/post-merge index 06f668cd4..a5b4e597b 100644 --- a/.husky/post-merge +++ b/.husky/post-merge @@ -16,6 +16,7 @@ if [[ ${PACKAGES[@]} ]]; then done echo "📦 Running npm install to update your dependencies..." npm install + npm run lint:fix else echo "📦 All packages up-to-date. No need to run npm install." fi \ No newline at end of file diff --git a/docs/dist/documentation.md b/docs/dist/documentation.md index 8027e7538..8869a8e2e 100644 --- a/docs/dist/documentation.md +++ b/docs/dist/documentation.md @@ -186,11 +186,32 @@ Provides default functionality that can be overwritten by child metadata type cl
csvToArray(csv)Array.<string>

helper to convert CSVs into an array. if only one value was given, it's also returned as an array

+
Mcdev.(methodName, businessUnit, [selectedType], [keys])Promise.<boolean>
+

run a method across BUs

+
+
Mcdev.(methodName, cred, bu, [type], keyArr)Promise.<boolean>
+

helper for Mcdev.#runMethod

+
+
Mcdev.(selectedType, buObject)Array.<string>
+

helper for Mcdev.#runOnBU

+
+
Automation.(metadata)boolean
+

helper for postRetrieveTasks and execute

+
+
Automation.(metadataMap, key)Promise.<object>
+

helper for execute

+
+
Automation.(metadata)Promise.<object>
+

helper for pause

+
+
Automation.(metadata)
+

helper for preDeployTasks and execute

+
Automation.(metadataMap, key)Promise.<void>

helper for postDeployTasks

-
Automation.(metadataMap, originalMetadataMap, key)
-

helper for postDeployTasks

+
Automation.(metadataMap, originalMetadataMap, key)Promise.<object>
+

helper for postDeployTasks

getUserName(userList, item, fieldname)string
@@ -371,7 +392,7 @@ Source and target business units are also compared before the deployment to appl * [new Deployer(properties, buObject)](#new_Deployer_new) * _instance_ * [.metadata](#Deployer+metadata) : TYPE.MultiMetadataTypeMap - * [._deploy([typeArr], [keyArr], [fromRetrieve], [isRefresh])](#Deployer+_deploy) ⇒ Promise.<TYPE.MultiMetadataTypeMap> + * [._deploy([typeArr], [keyArr], [fromRetrieve])](#Deployer+_deploy) ⇒ Promise.<TYPE.MultiMetadataTypeMap> * _static_ * [.deploy(businessUnit, [selectedTypesArr], [keyArr], [fromRetrieve])](#Deployer.deploy) ⇒ Promise.<Object.<string, TYPE.MultiMetadataTypeMap>> * [._deployBU(cred, bu, properties, [typeArr], [keyArr], [fromRetrieve])](#Deployer._deployBU) ⇒ Promise.<TYPE.MultiMetadataTypeMap> @@ -395,7 +416,7 @@ Creates a Deployer, uses v2 auth if v2AuthOptions are passed. **Kind**: instance property of [Deployer](#Deployer) -### deployer.\_deploy([typeArr], [keyArr], [fromRetrieve], [isRefresh]) ⇒ Promise.<TYPE.MultiMetadataTypeMap> +### deployer.\_deploy([typeArr], [keyArr], [fromRetrieve]) ⇒ Promise.<TYPE.MultiMetadataTypeMap> Deploy all metadata that is located in the deployDir **Kind**: instance method of [Deployer](#Deployer) @@ -406,7 +427,6 @@ Deploy all metadata that is located in the deployDir | [typeArr] | Array.<TYPE.SupportedMetadataTypes> | limit deployment to given metadata type (can include subtype) | | [keyArr] | Array.<string> | limit deployment to given metadata keys | | [fromRetrieve] | boolean | if true, no folders will be updated/created | -| [isRefresh] | boolean | optional flag to indicate that triggeredSend should be refreshed after deployment of assets | @@ -426,7 +446,7 @@ Deploys all metadata located in the 'deploy' directory to the specified business ### Deployer.\_deployBU(cred, bu, properties, [typeArr], [keyArr], [fromRetrieve]) ⇒ Promise.<TYPE.MultiMetadataTypeMap> -helper for [deploy](deploy) +helper for [deploy](#Deployer.deploy) **Kind**: static method of [Deployer](#Deployer) **Returns**: Promise.<TYPE.MultiMetadataTypeMap> - ensure that BUs are worked on sequentially @@ -448,11 +468,11 @@ Returns metadata of a business unit that is saved locally **Kind**: static method of [Deployer](#Deployer) **Returns**: TYPE.MultiMetadataTypeMap - Metadata of BU in local directory -| Param | Type | Default | Description | -| --- | --- | --- | --- | -| deployDir | string | | root directory of metadata. | -| [typeArr] | Array.<string> | | limit deployment to given metadata type | -| [listBadKeys] | boolean | false | do not print errors, used for badKeys() | +| Param | Type | Description | +| --- | --- | --- | +| deployDir | string | root directory of metadata. | +| [typeArr] | Array.<string> | limit deployment to given metadata type | +| [listBadKeys] | boolean | do not print errors, used for badKeys() | @@ -496,8 +516,8 @@ main class * [.buildDefinition(businessUnit, selectedType, name, market)](#Mcdev.buildDefinition) ⇒ Promise.<void> * [.buildDefinitionBulk(listName, type, name)](#Mcdev.buildDefinitionBulk) ⇒ Promise.<void> * [.getFilesToCommit(businessUnit, selectedType, keyArr)](#Mcdev.getFilesToCommit) ⇒ Promise.<Array.<string>> - * [.execute(businessUnit, [selectedTypesArr], keys)](#Mcdev.execute) ⇒ Promise.<boolean> - * [._executeBU(cred, bu, [selectedTypesArr], keyArr)](#Mcdev._executeBU) ⇒ Promise.<boolean> + * [.execute(businessUnit, [selectedType], [keys])](#Mcdev.execute) ⇒ Promise.<boolean> + * [.pause(businessUnit, [selectedType], [keys])](#Mcdev.pause) ⇒ Promise.<boolean> @@ -752,8 +772,8 @@ Build a specific metadata file based on a template using a list of bu-market com -### Mcdev.execute(businessUnit, [selectedTypesArr], keys) ⇒ Promise.<boolean> -Start an item (query) +### Mcdev.execute(businessUnit, [selectedType], [keys]) ⇒ Promise.<boolean> +Start/execute an item **Kind**: static method of [Mcdev](#Mcdev) **Returns**: Promise.<boolean> - true if all started successfully, false if not @@ -761,23 +781,22 @@ Start an item (query) | Param | Type | Description | | --- | --- | --- | | businessUnit | string | name of BU | -| [selectedTypesArr] | Array.<TYPE.SupportedMetadataTypes> | limit to given metadata types | -| keys | Array.<string> | customerkey of the metadata | +| [selectedType] | TYPE.SupportedMetadataTypes | limit to given metadata types | +| [keys] | Array.<string> | customerkey of the metadata | - + -### Mcdev.\_executeBU(cred, bu, [selectedTypesArr], keyArr) ⇒ Promise.<boolean> -helper for [execute](execute) +### Mcdev.pause(businessUnit, [selectedType], [keys]) ⇒ Promise.<boolean> +pause an item **Kind**: static method of [Mcdev](#Mcdev) -**Returns**: Promise.<boolean> - true if all items were executed, false otherwise +**Returns**: Promise.<boolean> - true if all started successfully, false if not | Param | Type | Description | | --- | --- | --- | -| cred | string | name of Credential | -| bu | string | name of BU | -| [selectedTypesArr] | Array.<TYPE.SupportedMetadataTypes> | limit execution to given metadata type | -| keyArr | Array.<string> | customerkey of the metadata | +| businessUnit | string | name of BU | +| [selectedType] | TYPE.SupportedMetadataTypes | limit to given metadata types | +| [keys] | Array.<string> | customerkey of the metadata | @@ -798,7 +817,7 @@ FileTransfer MetadataType * [._retrieveExtendedFile(metadata, subType, retrieveDir)](#Asset._retrieveExtendedFile) ⇒ Promise.<void> * [._readExtendedFileFromFS(metadata, subType, deployDir, [pathOnly])](#Asset._readExtendedFileFromFS) ⇒ Promise.<string> * [.postRetrieveTasks(metadata)](#Asset.postRetrieveTasks) ⇒ TYPE.CodeExtractItem - * [.postDeployTasks(metadata, _, createdUpdated, [isRefresh])](#Asset.postDeployTasks) ⇒ Promise.<void> + * [.postDeployTasks(metadata, _, createdUpdated)](#Asset.postDeployTasks) ⇒ Promise.<void> * [.preDeployTasks(metadata, deployDir)](#Asset.preDeployTasks) ⇒ Promise.<TYPE.AssetItem> * [._getMainSubtype(extendedSubType)](#Asset._getMainSubtype) ⇒ string * [.buildDefinitionForNested(templateDir, targetDir, metadata, templateVariables, templateName)](#Asset.buildDefinitionForNested) ⇒ Promise.<void> @@ -931,7 +950,7 @@ This method retrieves these and saves them alongside the metadata json ### Asset.\_readExtendedFileFromFS(metadata, subType, deployDir, [pathOnly]) ⇒ Promise.<string> -helper for [preDeployTasks](preDeployTasks) +helper for [preDeployTasks](#Asset.preDeployTasks) Some metadata types store their actual content as a separate file, e.g. images This method reads these from the local FS stores them in the metadata object allowing to deploy it @@ -959,7 +978,7 @@ manages post retrieve steps -### Asset.postDeployTasks(metadata, _, createdUpdated, [isRefresh]) ⇒ Promise.<void> +### Asset.postDeployTasks(metadata, _, createdUpdated) ⇒ Promise.<void> Gets executed after deployment of metadata type **Kind**: static method of [Asset](#Asset) @@ -970,7 +989,6 @@ Gets executed after deployment of metadata type | metadata | TYPE.MetadataTypeMap | metadata mapped by their keyField | | _ | TYPE.MetadataTypeMap | originalMetadata to be updated (contains additioanl fields) | | createdUpdated | Object | counter representing successful creates/updates | -| [isRefresh] | boolean | optional flag to indicate that triggeredSend should be refreshed after deployment of assets | @@ -1078,7 +1096,7 @@ Asset-specific script that retrieves the folder ID from cache and updates the gi ### Asset.\_mergeCode(metadata, deployDir, subType, [templateName], [fileListOnly]) ⇒ Promise.<Array.<TYPE.CodeExtract>> -helper for [preDeployTasks](preDeployTasks) that loads extracted code content back into JSON +helper for [preDeployTasks](#Asset.preDeployTasks) that loads extracted code content back into JSON **Kind**: static method of [Asset](#Asset) **Returns**: Promise.<Array.<TYPE.CodeExtract>> - fileList for templating (disregarded during deployment) @@ -1094,7 +1112,7 @@ helper for [preDeployTasks](preDeployTasks) that loads extracted code content ba ### Asset.\_mergeCode\_slots(prefix, metadataSlots, readDirArr, subtypeExtension, subDirArr, fileList, customerKey, [templateName], [fileListOnly]) ⇒ Promise.<void> -helper for [preDeployTasks](preDeployTasks) that loads extracted code content back into JSON +helper for [preDeployTasks](#Asset.preDeployTasks) that loads extracted code content back into JSON **Kind**: static method of [Asset](#Asset) **Returns**: Promise.<void> - - @@ -1114,7 +1132,7 @@ helper for [preDeployTasks](preDeployTasks) that loads extracted code content ba ### Asset.\_extractCode(metadata) ⇒ TYPE.CodeExtractItem -helper for [postRetrieveTasks](postRetrieveTasks) that finds code content in JSON and extracts it +helper for [postRetrieveTasks](#Asset.postRetrieveTasks) that finds code content in JSON and extracts it to allow saving that separately and formatted **Kind**: static method of [Asset](#Asset) @@ -1238,7 +1256,9 @@ Automation MetadataType * [.retrieveForCache()](#Automation.retrieveForCache) ⇒ Promise.<TYPE.AutomationMapObj> * [.retrieveAsTemplate(templateDir, name, templateVariables)](#Automation.retrieveAsTemplate) ⇒ Promise.<TYPE.AutomationItemObj> * [.postRetrieveTasks(metadata)](#Automation.postRetrieveTasks) ⇒ TYPE.AutomationItem \| void - * [.deploy(metadata, targetBU, retrieveDir, [isRefresh])](#Automation.deploy) ⇒ Promise.<TYPE.AutomationMap> + * [.execute(keyArr)](#Automation.execute) ⇒ Promise.<boolean> + * [.pause(keyArr)](#Automation.pause) ⇒ Promise.<boolean> + * [.deploy(metadata, targetBU, retrieveDir)](#Automation.deploy) ⇒ Promise.<TYPE.AutomationMap> * [.create(metadata)](#Automation.create) ⇒ Promise * [.update(metadata, metadataBefore)](#Automation.update) ⇒ Promise * [.preDeployTasks(metadata)](#Automation.preDeployTasks) ⇒ Promise.<TYPE.AutomationItem> @@ -1308,9 +1328,33 @@ manages post retrieve steps | --- | --- | --- | | metadata | TYPE.AutomationItem | a single automation | + + +### Automation.execute(keyArr) ⇒ Promise.<boolean> +a function to start query execution via API + +**Kind**: static method of [Automation](#Automation) +**Returns**: Promise.<boolean> - Returns true if all items were executed successfully, otherwise false + +| Param | Type | Description | +| --- | --- | --- | +| keyArr | Array.<string> | customerkey of the metadata | + + + +### Automation.pause(keyArr) ⇒ Promise.<boolean> +a function to start query execution via API + +**Kind**: static method of [Automation](#Automation) +**Returns**: Promise.<boolean> - Returns true if all items were executed successfully, otherwise false + +| Param | Type | Description | +| --- | --- | --- | +| keyArr | Array.<string> | customerkey of the metadata | + -### Automation.deploy(metadata, targetBU, retrieveDir, [isRefresh]) ⇒ Promise.<TYPE.AutomationMap> +### Automation.deploy(metadata, targetBU, retrieveDir) ⇒ Promise.<TYPE.AutomationMap> Deploys automation - the saved file is the original one due to large differences required for deployment **Kind**: static method of [Automation](#Automation) @@ -1321,7 +1365,6 @@ Deploys automation - the saved file is the original one due to large differences | metadata | TYPE.AutomationMap | metadata mapped by their keyField | | targetBU | string | name/shorthand of target businessUnit for mapping | | retrieveDir | string | directory where metadata after deploy should be saved | -| [isRefresh] | boolean | optional flag - so far not used by automation | @@ -2335,7 +2378,7 @@ Event MetadataType * [.retrieveAsTemplate(templateDir, name, templateVariables)](#Event.retrieveAsTemplate) ⇒ Promise.<TYPE.MetadataTypeItemObj> * [.postRetrieveTasks(eventDef)](#Event.postRetrieveTasks) ⇒ TYPE.MetadataTypeItem * [.deleteByKey(key)](#Event.deleteByKey) ⇒ Promise.<boolean> - * [.deploy(metadata, deployDir, retrieveDir, [isRefresh])](#Event.deploy) ⇒ Promise.<TYPE.MetadataTypeMap> + * [.deploy(metadata, deployDir, retrieveDir)](#Event.deploy) ⇒ Promise.<TYPE.MetadataTypeMap> * [.create(metadata)](#Event.create) ⇒ Promise * [.update(metadataEntry)](#Event.update) ⇒ Promise * [.preDeployTasks(metadata)](#Event.preDeployTasks) ⇒ TYPE.MetadataTypeItem @@ -2405,7 +2448,7 @@ Delete a metadata item from the specified business unit -### Event.deploy(metadata, deployDir, retrieveDir, [isRefresh]) ⇒ Promise.<TYPE.MetadataTypeMap> +### Event.deploy(metadata, deployDir, retrieveDir) ⇒ Promise.<TYPE.MetadataTypeMap> Deploys metadata - merely kept here to be able to print [logBeta](#Util.logBeta) once per deploy **Kind**: static method of [Event](#Event) @@ -2416,7 +2459,6 @@ Deploys metadata - merely kept here to be able to print [logBeta](#Util.logBeta) | metadata | TYPE.MetadataTypeMap | metadata mapped by their keyField | | deployDir | string | directory where deploy metadata are saved | | retrieveDir | string | directory where metadata after deploy should be saved | -| [isRefresh] | boolean | optional flag - so far not used by eventDefinition | @@ -2747,10 +2789,10 @@ Returns file contents mapped to their filename without '.json' ending **Kind**: static method of [Folder](#Folder) **Returns**: TYPE.MetadataTypeMap - fileName => fileContent map -| Param | Type | Default | Description | -| --- | --- | --- | --- | -| dir | string | | directory that contains '.json' files to be read | -| [listBadKeys] | boolean | false | do not print errors, used for badKeys() | +| Param | Type | Description | +| --- | --- | --- | +| dir | string | directory that contains '.json' files to be read | +| [listBadKeys] | boolean | do not print errors, used for badKeys() | @@ -2923,7 +2965,7 @@ definitionId: A unique UUID provided by Salesforce Marketing Cloud. Each version * [Journey](#Journey) ⇐ [MetadataType](#MetadataType) * [.retrieve(retrieveDir, [_], [__], [key])](#Journey.retrieve) ⇒ Promise.<TYPE.MetadataTypeMapObj> * [.deleteByKey(key)](#Journey.deleteByKey) ⇒ Promise.<boolean> - * [.deploy(metadata, deployDir, retrieveDir, [isRefresh])](#Journey.deploy) ⇒ Promise.<TYPE.MetadataTypeMap> + * [.deploy(metadata, deployDir, retrieveDir)](#Journey.deploy) ⇒ Promise.<TYPE.MetadataTypeMap> * [.update(metadata)](#Journey.update) ⇒ Promise * [.create(metadata)](#Journey.create) ⇒ Promise * [.saveResults(results, retrieveDir, [overrideType], [templateVariables])](#Journey.saveResults) ⇒ Promise.<TYPE.MetadataTypeMap> @@ -2961,7 +3003,7 @@ Delete a metadata item from the specified business unit -### Journey.deploy(metadata, deployDir, retrieveDir, [isRefresh]) ⇒ Promise.<TYPE.MetadataTypeMap> +### Journey.deploy(metadata, deployDir, retrieveDir) ⇒ Promise.<TYPE.MetadataTypeMap> Deploys metadata - merely kept here to be able to print [logBeta](#Util.logBeta) once per deploy **Kind**: static method of [Journey](#Journey) @@ -2972,7 +3014,6 @@ Deploys metadata - merely kept here to be able to print [logBeta](#Util.logBeta) | metadata | TYPE.MetadataTypeMap | metadata mapped by their keyField | | deployDir | string | directory where deploy metadata are saved | | retrieveDir | string | directory where metadata after deploy should be saved | -| [isRefresh] | boolean | optional flag - so far not used by interaction | @@ -3016,7 +3057,7 @@ Helper for writing Metadata to disk, used for Retrieve and deploy ### Journey.\_postRetrieveTasksBulk(metadataMap) -helper for Journey's [saveResults](saveResults). Gets executed after retreive of metadata type and +helper for Journey's [saveResults](#Journey.saveResults). Gets executed after retreive of metadata type and **Kind**: static method of [Journey](#Journey) @@ -3155,8 +3196,8 @@ Provides default functionality that can be overwritten by child metadata type cl * [.buObject](#MetadataType.buObject) : TYPE.BuObject * [.getJsonFromFS(dir, [listBadKeys])](#MetadataType.getJsonFromFS) ⇒ TYPE.MetadataTypeMap * [.getFieldNamesToRetrieve([additionalFields], [isCaching])](#MetadataType.getFieldNamesToRetrieve) ⇒ Array.<string> - * [.deploy(metadata, deployDir, retrieveDir, [isRefresh])](#MetadataType.deploy) ⇒ Promise.<TYPE.MetadataTypeMap> - * [.postDeployTasks(upsertResults, originalMetadata, createdUpdated, [isRefresh])](#MetadataType.postDeployTasks) ⇒ void + * [.deploy(metadata, deployDir, retrieveDir)](#MetadataType.deploy) ⇒ Promise.<TYPE.MetadataTypeMap> + * [.postDeployTasks(upsertResults, originalMetadata, createdUpdated)](#MetadataType.postDeployTasks) ⇒ void * [.postCreateTasks(metadataEntry, apiResponse)](#MetadataType.postCreateTasks) ⇒ void * [.postUpdateTasks(metadataEntry, apiResponse)](#MetadataType.postUpdateTasks) ⇒ void * [.postDeployTasks_legacyApi(metadataEntry, apiResponse)](#MetadataType.postDeployTasks_legacyApi) ⇒ Promise.<void> @@ -3174,9 +3215,10 @@ Provides default functionality that can be overwritten by child metadata type cl * [.update(metadata, [metadataBefore])](#MetadataType.update) ⇒ void * [.refresh()](#MetadataType.refresh) ⇒ void * [.execute()](#MetadataType.execute) ⇒ void + * [.pause()](#MetadataType.pause) ⇒ void * [.hasChanged(cachedVersion, metadata, [fieldName])](#MetadataType.hasChanged) ⇒ boolean * [.hasChangedGeneric(cachedVersion, metadata, [fieldName], [silent])](#MetadataType.hasChangedGeneric) ⇒ boolean - * [.upsert(metadataMap, deployDir, [isRefresh])](#MetadataType.upsert) ⇒ Promise.<TYPE.MetadataTypeMap> + * [.upsert(metadataMap, deployDir)](#MetadataType.upsert) ⇒ Promise.<TYPE.MetadataTypeMap> * [.createOrUpdate(metadataMap, metadataKey, hasError, metadataToUpdate, metadataToCreate)](#MetadataType.createOrUpdate) ⇒ 'create' \| 'update' \| 'skip' * [.createREST(metadataEntry, uri)](#MetadataType.createREST) ⇒ Promise.<object> \| null * [.createSOAP(metadataEntry, [handleOutside])](#MetadataType.createSOAP) ⇒ Promise.<object> \| null @@ -3237,10 +3279,10 @@ Returns file contents mapped to their filename without '.json' ending **Kind**: static method of [MetadataType](#MetadataType) **Returns**: TYPE.MetadataTypeMap - fileName => fileContent map -| Param | Type | Default | Description | -| --- | --- | --- | --- | -| dir | string | | directory that contains '.json' files to be read | -| [listBadKeys] | boolean | false | do not print errors, used for badKeys() | +| Param | Type | Description | +| --- | --- | --- | +| dir | string | directory that contains '.json' files to be read | +| [listBadKeys] | boolean | do not print errors, used for badKeys() | @@ -3257,7 +3299,7 @@ Returns fieldnames of Metadata Type. 'this.definition.fields' variable only set -### MetadataType.deploy(metadata, deployDir, retrieveDir, [isRefresh]) ⇒ Promise.<TYPE.MetadataTypeMap> +### MetadataType.deploy(metadata, deployDir, retrieveDir) ⇒ Promise.<TYPE.MetadataTypeMap> Deploys metadata **Kind**: static method of [MetadataType](#MetadataType) @@ -3268,11 +3310,10 @@ Deploys metadata | metadata | TYPE.MetadataTypeMap | metadata mapped by their keyField | | deployDir | string | directory where deploy metadata are saved | | retrieveDir | string | directory where metadata after deploy should be saved | -| [isRefresh] | boolean | optional flag to indicate that triggeredSend should be refreshed after deployment of assets | -### MetadataType.postDeployTasks(upsertResults, originalMetadata, createdUpdated, [isRefresh]) ⇒ void +### MetadataType.postDeployTasks(upsertResults, originalMetadata, createdUpdated) ⇒ void Gets executed after deployment of metadata type **Kind**: static method of [MetadataType](#MetadataType) @@ -3282,12 +3323,11 @@ Gets executed after deployment of metadata type | upsertResults | TYPE.MetadataTypeMap | metadata mapped by their keyField as returned by update/create | | originalMetadata | TYPE.MetadataTypeMap | metadata to be updated (contains additioanl fields) | | createdUpdated | Object | counter representing successful creates/updates | -| [isRefresh] | boolean | optional flag to indicate that triggeredSend should be refreshed after deployment of assets | ### MetadataType.postCreateTasks(metadataEntry, apiResponse) ⇒ void -helper for [createREST](createREST) +helper for [createREST](#MetadataType.createREST) **Kind**: static method of [MetadataType](#MetadataType) @@ -3299,7 +3339,7 @@ helper for [createREST](createREST) ### MetadataType.postUpdateTasks(metadataEntry, apiResponse) ⇒ void -helper for [updateREST](updateREST) +helper for [updateREST](#MetadataType.updateREST) **Kind**: static method of [MetadataType](#MetadataType) @@ -3311,7 +3351,7 @@ helper for [updateREST](updateREST) ### MetadataType.postDeployTasks\_legacyApi(metadataEntry, apiResponse) ⇒ Promise.<void> -helper for [createREST](createREST) when legacy API endpoints as these do not return the created item but only their new id +helper for [createREST](#MetadataType.createREST) when legacy API endpoints as these do not return the created item but only their new id **Kind**: static method of [MetadataType](#MetadataType) **Returns**: Promise.<void> - - @@ -3492,6 +3532,12 @@ Abstract refresh method that needs to be implemented in child metadata type ### MetadataType.execute() ⇒ void Abstract execute method that needs to be implemented in child metadata type +**Kind**: static method of [MetadataType](#MetadataType) + + +### MetadataType.pause() ⇒ void +Abstract pause method that needs to be implemented in child metadata type + **Kind**: static method of [MetadataType](#MetadataType) @@ -3524,7 +3570,7 @@ test if metadata was actually changed or not to potentially skip it during deplo -### MetadataType.upsert(metadataMap, deployDir, [isRefresh]) ⇒ Promise.<TYPE.MetadataTypeMap> +### MetadataType.upsert(metadataMap, deployDir) ⇒ Promise.<TYPE.MetadataTypeMap> MetadataType upsert, after retrieving from target and comparing to check if create or update operation is needed. **Kind**: static method of [MetadataType](#MetadataType) @@ -3534,7 +3580,6 @@ MetadataType upsert, after retrieving from target and comparing to check if crea | --- | --- | --- | | metadataMap | TYPE.MetadataTypeMap | metadata mapped by their keyField | | deployDir | string | directory where deploy metadata are saved | -| [isRefresh] | boolean | optional flag to indicate that triggeredSend should be refreshed after deployment of assets | @@ -3590,7 +3635,7 @@ Updates a single metadata entry via REST | --- | --- | --- | --- | | metadataEntry | TYPE.MetadataTypeItem | | a single metadata Entry | | uri | string | | rest endpoint for PATCH | -| [httpMethod] | 'patch' \| 'post' \| 'put' | 'patch' | defaults to 'patch'; some update requests require PUT instead of PATCH | +| [httpMethod] | 'patch' \| 'post' \| 'put' | patch | defaults to 'patch'; some update requests require PUT instead of PATCH | @@ -3620,7 +3665,7 @@ Updates a single metadata entry via fuel-soap (generic lib not wrapper) ### MetadataType.getSOAPErrorMsg(ex) ⇒ string -helper for [_handleSOAPErrors](_handleSOAPErrors) +helper for [_handleSOAPErrors](#MetadataType._handleSOAPErrors) **Kind**: static method of [MetadataType](#MetadataType) **Returns**: string - error message @@ -3675,7 +3720,7 @@ Used to execute a query/automation etc. ### MetadataType.runDocumentOnRetrieve([singleRetrieve], metadataMap) ⇒ Promise.<void> -helper for [retrieveREST](retrieveREST) and [retrieveSOAP](retrieveSOAP) +helper for [retrieveREST](#MetadataType.retrieveREST) and [retrieveSOAP](#MetadataType.retrieveSOAP) **Kind**: static method of [MetadataType](#MetadataType) **Returns**: Promise.<void> - - @@ -3769,10 +3814,10 @@ checks if the current metadata entry should be saved on retrieve or not **Kind**: static method of [MetadataType](#MetadataType) **Returns**: boolean - true: skip saving == filtered; false: continue with saving -| Param | Type | Default | Description | -| --- | --- | --- | --- | -| metadataEntry | TYPE.MetadataTypeItem | | metadata entry | -| [include] | boolean | false | true: use definition.include / options.include; false=exclude: use definition.filter / options.exclude | +| Param | Type | Description | +| --- | --- | --- | +| metadataEntry | TYPE.MetadataTypeItem | metadata entry | +| [include] | boolean | true: use definition.include / options.include; false=exclude: use definition.filter / options.exclude | @@ -3782,10 +3827,10 @@ optionally filter by what folder something is in **Kind**: static method of [MetadataType](#MetadataType) **Returns**: boolean - true: filtered == do NOT save; false: not filtered == do save -| Param | Type | Default | Description | -| --- | --- | --- | --- | -| metadataEntry | object | | metadata entry | -| [include] | boolean | false | true: use definition.include / options.include; false=exclude: use definition.filter / options.exclude | +| Param | Type | Description | +| --- | --- | --- | +| metadataEntry | object | metadata entry | +| [include] | boolean | true: use definition.include / options.include; false=exclude: use definition.filter / options.exclude | @@ -3805,7 +3850,7 @@ Helper for writing Metadata to disk, used for Retrieve and deploy ### MetadataType.applyTemplateValues(code, templateVariables) ⇒ string -helper for [buildDefinitionForNested](buildDefinitionForNested) +helper for [buildDefinitionForNested](#MetadataType.buildDefinitionForNested) searches extracted file for template variable names and applies the market values **Kind**: static method of [MetadataType](#MetadataType) @@ -3819,7 +3864,7 @@ searches extracted file for template variable names and applies the market value ### MetadataType.applyTemplateNames(code, templateVariables) ⇒ string -helper for [buildTemplateForNested](buildTemplateForNested) +helper for [buildTemplateForNested](#MetadataType.buildTemplateForNested) searches extracted file for template variable values and applies the market variable names **Kind**: static method of [MetadataType](#MetadataType) @@ -3833,7 +3878,7 @@ searches extracted file for template variable values and applies the market vari ### MetadataType.buildDefinitionForNested(templateDir, targetDir, metadata, variables, templateName) ⇒ Promise.<Array.<Array.<string>>> -helper for [buildDefinition](buildDefinition) +helper for [buildDefinition](#MetadataType.buildDefinition) handles extracted code if any are found for complex types (e.g script, asset, query) **Kind**: static method of [MetadataType](#MetadataType) @@ -3850,7 +3895,7 @@ handles extracted code if any are found for complex types (e.g script, asset, qu ### MetadataType.buildTemplateForNested(templateDir, targetDir, metadata, templateVariables, templateName) ⇒ Promise.<Array.<Array.<string>>> -helper for [buildTemplate](buildTemplate) +helper for [buildTemplate](#MetadataType.buildTemplate) handles extracted code if any are found for complex types **Kind**: static method of [MetadataType](#MetadataType) @@ -3995,11 +4040,11 @@ Returns metadata of a business unit that is saved locally **Kind**: static method of [MetadataType](#MetadataType) **Returns**: object - Metadata of BU in local directory -| Param | Type | Default | Description | -| --- | --- | --- | --- | -| readDir | string | | root directory of metadata. | -| [listBadKeys] | boolean | false | do not print errors, used for badKeys() | -| [buMetadata] | object | | Metadata of BU in local directory | +| Param | Type | Description | +| --- | --- | --- | +| readDir | string | root directory of metadata. | +| [listBadKeys] | boolean | do not print errors, used for badKeys() | +| [buMetadata] | object | Metadata of BU in local directory | @@ -4190,7 +4235,7 @@ manages post retrieve steps ### MobileKeyword.prepExtractedCode(metadataScript) ⇒ Object -helper for [parseMetadata](parseMetadata) and [_buildForNested](_buildForNested) +helper for [postRetrieveTasks](#MobileKeyword.postRetrieveTasks) and [_buildForNested](#MobileKeyword._buildForNested) **Kind**: static method of [MobileKeyword](#MobileKeyword) **Returns**: Object - returns found extension and file content @@ -4240,7 +4285,7 @@ scripts are saved as 1 json and 1 ssjs file. both files need to be run through t ### MobileKeyword.\_buildForNested(templateDir, targetDir, metadata, templateVariables, templateName, mode) ⇒ Promise.<Array.<Array.<string>>> -helper for [buildTemplateForNested](buildTemplateForNested) / [buildDefinitionForNested](buildDefinitionForNested) +helper for [buildTemplateForNested](#MobileKeyword.buildTemplateForNested) / [buildDefinitionForNested](#MobileKeyword.buildDefinitionForNested) handles extracted code if any are found for complex types **Kind**: static method of [MobileKeyword](#MobileKeyword) @@ -4271,7 +4316,7 @@ prepares an event definition for deployment ### MobileKeyword.postCreateTasks(metadataEntry, apiResponse) ⇒ void -helper for [createREST](createREST) +helper for [createREST](#MetadataType.createREST) **Kind**: static method of [MobileKeyword](#MobileKeyword) @@ -4283,7 +4328,7 @@ helper for [createREST](createREST) ### MobileKeyword.postUpdateTasks(metadataEntry, apiResponse) ⇒ void -helper for [updateREST](updateREST) +helper for [updateREST](#MetadataType.updateREST) **Kind**: static method of [MobileKeyword](#MobileKeyword) @@ -4295,7 +4340,7 @@ helper for [updateREST](updateREST) ### MobileKeyword.\_mergeCode(metadata, deployDir, [templateName]) ⇒ Promise.<string> -helper for [preDeployTasks](preDeployTasks) that loads extracted code content back into JSON +helper for [preDeployTasks](#MobileKeyword.preDeployTasks) that loads extracted code content back into JSON **Kind**: static method of [MobileKeyword](#MobileKeyword) **Returns**: Promise.<string> - content for metadata.script @@ -4423,7 +4468,7 @@ Creates a single item ### MobileMessage.\_mergeCode(metadata, deployDir, [templateName]) ⇒ Promise.<string> -helper for [preDeployTasks](preDeployTasks) that loads extracted code content back into JSON +helper for [preDeployTasks](#MobileMessage.preDeployTasks) that loads extracted code content back into JSON **Kind**: static method of [MobileMessage](#MobileMessage) **Returns**: Promise.<string> - code @@ -4437,7 +4482,7 @@ helper for [preDeployTasks](preDeployTasks) that loads extracted code content ba ### MobileMessage.prepExtractedCode(code) ⇒ Object -helper for [parseMetadata](parseMetadata) and [_buildForNested](_buildForNested) +helper for [postRetrieveTasks](#MobileMessage.postRetrieveTasks) and [_buildForNested](#MobileMessage._buildForNested) **Kind**: static method of [MobileMessage](#MobileMessage) **Returns**: Object - returns found extension and file content @@ -4487,7 +4532,7 @@ prepares an event definition for deployment ### MobileMessage.postCreateTasks(metadataEntry, apiResponse) ⇒ void -helper for [createREST](createREST) +helper for [createREST](#MetadataType.createREST) **Kind**: static method of [MobileMessage](#MobileMessage) @@ -4499,7 +4544,7 @@ helper for [createREST](createREST) ### MobileMessage.postUpdateTasks(metadataEntry, apiResponse) ⇒ void -helper for [updateREST](updateREST) +helper for [updateREST](#MetadataType.updateREST) **Kind**: static method of [MobileMessage](#MobileMessage) @@ -4549,7 +4594,7 @@ scripts are saved as 1 json and 1 ssjs file. both files need to be run through t ### MobileMessage.\_buildForNested(templateDir, targetDir, metadata, templateVariables, templateName, mode) ⇒ Promise.<Array.<Array.<string>>> -helper for [buildTemplateForNested](buildTemplateForNested) / [buildDefinitionForNested](buildDefinitionForNested) +helper for [buildTemplateForNested](#MobileMessage.buildTemplateForNested) / [buildDefinitionForNested](#MobileMessage.buildDefinitionForNested) handles extracted code if any are found for complex types **Kind**: static method of [MobileMessage](#MobileMessage) @@ -4601,6 +4646,7 @@ Query MetadataType * [.checkForErrors(ex)](#Query.checkForErrors) ⇒ Array.<string> \| void * [.deleteByKey(customerKey)](#Query.deleteByKey) ⇒ boolean * [.postDeleteTasks(customerKey)](#Query.postDeleteTasks) ⇒ void + * [.postDeployTasks(upsertResults)](#Query.postDeployTasks) @@ -4702,7 +4748,7 @@ prepares a Query for deployment ### Query.applyTemplateValues(code, templateVariables) ⇒ string -helper for [buildDefinitionForNested](buildDefinitionForNested) +helper for [buildDefinitionForNested](#Query.buildDefinitionForNested) searches extracted SQL file for template variables and applies the market values **Kind**: static method of [Query](#Query) @@ -4799,6 +4845,17 @@ clean up after deleting a metadata item | --- | --- | --- | | customerKey | string | Identifier of metadata item | + + +### Query.postDeployTasks(upsertResults) +Gets executed after deployment of metadata type + +**Kind**: static method of [Query](#Query) + +| Param | Type | Description | +| --- | --- | --- | +| upsertResults | TYPE.MetadataTypeMap | metadata mapped by their keyField as returned by update/create | + ## Role ⇐ [MetadataType](#MetadataType) @@ -4992,7 +5049,7 @@ Creates a single Script ### Script.\_mergeCode(metadata, deployDir, [templateName]) ⇒ Promise.<string> -helper for [preDeployTasks](preDeployTasks) that loads extracted code content back into JSON +helper for [preDeployTasks](#Script.preDeployTasks) that loads extracted code content back into JSON **Kind**: static method of [Script](#Script) **Returns**: Promise.<string> - content for metadata.script @@ -5057,7 +5114,7 @@ scripts are saved as 1 json and 1 ssjs file. both files need to be run through t ### Script.\_buildForNested(templateDir, targetDir, metadata, templateVariables, templateName, mode) ⇒ Promise.<Array.<Array.<string>>> -helper for [buildTemplateForNested](buildTemplateForNested) / [buildDefinitionForNested](buildDefinitionForNested) +helper for [buildTemplateForNested](#Script.buildTemplateForNested) / [buildDefinitionForNested](#Script.buildDefinitionForNested) handles extracted code if any are found for complex types **Kind**: static method of [Script](#Script) @@ -5087,7 +5144,7 @@ Splits the script metadata into two parts and parses in a standard manner ### Script.prepExtractedCode(metadataScript, metadataName) ⇒ Object -helper for [parseMetadata](parseMetadata) and [_buildForNested](_buildForNested) +helper for [parseMetadata](#Script.parseMetadata) and [_buildForNested](#Script._buildForNested) **Kind**: static method of [Script](#Script) **Returns**: Object - returns found extension and file content @@ -5371,7 +5428,7 @@ prepares for deployment ### TransactionalSMS.\_mergeCode(metadata, deployDir, [templateName]) ⇒ Promise.<string> -helper for [preDeployTasks](preDeployTasks) that loads extracted code content back into JSON +helper for [preDeployTasks](#TransactionalSMS.preDeployTasks) that loads extracted code content back into JSON **Kind**: static method of [TransactionalSMS](#TransactionalSMS) **Returns**: Promise.<string> - content for metadata.script @@ -5397,7 +5454,7 @@ manages post retrieve steps ### TransactionalSMS.prepExtractedCode(metadataScript) ⇒ Object -helper for [parseMetadata](parseMetadata) and [_buildForNested](_buildForNested) +helper for [postRetrieveTasks](#TransactionalSMS.postRetrieveTasks) and [_buildForNested](#TransactionalSMS._buildForNested) **Kind**: static method of [TransactionalSMS](#TransactionalSMS) **Returns**: Object - returns found extension and file content @@ -5409,7 +5466,7 @@ helper for [parseMetadata](parseMetadata) and [_buildForNested](_buildForNested) ### TransactionalSMS.buildDefinitionForNested(templateDir, targetDir, metadata, templateVariables, templateName) ⇒ Promise.<Array.<Array.<string>>> -helper for [buildDefinition](#MetadataType.buildDefinition) +helper for [TransactionalMessage.buildDefinition](TransactionalMessage.buildDefinition) handles extracted code if any are found for complex types **Kind**: static method of [TransactionalSMS](#TransactionalSMS) @@ -5426,7 +5483,7 @@ handles extracted code if any are found for complex types ### TransactionalSMS.buildTemplateForNested(templateDir, targetDir, metadata, templateVariables, templateName) ⇒ Promise.<Array.<Array.<string>>> -helper for [buildTemplate](#MetadataType.buildTemplate) +helper for [TransactionalMessage.buildTemplate](TransactionalMessage.buildTemplate) handles extracted code if any are found for complex types **Kind**: static method of [TransactionalSMS](#TransactionalSMS) @@ -5447,7 +5504,7 @@ scripts are saved as 1 json and 1 ssjs file. both files need to be run through t ### TransactionalSMS.\_buildForNested(templateDir, targetDir, metadata, templateVariables, templateName, mode) ⇒ Promise.<Array.<Array.<string>>> -helper for [buildTemplateForNested](buildTemplateForNested) / [buildDefinitionForNested](buildDefinitionForNested) +helper for [buildTemplateForNested](#TransactionalSMS.buildTemplateForNested) / [buildDefinitionForNested](#TransactionalSMS.buildDefinitionForNested) handles extracted code if any are found for complex types **Kind**: static method of [TransactionalSMS](#TransactionalSMS) @@ -5500,13 +5557,11 @@ MessageSendActivity MetadataType * [.create(metadata)](#TriggeredSend.create) ⇒ Promise * [.update(metadata)](#TriggeredSend.update) ⇒ Promise * [.deleteByKey(customerKey)](#TriggeredSend.deleteByKey) ⇒ Promise.<boolean> - * [.postRetrieveTasks(metadata)](#TriggeredSend.postRetrieveTasks) ⇒ TYPE.MetadataTypeItem - * [.setFolderPath(metadata)](#TriggeredSend.setFolderPath) - * [.parseMetadata(metadata)](#TriggeredSend.parseMetadata) ⇒ TYPE.MetadataTypeItem \| void + * [.postRetrieveTasks(metadata)](#TriggeredSend.postRetrieveTasks) ⇒ TYPE.MetadataTypeItem \| void * [.preDeployTasks(metadata)](#TriggeredSend.preDeployTasks) ⇒ TYPE.MetadataTypeItem * [.refresh([keyArr], [checkKey])](#TriggeredSend.refresh) ⇒ Promise.<void> * [.getKeysForValidTSDs(metadata)](#TriggeredSend.getKeysForValidTSDs) ⇒ Promise.<Array.<string>> - * [.findRefreshableItems()](#TriggeredSend.findRefreshableItems) ⇒ Promise.<TYPE.MetadataTypeMapObj> + * [.findRefreshableItems([assetLoaded])](#TriggeredSend.findRefreshableItems) ⇒ Promise.<TYPE.MetadataTypeMapObj> * [._refreshItem(key, checkKey)](#TriggeredSend._refreshItem) ⇒ Promise.<boolean> @@ -5562,30 +5617,7 @@ Delete a metadata item from the specified business unit -### TriggeredSend.postRetrieveTasks(metadata) ⇒ TYPE.MetadataTypeItem -manages post retrieve steps - -**Kind**: static method of [TriggeredSend](#TriggeredSend) -**Returns**: TYPE.MetadataTypeItem - Array with one metadata object and one query string - -| Param | Type | Description | -| --- | --- | --- | -| metadata | TYPE.MetadataTypeItem | a single query | - - - -### TriggeredSend.setFolderPath(metadata) -generic script that retrieves the folder path from cache and updates the given metadata with it after retrieve - -**Kind**: static method of [TriggeredSend](#TriggeredSend) - -| Param | Type | Description | -| --- | --- | --- | -| metadata | TYPE.MetadataTypeItem | a single script activity definition | - - - -### TriggeredSend.parseMetadata(metadata) ⇒ TYPE.MetadataTypeItem \| void +### TriggeredSend.postRetrieveTasks(metadata) ⇒ TYPE.MetadataTypeItem \| void parses retrieved Metadata before saving **Kind**: static method of [TriggeredSend](#TriggeredSend) @@ -5593,7 +5625,7 @@ parses retrieved Metadata before saving | Param | Type | Description | | --- | --- | --- | -| metadata | TYPE.MetadataTypeItem | a single query activity definition | +| metadata | TYPE.MetadataTypeItem | a single item | @@ -5623,7 +5655,7 @@ TSD-specific refresh method that finds active TSDs and refreshes them ### TriggeredSend.getKeysForValidTSDs(metadata) ⇒ Promise.<Array.<string>> -helper for [refresh](refresh) that extracts the keys from the TSD item map and eli +helper for [refresh](#TriggeredSend.refresh) that extracts the keys from the TSD item map and eli **Kind**: static method of [TriggeredSend](#TriggeredSend) **Returns**: Promise.<Array.<string>> - keyArr @@ -5634,15 +5666,20 @@ helper for [refresh](refresh) that extracts the keys from the TSD item map and e -### TriggeredSend.findRefreshableItems() ⇒ Promise.<TYPE.MetadataTypeMapObj> -helper for [refresh](refresh) that finds active TSDs on the server and filters it by the same rules that [retrieve](retrieve) is using to avoid refreshing TSDs with broken dependencies +### TriggeredSend.findRefreshableItems([assetLoaded]) ⇒ Promise.<TYPE.MetadataTypeMapObj> +helper for [refresh](#TriggeredSend.refresh) that finds active TSDs on the server and filters it by the same rules that [retrieve](#TriggeredSend.retrieve) is using to avoid refreshing TSDs with broken dependencies **Kind**: static method of [TriggeredSend](#TriggeredSend) **Returns**: Promise.<TYPE.MetadataTypeMapObj> - Promise of TSD item map + +| Param | Type | Default | Description | +| --- | --- | --- | --- | +| [assetLoaded] | boolean | false | if run after Asset.deploy via --refresh option this will skip caching assets | + ### TriggeredSend.\_refreshItem(key, checkKey) ⇒ Promise.<boolean> -helper for [refresh](refresh) that pauses, publishes and starts a triggered send +helper for [refresh](#TriggeredSend.refresh) that pauses, publishes and starts a triggered send **Kind**: static method of [TriggeredSend](#TriggeredSend) **Returns**: Promise.<boolean> - true if refresh was successful @@ -5862,7 +5899,7 @@ Retrieve metadata of specified types into local file system and Retriever.metada ### retriever.\_getTypeDependencies(metadataTypes) ⇒ Array.<TYPE.SupportedMetadataTypes> -helper for [retrieve](retrieve) to get all dependencies of the given types +helper for [Retriever.retrieve](Retriever.retrieve) to get all dependencies of the given types **Kind**: instance method of [Retriever](#Retriever) **Returns**: Array.<TYPE.SupportedMetadataTypes> - unique list dependent metadata types @@ -5908,6 +5945,8 @@ CLI entry for SFMC DevTools * [.getKeysString(keyArr, [isId])](#Util.getKeysString) ⇒ string * [.sleep(ms)](#Util.sleep) ⇒ Promise.<void> * [.getSsjs(code)](#Util.getSsjs) ⇒ string + * [.stringLike(testString, search)](#Util.stringLike) ⇒ boolean + * [.fieldsLike(metadata, [filters])](#Util.fieldsLike) ⇒ boolean @@ -6053,9 +6092,9 @@ wrapper around our standard winston logging to console and logfile **Kind**: static method of [Util](#Util) **Returns**: object - initiated logger for console and file -| Param | Type | Default | Description | -| --- | --- | --- | --- | -| [noLogFile] | boolean | false | optional flag to indicate if we should log to file; CLI logs are always on | +| Param | Type | Description | +| --- | --- | --- | +| [noLogFile] | boolean | optional flag to indicate if we should log to file; CLI logs are always on | @@ -6064,10 +6103,10 @@ initiate winston logger **Kind**: static method of [Util](#Util) -| Param | Type | Default | Description | -| --- | --- | --- | --- | -| [restart] | boolean | false | if true, logger will be restarted; otherwise, an existing logger will be used | -| [noLogFile] | boolean | false | if false, logger will log to file; otherwise, only to console | +| Param | Type | Description | +| --- | --- | --- | +| [restart] | boolean | if true, logger will be restarted; otherwise, an existing logger will be used | +| [noLogFile] | boolean | if false, logger will log to file; otherwise, only to console | @@ -6264,6 +6303,32 @@ the following is invalid: // 3 ``` + + +### Util.stringLike(testString, search) ⇒ boolean +allows us to filter just like with SQL's LIKE operator + +**Kind**: static method of [Util](#Util) +**Returns**: boolean - true if testString matches search + +| Param | Type | Description | +| --- | --- | --- | +| testString | string | field value to test | +| search | string | search string in SQL LIKE format | + + + +### Util.fieldsLike(metadata, [filters]) ⇒ boolean +returns true if no LIKE filter is defined or if all filters match + +**Kind**: static method of [Util](#Util) +**Returns**: boolean - true if no LIKE filter is defined or if all filters match + +| Param | Type | Description | +| --- | --- | --- | +| metadata | TYPE.MetadataTypeItem | a single metadata item | +| [filters] | object | only used in recursive calls | + ## MetadataTypeDefinitions @@ -6801,12 +6866,12 @@ reads file from local file system. **Kind**: static method of [File](#File) **Returns**: Promise.<string> \| void - file contents; void on error -| Param | Type | Default | Description | -| --- | --- | --- | --- | -| directory | string \| Array.<string> | | directory where the file is stored | -| filename | string | | name of the file without '.json' ending | -| filetype | string | | filetype suffix | -| [encoding] | string | "'utf8'" | read file with encoding (defaults to utf-8) | +| Param | Type | Description | +| --- | --- | --- | +| directory | string \| Array.<string> | directory where the file is stored | +| filename | string | name of the file without '.json' ending | +| filetype | string | filetype suffix | +| [encoding] | string | read file with encoding (defaults to utf-8) | @@ -6869,9 +6934,9 @@ Initalises Prettier formatting lib async. **Kind**: static method of [File](#File) **Returns**: Promise.<boolean> - success of config load -| Param | Type | Default | Description | -| --- | --- | --- | --- | -| [filetype] | string | "'html'" | filetype ie. JSON or SSJS | +| Param | Type | Description | +| --- | --- | --- | +| [filetype] | string | filetype ie. JSON or SSJS | @@ -7798,6 +7863,8 @@ Util that contains logger and simple util methods * [.getKeysString(keyArr, [isId])](#Util.getKeysString) ⇒ string * [.sleep(ms)](#Util.sleep) ⇒ Promise.<void> * [.getSsjs(code)](#Util.getSsjs) ⇒ string + * [.stringLike(testString, search)](#Util.stringLike) ⇒ boolean + * [.fieldsLike(metadata, [filters])](#Util.fieldsLike) ⇒ boolean @@ -7943,9 +8010,9 @@ wrapper around our standard winston logging to console and logfile **Kind**: static method of [Util](#Util) **Returns**: object - initiated logger for console and file -| Param | Type | Default | Description | -| --- | --- | --- | --- | -| [noLogFile] | boolean | false | optional flag to indicate if we should log to file; CLI logs are always on | +| Param | Type | Description | +| --- | --- | --- | +| [noLogFile] | boolean | optional flag to indicate if we should log to file; CLI logs are always on | @@ -7954,10 +8021,10 @@ initiate winston logger **Kind**: static method of [Util](#Util) -| Param | Type | Default | Description | -| --- | --- | --- | --- | -| [restart] | boolean | false | if true, logger will be restarted; otherwise, an existing logger will be used | -| [noLogFile] | boolean | false | if false, logger will log to file; otherwise, only to console | +| Param | Type | Description | +| --- | --- | --- | +| [restart] | boolean | if true, logger will be restarted; otherwise, an existing logger will be used | +| [noLogFile] | boolean | if false, logger will log to file; otherwise, only to console | @@ -8154,6 +8221,32 @@ the following is invalid: // 3 ``` + + +### Util.stringLike(testString, search) ⇒ boolean +allows us to filter just like with SQL's LIKE operator + +**Kind**: static method of [Util](#Util) +**Returns**: boolean - true if testString matches search + +| Param | Type | Description | +| --- | --- | --- | +| testString | string | field value to test | +| search | string | search string in SQL LIKE format | + + + +### Util.fieldsLike(metadata, [filters]) ⇒ boolean +returns true if no LIKE filter is defined or if all filters match + +**Kind**: static method of [Util](#Util) +**Returns**: boolean - true if no LIKE filter is defined or if all filters match + +| Param | Type | Description | +| --- | --- | --- | +| metadata | TYPE.MetadataTypeItem | a single metadata item | +| [filters] | object | only used in recursive calls | + ## csvToArray(csv) ⇒ Array.<string> @@ -8166,6 +8259,98 @@ helper to convert CSVs into an array. if only one value was given, it's also ret | --- | --- | --- | | csv | string | potentially comma-separated value or null | + + +## Mcdev.(methodName, businessUnit, [selectedType], [keys]) ⇒ Promise.<boolean> +run a method across BUs + +**Kind**: global function +**Returns**: Promise.<boolean> - true if all started successfully, false if not + +| Param | Type | Description | +| --- | --- | --- | +| methodName | 'execute' \| 'pause' | what to run | +| businessUnit | string | name of BU | +| [selectedType] | TYPE.SupportedMetadataTypes | limit to given metadata types | +| [keys] | Array.<string> | customerkey of the metadata | + + + +## Mcdev.(methodName, cred, bu, [type], keyArr) ⇒ Promise.<boolean> +helper for [Mcdev.#runMethod](Mcdev.#runMethod) + +**Kind**: global function +**Returns**: Promise.<boolean> - true if all items were executed, false otherwise + +| Param | Type | Description | +| --- | --- | --- | +| methodName | 'execute' \| 'pause' | what to run | +| cred | string | name of Credential | +| bu | string | name of BU | +| [type] | TYPE.SupportedMetadataTypes | limit execution to given metadata type | +| keyArr | Array.<string> | customerkey of the metadata | + + + +## Mcdev.(selectedType, buObject) ⇒ Array.<string> +helper for [Mcdev.#runOnBU](Mcdev.#runOnBU) + +**Kind**: global function +**Returns**: Array.<string> - keyArr + +| Param | Type | Description | +| --- | --- | --- | +| selectedType | TYPE.SupportedMetadataTypes | limit execution to given metadata type | +| buObject | TYPE.BuObject | properties for auth | + + + +## Automation.(metadata) ⇒ boolean +helper for [postRetrieveTasks](#Automation.postRetrieveTasks) and [execute](#Automation.execute) + +**Kind**: global function +**Returns**: boolean - true if the automation schedule is valid + +| Param | Type | Description | +| --- | --- | --- | +| metadata | TYPE.AutomationItem | a single automation | + + + +## Automation.(metadataMap, key) ⇒ Promise.<object> +helper for [execute](#Automation.execute) + +**Kind**: global function +**Returns**: Promise.<object> - Returns the result of the API call + +| Param | Type | Description | +| --- | --- | --- | +| metadataMap | TYPE.AutomationMap | map of metadata | +| key | string | key of the metadata | + + + +## Automation.(metadata) ⇒ Promise.<object> +helper for [pause](#Automation.pause) + +**Kind**: global function +**Returns**: Promise.<object> - schedule reponse + +| Param | Type | Description | +| --- | --- | --- | +| metadata | TYPE.AutomationItem | automation metadata | + + + +## Automation.(metadata) +helper for [preDeployTasks](#Automation.preDeployTasks) and [execute](#Automation.execute) + +**Kind**: global function + +| Param | Type | Description | +| --- | --- | --- | +| metadata | TYPE.AutomationItem | metadata mapped by their keyField | + ## Automation.(metadataMap, key) ⇒ Promise.<void> @@ -8181,10 +8366,11 @@ helper for [postDeployTasks](#Automation.postDeployTasks) -## Automation.(metadataMap, originalMetadataMap, key) -helper for [postDeployTasks](postDeployTasks) +## Automation.(metadataMap, originalMetadataMap, key) ⇒ Promise.<object> +helper for [postDeployTasks](#Automation.postDeployTasks) **Kind**: global function +**Returns**: Promise.<object> - - | Param | Type | Description | | --- | --- | --- | diff --git a/lib/Deployer.js b/lib/Deployer.js index 3d064049a..8df75ff6d 100644 --- a/lib/Deployer.js +++ b/lib/Deployer.js @@ -180,7 +180,7 @@ class Deployer { return buMultiMetadataTypeMap; } /** - * helper for {@link deploy} + * helper for {@link Deployer.deploy} * * @param {string} cred name of Credential * @param {string} bu name of BU @@ -215,10 +215,9 @@ class Deployer { * @param {TYPE.SupportedMetadataTypes[]} [typeArr] limit deployment to given metadata type (can include subtype) * @param {string[]} [keyArr] limit deployment to given metadata keys * @param {boolean} [fromRetrieve] if true, no folders will be updated/created - * @param {boolean} [isRefresh] optional flag to indicate that triggeredSend should be refreshed after deployment of assets * @returns {Promise.} Promise of all deployed metadata */ - async _deploy(typeArr, keyArr, fromRetrieve, isRefresh) { + async _deploy(typeArr, keyArr, fromRetrieve) { if (await File.pathExists(this.deployDir)) { /** @type {TYPE.MultiMetadataTypeMap} */ this.metadata = Deployer.readBUMetadata(this.deployDir, typeArr); @@ -297,8 +296,7 @@ class Deployer { const result = await MetadataTypeInfo[type].deploy( this.metadata[type], this.deployDir, - this.retrieveDir, - isRefresh + this.retrieveDir ); multiMetadataTypeMap[type] = result; cache.mergeMetadata(type, result); @@ -312,7 +310,7 @@ class Deployer { * * @param {string} deployDir root directory of metadata. * @param {string[]} [typeArr] limit deployment to given metadata type - * @param {boolean} [listBadKeys=false] do not print errors, used for badKeys() + * @param {boolean} [listBadKeys] do not print errors, used for badKeys() * @returns {TYPE.MultiMetadataTypeMap} Metadata of BU in local directory */ static readBUMetadata(deployDir, typeArr, listBadKeys) { diff --git a/lib/Retriever.js b/lib/Retriever.js index 86ecf781e..f32acd2e3 100644 --- a/lib/Retriever.js +++ b/lib/Retriever.js @@ -189,7 +189,7 @@ class Retriever { } /** - * helper for {@link retrieve} to get all dependencies of the given types + * helper for {@link Retriever.retrieve} to get all dependencies of the given types * * @param {TYPE.SupportedMetadataTypes[]} metadataTypes list of metadata types to retrieve; can include subtypes! * @returns {TYPE.SupportedMetadataTypes[]} unique list dependent metadata types diff --git a/lib/cli.js b/lib/cli.js index 59bc224ea..d27d1b0ab 100644 --- a/lib/cli.js +++ b/lib/cli.js @@ -31,6 +31,12 @@ yargs .positional('KEY', { type: 'string', describe: 'metadata keys that shall be exclusively downloaded', + }) + .option('like', { + type: 'string', + group: 'Options for retrieve:', + describe: + 'filter metadata components (can include % as wildcard or _ for a single character)', }); }, handler: (argv) => { @@ -59,22 +65,31 @@ yargs }) .option('changeKeyField', { type: 'string', + group: 'Options for deploy:', describe: 'enables updating the key of the deployed metadata with the value in provided field (e.g. c__newKey). Can be used to sync name and key fields.', }) .option('changeKeyValue', { type: 'string', + group: 'Options for deploy:', describe: 'allows updating the key of the metadata to the provided value. Only available if a single type and key is deployed', }) .option('fromRetrieve', { type: 'boolean', + group: 'Options for deploy:', describe: 'optionally deploy from retrieve folder', }) .option('refresh', { type: 'boolean', + group: 'Options for deploy:', describe: 'optional for asset-message: runs refresh command for related triggeredSends after deploy', + }) + .option('execute', { + type: 'boolean', + group: 'Options for deploy:', + describe: 'optional for query: runs execute after deploy', }); }, handler: (argv) => { @@ -307,6 +322,7 @@ yargs builder: (yargs) => { yargs.option('json', { type: 'boolean', + group: 'Options for explainTypes:', describe: 'optionaly return info in json format', }); }, @@ -316,7 +332,7 @@ yargs }, }) .command({ - command: 'createDeltaPkg [range] [--filter ] [--commitHistory ]', + command: 'createDeltaPkg [range]', aliases: ['cdp'], desc: 'Copies commit-based file delta into deploy folder', builder: (yargs) => { @@ -327,11 +343,13 @@ yargs }) .option('filter', { type: 'string', + group: 'Options for createDeltaPkg:', describe: 'Disable templating & instead filter by the specified BU path (comma separated), can include subtype, will be prefixed with "retrieve/"', }) .option('commitHistory', { type: 'number', + group: 'Options for createDeltaPkg:', describe: 'Number of commits to look back for changes (supersedes config)', }); }, @@ -390,8 +408,8 @@ yargs }, }) .command({ - command: 'execute ', - aliases: ['exec'], + command: 'execute [KEY]', + aliases: ['exec', 'start'], desc: 'executes the entity (query/journey/automation etc.)', builder: (yargs) => { yargs @@ -406,11 +424,49 @@ yargs .positional('KEY', { type: 'string', describe: 'key(s) of the metadata component(s)', + }) + .option('like', { + type: 'string', + group: 'Options for execute:', + describe: + 'filter metadata components (can include % as wildcard or _ for a single character)', + }); + }, + handler: (argv) => { + Mcdev.setOptions(argv); + // ! do not allow multiple types to be passed in here via csvToArray + Mcdev.execute(argv.BU, argv.TYPE, csvToArray(argv.KEY)); + }, + }) + .command({ + command: 'pause [KEY]', + aliases: ['p', 'stop'], + desc: 'pauses the entity (automation etc.)', + builder: (yargs) => { + yargs + .positional('BU', { + type: 'string', + describe: 'the business unit where to start an item', + }) + .positional('TYPE', { + type: 'string', + describe: 'metadata type', + }) + .positional('KEY', { + type: 'string', + describe: 'key(s) of the metadata component(s)', + }) + .option('like', { + type: 'string', + group: 'Options for pause:', + describe: + 'filter metadata components (can include % as wildcard or _ for a single character)', }); }, handler: (argv) => { Mcdev.setOptions(argv); - Mcdev.execute(argv.BU, csvToArray(argv.TYPE), csvToArray(argv.KEY)); + // ! do not allow multiple types to be passed in here via csvToArray + Mcdev.pause(argv.BU, argv.TYPE, csvToArray(argv.KEY)); }, }) .command({ @@ -490,9 +546,13 @@ function csvToArray(csv) { return !csv ? null : csv.includes(',') - ? csv.split(',').map((item) => - // allow whitespace in comma-separated lists - item.trim() - ) - : [csv.trim()]; + ? csv + .split(',') + .map((item) => + // allow whitespace in comma-separated lists + item.trim() + ) + // make sure trailing commas are ignored + .filter(Boolean) + : [csv.trim()].filter(Boolean); } diff --git a/lib/index.js b/lib/index.js index ea55c327d..da03af85c 100644 --- a/lib/index.js +++ b/lib/index.js @@ -51,15 +51,17 @@ class Mcdev { static setOptions(argv) { const knownOptions = [ 'api', - 'commitHistory', 'changeKeyField', 'changeKeyValue', + 'commitHistory', 'filter', 'fromRetrieve', 'json', + 'like', + 'noLogFile', 'refresh', + 'execute', 'skipInteraction', - 'noLogFile', ]; for (const option of knownOptions) { if (argv[option] !== undefined) { @@ -173,20 +175,20 @@ class Mcdev { } if (businessUnit === '*') { - Util.logger.info('\n :: Retrieving all BUs for all credentials'); + Util.logger.info(':: Retrieving all BUs for all credentials'); let counter_credTotal = 0; for (const cred in properties.credentials) { - Util.logger.info(`\n :: Retrieving all BUs for ${cred}`); + Util.logger.info(`:: Retrieving all BUs for ${cred}`); let counter_credBu = 0; for (const bu in properties.credentials[cred].businessUnits) { - await this._retrieveBU(cred, bu, selectedTypesArr, keys); + await this.#retrieveBU(cred, bu, selectedTypesArr, keys); counter_credBu++; Util.startLogger(true); } counter_credTotal += counter_credBu; - Util.logger.info(`\n :: ${counter_credBu} BUs for ${cred}\n`); + Util.logger.info(`:: ${counter_credBu} BUs for ${cred}\n`); } - Util.logger.info(`\n :: ${counter_credTotal} BUs in total\n`); + Util.logger.info(`:: ${counter_credTotal} BUs in total\n`); } else { let [cred, bu] = businessUnit ? businessUnit.split('/') : [null, null]; // to allow all-BU via user selection we need to run this here already @@ -210,17 +212,17 @@ class Mcdev { } if (bu === '*' && properties.credentials && properties.credentials[cred]) { - Util.logger.info(`\n :: Retrieving all BUs for ${cred}`); + Util.logger.info(`:: Retrieving all BUs for ${cred}`); let counter_credBu = 0; for (const bu in properties.credentials[cred].businessUnits) { - await this._retrieveBU(cred, bu, selectedTypesArr, keys); + await this.#retrieveBU(cred, bu, selectedTypesArr, keys); counter_credBu++; Util.startLogger(true); } - Util.logger.info(`\n :: ${counter_credBu} BUs for ${cred}\n`); + Util.logger.info(`:: ${counter_credBu} BUs for ${cred}\n`); } else { // retrieve a single BU; return - const retrieveChangelog = await this._retrieveBU( + const retrieveChangelog = await this.#retrieveBU( cred, bu, selectedTypesArr, @@ -235,7 +237,7 @@ class Mcdev { } } /** - * helper for {@link retrieve} + * helper for {@link Mcdev.retrieve} * * @private * @param {string} cred name of Credential @@ -245,7 +247,7 @@ class Mcdev { * @param {boolean} [changelogOnly] skip saving, only create json in memory * @returns {Promise.} ensure that BUs are worked on sequentially */ - static async _retrieveBU(cred, bu, selectedTypesArr, keys, changelogOnly) { + static async #retrieveBU(cred, bu, selectedTypesArr, keys, changelogOnly) { const properties = await config.getProperties(); if (!(await config.checkProperties(properties))) { return null; @@ -270,7 +272,7 @@ class Mcdev { triggeredSend: 'triggeredSendDefinition', user: 'accountUser', }; - Util.logger.info(`\n :: Retrieving ${cred}/${bu}\n`); + Util.logger.info(`:: Retrieving ${cred}/${bu}\n`); const retrieveTypesArr = []; if (selectedTypesArr) { for (const selectedType of Array.isArray(selectedTypesArr) @@ -701,43 +703,101 @@ class Mcdev { } } /** - * Start an item (query) + * Start/execute an item + * + * @param {string} businessUnit name of BU + * @param {TYPE.SupportedMetadataTypes} [selectedType] limit to given metadata types + * @param {string[]} [keys] customerkey of the metadata + * @returns {Promise.} true if all started successfully, false if not + */ + static async execute(businessUnit, selectedType, keys) { + return this.#runMethod('execute', businessUnit, selectedType, keys); + } + /** + * pause an item + * + * @param {string} businessUnit name of BU + * @param {TYPE.SupportedMetadataTypes} [selectedType] limit to given metadata types + * @param {string[]} [keys] customerkey of the metadata + * @returns {Promise.} true if all started successfully, false if not + */ + static async pause(businessUnit, selectedType, keys) { + return this.#runMethod('pause', businessUnit, selectedType, keys); + } + /** + * run a method across BUs * + * @param {'execute'|'pause'} methodName what to run * @param {string} businessUnit name of BU - * @param {TYPE.SupportedMetadataTypes[]} [selectedTypesArr] limit to given metadata types - * @param {string[]} keys customerkey of the metadata + * @param {TYPE.SupportedMetadataTypes} [selectedType] limit to given metadata types + * @param {string[]} [keys] customerkey of the metadata * @returns {Promise.} true if all started successfully, false if not */ - static async execute(businessUnit, selectedTypesArr, keys) { + static async #runMethod(methodName, businessUnit, selectedType, keys) { Util.startLogger(); - Util.logger.info('mcdev:: Execute'); + let lang_past; + let lang_present; + let requireKeyOrLike; + switch (methodName) { + case 'execute': { + lang_past = 'executed'; + lang_present = 'executing'; + requireKeyOrLike = true; + break; + } + case 'pause': { + lang_past = 'paused'; + lang_present = 'pausing'; + requireKeyOrLike = true; + break; + } + } + + Util.logger.info(`mcdev:: ${methodName} ${selectedType}`); const properties = await config.getProperties(); let counter_credBu = 0; let counter_failed = 0; if (!(await config.checkProperties(properties))) { // return null here to avoid seeing 2 error messages for the same issue - return null; + return false; } - if (Array.isArray(selectedTypesArr)) { - // types and keys can be provided but for each type all provided keys are applied as filter - for (const selectedType of Array.isArray(selectedTypesArr) - ? selectedTypesArr - : Object.keys(selectedTypesArr)) { - if (!Util._isValidType(selectedType)) { - return; - } - } + if (!Util._isValidType(selectedType)) { + return false; } + if (!Object.prototype.hasOwnProperty.call(MetadataTypeInfo[selectedType], methodName)) { + Util.logger.error( + ` ☇ skipping ${selectedType}: ${methodName} is not supported yet for ${selectedType}` + ); + return false; + } + + if ( + requireKeyOrLike && + (!Array.isArray(keys) || !keys.length) && + (!Util.OPTIONS.like || !Object.keys(Util.OPTIONS.like).length) + ) { + Util.logger.error('At least one key or a --like filter is required.'); + return false; + } else if ( + Array.isArray(keys) && + keys.length && + Util.OPTIONS.like && + Object.keys(Util.OPTIONS.like).length + ) { + Util.logger.error('You can either specify keys OR a --like filter.'); + return false; + } + if (businessUnit === '*') { Util.logger.info( - '\n :: Executing the entity on all BUs for all credentials' + `:: ${lang_present} the ${selectedType} on all BUs for all credentials` ); let counter_credTotal = 0; for (const cred in properties.credentials) { - Util.logger.info(`\n :: Executing the entity on all BUs for ${cred}`); + Util.logger.info(`:: ${lang_present} ${selectedType} on all BUs for ${cred}`); for (const bu in properties.credentials[cred].businessUnits) { - if (await this._executeBU(cred, bu, selectedTypesArr, keys)) { + if (await this.#runOnBU(methodName, cred, bu, selectedType, keys)) { counter_credBu++; } else { counter_failed++; @@ -746,11 +806,11 @@ class Mcdev { } counter_credTotal += counter_credBu; Util.logger.info( - `\n :: Executed the entity on ${counter_credBu} BUs for ${cred}\n` + `:: ${lang_past} ${selectedType} on ${counter_credBu} BUs for ${cred}` ); } Util.logger.info( - `\n :: Executed the entity on ${counter_credTotal} BUs in total\n` + `:: ${lang_past} ${selectedType} on ${counter_credTotal} BUs in total\n` ); } else { let [cred, bu] = businessUnit ? businessUnit.split('/') : [null, null]; @@ -767,17 +827,17 @@ class Mcdev { true ); if (buObject === null) { - return; + return false; } else { cred = buObject.credential; bu = buObject.businessUnit; } } if (bu === '*' && properties.credentials && properties.credentials[cred]) { - Util.logger.info(`\n :: Executing the entity on all BUs for ${cred}`); + Util.logger.info(`:: ${lang_present} ${selectedType} on all BUs for ${cred}`); let counter_credBu = 0; for (const bu in properties.credentials[cred].businessUnits) { - if (await this._executeBU(cred, bu, selectedTypesArr, keys)) { + if (await this.#runOnBU(methodName, cred, bu, selectedType, keys)) { counter_credBu++; } else { counter_failed++; @@ -785,33 +845,34 @@ class Mcdev { Util.startLogger(true); } Util.logger.info( - `\n :: Executed the entity on ${counter_credBu} BUs for ${cred}\n` + `:: ${lang_past} ${selectedType} on ${counter_credBu} BUs for ${cred}` ); } else { - // execute the entity on one BU only - if (await this._executeBU(cred, bu, selectedTypesArr, keys)) { + // execute runMethod for the entity on one BU only + if (await this.#runOnBU(methodName, cred, bu, selectedType, keys)) { counter_credBu++; } else { counter_failed++; } - Util.logger.info(`\n :: Done\n`); + Util.logger.info(`:: Done`); } } - if (counter_credBu !== 0) { - Util.logger.info(`\n :: Executed query on ${counter_credBu} BUs\n`); + if (counter_credBu > 1) { + Util.logger.info(`:: ${lang_past} ${selectedType} on ${counter_credBu} BUs`); } return counter_failed === 0 ? true : false; } /** - * helper for {@link execute} + * helper for {@link Mcdev.#runMethod} * + * @param {'execute'|'pause'} methodName what to run * @param {string} cred name of Credential * @param {string} bu name of BU - * @param {TYPE.SupportedMetadataTypes[]} [selectedTypesArr] limit execution to given metadata type + * @param {TYPE.SupportedMetadataTypes} [type] limit execution to given metadata type * @param {string[]} keyArr customerkey of the metadata * @returns {Promise.} true if all items were executed, false otherwise */ - static async _executeBU(cred, bu, selectedTypesArr, keyArr) { + static async #runOnBU(methodName, cred, bu, type, keyArr) { const properties = await config.getProperties(); let counter_failed = 0; const buObject = await Cli.getCredentialObject( @@ -820,39 +881,103 @@ class Mcdev { null, true ); - if (!keyArr || (Array.isArray(keyArr) && !keyArr.length)) { - throw new Error('No keys were provided'); + try { + if (!type) { + throw new Error('No type was provided'); + } + if (buObject !== null) { + cache.initCache(buObject); + cred = buObject.credential; + bu = buObject.businessUnit; + } + Util.logger.info(`:: ${methodName} ${type} on ${cred}/${bu}`); + MetadataTypeInfo[type].client = auth.getSDK(buObject); + if (Util.OPTIONS.like && Object.keys(Util.OPTIONS.like).length) { + keyArr = await this.#retrieveKeysWithLike(type, buObject); + } else { + MetadataTypeInfo[type].properties = properties; + MetadataTypeInfo[type].buObject = buObject; + } + if (!keyArr || (Array.isArray(keyArr) && !keyArr.length)) { + throw new Error('No keys were provided'); + } + + // result will be undefined (false) if methodName is not supported for the type + if (!(await MetadataTypeInfo[type][methodName](keyArr))) { + counter_failed++; + } + } catch (ex) { + Util.logger.errorStack(ex, 'mcdev.' + methodName + ' failed'); } - if (!selectedTypesArr || (Array.isArray(selectedTypesArr) && !selectedTypesArr.length)) { - throw new Error('No type was provided'); + + return counter_failed === 0 ? true : false; + } + + /** + * helper for {@link Mcdev.#runOnBU} + * + * @param {TYPE.SupportedMetadataTypes} selectedType limit execution to given metadata type + * @param {TYPE.BuObject} buObject properties for auth + * @returns {string[]} keyArr + */ + static async #retrieveKeysWithLike(selectedType, buObject) { + const properties = await config.getProperties(); + + // cache depenencies + const deployOrder = Util.getMetadataHierachy([selectedType]); + for (const type in deployOrder) { + const subTypeArr = deployOrder[type]; + MetadataTypeInfo[type].client = auth.getSDK(buObject); + MetadataTypeInfo[type].properties = properties; + MetadataTypeInfo[type].buObject = buObject; + Util.logger.info(`Caching dependent Metadata: ${type}`); + Util.logSubtypes(subTypeArr); + const result = await MetadataTypeInfo[type].retrieveForCache(null, subTypeArr); + if (result) { + if (Array.isArray(result)) { + for (const result_i of result) { + if (result_i?.metadata && Object.keys(result_i.metadata).length) { + cache.mergeMetadata(type, result_i.metadata); + } + } + } else { + cache.setMetadata(type, result.metadata); + } + } } - if (buObject !== null) { - cache.initCache(buObject); - cred = buObject.credential; - bu = buObject.businessUnit; + + // find all keys in chosen type that match the like-filter + const keyArr = []; + const metadataMap = cache.getCache()[selectedType]; + if (!metadataMap) { + throw new Error(`Selected type ${selectedType} could not be cached`); } Util.logger.info( - `\n :: Executing ${selectedTypesArr.join(', ')} on ${cred}/${bu}\n` + Util.getGrayMsg(`Found ${Object.keys(metadataMap).length} ${selectedType}s`) ); - try { - // more than one type was provided, iterate types and execute items - for (const type of selectedTypesArr) { - try { - MetadataTypeInfo[type].client = auth.getSDK(buObject); - } catch (ex) { - Util.logger.error(ex.message); - return; - } - // result will be undefined (false) if execute is not supported for the type - if (!(await MetadataTypeInfo[type].execute(keyArr))) { - counter_failed++; - } + for (const originalKey in metadataMap) { + // hide postRetrieveOutput + Util.setLoggingLevel({ silent: true }); + metadataMap[originalKey] = MetadataTypeInfo[selectedType].postRetrieveTasks( + metadataMap[originalKey] + ); + // reactivate logging + Util.setLoggingLevel({}); + if (Util.fieldsLike(metadataMap[originalKey])) { + keyArr.push(originalKey); } - } catch (ex) { - Util.logger.errorStack(ex, 'mcdev.execute failed'); } - return counter_failed === 0 ? true : false; + Util.logger.info( + Util.getGrayMsg( + `Identified ${keyArr.length} ${selectedType}${ + keyArr.length === 1 ? '' : 's' + } that match${selectedType}${keyArr.length === 1 ? 'es' : ''} the like-filter` + ) + ); + + return keyArr; } +} /** * Updates the key to match the name field * diff --git a/lib/metadataTypes/Asset.js b/lib/metadataTypes/Asset.js index 3ff861cce..c44e337c3 100644 --- a/lib/metadataTypes/Asset.js +++ b/lib/metadataTypes/Asset.js @@ -86,7 +86,7 @@ class Asset extends MetadataType { return { metadata: Object.values(metadata)[0], type: this.definition.type }; } /** - * helper for {@link retrieve} + {@link retrieveAsTemplate} + * helper for {@link Asset.retrieve} + {@link Asset.retrieveAsTemplate} * * @private * @returns {TYPE.AssetSubType[]} subtype array @@ -448,14 +448,14 @@ class Asset extends MetadataType { ); } /** - * helper for {@link preDeployTasks} + * helper for {@link Asset.preDeployTasks} * Some metadata types store their actual content as a separate file, e.g. images * This method reads these from the local FS stores them in the metadata object allowing to deploy it * * @param {TYPE.AssetItem} metadata a single asset * @param {TYPE.AssetSubType} subType group of similar assets to put in a folder (ie. images) * @param {string} deployDir directory of deploy files - * @param {boolean} [pathOnly=false] used by getFilesToCommit which does not need the binary file to be actually read + * @param {boolean} [pathOnly] used by getFilesToCommit which does not need the binary file to be actually read * @returns {Promise.} if found will return the path of the binary file */ static async _readExtendedFileFromFS(metadata, subType, deployDir, pathOnly = false) { @@ -504,11 +504,10 @@ class Asset extends MetadataType { * @param {TYPE.MetadataTypeMap} metadata metadata mapped by their keyField * @param {TYPE.MetadataTypeMap} _ originalMetadata to be updated (contains additioanl fields) * @param {{created: number, updated: number}} createdUpdated counter representing successful creates/updates - * @param {boolean} [isRefresh] optional flag to indicate that triggeredSend should be refreshed after deployment of assets * @returns {Promise.} - */ - static async postDeployTasks(metadata, _, createdUpdated, isRefresh) { - if (isRefresh) { + static async postDeployTasks(metadata, _, createdUpdated) { + if (Util.OPTIONS.refresh) { if (createdUpdated.updated) { // only run this if assets were updated. for created assets we do not expect this._refreshTriggeredSend(metadata); @@ -521,7 +520,7 @@ class Asset extends MetadataType { } /** - * helper for {@link postDeployTasks}. triggers a refresh of active triggerredSendDefinitions associated with the updated asset-message items. Gets executed if isRefresh is true. + * helper for {@link Asset.postDeployTasks}. triggers a refresh of active triggerredSendDefinitions associated with the updated asset-message items. Gets executed if refresh option has been set. * * @private * @param {TYPE.MetadataTypeMap} metadata metadata mapped by their keyField @@ -546,7 +545,7 @@ class Asset extends MetadataType { TriggeredSend.client = this.client; try { // find refreshable TSDs - const tsdObj = (await TriggeredSend.findRefreshableItems()).metadata; + const tsdObj = (await TriggeredSend.findRefreshableItems(true)).metadata; const tsdCountInitial = Object.keys(tsdObj).length; const emailCount = legacyIdArr.length; @@ -867,7 +866,7 @@ class Asset extends MetadataType { } /** - * helper for {@link preDeployTasks} that loads extracted code content back into JSON + * helper for {@link Asset.preDeployTasks} that loads extracted code content back into JSON * * @param {TYPE.AssetItem} metadata a single asset definition * @param {string} deployDir directory of deploy files @@ -1151,7 +1150,7 @@ class Asset extends MetadataType { return fileList; } /** - * helper for {@link preDeployTasks} that loads extracted code content back into JSON + * helper for {@link Asset.preDeployTasks} that loads extracted code content back into JSON * * @param {string} prefix usually the customerkey * @param {object} metadataSlots metadata.views.html.slots or deeper slots.<>.blocks.<>.slots @@ -1231,7 +1230,7 @@ class Asset extends MetadataType { } } /** - * helper for {@link postRetrieveTasks} that finds code content in JSON and extracts it + * helper for {@link Asset.postRetrieveTasks} that finds code content in JSON and extracts it * to allow saving that separately and formatted * * @param {TYPE.AssetItem} metadata a single asset definition diff --git a/lib/metadataTypes/Automation.js b/lib/metadataTypes/Automation.js index 560ed94e3..1f6dd6d19 100644 --- a/lib/metadataTypes/Automation.js +++ b/lib/metadataTypes/Automation.js @@ -318,6 +318,29 @@ class Automation extends MetadataType { throw new Error(JSON.stringify(results)); } } + /** + * helper for {@link Automation.postRetrieveTasks} and {@link Automation.execute} + * + * @param {TYPE.AutomationItem} metadata a single automation + * @returns {boolean} true if the automation schedule is valid + */ + static #isValidSchedule(metadata) { + if (metadata.type === 'scheduled' && metadata.schedule?.startDate) { + try { + if (this.definition.timeZoneMapping[metadata.schedule.timezoneName]) { + // if we found the id in our list, remove the redundant data + delete metadata.schedule.timezoneId; + } + } catch { + Util.logger.debug( + `- Schedule name '${metadata.schedule.timezoneName}' not found in definition.timeZoneMapping` + ); + } + return true; + } else { + return false; + } + } /** * manages post retrieve steps * @@ -332,27 +355,13 @@ class Automation extends MetadataType { if (metadata.type === 'scheduled' && metadata.schedule?.startDate) { // Starting Source == 'Schedule' - try { - if (this.definition.timeZoneMapping[metadata.schedule.timezoneName]) { - // if we found the id in our list, remove the redundant data - delete metadata.schedule.timezoneId; - } - } catch { - Util.logger.debug( - `- Schedule name '${metadata.schedule.timezoneName}' not found in definition.timeZoneMapping` - ); - } - try { - // type 'Running' is temporary status only, overwrite with Scheduled for storage. - if (metadata.type === 'scheduled' && metadata.status === 'Running') { - metadata.status = 'Scheduled'; - } - } catch { - Util.logger.error( - `- ${this.definition.type} ${metadata.name} does not have a valid schedule setting.` - ); + if (!this.#isValidSchedule(metadata)) { return; } + // type 'Running' is temporary status only, overwrite with Scheduled for storage. + if (metadata.type === 'scheduled' && metadata.status === 'Running') { + metadata.status = 'Scheduled'; + } } else if (metadata.type === 'triggered' && metadata.fileTrigger) { // Starting Source == 'File Drop' // Do nothing for now @@ -445,6 +454,147 @@ class Automation extends MetadataType { return null; } } + /** + * a function to start query execution via API + * + * @param {string[]} keyArr customerkey of the metadata + * @returns {Promise.} Returns true if all items were executed successfully, otherwise false + */ + static async execute(keyArr) { + const metadataMap = {}; + for (const key of keyArr) { + if (key) { + const results = await this.retrieve(undefined, undefined, undefined, key); + if (Object.keys(results.metadata).length) { + for (const key of Object.keys(results.metadata)) { + if (this.#isValidSchedule(results.metadata[key])) { + metadataMap[key] = results.metadata[key]; + } else { + Util.logger.error( + ` - skipping ${this.definition.type} ${results.metadata[key].name}: no valid schedule settings found.` + ); + } + } + } + } + } + + Util.logger.info( + `Starting automations according to schedule: ${Object.keys(metadataMap).length}` + ); + const promiseResults = []; + + for (const key of Object.keys(metadataMap)) { + if (metadataMap[key].status === 'Scheduled') { + Util.logger.info( + ` - skipping ${this.definition.type} ${metadataMap[key].name}: already scheduled.` + ); + } else { + promiseResults.push(this.#executeItem(metadataMap, key)); + } + } + const results = await Promise.all(promiseResults); + const successCounter = results + .filter(Boolean) + .filter((r) => r.OverallStatus === 'OK').length; + Util.logger.info(`Executed ${successCounter} of ${keyArr.length} items`); + return successCounter === keyArr.length; + } + /** + * helper for {@link Automation.execute} + * + * @param {TYPE.AutomationMap} metadataMap map of metadata + * @param {string} key key of the metadata + * @returns {Promise.} Returns the result of the API call + */ + static async #executeItem(metadataMap, key) { + this.#preDeploySchedule(metadataMap[key]); + metadataMap[key].status = 'Scheduled'; + return this.#scheduleAutomation(metadataMap, metadataMap, key); + } + /** + * a function to start query execution via API + * + * @param {string[]} keyArr customerkey of the metadata + * @returns {Promise.} Returns true if all items were executed successfully, otherwise false + */ + static async pause(keyArr) { + const metadataMap = {}; + for (const key of keyArr) { + if (key) { + const results = await this.retrieve(undefined, undefined, undefined, key); + if (Object.keys(results.metadata).length) { + for (const key of Object.keys(results.metadata)) { + if (this.#isValidSchedule(results.metadata[key])) { + metadataMap[key] = results.metadata[key]; + } else { + Util.logger.error( + ` - skipping ${this.definition.type} ${results.metadata[key].name}: no valid schedule settings found.` + ); + } + } + } + } + } + + Util.logger.info(`Pausing automations: ${Object.keys(metadataMap).length}`); + const promiseResults = []; + for (const key of Object.keys(metadataMap)) { + if (metadataMap[key].status === 'Scheduled') { + promiseResults.push(this.#pauseItem(metadataMap[key])); + } else if (metadataMap[key].status === 'PausedSchedule') { + Util.logger.info( + ` - skipping ${this.definition.type} ${metadataMap[key].name}: already paused.` + ); + } else { + Util.logger.error( + ` - skipping ${this.definition.type} ${ + metadataMap[key].name + }: currently ${metadataMap[ + key + ].status.toLowerCase()}. Please try again in a few minutes.` + ); + } + } + const successCounter = (await Promise.all(promiseResults)) + .filter(Boolean) + .filter((r) => r.OverallStatus === 'OK').length; + + Util.logger.info(`Paused ${successCounter} of ${keyArr.length} items`); + return successCounter === keyArr.length; + } + + /** + * helper for {@link Automation.pause} + * + * @param {TYPE.AutomationItem} metadata automation metadata + * @returns {Promise.} schedule reponse + */ + static async #pauseItem(metadata) { + const schedule = {}; + try { + const response = await this.client.soap.schedule( + 'Automation', + schedule, + { + Interaction: { + ObjectID: metadata[this.definition.idField], + }, + }, + 'pause', + {} + ); + Util.logger.info( + ` - paused ${this.definition.type}: ${metadata[this.definition.keyField]} / ${ + metadata[this.definition.nameField] + }` + ); + return response; + } catch (ex) { + this._handleSOAPErrors(ex, 'pausing', metadata, false); + return null; + } + } /** * Deploys automation - the saved file is the original one due to large differences required for deployment @@ -452,11 +602,10 @@ class Automation extends MetadataType { * @param {TYPE.AutomationMap} metadata metadata mapped by their keyField * @param {string} targetBU name/shorthand of target businessUnit for mapping * @param {string} retrieveDir directory where metadata after deploy should be saved - * @param {boolean} [isRefresh] optional flag - so far not used by automation * @returns {Promise.} Promise */ - static async deploy(metadata, targetBU, retrieveDir, isRefresh) { - const upsertResults = await this.upsert(metadata, targetBU, isRefresh); + static async deploy(metadata, targetBU, retrieveDir) { + const upsertResults = await this.upsert(metadata, targetBU); const savedMetadata = await this.saveResults(upsertResults, retrieveDir, null); if ( this.properties.metaDataTypes.documentOnRetrieve.includes(this.definition.type) && @@ -493,6 +642,33 @@ class Automation extends MetadataType { return super.updateREST(metadata, uri); } + /** + * helper for {@link Automation.preDeployTasks} and {@link Automation.execute} + * + * @param {TYPE.AutomationItem} metadata metadata mapped by their keyField + */ + static #preDeploySchedule(metadata) { + delete metadata.schedule.rangeTypeId; + delete metadata.schedule.pattern; + delete metadata.schedule.scheduledTime; + delete metadata.schedule.scheduledStatus; + if (this.definition.timeZoneMapping[metadata.schedule.timezoneName]) { + metadata.schedule.timezoneId = + this.definition.timeZoneMapping[metadata.schedule.timezoneName]; + } else { + Util.logger.error( + `Could not find timezone ${metadata.schedule.timezoneName} in definition.timeZoneMapping` + ); + } + + // the upsert API needs this to be named scheduleTypeId; the retrieve API returns it as typeId + metadata.schedule.scheduleTypeId = metadata.schedule.typeId; + delete metadata.schedule.typeId; + + // prep startSource + metadata.startSource = { schedule: metadata.schedule, typeId: 1 }; + } + /** * Gets executed before deploying metadata * @@ -516,25 +692,12 @@ class Automation extends MetadataType { if (metadata.type === 'scheduled' && metadata?.schedule?.startDate) { // Starting Source == 'Schedule' - delete metadata.schedule.rangeTypeId; - delete metadata.schedule.pattern; - delete metadata.schedule.scheduledTime; - delete metadata.schedule.scheduledStatus; - if (this.definition.timeZoneMapping[metadata.schedule.timezoneName]) { - metadata.schedule.timezoneId = - this.definition.timeZoneMapping[metadata.schedule.timezoneName]; - } else { - Util.logger.error( - `Could not find timezone ${metadata.schedule.timezoneName} in definition.timeZoneMapping` - ); - } - delete metadata.schedule.timezoneName; - // the upsert API needs this to be named scheduleTypeId; the retrieve API returns it as typeId - metadata.schedule.scheduleTypeId = metadata.schedule.typeId; - delete metadata.schedule.typeId; + this.#preDeploySchedule(metadata); + // * run _buildSchedule here but only to check if things look ok - do not use the returned schedule object for deploy + this._buildSchedule(metadata.schedule); - // prep startSource - metadata.startSource = { schedule: metadata.schedule, typeId: 1 }; + delete metadata.schedule.timezoneName; + delete metadata.startSource.schedule.timezoneName; } else if (metadata.type === 'triggered' && metadata.fileTrigger) { // Starting Source == 'File Drop' @@ -642,6 +805,30 @@ class Automation extends MetadataType { */ static async postDeployTasks(metadataMap, originalMetadataMap) { for (const key in metadataMap) { + if (!metadataMap[key].type) { + // create response does not return the type attribute + + // el.schedule.timezoneName + const scheduleHelper = + metadataMap[key].schedule || metadataMap[key].startSource.schedule; + scheduleHelper.timezoneName ||= Util.inverseGet( + this.definition.timeZoneMapping, + scheduleHelper.timezoneId + ); + + // el.type + metadataMap[key].type = scheduleHelper + ? 'scheduled' + : metadataMap[key].fileTrigger + ? 'triggered' + : undefined; + + // el.status + metadataMap[key].status ||= Util.inverseGet( + this.definition.statusMapping, + metadataMap[key].statusId + ); + } // need to put schedule on here if status is scheduled await Automation.#scheduleAutomation(metadataMap, originalMetadataMap, key); @@ -657,6 +844,10 @@ class Automation extends MetadataType { } } } + if (Util.OPTIONS.execute) { + Util.logger.info(`Executing: ${this.definition.type}`); + await this.execute(Object.keys(metadataMap)); + } } /** * helper for {@link Automation.postDeployTasks} @@ -706,13 +897,15 @@ class Automation extends MetadataType { } /** - * helper for {@link postDeployTasks} + * helper for {@link Automation.postDeployTasks} * * @param {TYPE.AutomationMap} metadataMap metadata mapped by their keyField * @param {TYPE.AutomationMap} originalMetadataMap metadata to be updated (contains additioanl fields) * @param {string} key current customer key + * @returns {Promise.} - */ static async #scheduleAutomation(metadataMap, originalMetadataMap, key) { + let response = null; if (originalMetadataMap[key]?.type === 'scheduled') { // Starting Source == 'Schedule': Try starting the automation if (originalMetadataMap[key].status === 'Scheduled') { @@ -734,7 +927,7 @@ class Automation extends MetadataType { const schedule_timezoneString = schedule._timezoneString; delete schedule._timezoneString; // start the automation - await this.client.soap.schedule( + response = await this.client.soap.schedule( 'Automation', schedule, { @@ -758,9 +951,10 @@ class Automation extends MetadataType { schedule_StartDateTime.split('T').join(' ').split('.')[0] } ${schedule_timezoneString}` ); - } catch (ex) { + } catch { + // API does not return anything usefull here. We have to know the rules instead Util.logger.error( - `- Could not start scheduled automation '${originalMetadataMap[key].name}': ${ex.message}` + ` ☇ error starting scheduled ${this.definition.type}${key}: Please check schedule settings` ); } } @@ -781,6 +975,7 @@ class Automation extends MetadataType { metadataMap[key].schedule.typeId = metadataMap[key].schedule.scheduleTypeId; delete metadataMap[key].schedule.scheduleTypeId; } + return response; } /** @@ -867,6 +1062,9 @@ class Automation extends MetadataType { const a = obj.split('='); recurHelper[a[0]] = a[1]; } + if (recurHelper.INTERVAL) { + recurHelper.INTERVAL = Number.parseInt(recurHelper.INTERVAL); + } // the ical schedule is all in caps but soap objects require Title Case. const keyStem = recurHelper.FREQ.charAt(0) + recurHelper.FREQ.slice(1, -2).toLowerCase(); @@ -895,13 +1093,18 @@ class Automation extends MetadataType { 'Scheduling automatically not supported for Weekly, Monthly and Yearly, please configure manually.' ); } + if (recurHelper.FREQ === 'MINUTELY' && recurHelper.INTERVAL && recurHelper.INTERVAL < 5) { + throw new Error( + 'The smallest interval you can configure is 5 minutes. Please adjust your schedule.' + ); + } if (this.definition.timeZoneMapping[scheduleObject.timezoneName]) { scheduleObject.timezoneId = this.definition.timeZoneMapping[scheduleObject.timezoneName]; } else { - Util.logger.error( - `- Could not find timezone ${scheduleObject.timezoneName} in definition.timeZoneMapping` + throw new Error( + `Could not find timezone ${scheduleObject.timezoneName} in definition.timeZoneMapping` ); } schedule.TimeZone.ID = scheduleObject.timezoneId; @@ -995,8 +1198,8 @@ class Automation extends MetadataType { // create new Date object reflecting SFMC's servertime const dateServer = new Date(utc + 3600000 * offsetServer); - // return time as a string without trailing "Z" - return dateServer.toISOString().slice(0, -1); + // return time as a string without trailing "Z" and without miliseconds (separated by .) + return dateServer.toISOString().slice(0, -1).split('.')[0]; } /** * Experimental: Only working for DataExtensions: @@ -1023,7 +1226,7 @@ class Automation extends MetadataType { const automationType = { scheduled: 'Schedule', triggered: 'File Drop' }; output += `**Started by:** ${automationType[json.type] || 'Not defined'}\n\n`; output += `**Status:** ${json.status}\n\n`; - if (json.type === 'scheduled') { + if (json.type === 'scheduled' || json.schedule) { const tz = this.definition.timeZoneDifference[ this.definition.timeZoneMapping[json?.schedule?.timezoneName] @@ -1033,7 +1236,7 @@ class Automation extends MetadataType { output += `**Schedule:**\n\n`; output += `* Start: ${json.schedule.startDate.split('T').join(' ')} ${tz}\n`; output += `* End: ${json.schedule.endDate.split('T').join(' ')} ${tz}\n`; - output += `* Timezone: ${json.schedule.timezoneName}\n`; + output += `* Timezone: ${json.schedule.timezoneName}\n`; const ical = {}; for (const item of json.schedule.icalRecur.split(';')) { @@ -1044,7 +1247,7 @@ class Automation extends MetadataType { output += `* Recurrance: every ${ical.INTERVAL > 1 ? ical.INTERVAL : ''} ${ frequency === 'dai' ? 'day' : frequency - }${ical.INTERVAL > 1 ? 's' : ''} ${ical.COUNT ? `for ${ical.COUNT} times` : ''}\n`; + }${ical.INTERVAL > 1 ? 's' : ''}${ical.COUNT ? ` for ${ical.COUNT} times` : ''}\n`; } else if (json.schedule) { output += `**Schedule:** Not defined\n`; } diff --git a/lib/metadataTypes/DataExtension.js b/lib/metadataTypes/DataExtension.js index 43e9dcfc8..7210095a6 100644 --- a/lib/metadataTypes/DataExtension.js +++ b/lib/metadataTypes/DataExtension.js @@ -203,7 +203,7 @@ class DataExtension extends MetadataType { } } /** - * helper for {@link upsert} + * helper for {@link DataExtension.upsert} * * @private * @param {object} res - @@ -607,7 +607,7 @@ class DataExtension extends MetadataType { metadata[customerKey].Fields = fieldArr; } /** - * helper for {@link super.updateREST} and {@link super.updateSOAP} that removes old files after the key was changed + * helper for {@link MetadataType.updateREST} and {@link MetadataType.updateSOAP} that removes old files after the key was changed * * @private * @param {TYPE.MetadataTypeItem} metadataEntry a single metadata Entry diff --git a/lib/metadataTypes/Event.js b/lib/metadataTypes/Event.js index 3062acfd4..864cd925c 100644 --- a/lib/metadataTypes/Event.js +++ b/lib/metadataTypes/Event.js @@ -136,12 +136,11 @@ class Event extends MetadataType { * @param {TYPE.MetadataTypeMap} metadata metadata mapped by their keyField * @param {string} deployDir directory where deploy metadata are saved * @param {string} retrieveDir directory where metadata after deploy should be saved - * @param {boolean} [isRefresh] optional flag - so far not used by eventDefinition * @returns {Promise.} Promise of keyField => metadata map */ - static async deploy(metadata, deployDir, retrieveDir, isRefresh) { + static async deploy(metadata, deployDir, retrieveDir) { Util.logBeta(this.definition.type); - return super.deploy(metadata, deployDir, retrieveDir, isRefresh); + return super.deploy(metadata, deployDir, retrieveDir); } /** diff --git a/lib/metadataTypes/Folder.js b/lib/metadataTypes/Folder.js index e1f691873..829800112 100644 --- a/lib/metadataTypes/Folder.js +++ b/lib/metadataTypes/Folder.js @@ -452,7 +452,7 @@ class Folder extends MetadataType { * Returns file contents mapped to their filename without '.json' ending * * @param {string} dir directory that contains '.json' files to be read - * @param {boolean} [listBadKeys=false] do not print errors, used for badKeys() + * @param {boolean} [listBadKeys] do not print errors, used for badKeys() * @returns {TYPE.MetadataTypeMap} fileName => fileContent map */ static getJsonFromFS(dir, listBadKeys) { diff --git a/lib/metadataTypes/Journey.js b/lib/metadataTypes/Journey.js index 7c20b2ee3..1719dfe47 100644 --- a/lib/metadataTypes/Journey.js +++ b/lib/metadataTypes/Journey.js @@ -216,12 +216,11 @@ class Journey extends MetadataType { * @param {TYPE.MetadataTypeMap} metadata metadata mapped by their keyField * @param {string} deployDir directory where deploy metadata are saved * @param {string} retrieveDir directory where metadata after deploy should be saved - * @param {boolean} [isRefresh] optional flag - so far not used by interaction * @returns {Promise.} Promise of keyField => metadata map */ - static async deploy(metadata, deployDir, retrieveDir, isRefresh) { + static async deploy(metadata, deployDir, retrieveDir) { Util.logBeta(this.definition.type); - return super.deploy(metadata, deployDir, retrieveDir, isRefresh); + return super.deploy(metadata, deployDir, retrieveDir); } /** @@ -265,7 +264,7 @@ class Journey extends MetadataType { } /** - * helper for Journey's {@link saveResults}. Gets executed after retreive of metadata type and + * helper for Journey's {@link Journey.saveResults}. Gets executed after retreive of metadata type and * * @param {TYPE.MetadataTypeMap} metadataMap key=customer key, value=metadata */ @@ -478,7 +477,7 @@ class Journey extends MetadataType { return metadata; } /** - * helper for {@link postRetrieveTasks} + * helper for {@link Journey.postRetrieveTasks} * * @private * @param {TYPE.MetadataTypeItem} metadata a single item @@ -800,7 +799,7 @@ class Journey extends MetadataType { } /** - * helper for {@link preDeployTasks} + * helper for {@link Journey.preDeployTasks} * * @private * @param {TYPE.MetadataTypeItem} metadata a single item diff --git a/lib/metadataTypes/MetadataType.js b/lib/metadataTypes/MetadataType.js index 92dbdc169..60d273fce 100644 --- a/lib/metadataTypes/MetadataType.js +++ b/lib/metadataTypes/MetadataType.js @@ -35,7 +35,7 @@ class MetadataType { * Returns file contents mapped to their filename without '.json' ending * * @param {string} dir directory that contains '.json' files to be read - * @param {boolean} [listBadKeys=false] do not print errors, used for badKeys() + * @param {boolean} [listBadKeys] do not print errors, used for badKeys() * @returns {TYPE.MetadataTypeMap} fileName => fileContent map */ static getJsonFromFS(dir, listBadKeys) { @@ -108,11 +108,10 @@ class MetadataType { * @param {TYPE.MetadataTypeMap} metadata metadata mapped by their keyField * @param {string} deployDir directory where deploy metadata are saved * @param {string} retrieveDir directory where metadata after deploy should be saved - * @param {boolean} [isRefresh] optional flag to indicate that triggeredSend should be refreshed after deployment of assets * @returns {Promise.} Promise of keyField => metadata map */ - static async deploy(metadata, deployDir, retrieveDir, isRefresh) { - const upsertResults = await this.upsert(metadata, deployDir, isRefresh); + static async deploy(metadata, deployDir, retrieveDir) { + const upsertResults = await this.upsert(metadata, deployDir); const savedMetadata = await this.saveResults(upsertResults, retrieveDir, null); if ( this.properties.metaDataTypes.documentOnRetrieve.includes(this.definition.type) && @@ -131,13 +130,12 @@ class MetadataType { * @param {TYPE.MetadataTypeMap} upsertResults metadata mapped by their keyField as returned by update/create * @param {TYPE.MetadataTypeMap} originalMetadata metadata to be updated (contains additioanl fields) * @param {{created: number, updated: number}} createdUpdated counter representing successful creates/updates - * @param {boolean} [isRefresh] optional flag to indicate that triggeredSend should be refreshed after deployment of assets * @returns {void} */ - static postDeployTasks(upsertResults, originalMetadata, createdUpdated, isRefresh) {} + static postDeployTasks(upsertResults, originalMetadata, createdUpdated) {} /** - * helper for {@link createREST} + * helper for {@link MetadataType.createREST} * * @param {TYPE.MetadataTypeItem} metadataEntry a single metadata Entry * @param {object} apiResponse varies depending on the API call @@ -146,7 +144,7 @@ class MetadataType { static postCreateTasks(metadataEntry, apiResponse) {} /** - * helper for {@link updateREST} + * helper for {@link MetadataType.updateREST} * * @param {TYPE.MetadataTypeItem} metadataEntry a single metadata Entry * @param {object} apiResponse varies depending on the API call @@ -155,7 +153,7 @@ class MetadataType { static postUpdateTasks(metadataEntry, apiResponse) {} /** - * helper for {@link createREST} when legacy API endpoints as these do not return the created item but only their new id + * helper for {@link MetadataType.createREST} when legacy API endpoints as these do not return the created item but only their new id * * @param {TYPE.MetadataTypeItem} metadataEntry a single metadata Entry * @param {object} apiResponse varies depending on the API call @@ -465,6 +463,17 @@ class MetadataType { ); return; } + /** + * Abstract pause method that needs to be implemented in child metadata type + * + * @returns {void} + */ + static pause() { + Util.logger.error( + ` ☇ skipping ${this.definition.type}: pause is not supported yet for ${this.definition.type}` + ); + return; + } /** * test if metadata was actually changed or not to potentially skip it during deployment @@ -537,10 +546,9 @@ class MetadataType { * * @param {TYPE.MetadataTypeMap} metadataMap metadata mapped by their keyField * @param {string} deployDir directory where deploy metadata are saved - * @param {boolean} [isRefresh] optional flag to indicate that triggeredSend should be refreshed after deployment of assets * @returns {Promise.} keyField => metadata map */ - static async upsert(metadataMap, deployDir, isRefresh) { + static async upsert(metadataMap, deployDir) { const orignalMetadataMap = JSON.parse(JSON.stringify(metadataMap)); const metadataToUpdate = []; const metadataToCreate = []; @@ -632,12 +640,10 @@ class MetadataType { const metadataResults = createResults.concat(updateResults).filter(Boolean); upsertResults = this.parseResponseBody(metadataResults); } - await this.postDeployTasks( - upsertResults, - orignalMetadataMap, - { created: createResults.length, updated: updateResults.length }, - isRefresh - ); + await this.postDeployTasks(upsertResults, orignalMetadataMap, { + created: createResults.length, + updated: updateResults.length, + }); return upsertResults; } @@ -869,7 +875,7 @@ class MetadataType { * * @param {TYPE.MetadataTypeItem} metadataEntry a single metadata Entry * @param {string} uri rest endpoint for PATCH - * @param {'patch'|'post'|'put'} [httpMethod='patch'] defaults to 'patch'; some update requests require PUT instead of PATCH + * @param {'patch'|'post'|'put'} [httpMethod] defaults to 'patch'; some update requests require PUT instead of PATCH * @returns {Promise. | null} Promise of API response or null in case of an error */ static async updateREST(metadataEntry, uri, httpMethod = 'patch') { @@ -904,7 +910,7 @@ class MetadataType { } /** - * helper for {@link updateREST} and {@link updateSOAP} that removes old files after the key was changed + * helper for {@link MetadataType.updateREST} and {@link MetadataType.updateSOAP} that removes old files after the key was changed * * @private * @param {TYPE.MetadataTypeItem} metadataEntry a single metadata Entry @@ -985,7 +991,7 @@ class MetadataType { } } /** - * helper for {@link _handleSOAPErrors} + * helper for {@link MetadataType._handleSOAPErrors} * * @param {Error} ex error that occured * @returns {string} error message @@ -1084,7 +1090,7 @@ class MetadataType { try { const response = await this.client.rest.post(uri, {}); // payload is empty for this request if (response === 'OK') { - Util.logger.info(`Executed ${this.definition.type}: ${key}`); + Util.logger.info(` - executed ${this.definition.type}: ${key}`); } else { throw new Error(response); } @@ -1095,7 +1101,7 @@ class MetadataType { } /** - * helper for {@link retrieveREST} and {@link retrieveSOAP} + * helper for {@link MetadataType.retrieveREST} and {@link MetadataType.retrieveSOAP} * * @param {string|number} [singleRetrieve] key of single item to filter by * @param {TYPE.MetadataTypeMap} metadataMap saved metadata @@ -1298,7 +1304,7 @@ class MetadataType { * * @static * @param {TYPE.MetadataTypeItem} metadataEntry metadata entry - * @param {boolean} [include=false] true: use definition.include / options.include; false=exclude: use definition.filter / options.exclude + * @param {boolean} [include] true: use definition.include / options.include; false=exclude: use definition.filter / options.exclude * @returns {boolean} true: skip saving == filtered; false: continue with saving * @memberof MetadataType */ @@ -1343,7 +1349,7 @@ class MetadataType { * * @static * @param {object} metadataEntry metadata entry - * @param {boolean} [include=false] true: use definition.include / options.include; false=exclude: use definition.filter / options.exclude + * @param {boolean} [include] true: use definition.include / options.include; false=exclude: use definition.filter / options.exclude * @returns {boolean} true: filtered == do NOT save; false: not filtered == do save * @memberof MetadataType */ @@ -1575,6 +1581,10 @@ class MetadataType { } } + if (Util.OPTIONS.like && !Util.fieldsLike(results[originalKey])) { + Util.logger.debug(`Filtered ${originalKey} because of --like option`); + continue; + } // we dont store Id on local disk, but we need it for caching logic, // so its in retrieve but not in save. Here we put into the clone so that the original // object used for caching doesnt have the Id removed. @@ -1621,7 +1631,7 @@ class MetadataType { return savedResults; } /** - * helper for {@link buildDefinitionForNested} + * helper for {@link MetadataType.buildDefinitionForNested} * searches extracted file for template variable names and applies the market values * * @param {string} code code from extracted code @@ -1633,7 +1643,7 @@ class MetadataType { return Mustache.render(code, templateVariables, {}, ['{{{', '}}}']); } /** - * helper for {@link buildTemplateForNested} + * helper for {@link MetadataType.buildTemplateForNested} * searches extracted file for template variable values and applies the market variable names * * @param {string} code code from extracted code @@ -1645,7 +1655,7 @@ class MetadataType { return Util.replaceByObject(code, templateVariables); } /** - * helper for {@link buildDefinition} + * helper for {@link MetadataType.buildDefinition} * handles extracted code if any are found for complex types (e.g script, asset, query) * * @param {string} templateDir Directory where metadata templates are stored @@ -1666,7 +1676,7 @@ class MetadataType { return null; } /** - * helper for {@link buildTemplate} + * helper for {@link MetadataType.buildTemplate} * handles extracted code if any are found for complex types * * @param {string} templateDir Directory where metadata templates are stored @@ -1959,7 +1969,7 @@ class MetadataType { * Returns metadata of a business unit that is saved locally * * @param {string} readDir root directory of metadata. - * @param {boolean} [listBadKeys=false] do not print errors, used for badKeys() + * @param {boolean} [listBadKeys] do not print errors, used for badKeys() * @param {object} [buMetadata] Metadata of BU in local directory * @returns {object} Metadata of BU in local directory */ diff --git a/lib/metadataTypes/MobileKeyword.js b/lib/metadataTypes/MobileKeyword.js index 02ade31c3..12b796bc0 100644 --- a/lib/metadataTypes/MobileKeyword.js +++ b/lib/metadataTypes/MobileKeyword.js @@ -96,7 +96,7 @@ class MobileKeyword extends MetadataType { } /** - * helper for {@link parseResponseBody} that creates a custom key field for this type based on mobileCode and keyword + * helper for {@link MobileKeyword.parseResponseBody} that creates a custom key field for this type based on mobileCode and keyword * * @private * @param {TYPE.MetadataType} metadata single item @@ -106,7 +106,7 @@ class MobileKeyword extends MetadataType { } /** - * helper for {@link preDeployTasks} and {@link createOrUpdate} to ensure we have code & keyword properly set + * helper for {@link MobileKeyword.preDeployTasks} and {@link MobileKeyword.createOrUpdate} to ensure we have code & keyword properly set * * @private * @param {TYPE.MetadataType} metadata single item @@ -192,7 +192,7 @@ class MobileKeyword extends MetadataType { } /** - * helper for {@link retrieve} and {@link retrieveAsTemplate} + * helper for {@link MobileKeyword.retrieve} and {@link MobileKeyword.retrieveAsTemplate} * * @private * @param {string} key customer key of single item to retrieve / name of the metadata file @@ -281,7 +281,7 @@ class MobileKeyword extends MetadataType { } } /** - * helper for {@link parseMetadata} and {@link _buildForNested} + * helper for {@link MobileKeyword.postRetrieveTasks} and {@link MobileKeyword._buildForNested} * * @param {string} metadataScript the code of the file * @returns {{fileExt:string,code:string}} returns found extension and file content @@ -349,7 +349,7 @@ class MobileKeyword extends MetadataType { } /** - * helper for {@link buildTemplateForNested} / {@link buildDefinitionForNested} + * helper for {@link MobileKeyword.buildTemplateForNested} / {@link MobileKeyword.buildDefinitionForNested} * handles extracted code if any are found for complex types * * @param {string} templateDir Directory where metadata templates are stored @@ -445,7 +445,7 @@ class MobileKeyword extends MetadataType { return metadata; } /** - * helper for {@link createREST} + * helper for {@link MetadataType.createREST} * * @param {TYPE.MetadataTypeItem} metadataEntry a single metadata Entry * @param {object} apiResponse varies depending on the API call @@ -455,7 +455,7 @@ class MobileKeyword extends MetadataType { await super.postDeployTasks_legacyApi(metadataEntry, apiResponse); } /** - * helper for {@link updateREST} + * helper for {@link MetadataType.updateREST} * * @param {TYPE.MetadataTypeItem} metadataEntry a single metadata Entry * @param {object} apiResponse varies depending on the API call @@ -466,7 +466,7 @@ class MobileKeyword extends MetadataType { } /** - * helper for {@link preDeployTasks} that loads extracted code content back into JSON + * helper for {@link MobileKeyword.preDeployTasks} that loads extracted code content back into JSON * * @param {TYPE.MetadataTypeItem} metadata a single definition * @param {string} deployDir directory of deploy files diff --git a/lib/metadataTypes/MobileMessage.js b/lib/metadataTypes/MobileMessage.js index a9a1f8227..8514b5cd0 100644 --- a/lib/metadataTypes/MobileMessage.js +++ b/lib/metadataTypes/MobileMessage.js @@ -83,7 +83,7 @@ class MobileMessage extends MetadataType { return super.createREST(metadata, '/legacy/v1/beta/mobile/message/'); } /** - * helper for {@link preDeployTasks} that loads extracted code content back into JSON + * helper for {@link MobileMessage.preDeployTasks} that loads extracted code content back into JSON * * @param {TYPE.MetadataTypeItem} metadata a single definition * @param {string} deployDir directory of deploy files @@ -110,7 +110,7 @@ class MobileMessage extends MetadataType { } } /** - * helper for {@link parseMetadata} and {@link _buildForNested} + * helper for {@link MobileMessage.postRetrieveTasks} and {@link MobileMessage._buildForNested} * * @param {string} code the code of the file * @returns {{fileExt:string,code:string}} returns found extension and file content @@ -304,7 +304,7 @@ class MobileMessage extends MetadataType { return metadata; } /** - * helper for {@link createREST} + * helper for {@link MetadataType.createREST} * * @param {TYPE.MetadataTypeItem} metadataEntry a single metadata Entry * @param {object} apiResponse varies depending on the API call @@ -314,7 +314,7 @@ class MobileMessage extends MetadataType { await super.postDeployTasks_legacyApi(metadataEntry, apiResponse); } /** - * helper for {@link updateREST} + * helper for {@link MetadataType.updateREST} * * @param {TYPE.MetadataTypeItem} metadataEntry a single metadata Entry * @param {object} apiResponse varies depending on the API call @@ -380,7 +380,7 @@ class MobileMessage extends MetadataType { } /** - * helper for {@link buildTemplateForNested} / {@link buildDefinitionForNested} + * helper for {@link MobileMessage.buildTemplateForNested} / {@link MobileMessage.buildDefinitionForNested} * handles extracted code if any are found for complex types * * @param {string} templateDir Directory where metadata templates are stored diff --git a/lib/metadataTypes/Query.js b/lib/metadataTypes/Query.js index e91496131..4fe76d256 100644 --- a/lib/metadataTypes/Query.js +++ b/lib/metadataTypes/Query.js @@ -65,7 +65,7 @@ class Query extends MetadataType { objectId = await this._getObjectIdForSingleRetrieve(key); if (!objectId) { Util.logger.info(`Skipping ${key} - did not find an item with such key`); - break; + continue; } } results.push( @@ -250,7 +250,7 @@ class Query extends MetadataType { return metadata; } /** - * helper for {@link buildDefinitionForNested} + * helper for {@link Query.buildDefinitionForNested} * searches extracted SQL file for template variables and applies the market values * * @param {string} code code from extracted code @@ -328,7 +328,7 @@ class Query extends MetadataType { ); } /** - * helper for {@link buildTemplateForNested} / {@link buildDefinitionForNested} + * helper for {@link Query.buildTemplateForNested} / {@link Query.buildDefinitionForNested} * handles extracted code if any are found for complex types * * @private @@ -452,6 +452,17 @@ class Query extends MetadataType { // delete local copy: retrieve/cred/bu/.../...-meta.sql await super.postDeleteTasks(customerKey, [`${this.definition.type}-meta.sql`]); } + /** + * Gets executed after deployment of metadata type + * + * @param {TYPE.MetadataTypeMap} upsertResults metadata mapped by their keyField as returned by update/create + */ + static async postDeployTasks(upsertResults) { + if (Util.OPTIONS.execute) { + Util.logger.info(`Executing: ${this.definition.type}`); + await this.execute(Object.keys(upsertResults)); + } + } } // Assign definition & cache to static attributes diff --git a/lib/metadataTypes/Script.js b/lib/metadataTypes/Script.js index aec027bfb..1f33b1c1f 100644 --- a/lib/metadataTypes/Script.js +++ b/lib/metadataTypes/Script.js @@ -82,7 +82,7 @@ class Script extends MetadataType { } /** - * helper for {@link preDeployTasks} that loads extracted code content back into JSON + * helper for {@link Script.preDeployTasks} that loads extracted code content back into JSON * * @param {TYPE.ScriptItem} metadata a single asset definition * @param {string} deployDir directory of deploy files @@ -189,7 +189,7 @@ class Script extends MetadataType { } /** - * helper for {@link buildTemplateForNested} / {@link buildDefinitionForNested} + * helper for {@link Script.buildTemplateForNested} / {@link Script.buildDefinitionForNested} * handles extracted code if any are found for complex types * * @param {string} templateDir Directory where metadata templates are stored @@ -279,7 +279,7 @@ class Script extends MetadataType { return { json: metadata, codeArr: codeArr, subFolder: null }; } /** - * helper for {@link parseMetadata} and {@link _buildForNested} + * helper for {@link Script.parseMetadata} and {@link Script._buildForNested} * * @param {string} metadataScript the code of the file * @param {string} metadataName the name of the metadata diff --git a/lib/metadataTypes/TransactionalSMS.js b/lib/metadataTypes/TransactionalSMS.js index 4e9bfb3db..dd568c10c 100644 --- a/lib/metadataTypes/TransactionalSMS.js +++ b/lib/metadataTypes/TransactionalSMS.js @@ -66,7 +66,7 @@ class TransactionalSMS extends TransactionalMessage { return metadata; } /** - * helper for {@link preDeployTasks} that loads extracted code content back into JSON + * helper for {@link TransactionalSMS.preDeployTasks} that loads extracted code content back into JSON * * @param {TYPE.MetadataTypeItem} metadata a single definition * @param {string} deployDir directory of deploy files @@ -158,7 +158,7 @@ class TransactionalSMS extends TransactionalMessage { return { json: metadata, codeArr: codeArr, subFolder: null }; } /** - * helper for {@link parseMetadata} and {@link _buildForNested} + * helper for {@link TransactionalSMS.postRetrieveTasks} and {@link TransactionalSMS._buildForNested} * * @param {string} metadataScript the code of the file * @returns {{fileExt:string,code:string}} returns found extension and file content @@ -189,7 +189,7 @@ class TransactionalSMS extends TransactionalMessage { return { fileExt, code }; } /** - * helper for {@link MetadataType.buildDefinition} + * helper for {@link TransactionalMessage.buildDefinition} * handles extracted code if any are found for complex types * * @param {string} templateDir Directory where metadata templates are stored @@ -216,7 +216,7 @@ class TransactionalSMS extends TransactionalMessage { ); } /** - * helper for {@link MetadataType.buildTemplate} + * helper for {@link TransactionalMessage.buildTemplate} * handles extracted code if any are found for complex types * * @example scripts are saved as 1 json and 1 ssjs file. both files need to be run through templating @@ -245,7 +245,7 @@ class TransactionalSMS extends TransactionalMessage { } /** - * helper for {@link buildTemplateForNested} / {@link buildDefinitionForNested} + * helper for {@link TransactionalSMS.buildTemplateForNested} / {@link TransactionalSMS.buildDefinitionForNested} * handles extracted code if any are found for complex types * * @param {string} templateDir Directory where metadata templates are stored diff --git a/lib/metadataTypes/TriggeredSend.js b/lib/metadataTypes/TriggeredSend.js index 26b3ca33d..e0df317af 100644 --- a/lib/metadataTypes/TriggeredSend.js +++ b/lib/metadataTypes/TriggeredSend.js @@ -85,51 +85,19 @@ class TriggeredSend extends MetadataType { return super.deleteByKeySOAP(customerKey); } - /** - * manages post retrieve steps - * - * @param {TYPE.MetadataTypeItem} metadata a single query - * @returns {TYPE.MetadataTypeItem} Array with one metadata object and one query string - */ - static postRetrieveTasks(metadata) { - return this.parseMetadata(metadata); - } - /** - * generic script that retrieves the folder path from cache and updates the given metadata with it after retrieve - * - * @param {TYPE.MetadataTypeItem} metadata a single script activity definition - */ - static setFolderPath(metadata) { - try { - metadata.r__folder_Path = cache.searchForField( - 'folder', - metadata[this.definition.folderIdField], - 'ID', - 'Path' - ); - delete metadata[this.definition.folderIdField]; - } catch (ex) { - Util.logger.verbose( - ` - skipping ${this.definition.type} '${metadata[this.definition.nameField]}' (${ - metadata[this.definition.keyField] - }): Could not find folder (${ex.message})` - ); - throw ex; - } - } /** * parses retrieved Metadata before saving * - * @param {TYPE.MetadataTypeItem} metadata a single query activity definition + * @param {TYPE.MetadataTypeItem} metadata a single item * @returns {TYPE.MetadataTypeItem | void} Array with one metadata object and one sql string */ - static parseMetadata(metadata) { + static postRetrieveTasks(metadata) { // remove IsPlatformObject, always has to be 'false' delete metadata.IsPlatformObject; + // folder - try { - this.setFolderPath(metadata); - } catch { + this.setFolderPath(metadata); + if (!metadata.r__folder_Path) { Util.logger.verbose( ` ☇ skipping ${this.definition.typeName} '${metadata.Name}'/'${metadata.CustomerKey}': Could not find folder.` ); @@ -246,7 +214,7 @@ class TriggeredSend extends MetadataType { * TSD-specific refresh method that finds active TSDs and refreshes them * * @param {string[]} [keyArr] metadata keys - * @param {boolean} [checkKey=true] whether to check if the key is valid + * @param {boolean} [checkKey] whether to check if the key is valid * @returns {Promise.} - */ static async refresh(keyArr, checkKey = true) { @@ -268,7 +236,7 @@ class TriggeredSend extends MetadataType { } /** - * helper for {@link refresh} that extracts the keys from the TSD item map and eli + * helper for {@link TriggeredSend.refresh} that extracts the keys from the TSD item map and eli * * @param {TYPE.MetadataTypeMapObj} metadata TSD item map * @returns {Promise.} keyArr @@ -282,11 +250,12 @@ class TriggeredSend extends MetadataType { return keyArr; } /** - * helper for {@link refresh} that finds active TSDs on the server and filters it by the same rules that {@link retrieve} is using to avoid refreshing TSDs with broken dependencies + * helper for {@link TriggeredSend.refresh} that finds active TSDs on the server and filters it by the same rules that {@link TriggeredSend.retrieve} is using to avoid refreshing TSDs with broken dependencies * + * @param {boolean} [assetLoaded] if run after Asset.deploy via --refresh option this will skip caching assets * @returns {Promise.} Promise of TSD item map */ - static async findRefreshableItems() { + static async findRefreshableItems(assetLoaded = false) { Util.logger.info('Finding refreshable items...'); // cache dependencies to test for broken links // skip deprecated classic emails here, assuming they cannot be updated and hence are not relevant for {@link refresh} @@ -305,14 +274,20 @@ class TriggeredSend extends MetadataType { list: null, }; for (const [type, subTypeArr] of Object.entries(requiredCache)) { - if (!cache.getCache()?.[type]) { - Util.logger.info(` - Caching dependent Metadata: ${type}`); - Util.logSubtypes(subTypeArr); - cacheTypes[type].client = this.client; - cacheTypes[type].buObject = this.buObject; - cacheTypes[type].properties = this.properties; + if (type === 'asset' && assetLoaded) { + continue; + } + Util.logger.info(` - Caching dependent Metadata: ${type}`); + Util.logSubtypes(subTypeArr); + cacheTypes[type].client = this.client; + cacheTypes[type].buObject = this.buObject; + cacheTypes[type].properties = this.properties; - const result = await cacheTypes[type].retrieveForCache(null, subTypeArr); + const result = await cacheTypes[type].retrieveForCache(null, subTypeArr); + if (cache.getCache()?.[type]) { + // re-run caching to merge with existing cache, assuming we might have missed subtypes + cache.mergeMetadata(type, result.metadata); + } else { cache.setMetadata(type, result.metadata); } } @@ -329,7 +304,7 @@ class TriggeredSend extends MetadataType { } /** - * helper for {@link refresh} that pauses, publishes and starts a triggered send + * helper for {@link TriggeredSend.refresh} that pauses, publishes and starts a triggered send * * @param {string} key external key of triggered send item * @param {boolean} checkKey whether to check if key exists on the server diff --git a/lib/metadataTypes/User.js b/lib/metadataTypes/User.js index 59701f082..5d86c32b4 100644 --- a/lib/metadataTypes/User.js +++ b/lib/metadataTypes/User.js @@ -404,8 +404,11 @@ class User extends MetadataType { if (!metadata.Password) { metadata.Password = this._generatePassword(); Util.logger.info( - ` - Password for ${metadata.UserID} was not given. Generated password: ${metadata.Password}` + ` - Password for ${metadata.UserID} was not given. Generated password:` ); + // use console.log here to print the generated password to bypass the logfile + // eslint-disable-next-line no-console + console.log(metadata.Password); } } @@ -510,7 +513,7 @@ class User extends MetadataType { * @param {string} roleName role.Name * @param {number} userId user.AccountUserID * @param {boolean} assignmentOnly if true, only assignment configuration will be returned - * @param {boolean} [isRoleRemovale=false] if true, role will be removed from user; otherwise added + * @param {boolean} [isRoleRemovale] if true, role will be removed from user; otherwise added * @returns {object} format needed by API */ static _getRoleObjectForDeploy( @@ -737,7 +740,7 @@ class User extends MetadataType { } /** - * helper for {@link retrieveSOAP} + * helper for {@link User.retrieveSOAP} * * @private * @param {TYPE.SoapRequestParams} [requestParams] required for the specific request (filter for example) @@ -897,7 +900,7 @@ class User extends MetadataType { } } /** - * helper for {@link createOrUpdate} to generate a random initial password for new users + * helper for {@link User.createOrUpdate} to generate a random initial password for new users * note: possible minimum length values in SFMC are 6, 8, 10, 15 chars. Therefore we should default here to 15 chars. * * @private diff --git a/lib/metadataTypes/definitions/Automation.definition.js b/lib/metadataTypes/definitions/Automation.definition.js index c7ab19a19..e34b258ca 100644 --- a/lib/metadataTypes/definitions/Automation.definition.js +++ b/lib/metadataTypes/definitions/Automation.definition.js @@ -42,12 +42,19 @@ module.exports = { keyField: 'key', nameField: 'name', folderIdField: 'categoryId', - createdDateField: 'createdDate', - createdNameField: 'createdByName', - lastmodDateField: 'lastSavedDate', - lastmodNameField: 'lastSavedByName', + createdDateField: 'createdDate', // only returned by upsert + createdNameField: 'createdByName', // only returned by upsert + lastmodDateField: 'lastSavedDate', // only returned by upsert + lastmodNameField: 'lastSavedByName', // only returned by upsert restPagination: true, maxKeyLength: 200, // confirmed max length + scheduleTypeMapping: { + MINUTELY: 1, + HOURLY: 2, + DAILY: 3, + WEEKLY: 4, + MONTHLY: 5, + }, statusMapping: { AwaitingTrigger: 7, Building: 1, @@ -336,31 +343,31 @@ module.exports = { lastSavedDate: { isCreateable: false, isUpdateable: false, - retrieving: true, + retrieving: false, // only returned by upsert template: false, }, lastSavedByName: { isCreateable: false, isUpdateable: false, - retrieving: true, + retrieving: false, // only returned by upsert template: false, }, createdDate: { isCreateable: false, isUpdateable: false, - retrieving: true, + retrieving: false, // only returned by upsert template: false, }, createdByName: { isCreateable: false, isUpdateable: false, - retrieving: true, + retrieving: false, // only returned by upsertt template: false, }, updateInProgress: { isCreateable: false, isUpdateable: false, - retrieving: true, + retrieving: false, template: false, }, name: { @@ -474,8 +481,8 @@ module.exports = { 'schedule.timezoneId': { isCreateable: true, isUpdateable: true, - retrieving: true, - template: true, + retrieving: false, + template: false, }, 'schedule.timezoneName': { isCreateable: true, @@ -484,10 +491,10 @@ module.exports = { template: true, }, 'schedule.typeId': { - isCreateable: true, - isUpdateable: true, - retrieving: true, - template: true, + isCreateable: false, + isUpdateable: false, + retrieving: false, + template: false, }, status: { isCreateable: true, diff --git a/lib/util/cache.js b/lib/util/cache.js index bcea3a35d..b95931e3e 100644 --- a/lib/util/cache.js +++ b/lib/util/cache.js @@ -72,10 +72,15 @@ module.exports = { * @returns {void} */ mergeMetadata: (type, metadataMap, overrideMID) => { - dataStore[overrideMID || currentMID][type] = Object.assign( - metadataMap, - dataStore[currentMID][type] - ); + // ensure cache exists for type + dataStore[currentMID][type] ||= {}; + // if overrideMID is provided, create a copy of current MID cache + if (overrideMID) { + // ! needs to be verified if this is actually needed. When discovering an issue with this method actually overriting metadataMap, this copy-logic was present and i did not want to break things + dataStore[overrideMID][type] = Object.assign({}, dataStore[currentMID][type]); + } + // merge metadataMap into existing cache + Object.assign(dataStore[overrideMID || currentMID][type] || {}, metadataMap); }, /** * standardized method for getting data from cache. diff --git a/lib/util/file.js b/lib/util/file.js index 89206f83e..d164a943c 100644 --- a/lib/util/file.js +++ b/lib/util/file.js @@ -381,7 +381,7 @@ const File = { * @param {string | string[]} directory directory where the file is stored * @param {string} filename name of the file without '.json' ending * @param {string} filetype filetype suffix - * @param {string} [encoding='utf8'] read file with encoding (defaults to utf-8) + * @param {string} [encoding] read file with encoding (defaults to utf-8) * @returns {Promise. | void} file contents; void on error */ readFilteredFilename: function (directory, filename, filetype, encoding) { @@ -514,7 +514,7 @@ const File = { /** * Initalises Prettier formatting lib async. * - * @param {string} [filetype='html'] filetype ie. JSON or SSJS + * @param {string} [filetype] filetype ie. JSON or SSJS * @returns {Promise.} success of config load */ async initPrettier(filetype = 'html') { diff --git a/lib/util/util.js b/lib/util/util.js index a5bfe7cf3..e552fe382 100644 --- a/lib/util/util.js +++ b/lib/util/util.js @@ -229,7 +229,7 @@ const Util = { /** * wrapper around our standard winston logging to console and logfile * - * @param {boolean} [noLogFile=false] optional flag to indicate if we should log to file; CLI logs are always on + * @param {boolean} [noLogFile] optional flag to indicate if we should log to file; CLI logs are always on * @returns {object} initiated logger for console and file */ _createNewLoggerTransport: function (noLogFile = false) { @@ -296,8 +296,8 @@ const Util = { /** * initiate winston logger * - * @param {boolean} [restart=false] if true, logger will be restarted; otherwise, an existing logger will be used - * @param {boolean} [noLogFile=false] if false, logger will log to file; otherwise, only to console + * @param {boolean} [restart] if true, logger will be restarted; otherwise, an existing logger will be used + * @param {boolean} [noLogFile] if false, logger will log to file; otherwise, only to console * @returns {void} */ startLogger: function (restart = false, noLogFile = false) { @@ -780,6 +780,60 @@ const Util = { // no script found return null; }, + /** + * allows us to filter just like with SQL's LIKE operator + * + * @param {string} testString field value to test + * @param {string} search search string in SQL LIKE format + * @returns {boolean} true if testString matches search + */ + stringLike(testString, search) { + if (typeof search !== 'string' || this === null) { + return false; + } + // Remove special chars + search = search.replaceAll( + new RegExp('([\\.\\\\\\+\\*\\?\\[\\^\\]\\$\\(\\)\\{\\}\\=\\!\\<\\>\\|\\:\\-])', 'g'), + '\\$1' + ); + // Replace % and _ with equivalent regex + search = search.replaceAll('%', '.*').replaceAll('_', '.'); + // Check matches + return new RegExp('^' + search + '$', 'gi').test(testString); + }, + /** + * returns true if no LIKE filter is defined or if all filters match + * + * @param {TYPE.MetadataTypeItem} metadata a single metadata item + * @param {object} [filters] only used in recursive calls + * @returns {boolean} true if no LIKE filter is defined or if all filters match + */ + fieldsLike(metadata, filters) { + if (metadata.json && metadata.codeArr) { + // Compensate for CodeExtractItem format + metadata = metadata.json; + } + filters ||= Util.OPTIONS.like; + if (!filters) { + return true; + } + const fields = Object.keys(filters); + return fields.every((field) => { + const filter = filters[field]; + if (Array.isArray(metadata[field])) { + return metadata[field].some((f) => Util.fieldsLike(f, filter)); + } else { + if (typeof filter === 'string') { + return Util.stringLike(metadata[field], filter); + } else if (Array.isArray(filter)) { + return filter.some((f) => Util.stringLike(metadata[field], f)); + } else if (typeof filter === 'object') { + return Util.fieldsLike(metadata[field], filter); + } + } + return false; + }); + }, }; Util.startLogger(false, true); diff --git a/package-lock.json b/package-lock.json index f9ca5851b..6bb4830b9 100644 --- a/package-lock.json +++ b/package-lock.json @@ -22,7 +22,7 @@ "p-limit": "3.1.0", "prettier": "2.8.8", "prettier-plugin-sql": "0.14.0", - "semver": "7.5.0", + "semver": "7.5.3", "sfmc-sdk": "1.0.1", "simple-git": "3.18.0", "toposort": "2.0.2", @@ -35,20 +35,20 @@ }, "devDependencies": { "assert": "2.0.0", - "axios-mock-adapter": "1.21.3", + "axios-mock-adapter": "1.21.5", "chai": "4.3.7", "chai-files": "1.4.0", - "eslint": "8.42.0", + "eslint": "8.44.0", "eslint-config-prettier": "8.7.0", "eslint-config-ssjs": "1.1.11", - "eslint-plugin-jsdoc": "46.2.5", + "eslint-plugin-jsdoc": "46.4.3", "eslint-plugin-mocha": "10.1.0", "eslint-plugin-prettier": "4.2.1", "eslint-plugin-unicorn": "47.0.0", - "fast-xml-parser": "4.2.4", + "fast-xml-parser": "4.2.5", "husky": "8.0.3", "jsdoc-to-markdown": "8.0.0", - "lint-staged": "13.2.2", + "lint-staged": "13.2.3", "mocha": "10.2.0", "mock-fs": "5.2.0", "npm-check": "6.0.1", @@ -67,6 +67,15 @@ "fsevents": "*" } }, + "node_modules/@aashutoshrathi/word-wrap": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", + "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/@ampproject/remapping": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz", @@ -545,14 +554,14 @@ } }, "node_modules/@eslint/eslintrc": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.0.3.tgz", - "integrity": "sha512-+5gy6OQfk+xx3q0d6jGZZC3f3KzAkXc/IanVxd1is/VIIziRqqt3ongQz0FiTUXqTk0c7aDB3OaFuKnuSoJicQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.0.tgz", + "integrity": "sha512-Lj7DECXqIVCqnqjjHMPna4vn6GJcMgul/wuS0je9OZ9gsL0zzDpKPVtcG1HaDVc+9y+qgXneTeUMbCqXJNpH1A==", "dev": true, "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", - "espree": "^9.5.2", + "espree": "^9.6.0", "globals": "^13.19.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", @@ -590,9 +599,9 @@ "dev": true }, "node_modules/@eslint/js": { - "version": "8.42.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.42.0.tgz", - "integrity": "sha512-6SWlXpWU5AvId8Ac7zjzmIOqMOba/JWY8XZ4A7q7Gn1Vlfg/SFFIlrtHXt9nPn4op9ZPAkl91Jao+QQv3r/ukw==", + "version": "8.44.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.44.0.tgz", + "integrity": "sha512-Ag+9YM4ocKQx9AarydN0KY2j0ErMHNIocPDrVo8zAE44xLTjEtz81OdR68/cydGtk6m6jDb5Za3r2useMzYmSw==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -1136,9 +1145,9 @@ "dev": true }, "node_modules/acorn": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.0.tgz", - "integrity": "sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w==", + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", + "integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==", "dev": true, "bin": { "acorn": "bin/acorn" @@ -1449,9 +1458,9 @@ } }, "node_modules/axios-mock-adapter": { - "version": "1.21.3", - "resolved": "https://registry.npmjs.org/axios-mock-adapter/-/axios-mock-adapter-1.21.3.tgz", - "integrity": "sha512-cdrQs/BqiJq4qn5EvaUUKDCaSor2j0KKW5tMtq5lqfTauxLRownBlzUNoLe+WKRoDrrXXXbtXTbmKpWFdL/NJw==", + "version": "1.21.5", + "resolved": "https://registry.npmjs.org/axios-mock-adapter/-/axios-mock-adapter-1.21.5.tgz", + "integrity": "sha512-5NI1V/VK+8+JeTF8niqOowuysA4b8mGzdlMN/QnTnoXbYh4HZSNiopsDclN2g/m85+G++IrEtUdZaQ3GnaMsSA==", "dev": true, "dependencies": { "fast-deep-equal": "^3.1.3", @@ -3138,15 +3147,15 @@ } }, "node_modules/eslint": { - "version": "8.42.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.42.0.tgz", - "integrity": "sha512-ulg9Ms6E1WPf67PHaEY4/6E2tEn5/f7FXGzr3t9cBMugOmf1INYvuUwwh1aXQN4MfJ6a5K2iNwP3w4AColvI9A==", + "version": "8.44.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.44.0.tgz", + "integrity": "sha512-0wpHoUbDUHgNCyvFB5aXLiQVfK9B0at6gUvzy83k4kAsQ/u769TQDX6iKC+aO4upIHO9WSaA3QoXYQDHbNwf1A==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.4.0", - "@eslint/eslintrc": "^2.0.3", - "@eslint/js": "8.42.0", + "@eslint/eslintrc": "^2.1.0", + "@eslint/js": "8.44.0", "@humanwhocodes/config-array": "^0.11.10", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", @@ -3158,7 +3167,7 @@ "escape-string-regexp": "^4.0.0", "eslint-scope": "^7.2.0", "eslint-visitor-keys": "^3.4.1", - "espree": "^9.5.2", + "espree": "^9.6.0", "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", @@ -3178,7 +3187,7 @@ "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", - "optionator": "^0.9.1", + "optionator": "^0.9.3", "strip-ansi": "^6.0.1", "strip-json-comments": "^3.1.0", "text-table": "^0.2.0" @@ -3221,9 +3230,9 @@ } }, "node_modules/eslint-plugin-jsdoc": { - "version": "46.2.5", - "resolved": "https://registry.npmjs.org/eslint-plugin-jsdoc/-/eslint-plugin-jsdoc-46.2.5.tgz", - "integrity": "sha512-Rmd0pb6S5fv9/lGbJMiVUZn56XvjKTGQoq9H5yfNjj6jcJHkTaq+Pqj2KHK/8EO01f8auFFy2kNL64cFisMEDw==", + "version": "46.4.3", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsdoc/-/eslint-plugin-jsdoc-46.4.3.tgz", + "integrity": "sha512-Prc7ol+vCIghPeECpwZq5+P+VZfoi87suywvbYCiCnkI1kTmVSdcOC2M8mioglWxBbd28wbb1OVjg/8OzGzatA==", "dev": true, "dependencies": { "@es-joy/jsdoccomment": "~0.39.4", @@ -3243,21 +3252,6 @@ "eslint": "^7.0.0 || ^8.0.0" } }, - "node_modules/eslint-plugin-jsdoc/node_modules/semver": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.1.tgz", - "integrity": "sha512-Wvss5ivl8TMRZXXESstBA4uR5iXgEN/VC5/sOcuXdVLzcdkz4HWetIoRfG5gb5X+ij/G9rw9YoGn3QoQ8OCSpw==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/eslint-plugin-mocha": { "version": "10.1.0", "resolved": "https://registry.npmjs.org/eslint-plugin-mocha/-/eslint-plugin-mocha-10.1.0.tgz", @@ -3433,12 +3427,12 @@ "dev": true }, "node_modules/espree": { - "version": "9.5.2", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.5.2.tgz", - "integrity": "sha512-7OASN1Wma5fum5SrNhFMAMJxOUAbhyfQ8dQ//PJaJbNw0URTPWqIghHWt1MmAANKhHZIYOHruW4Kw4ruUWOdGw==", + "version": "9.6.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.0.tgz", + "integrity": "sha512-1FH/IiruXZ84tpUlm0aCUEwMl2Ho5ilqVh0VvQXw+byAz/4SAciyHLlfmL5WYqsvD38oymdUwBss0LtK8m4s/A==", "dev": true, "dependencies": { - "acorn": "^8.8.0", + "acorn": "^8.9.0", "acorn-jsx": "^5.3.2", "eslint-visitor-keys": "^3.4.1" }, @@ -3633,13 +3627,13 @@ "node_modules/fast-levenshtein": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", "dev": true }, "node_modules/fast-xml-parser": { - "version": "4.2.4", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.4.tgz", - "integrity": "sha512-fbfMDvgBNIdDJLdLOwacjFAPYt67tr31H9ZhWSm45CDAxvd0I6WTlSOUo7K2P/K5sA5JgMKG64PI3DMcaFdWpQ==", + "version": "4.2.5", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.5.tgz", + "integrity": "sha512-B9/wizE4WngqQftFPmdaMYlXoJlJOYxGQOanC77fq9k8+Z0v5dDSVh+3glErdIROP//s/jgb7ZuxKfB8nVyo0g==", "dev": true, "funding": [ { @@ -5736,9 +5730,9 @@ } }, "node_modules/lint-staged": { - "version": "13.2.2", - "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-13.2.2.tgz", - "integrity": "sha512-71gSwXKy649VrSU09s10uAT0rWCcY3aewhMaHyl2N84oBk4Xs9HgxvUp3AYu+bNsK4NrOYYxvSgg7FyGJ+jGcA==", + "version": "13.2.3", + "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-13.2.3.tgz", + "integrity": "sha512-zVVEXLuQIhr1Y7R7YAWx4TZLdvuzk7DnmrsTNL0fax6Z3jrpFcas+vKbzxhhvp6TA55m1SQuWkpzI1qbfDZbAg==", "dev": true, "dependencies": { "chalk": "5.2.0", @@ -7407,17 +7401,17 @@ } }, "node_modules/optionator": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", - "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", + "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", "dev": true, "dependencies": { + "@aashutoshrathi/word-wrap": "^1.2.3", "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", - "type-check": "^0.4.0", - "word-wrap": "^1.2.3" + "type-check": "^0.4.0" }, "engines": { "node": ">= 0.8.0" @@ -8666,9 +8660,9 @@ } }, "node_modules/semver": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.0.tgz", - "integrity": "sha512-+XC0AD/R7Q2mPSRuy2Id0+CGTZ98+8f+KvwirxOKIEyid+XSx6HbC63p+O4IndTHuX5Z+JxQ0TghCkO5Cg/2HA==", + "version": "7.5.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.3.tgz", + "integrity": "sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ==", "dependencies": { "lru-cache": "^6.0.0" }, @@ -9791,15 +9785,6 @@ "node": ">= 6.4.0" } }, - "node_modules/word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/wordwrap": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", @@ -9989,6 +9974,12 @@ } }, "dependencies": { + "@aashutoshrathi/word-wrap": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", + "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", + "dev": true + }, "@ampproject/remapping": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.2.0.tgz", @@ -10359,14 +10350,14 @@ "dev": true }, "@eslint/eslintrc": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.0.3.tgz", - "integrity": "sha512-+5gy6OQfk+xx3q0d6jGZZC3f3KzAkXc/IanVxd1is/VIIziRqqt3ongQz0FiTUXqTk0c7aDB3OaFuKnuSoJicQ==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.0.tgz", + "integrity": "sha512-Lj7DECXqIVCqnqjjHMPna4vn6GJcMgul/wuS0je9OZ9gsL0zzDpKPVtcG1HaDVc+9y+qgXneTeUMbCqXJNpH1A==", "dev": true, "requires": { "ajv": "^6.12.4", "debug": "^4.3.2", - "espree": "^9.5.2", + "espree": "^9.6.0", "globals": "^13.19.0", "ignore": "^5.2.0", "import-fresh": "^3.2.1", @@ -10396,9 +10387,9 @@ } }, "@eslint/js": { - "version": "8.42.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.42.0.tgz", - "integrity": "sha512-6SWlXpWU5AvId8Ac7zjzmIOqMOba/JWY8XZ4A7q7Gn1Vlfg/SFFIlrtHXt9nPn4op9ZPAkl91Jao+QQv3r/ukw==", + "version": "8.44.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.44.0.tgz", + "integrity": "sha512-Ag+9YM4ocKQx9AarydN0KY2j0ErMHNIocPDrVo8zAE44xLTjEtz81OdR68/cydGtk6m6jDb5Za3r2useMzYmSw==", "dev": true }, "@humanwhocodes/config-array": { @@ -10825,9 +10816,9 @@ "dev": true }, "acorn": { - "version": "8.8.0", - "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.8.0.tgz", - "integrity": "sha512-QOxyigPVrpZ2GXT+PFyZTl6TtOFc5egxHIP9IlQ+RbupQuX4RkT/Bee4/kQuC02Xkzg84JcT7oLYtDIQxp+v7w==", + "version": "8.10.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.10.0.tgz", + "integrity": "sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==", "dev": true }, "acorn-jsx": { @@ -11050,9 +11041,9 @@ } }, "axios-mock-adapter": { - "version": "1.21.3", - "resolved": "https://registry.npmjs.org/axios-mock-adapter/-/axios-mock-adapter-1.21.3.tgz", - "integrity": "sha512-cdrQs/BqiJq4qn5EvaUUKDCaSor2j0KKW5tMtq5lqfTauxLRownBlzUNoLe+WKRoDrrXXXbtXTbmKpWFdL/NJw==", + "version": "1.21.5", + "resolved": "https://registry.npmjs.org/axios-mock-adapter/-/axios-mock-adapter-1.21.5.tgz", + "integrity": "sha512-5NI1V/VK+8+JeTF8niqOowuysA4b8mGzdlMN/QnTnoXbYh4HZSNiopsDclN2g/m85+G++IrEtUdZaQ3GnaMsSA==", "dev": true, "requires": { "fast-deep-equal": "^3.1.3", @@ -12335,15 +12326,15 @@ "dev": true }, "eslint": { - "version": "8.42.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.42.0.tgz", - "integrity": "sha512-ulg9Ms6E1WPf67PHaEY4/6E2tEn5/f7FXGzr3t9cBMugOmf1INYvuUwwh1aXQN4MfJ6a5K2iNwP3w4AColvI9A==", + "version": "8.44.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.44.0.tgz", + "integrity": "sha512-0wpHoUbDUHgNCyvFB5aXLiQVfK9B0at6gUvzy83k4kAsQ/u769TQDX6iKC+aO4upIHO9WSaA3QoXYQDHbNwf1A==", "dev": true, "requires": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.4.0", - "@eslint/eslintrc": "^2.0.3", - "@eslint/js": "8.42.0", + "@eslint/eslintrc": "^2.1.0", + "@eslint/js": "8.44.0", "@humanwhocodes/config-array": "^0.11.10", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", @@ -12355,7 +12346,7 @@ "escape-string-regexp": "^4.0.0", "eslint-scope": "^7.2.0", "eslint-visitor-keys": "^3.4.1", - "espree": "^9.5.2", + "espree": "^9.6.0", "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", @@ -12375,7 +12366,7 @@ "lodash.merge": "^4.6.2", "minimatch": "^3.1.2", "natural-compare": "^1.4.0", - "optionator": "^0.9.1", + "optionator": "^0.9.3", "strip-ansi": "^6.0.1", "strip-json-comments": "^3.1.0", "text-table": "^0.2.0" @@ -12416,9 +12407,9 @@ "requires": {} }, "eslint-plugin-jsdoc": { - "version": "46.2.5", - "resolved": "https://registry.npmjs.org/eslint-plugin-jsdoc/-/eslint-plugin-jsdoc-46.2.5.tgz", - "integrity": "sha512-Rmd0pb6S5fv9/lGbJMiVUZn56XvjKTGQoq9H5yfNjj6jcJHkTaq+Pqj2KHK/8EO01f8auFFy2kNL64cFisMEDw==", + "version": "46.4.3", + "resolved": "https://registry.npmjs.org/eslint-plugin-jsdoc/-/eslint-plugin-jsdoc-46.4.3.tgz", + "integrity": "sha512-Prc7ol+vCIghPeECpwZq5+P+VZfoi87suywvbYCiCnkI1kTmVSdcOC2M8mioglWxBbd28wbb1OVjg/8OzGzatA==", "dev": true, "requires": { "@es-joy/jsdoccomment": "~0.39.4", @@ -12430,17 +12421,6 @@ "is-builtin-module": "^3.2.1", "semver": "^7.5.1", "spdx-expression-parse": "^3.0.1" - }, - "dependencies": { - "semver": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.1.tgz", - "integrity": "sha512-Wvss5ivl8TMRZXXESstBA4uR5iXgEN/VC5/sOcuXdVLzcdkz4HWetIoRfG5gb5X+ij/G9rw9YoGn3QoQ8OCSpw==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - } - } } }, "eslint-plugin-mocha": { @@ -12534,12 +12514,12 @@ "dev": true }, "espree": { - "version": "9.5.2", - "resolved": "https://registry.npmjs.org/espree/-/espree-9.5.2.tgz", - "integrity": "sha512-7OASN1Wma5fum5SrNhFMAMJxOUAbhyfQ8dQ//PJaJbNw0URTPWqIghHWt1MmAANKhHZIYOHruW4Kw4ruUWOdGw==", + "version": "9.6.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.0.tgz", + "integrity": "sha512-1FH/IiruXZ84tpUlm0aCUEwMl2Ho5ilqVh0VvQXw+byAz/4SAciyHLlfmL5WYqsvD38oymdUwBss0LtK8m4s/A==", "dev": true, "requires": { - "acorn": "^8.8.0", + "acorn": "^8.9.0", "acorn-jsx": "^5.3.2", "eslint-visitor-keys": "^3.4.1" } @@ -12680,13 +12660,13 @@ "fast-levenshtein": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", - "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", "dev": true }, "fast-xml-parser": { - "version": "4.2.4", - "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.4.tgz", - "integrity": "sha512-fbfMDvgBNIdDJLdLOwacjFAPYt67tr31H9ZhWSm45CDAxvd0I6WTlSOUo7K2P/K5sA5JgMKG64PI3DMcaFdWpQ==", + "version": "4.2.5", + "resolved": "https://registry.npmjs.org/fast-xml-parser/-/fast-xml-parser-4.2.5.tgz", + "integrity": "sha512-B9/wizE4WngqQftFPmdaMYlXoJlJOYxGQOanC77fq9k8+Z0v5dDSVh+3glErdIROP//s/jgb7ZuxKfB8nVyo0g==", "dev": true, "requires": { "strnum": "^1.0.5" @@ -14187,9 +14167,9 @@ } }, "lint-staged": { - "version": "13.2.2", - "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-13.2.2.tgz", - "integrity": "sha512-71gSwXKy649VrSU09s10uAT0rWCcY3aewhMaHyl2N84oBk4Xs9HgxvUp3AYu+bNsK4NrOYYxvSgg7FyGJ+jGcA==", + "version": "13.2.3", + "resolved": "https://registry.npmjs.org/lint-staged/-/lint-staged-13.2.3.tgz", + "integrity": "sha512-zVVEXLuQIhr1Y7R7YAWx4TZLdvuzk7DnmrsTNL0fax6Z3jrpFcas+vKbzxhhvp6TA55m1SQuWkpzI1qbfDZbAg==", "dev": true, "requires": { "chalk": "5.2.0", @@ -15468,17 +15448,17 @@ } }, "optionator": { - "version": "0.9.1", - "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", - "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", + "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", "dev": true, "requires": { + "@aashutoshrathi/word-wrap": "^1.2.3", "deep-is": "^0.1.3", "fast-levenshtein": "^2.0.6", "levn": "^0.4.1", "prelude-ls": "^1.2.1", - "type-check": "^0.4.0", - "word-wrap": "^1.2.3" + "type-check": "^0.4.0" } }, "ora": { @@ -16386,9 +16366,9 @@ } }, "semver": { - "version": "7.5.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.0.tgz", - "integrity": "sha512-+XC0AD/R7Q2mPSRuy2Id0+CGTZ98+8f+KvwirxOKIEyid+XSx6HbC63p+O4IndTHuX5Z+JxQ0TghCkO5Cg/2HA==", + "version": "7.5.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.5.3.tgz", + "integrity": "sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ==", "requires": { "lru-cache": "^6.0.0" } @@ -17252,12 +17232,6 @@ "triple-beam": "^1.3.0" } }, - "word-wrap": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", - "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", - "dev": true - }, "wordwrap": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", diff --git a/package.json b/package.json index e9975d472..d11bea506 100644 --- a/package.json +++ b/package.json @@ -71,7 +71,7 @@ "p-limit": "3.1.0", "prettier": "2.8.8", "prettier-plugin-sql": "0.14.0", - "semver": "7.5.0", + "semver": "7.5.3", "sfmc-sdk": "1.0.1", "simple-git": "3.18.0", "toposort": "2.0.2", @@ -81,20 +81,20 @@ }, "devDependencies": { "assert": "2.0.0", - "axios-mock-adapter": "1.21.3", + "axios-mock-adapter": "1.21.5", "chai": "4.3.7", "chai-files": "1.4.0", - "eslint": "8.42.0", + "eslint": "8.44.0", "eslint-config-prettier": "8.7.0", "eslint-config-ssjs": "1.1.11", - "eslint-plugin-jsdoc": "46.2.5", + "eslint-plugin-jsdoc": "46.4.3", "eslint-plugin-mocha": "10.1.0", "eslint-plugin-prettier": "4.2.1", "eslint-plugin-unicorn": "47.0.0", - "fast-xml-parser": "4.2.4", + "fast-xml-parser": "4.2.5", "husky": "8.0.3", "jsdoc-to-markdown": "8.0.0", - "lint-staged": "13.2.2", + "lint-staged": "13.2.3", "mocha": "10.2.0", "mock-fs": "5.2.0", "npm-check": "6.0.1", diff --git a/test/mockRoot/deploy/testInstance/testBU/automation/testExisting_automation.automation-meta.json b/test/mockRoot/deploy/testInstance/testBU/automation/testExisting_automation.automation-meta.json index c0781366e..51f7c3efd 100644 --- a/test/mockRoot/deploy/testInstance/testBU/automation/testExisting_automation.automation-meta.json +++ b/test/mockRoot/deploy/testInstance/testBU/automation/testExisting_automation.automation-meta.json @@ -7,8 +7,7 @@ "endDate": "2022-07-30T00:00:00", "icalRecur": "FREQ=DAILY;COUNT=1;INTERVAL=1", "startDate": "2022-07-30T00:00:00", - "timezoneName": "W. Europe Standard Time", - "typeId": 3 + "timezoneName": "W. Europe Standard Time" }, "status": "PausedSchedule", "steps": [ diff --git a/test/mockRoot/deploy/testInstance/testBU/automation/testNew_automation.automation-meta.json b/test/mockRoot/deploy/testInstance/testBU/automation/testNew_automation.automation-meta.json index e1c0c355b..850ec8a24 100644 --- a/test/mockRoot/deploy/testInstance/testBU/automation/testNew_automation.automation-meta.json +++ b/test/mockRoot/deploy/testInstance/testBU/automation/testNew_automation.automation-meta.json @@ -4,13 +4,12 @@ "name": "testNew_automation", "r__folder_Path": "my automations", "schedule": { - "endDate": "2022-07-30T00:00:00", - "icalRecur": "FREQ=DAILY;COUNT=1;INTERVAL=1", - "startDate": "2022-07-30T00:00:00", - "timezoneName": "W. Europe Standard Time", - "typeId": 3 + "startDate": "2020-05-14T02:30:32.11", + "endDate": "2079-06-06T21:00:00", + "icalRecur": "FREQ=MINUTELY;UNTIL=20790607T050000;INTERVAL=5", + "timezoneName": "W. Europe Standard Time" }, - "status": "PausedSchedule", + "status": "Scheduled", "steps": [ { "activities": [ diff --git a/test/resourceFactory.js b/test/resourceFactory.js index 5cdd55508..766eced42 100644 --- a/test/resourceFactory.js +++ b/test/resourceFactory.js @@ -10,13 +10,14 @@ const attributeParser = new XMLParser({ ignoreAttributes: false }); * @param {string} mcdevAction SOAP action * @param {string} type metadata Type * @param {string} mid of Business Unit - * @param {object} filter likely for customer key + * @param {object|string} filter likely for customer key * @returns {string} relevant metadata stringified */ exports.loadSOAPRecords = async (mcdevAction, type, mid, filter) => { type = type[0].toLowerCase() + type.slice(1); const testPath = path.join('test', 'resources', mid.toString(), type, mcdevAction); - const filterPath = this.filterToPath(filter); + const filterPath = + typeof filter === 'string' && filter ? '-' + filter : this.filterToPath(filter); if (await fs.pathExists(testPath + filterPath + '-response.xml')) { return fs.readFile(testPath + filterPath + '-response.xml', { encoding: 'utf8', @@ -42,8 +43,8 @@ exports.loadSOAPRecords = async (mcdevAction, type, mid, filter) => { /* eslint-disable no-console */ console.log( `${color.bgRed}${color.fgBlack}test-error${color.reset}: Please create file ${ - testPath + filterPath + '-response.xml' - } or ${testPath + '-response.xml'}` + filterPath ? testPath + filterPath + '-response.xml or ' : '' + }${testPath + '-response.xml'}` ); /* eslint-enable no-console */ @@ -54,11 +55,26 @@ exports.loadSOAPRecords = async (mcdevAction, type, mid, filter) => { }); }; exports.filterToPath = (filter) => { - if (filter && filter.Property && filter.SimpleOperator && filter.Value) { - return `-${filter.Property}${filter.SimpleOperator.replace('equals', '=')}${filter.Value}`; + if (filter) { + return '-' + this._filterToPath(filter); } return ''; }; +exports._filterToPath = (filter) => { + if (filter.Property && filter.SimpleOperator) { + return `${filter.Property}${filter.SimpleOperator.replace('equals', '=')}${ + filter.Value === undefined ? '' : filter.Value + }`; + } else if (filter.LeftOperand && filter.LogicalOperator && filter.RightOperand) { + return ( + this._filterToPath(filter.LeftOperand) + + filter.LogicalOperator + + this._filterToPath(filter.RightOperand) + ); + } else { + throw new Error('unknown filter type'); + } +}; /** * based on request, respond with different soap data * @@ -123,8 +139,20 @@ exports.handleSOAPRequest = async (config) => { break; } + case 'Schedule': { + responseXML = await this.loadSOAPRecords( + config.headers.SOAPAction.toLocaleLowerCase(), + fullObj.Envelope.Body.ScheduleRequestMsg.Interactions.Interaction['@_xsi:type'], + jObj.Envelope.Header.fueloauth, + fullObj.Envelope.Body.ScheduleRequestMsg.Interactions.Interaction.ObjectID + ); + + break; + } default: { - throw new Error('This SOAP Action is not supported by test handler'); + throw new Error( + `The SOAP Action ${config.headers.SOAPAction} is not supported by test handler` + ); } } @@ -197,7 +225,7 @@ exports.handleRESTRequest = async (config) => { return [ 404, - fs.readFile(path.join('test', 'resources', 'rest404-response.json'), { + await fs.readFile(path.join('test', 'resources', 'rest404-response.json'), { encoding: 'utf8', }), ]; diff --git a/test/resources/1111111/accountUser/retrieve-ActiveFlag=falseANDCustomerKey=testExisting_userANDEmaillike@-response.xml b/test/resources/1111111/accountUser/retrieve-ActiveFlag=falseANDCustomerKey=testExisting_userANDEmaillike@-response.xml new file mode 100644 index 000000000..af1dec2e2 --- /dev/null +++ b/test/resources/1111111/accountUser/retrieve-ActiveFlag=falseANDCustomerKey=testExisting_userANDEmaillike@-response.xml @@ -0,0 +1,27 @@ + + + + RetrieveResponse + urn:uuid:1babdae0-9282-4bba-b69f-2f1843deaf11 + urn:uuid:39d1c021-b3df-49f9-a8f7-bb444172d2d3 + http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous + + + 2023-03-11T13:53:00Z + 2023-03-11T13:58:00Z + + + + + + OK + 5347bf1d-e801-486a-b4a9-c2d46a8909b6 + + + + \ No newline at end of file diff --git a/test/resources/1111111/accountUser/retrieve-ActiveFlag=falseANDEmaillike@-response.xml b/test/resources/1111111/accountUser/retrieve-ActiveFlag=falseANDEmaillike@-response.xml new file mode 100644 index 000000000..22ee9fd1e --- /dev/null +++ b/test/resources/1111111/accountUser/retrieve-ActiveFlag=falseANDEmaillike@-response.xml @@ -0,0 +1,156 @@ + + + + RetrieveResponse + urn:uuid:1babdae0-9282-4bba-b69f-2f1843deaf11 + urn:uuid:39d1c021-b3df-49f9-a8f7-bb444172d2d3 + http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous + + + 2023-03-11T13:53:00Z + 2023-03-11T13:58:00Z + + + + + + OK + 5347bf1d-e801-486a-b4a9-c2d46a8909b6 + + + 1111111 + 123456 + + + 2019-09-06T01:59:07.097 + 2022-06-21T01:43:02.64 + 700301951 + + testExisting_user_inactive + 700301951 + user_test-inactive@accenture.asgr + user test-inactive + user_test-inactive@accenture.com + false + false + + + 2 + + + + + 0 + + + + 3 + + + + + 0 + + 0 + 2023-02-23T10:14:11.443 + false + user_test-inactive@accenture.com + false + 1111111 + + + 1111111 + + None + + + + + + en-GB + + + + 5 + + (GMT+01:00) Amsterdam, Berlin, Bern, Rome, Stockholm, Vienna * + + + + + 1111111 + -1000 + -1000 + 1111111 + + + 2016-07-22T17:52:36.93 + 2016-07-22T17:52:36.93 + f1cfb80f-3550-e611-96fe-38eaa7142c61 + SYS_DEF_ADMIN + Administrator + Administrator + false + true + + + + 1111111 + -1000 + -1000 + 1111111 + + + 2016-07-22T17:52:36.93 + 2016-07-22T17:52:36.93 + f4cfb80f-3550-e611-96fe-38eaa7142c61 + SYS_DEF_CONTENT + Content Creator + Content Creator + false + true + + + + 1111111 + -1000 + -1000 + 1111111 + + + 2019-09-06T07:59:07.8 + 2019-09-06T07:59:07.8 + bd251431-7cd0-e911-a2d3-1402ec936979 + Individual role for 700301951 + Individual role for 700301951 + Individual role for 700301951 + true + false + + + + 0 + 0 + 0 + 0 + + + 2012-02-21T02:09:19.983 + 2013-12-23T16:48:50.533 + 63a50610-315c-e111-beee-8e001800001f + SYS_DEF_IMHADMIN + Marketing Cloud Administrator + Assign Marketing Cloud roles to users and manage Mobile, Social and Sites Channels, Hub Apps and Marketing Cloud Tools + false + true + + + + + + \ No newline at end of file diff --git a/test/resources/1111111/accountUser/retrieve-response.xml b/test/resources/1111111/accountUser/retrieve-ActiveFlag=trueANDCustomerKey=testExisting_userANDEmaillike@-response.xml similarity index 100% rename from test/resources/1111111/accountUser/retrieve-response.xml rename to test/resources/1111111/accountUser/retrieve-ActiveFlag=trueANDCustomerKey=testExisting_userANDEmaillike@-response.xml diff --git a/test/resources/1111111/accountUser/retrieve-ActiveFlag=trueANDEmailisNullORNamelikeapp userANDMustChangePassword=false-response.xml b/test/resources/1111111/accountUser/retrieve-ActiveFlag=trueANDEmailisNullORNamelikeapp userANDMustChangePassword=false-response.xml new file mode 100644 index 000000000..5ac4001a5 --- /dev/null +++ b/test/resources/1111111/accountUser/retrieve-ActiveFlag=trueANDEmailisNullORNamelikeapp userANDMustChangePassword=false-response.xml @@ -0,0 +1,87 @@ + + + + RetrieveResponse + urn:uuid:1babdae0-9282-4bba-b69f-2f1843deaf11 + urn:uuid:39d1c021-b3df-49f9-a8f7-bb444172d2d3 + http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous + + + 2023-03-11T13:53:00Z + 2023-03-11T13:58:00Z + + + + + + OK + 5347bf1d-e801-486a-b4a9-c2d46a8909b6 + + + 1111111 + 0 + + + 2016-07-22T11:52:37.42 + 2023-05-27T07:05:55.113 + 7471228 + + 45372cbb-06e0-438e-88d8-008981f7a18b + 7471228 + 20f2d94a-9a7d-4580-9fb6-c36a1ce32fb9 + igopredictiveemail app user + + false + true + 0 + + true + + false + 1111111 + + + + en-GB + + + + 5 + + (GMT+01:00) Amsterdam, Berlin, Bern, Rome, Stockholm, Vienna + + + + 1111111 + + None + + + + + + 1111111 + 0 + 0 + 1111111 + + + 2016-07-22T17:52:37.88 + 2016-07-22T17:52:37.88 + 44d0b80f-3550-e611-96fe-38eaa7142c61 + Individual role for 7471228 + Individual role for 7471228 + Individual role for 7471228 + true + false + + + + + + \ No newline at end of file diff --git a/test/resources/1111111/accountUser/retrieve-ActiveFlag=trueANDEmaillike@-response.xml b/test/resources/1111111/accountUser/retrieve-ActiveFlag=trueANDEmaillike@-response.xml new file mode 100644 index 000000000..e16046c89 --- /dev/null +++ b/test/resources/1111111/accountUser/retrieve-ActiveFlag=trueANDEmaillike@-response.xml @@ -0,0 +1,156 @@ + + + + RetrieveResponse + urn:uuid:1babdae0-9282-4bba-b69f-2f1843deaf11 + urn:uuid:39d1c021-b3df-49f9-a8f7-bb444172d2d3 + http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous + + + 2023-03-11T13:53:00Z + 2023-03-11T13:58:00Z + + + + + + OK + 5347bf1d-e801-486a-b4a9-c2d46a8909b6 + + + 1111111 + 123456 + + + 2019-09-06T01:59:07.097 + 2022-06-21T01:43:02.64 + 700301950 + + testExisting_user + 700301950 + user_test@accenture.asgr + user test + user_test@accenture.com + false + true + + + 2 + + + + + 0 + + + + 3 + + + + + 0 + + 0 + 2023-02-23T10:14:11.443 + false + user_test@accenture.com + false + 1111111 + + + 1111111 + + None + + + + + + en-GB + + + + 5 + + (GMT+01:00) Amsterdam, Berlin, Bern, Rome, Stockholm, Vienna * + + + + + 1111111 + -1000 + -1000 + 1111111 + + + 2016-07-22T17:52:36.93 + 2016-07-22T17:52:36.93 + f1cfb80f-3550-e611-96fe-38eaa7142c61 + SYS_DEF_ADMIN + Administrator + Administrator + false + true + + + + 1111111 + -1000 + -1000 + 1111111 + + + 2016-07-22T17:52:36.93 + 2016-07-22T17:52:36.93 + f4cfb80f-3550-e611-96fe-38eaa7142c61 + SYS_DEF_CONTENT + Content Creator + Content Creator + false + true + + + + 1111111 + -1000 + -1000 + 1111111 + + + 2019-09-06T07:59:07.8 + 2019-09-06T07:59:07.8 + bd251431-7cd0-e911-a2d3-1402ec936979 + Individual role for 700301950 + Individual role for 700301950 + Individual role for 700301950 + true + false + + + + 0 + 0 + 0 + 0 + + + 2012-02-21T02:09:19.983 + 2013-12-23T16:48:50.533 + 63a50610-315c-e111-beee-8e001800001f + SYS_DEF_IMHADMIN + Marketing Cloud Administrator + Assign Marketing Cloud roles to users and manage Mobile, Social and Sites Channels, Hub Apps and Marketing Cloud Tools + false + true + + + + + + \ No newline at end of file diff --git a/test/resources/1111111/accountUser/retrieve-CustomerKey=testExisting_userANDActiveFlag=trueANDEmailisNullORNamelikeapp userANDMustChangePassword=false-response.xml b/test/resources/1111111/accountUser/retrieve-CustomerKey=testExisting_userANDActiveFlag=trueANDEmailisNullORNamelikeapp userANDMustChangePassword=false-response.xml new file mode 100644 index 000000000..af1dec2e2 --- /dev/null +++ b/test/resources/1111111/accountUser/retrieve-CustomerKey=testExisting_userANDActiveFlag=trueANDEmailisNullORNamelikeapp userANDMustChangePassword=false-response.xml @@ -0,0 +1,27 @@ + + + + RetrieveResponse + urn:uuid:1babdae0-9282-4bba-b69f-2f1843deaf11 + urn:uuid:39d1c021-b3df-49f9-a8f7-bb444172d2d3 + http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous + + + 2023-03-11T13:53:00Z + 2023-03-11T13:58:00Z + + + + + + OK + 5347bf1d-e801-486a-b4a9-c2d46a8909b6 + + + + \ No newline at end of file diff --git a/test/resources/1111111/accountUserAccount/retrieve-AccountUser.AccountUserID=700301950-response.xml b/test/resources/1111111/accountUserAccount/retrieve-AccountUser.AccountUserID=700301950-response.xml new file mode 100644 index 000000000..29e5e4173 --- /dev/null +++ b/test/resources/1111111/accountUserAccount/retrieve-AccountUser.AccountUserID=700301950-response.xml @@ -0,0 +1,60 @@ + + + + RetrieveResponse + urn:uuid:b9fbc10c-2775-48d0-b9bb-fddf1ed90616 + urn:uuid:11d44cfe-e1d7-4fc2-b6ad-a2c806573859 + http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous + + + 2023-03-11T13:52:52Z + 2023-03-11T13:57:52Z + + + + + + OK + f4057c93-40e3-47d6-a953-06839e221d40 + + + + + + + 700301950 + 0 + + + + 1111111 + + None + + + + + + + + + + 700301950 + 0 + + + + 9999999 + + None + + + + + + \ No newline at end of file diff --git a/test/resources/1111111/accountUserAccount/retrieve-response.xml b/test/resources/1111111/accountUserAccount/retrieve-AccountUser.AccountUserIDIN700301950,700301951,7471228-response.xml similarity index 100% rename from test/resources/1111111/accountUserAccount/retrieve-response.xml rename to test/resources/1111111/accountUserAccount/retrieve-AccountUser.AccountUserIDIN700301950,700301951,7471228-response.xml diff --git a/test/resources/1111111/businessUnit/retrieve-response.xml b/test/resources/1111111/businessUnit/retrieve-ID=1111111-response.xml similarity index 100% rename from test/resources/1111111/businessUnit/retrieve-response.xml rename to test/resources/1111111/businessUnit/retrieve-ID=1111111-response.xml diff --git a/test/resources/1111111/list/retrieve-response.xml b/test/resources/1111111/list/retrieve-CustomerKey=All SubscribersORListName=All Subscribers-response.xml similarity index 100% rename from test/resources/1111111/list/retrieve-response.xml rename to test/resources/1111111/list/retrieve-CustomerKey=All SubscribersORListName=All Subscribers-response.xml diff --git a/test/resources/1111111/role/retrieve-response.xml b/test/resources/1111111/role/retrieve-IsPrivate=false-response.xml similarity index 100% rename from test/resources/1111111/role/retrieve-response.xml rename to test/resources/1111111/role/retrieve-IsPrivate=false-response.xml diff --git a/test/resources/1111111/user/retrieve-expected.md b/test/resources/1111111/user/retrieve-expected.md index 50eb1b2ec..acc1becbd 100644 --- a/test/resources/1111111/user/retrieve-expected.md +++ b/test/resources/1111111/user/retrieve-expected.md @@ -7,13 +7,15 @@ | user test | 96 days | ✓ | - | - | - | _ParentBU_ (1111111) | _ParentBU_ (1111111),
testBU (9999999) | Administrator,
Content Creator,
Marketing Cloud Administrator | user_test@accenture.asgr | 700301950 | testExisting_user | user_test@accenture.com | user_test@accenture.com | GMT+01:00 | en-GB | 2022-06-21 01:43:02.64 | 123456 | 2019-09-06 01:59:07.097 | -## Inactivated Users (0) +## Inactivated Users (1) | Name | Last successful Login | Active | Access Locked out | API User | Must change PW | Default BU | BU Access | Roles | Login | ID | Key | E-Mail | Notification E-Mail | Timezone | SFMC Locale | Modified Date | Modified By | Created Date | | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | +| user test-inactive | 96 days | - | - | - | - | _ParentBU_ (1111111) | _ParentBU_ (1111111) | Administrator,
Content Creator,
Marketing Cloud Administrator | user_test-inactive@accenture.asgr | 700301951 | testExisting_user_inactive | user_test-inactive@accenture.com | user_test-inactive@accenture.com | GMT+01:00 | en-GB | 2022-06-21 01:43:02.64 | 123456 | 2019-09-06 01:59:07.097 | -## Installed Packages (0) +## Installed Packages (1) | Name | Last successful Login | Active | Access Locked out | API User | Must change PW | Default BU | BU Access | Roles | Login | ID | Key | E-Mail | Notification E-Mail | Timezone | SFMC Locale | Modified Date | Modified By | Created Date | | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | --- | +| igopredictiveemail app user | never | ✓ | - | ✓ | - | _ParentBU_ (1111111) | _ParentBU_ (1111111) | | 20f2d94a-9a7d-4580-9fb6-c36a1ce32fb9 | 7471228 | 45372cbb-06e0-438e-88d8-008981f7a18b | | | GMT+01:00 | en-GB | 2023-05-27 07:05:55.113 | n/a | 2016-07-22 11:52:37.42 | diff --git a/test/resources/9999999/automation/build-expected.json b/test/resources/9999999/automation/build-expected.json index 8422713d0..d8c32fd40 100644 --- a/test/resources/9999999/automation/build-expected.json +++ b/test/resources/9999999/automation/build-expected.json @@ -7,8 +7,7 @@ "endDate": "2022-07-30T00:00:00", "icalRecur": "FREQ=DAILY;COUNT=1;INTERVAL=1", "startDate": "2022-07-30T00:00:00", - "timezoneName": "W. Europe Standard Time", - "typeId": 3 + "timezoneName": "W. Europe Standard Time" }, "status": "PausedSchedule", "steps": [ diff --git a/test/resources/9999999/automation/create-expected.json b/test/resources/9999999/automation/create-expected.json index e1c0c355b..9e40f520d 100644 --- a/test/resources/9999999/automation/create-expected.json +++ b/test/resources/9999999/automation/create-expected.json @@ -3,14 +3,14 @@ "key": "testNew_automation", "name": "testNew_automation", "r__folder_Path": "my automations", + "type": "scheduled", + "status": "PausedSchedule", "schedule": { - "endDate": "2022-07-30T00:00:00", - "icalRecur": "FREQ=DAILY;COUNT=1;INTERVAL=1", - "startDate": "2022-07-30T00:00:00", - "timezoneName": "W. Europe Standard Time", - "typeId": 3 + "endDate": "2079-06-06T21:00:00-06:00", + "icalRecur": "FREQ=MINUTELY;UNTIL=20790607T050000;INTERVAL=5", + "startDate": "2020-05-13T18:30:32.11-06:00", + "timezoneName": "W. Europe Standard Time" }, - "status": "PausedSchedule", "steps": [ { "activities": [ @@ -41,6 +41,5 @@ ], "name": "" } - ], - "type": "scheduled" + ] } diff --git a/test/resources/9999999/automation/create-testNew_automation-expected.md b/test/resources/9999999/automation/create-testNew_automation-expected.md index d596cc549..c607175cc 100644 --- a/test/resources/9999999/automation/create-testNew_automation-expected.md +++ b/test/resources/9999999/automation/create-testNew_automation-expected.md @@ -10,10 +10,10 @@ **Schedule:** -* Start: 2022-07-30 00:00:00 +01:00 -* End: 2022-07-30 00:00:00 +01:00 -* Timezone: W. Europe Standard Time -* Recurrance: every day for 1 times +* Start: 2020-05-13 18:30:32.11-06:00 +01:00 +* End: 2079-06-06 21:00:00-06:00 +01:00 +* Timezone: W. Europe Standard Time +* Recurrance: every 5 minutes **Notifications:** _none_ diff --git a/test/resources/9999999/automation/retrieve-expected.json b/test/resources/9999999/automation/retrieve-expected.json index 16ad1ec5a..55a7c1767 100644 --- a/test/resources/9999999/automation/retrieve-expected.json +++ b/test/resources/9999999/automation/retrieve-expected.json @@ -7,8 +7,7 @@ "endDate": "2022-07-30T00:00:00", "icalRecur": "FREQ=DAILY;COUNT=1;INTERVAL=1", "startDate": "2022-07-30T00:00:00", - "timezoneName": "W. Europe Standard Time", - "typeId": 3 + "timezoneName": "W. Europe Standard Time" }, "status": "PausedSchedule", "steps": [ diff --git a/test/resources/9999999/automation/schedule-08afb0e2-b00a-4c88-ad2e-1f7f8788c560-response.xml b/test/resources/9999999/automation/schedule-08afb0e2-b00a-4c88-ad2e-1f7f8788c560-response.xml new file mode 100644 index 000000000..55576611b --- /dev/null +++ b/test/resources/9999999/automation/schedule-08afb0e2-b00a-4c88-ad2e-1f7f8788c560-response.xml @@ -0,0 +1,52 @@ + + + + ScheduleResponse + urn:uuid:b58a82d9-a251-4be4-b50c-bd300d71601d + urn:uuid:9d3dbeb3-68b7-4f0a-b0af-04855dc0fd1e + http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous + + + 2023-07-05T10:29:58Z + 2023-07-05T10:34:58Z + + + + + + + + OK + Program scheduled. + 3a677ca8-5423-4d59-a1bb-c5fbe1c87197 + + + + + Interval + 1 + + Daily + EndAfter + 2023-07-05T16:00:57 + 1 + + + 5 + + + + + + + OK + + cfe97488-3c5e-49a9-b338-c0e9f6b6f684 + + + \ No newline at end of file diff --git a/test/resources/9999999/automation/schedule-08afb0e2-b00a-4c88-ad2e-pause-response.xml b/test/resources/9999999/automation/schedule-08afb0e2-b00a-4c88-ad2e-pause-response.xml new file mode 100644 index 000000000..072b427e5 --- /dev/null +++ b/test/resources/9999999/automation/schedule-08afb0e2-b00a-4c88-ad2e-pause-response.xml @@ -0,0 +1,38 @@ + + + + ScheduleResponse + urn:uuid:b58a82d9-a251-4be4-b50c-bd300d71601d + urn:uuid:9d3dbeb3-68b7-4f0a-b0af-04855dc0fd1e + http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous + + + 2023-07-05T10:29:58Z + 2023-07-05T10:34:58Z + + + + + + + + OK + Schedule Paused. To reactivate the schedule use proxy.Schedule with the action 'start'. + + + + + + + + OK + + cfe97488-3c5e-49a9-b338-c0e9f6b6f684 + + + diff --git a/test/resources/9999999/automation/schedule-a8afb0e2-b00a-4c88-ad2e-1f7f8788c560-response.xml b/test/resources/9999999/automation/schedule-a8afb0e2-b00a-4c88-ad2e-1f7f8788c560-response.xml new file mode 100644 index 000000000..c0876dbef --- /dev/null +++ b/test/resources/9999999/automation/schedule-a8afb0e2-b00a-4c88-ad2e-1f7f8788c560-response.xml @@ -0,0 +1,52 @@ + + + + ScheduleResponse + urn:uuid:6c743640-b80f-456a-abed-b542247f2414 + urn:uuid:29fc6536-3c75-4e07-a05d-e63f7ee520d3 + http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous + + + 2023-07-04T13:13:15Z + 2023-07-04T13:18:15Z + + + + + + + + OK + Program scheduled. + 89cd977a-3927-49fa-b8e5-47c7e6093bef + + + + + Interval + 5 + + Minutely + EndOn + 2023-07-04T06:21:14.408 + 2079-06-06T20:00:00 + + + 5 + + + + + + + OK + + 3b03417b-649e-4a1d-963d-05f42fc8f1b5 + + + diff --git a/test/resources/9999999/automation/template-expected.json b/test/resources/9999999/automation/template-expected.json index 83e7df307..c82ba388b 100644 --- a/test/resources/9999999/automation/template-expected.json +++ b/test/resources/9999999/automation/template-expected.json @@ -7,8 +7,7 @@ "endDate": "2022-07-30T00:00:00", "icalRecur": "FREQ=DAILY;COUNT=1;INTERVAL=1", "startDate": "2022-07-30T00:00:00", - "timezoneName": "W. Europe Standard Time", - "typeId": 3 + "timezoneName": "W. Europe Standard Time" }, "status": "PausedSchedule", "steps": [ diff --git a/test/resources/9999999/automation/update-expected.json b/test/resources/9999999/automation/update-expected.json index cd5ecc8c5..6ba9e73b8 100644 --- a/test/resources/9999999/automation/update-expected.json +++ b/test/resources/9999999/automation/update-expected.json @@ -7,8 +7,7 @@ "endDate": "2022-07-30T00:00:00", "icalRecur": "FREQ=DAILY;COUNT=1;INTERVAL=1", "startDate": "2022-07-30T00:00:00", - "timezoneName": "W. Europe Standard Time", - "typeId": 3 + "timezoneName": "W. Europe Standard Time" }, "status": "PausedSchedule", "steps": [ diff --git a/test/resources/9999999/automation/v1/automations/08afb0e2-b00a-4c88-ad2e-pause/get-response.json b/test/resources/9999999/automation/v1/automations/08afb0e2-b00a-4c88-ad2e-pause/get-response.json new file mode 100644 index 000000000..58ec6b51c --- /dev/null +++ b/test/resources/9999999/automation/v1/automations/08afb0e2-b00a-4c88-ad2e-pause/get-response.json @@ -0,0 +1,85 @@ +{ + "id": "08afb0e2-b00a-4c88-ad2e-pause", + "name": "testExisting_automation_pause", + "description": "bla bla", + "key": "testExisting_automation_pause", + "typeId": 1, + "type": "scheduled", + "statusId": 4, + "status": "Scheduled", + "categoryId": 290937, + "schedule": { + "id": "b393aa6c-a4a8-4c0f-a148-9250258a7339", + "typeId": 3, + "startDate": "2022-07-30T00:00:00", + "endDate": "2022-07-30T00:00:00", + "scheduledTime": "0001-01-01T07:00:00", + "rangeTypeId": 0, + "occurrences": 1, + "pattern": "01", + "icalRecur": "FREQ=DAILY;COUNT=1;INTERVAL=1", + "timezoneName": "W. Europe Standard Time", + "scheduleStatus": "scheduled", + "timezoneId": 5 + }, + "steps": [ + { + "id": "13fda077-0e82-4936-b936-a36b0997fc44", + "name": "", + "step": 1, + "activities": [ + { + "id": "8081a992-a27d-4a43-984a-d60114ea1025", + "name": "testExisting_dataExtract", + "activityObjectId": "56c5370a-f988-4f36-b0ee-0f876573f6d7", + "objectTypeId": 73, + "displayOrder": 1 + }, + { + "id": "d3774dc2-a271-4a44-8cbe-f630a6d6545e", + "name": "testExisting_emailSend", + "activityObjectId": "9b1c7bf9-4964-ed11-b849-48df37d1de8b", + "objectTypeId": 42, + "displayOrder": 2 + }, + { + "id": "2c77fc42-85eb-4611-98f9-223d29d89d72", + "name": "testExisting_fileTransfer", + "activityObjectId": "72c328ac-f5b0-4e37-91d3-a775666f15a6", + "objectTypeId": 53, + "displayOrder": 3 + }, + { + "id": "298b2794-28cb-4c70-b7ad-58b2c8cf48f7", + "name": "testExisting_importFile", + "activityObjectId": "9d16f42c-2260-ed11-b849-48df37d1de8b", + "objectTypeId": 43, + "displayOrder": 4, + "targetDataExtensions": [ + { + "id": "21711373-72c1-ec11-b83b-48df37d1deb7", + "name": "testExisting_dataExtension", + "key": "testExisting_dataExtension", + "description": "bla bla", + "rowCount": 0 + } + ] + }, + { + "id": "e3774dc2-a271-4a44-8cbe-f630a6d6545e", + "name": "testExisting_query_WRONG_NAME", + "activityObjectId": "549f0568-607c-4940-afef-437965094dat", + "objectTypeId": 300, + "displayOrder": 5 + }, + { + "id": "g3774dc2-a271-4a44-8cbe-f630a6d6545e", + "name": "testExisting_script", + "activityObjectId": "39f6a488-20eb-4ba0-b0b9-023725b574e4", + "objectTypeId": 423, + "displayOrder": 6 + } + ] + } + ] +} diff --git a/test/resources/9999999/automation/v1/automations/a8afb0e2-b00a-4c88-ad2e-1f7f8788c560/get-response.json b/test/resources/9999999/automation/v1/automations/a8afb0e2-b00a-4c88-ad2e-1f7f8788c560/get-response.json index b58d358fb..0f20856b9 100644 --- a/test/resources/9999999/automation/v1/automations/a8afb0e2-b00a-4c88-ad2e-1f7f8788c560/get-response.json +++ b/test/resources/9999999/automation/v1/automations/a8afb0e2-b00a-4c88-ad2e-1f7f8788c560/get-response.json @@ -6,7 +6,7 @@ "typeId": 1, "type": "scheduled", "statusId": 4, - "status": "PausedSchedule", + "status": "Scheduled", "categoryId": 290937, "schedule": { "id": "b393aa6c-a4a8-4c0f-a148-9250258a7339", diff --git a/test/resources/9999999/automation/v1/automations/post-response.json b/test/resources/9999999/automation/v1/automations/post-response.json index 6f82ef7c9..ba11ebf1e 100644 --- a/test/resources/9999999/automation/v1/automations/post-response.json +++ b/test/resources/9999999/automation/v1/automations/post-response.json @@ -1,25 +1,28 @@ { + "id": "a8afb0e2-b00a-4c88-ad2e-1f7f8788c560", "legacyId": "NewRkpOcE9qSVh2VUdnYTVJbWFfWW14dzoyNTow", "name": "testNew_automation", "description": "created on deploy", "key": "testNew_automation", - "typeId": 1, - "type": "scheduled", + "categoryId": 290937, "statusId": 4, - "status": "PausedSchedule", - "schedule": { - "id": "b393aa6c-a4a8-4c0f-a148-9250258a7339", - "typeId": 3, - "startDate": "2022-07-30T00:00:00", - "endDate": "2022-07-30T00:00:00", - "scheduledTime": "0001-01-01T07:00:00", - "rangeTypeId": 0, - "occurrences": 1, - "pattern": "01", - "icalRecur": "FREQ=DAILY;COUNT=1;INTERVAL=1", - "timezoneName": "W. Europe Standard Time", - "scheduleStatus": "paused", - "timezoneId": 5 + "lastSavedDate": "2023-07-04T07:49:08.577", + "lastSavedByName": "SFMC DevTools app user", + "createdDate": "2023-07-04T07:49:08.297", + "createdByName": "SFMC DevTools app user", + "updateInProgress": false, + "startSource": { + "typeId": 1, + "schedule": { + "scheduleTypeId": 1, + "startDate": "2020-05-13T18:30:32.11-06:00", + "endDate": "2079-06-06T21:00:00-06:00", + "rangeTypeId": 1, + "occurrences": 6213054, + "icalRecur": "FREQ=MINUTELY;UNTIL=20790607T050000;INTERVAL=5", + "timezoneId": 5, + "statusId": 0 + } }, "steps": [ { @@ -79,7 +82,5 @@ "annotation": "", "stepNumber": 0 } - ], - "categoryId": 290937, - "id": "a8afb0e2-b00a-4c88-ad2e-1f7f8788c560" + ] } diff --git a/test/resources/9999999/automation/v1/queries/549f0568-607c-4940-afef-437965094dae/actions/start/post-response.txt b/test/resources/9999999/automation/v1/queries/549f0568-607c-4940-afef-437965094dae/actions/start/post-response.txt new file mode 100644 index 000000000..a0aba9318 --- /dev/null +++ b/test/resources/9999999/automation/v1/queries/549f0568-607c-4940-afef-437965094dae/actions/start/post-response.txt @@ -0,0 +1 @@ +OK \ No newline at end of file diff --git a/test/resources/9999999/dataExtension/retrieve-Name=testExisting_dataExtension-response.xml b/test/resources/9999999/dataExtension/retrieve-Name=testExisting_dataExtension-response.xml new file mode 100644 index 000000000..e7cf0d2ef --- /dev/null +++ b/test/resources/9999999/dataExtension/retrieve-Name=testExisting_dataExtension-response.xml @@ -0,0 +1,52 @@ + + + + RetrieveResponse + urn:uuid:c198cc12-c34c-4d1d-90b0-5b785a342efc + urn:uuid:a0506b59-1847-4405-8231-6a15e26bbcc9 + http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous + + + 2022-04-21T19:21:50Z + 2022-04-21T19:26:50Z + + + + + + OK + d175de6e-c8e4-4f5d-9c1d-ad64426ff4b7 + + + 2022-04-21T06:56:27.927 + 2022-04-21T06:56:27.927 + 21711373-72c1-ec11-b83b-48df37d1deb7 + testExisting_dataExtension + testExisting_dataExtension + bla bla + true + true + + + + ContactKey + + + _SubscriberKey + + 6 + 5 + true + false + false + + 2 + + + + \ No newline at end of file diff --git a/test/resources/9999999/dataExtensionField/retrieve-DataExtension.CustomerKey=testExisting_dataExtension-response.xml b/test/resources/9999999/dataExtensionField/retrieve-DataExtension.CustomerKey=testExisting_dataExtension-response.xml new file mode 100644 index 000000000..3ffc1b3eb --- /dev/null +++ b/test/resources/9999999/dataExtensionField/retrieve-DataExtension.CustomerKey=testExisting_dataExtension-response.xml @@ -0,0 +1,98 @@ + + + + RetrieveResponse + urn:uuid:6d2f81c8-80ab-44d5-8664-206753e7ac8d + urn:uuid:a7354389-079e-4844-93b5-af6b7bf1d535 + http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous + + + 2022-04-21T19:21:52Z + 2022-04-21T19:26:52Z + + + + + + OK + cfe51e71-6a2b-4cb4-aecd-3777076d63bc + + + 8018397d-880d-4f88-940e-3b4eca098a0c + [testExisting_dataExtension].[ContactKey] + ContactKey + 0 + + 50 + true + 3 + true + Text + + + + testExisting_dataExtension + + + + + bea0e308-5d45-4181-a673-da9972a7c674 + [testExisting_dataExtension].[LastName] + LastName + 0 + + 50 + false + 1 + false + Text + + + + testExisting_dataExtension + + + + + 2557b461-a699-4744-950d-e80a19afc2dc + [testExisting_dataExtension].[EmailAddress] + EmailAddress + 0 + + 254 + true + 2 + false + EmailAddress + + + + testExisting_dataExtension + + + + + 42760528-a8c5-44dd-8c1d-ff34e5daee54 + [testExisting_dataExtension].[FirstName] + FirstName + 0 + + 50 + false + 0 + false + Text + + + + testExisting_dataExtension + + + + + \ No newline at end of file diff --git a/test/resources/9999999/dataExtensionField/retrieve-DataExtension.CustomerKey=testNew_dataExtensionORDataExtension.CustomerKey=testExisting_dataExtension-response.xml b/test/resources/9999999/dataExtensionField/retrieve-DataExtension.CustomerKey=testNew_dataExtensionORDataExtension.CustomerKey=testExisting_dataExtension-response.xml new file mode 100644 index 000000000..8018e9949 --- /dev/null +++ b/test/resources/9999999/dataExtensionField/retrieve-DataExtension.CustomerKey=testNew_dataExtensionORDataExtension.CustomerKey=testExisting_dataExtension-response.xml @@ -0,0 +1,99 @@ + + + + + RetrieveResponse + urn:uuid:6d2f81c8-80ab-44d5-8664-206753e7ac8d + urn:uuid:a7354389-079e-4844-93b5-af6b7bf1d535 + http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous + + + 2022-04-21T19:21:52Z + 2022-04-21T19:26:52Z + + + + + + OK + cfe51e71-6a2b-4cb4-aecd-3777076d63bc + + + 8018397d-880d-4f88-940e-3b4eca098a0c + [testExisting_dataExtension].[ContactKey] + ContactKey + 0 + + 50 + true + 3 + true + Text + + + + testExisting_dataExtension + + + + + bea0e308-5d45-4181-a673-da9972a7c674 + [testExisting_dataExtension].[LastName] + LastName + 0 + + 50 + false + 1 + false + Text + + + + testExisting_dataExtension + + + + + 2557b461-a699-4744-950d-e80a19afc2dc + [testExisting_dataExtension].[EmailAddress] + EmailAddress + 0 + + 254 + true + 2 + false + EmailAddress + + + + testExisting_dataExtension + + + + + 42760528-a8c5-44dd-8c1d-ff34e5daee54 + [testExisting_dataExtension].[FirstName] + FirstName + 0 + + 50 + false + 0 + false + Text + + + + testExisting_dataExtension + + + + + \ No newline at end of file diff --git a/test/resources/9999999/emailSendDefinition/retrieve-response.xml b/test/resources/9999999/emailSendDefinition/retrieve-IsPlatformObject=falseANDDescriptionnotEqualsSFSendDefinition-response.xml similarity index 100% rename from test/resources/9999999/emailSendDefinition/retrieve-response.xml rename to test/resources/9999999/emailSendDefinition/retrieve-IsPlatformObject=falseANDDescriptionnotEqualsSFSendDefinition-response.xml diff --git a/test/resources/9999999/program/retrieve-CustomerKey=testExisting_automation_pause-response.xml b/test/resources/9999999/program/retrieve-CustomerKey=testExisting_automation_pause-response.xml new file mode 100644 index 000000000..6d33a7bfc --- /dev/null +++ b/test/resources/9999999/program/retrieve-CustomerKey=testExisting_automation_pause-response.xml @@ -0,0 +1,30 @@ + + + + RetrieveResponse + urn:uuid:60a72d4a-847e-4d9b-a4eb-a42951078298 + urn:uuid:0b59ed53-72ec-4481-ae06-4ee78912aef2 + http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous + + + 2023-06-01T12:04:20Z + 2023-06-01T12:09:20Z + + + + + + OK + 3b1c8cee-b270-49cb-b77b-e7b33934d1b6 + + + 08afb0e2-b00a-4c88-ad2e-pause + + + + diff --git a/test/resources/9999999/program/retrieve-response.xml b/test/resources/9999999/program/retrieve-response.xml index 67ede7785..bcb9045c8 100644 --- a/test/resources/9999999/program/retrieve-response.xml +++ b/test/resources/9999999/program/retrieve-response.xml @@ -24,9 +24,15 @@ 08afb0e2-b00a-4c88-ad2e-1f7f8788c560 - testExisting_automation - testExisting_automation + testExisting_automation + testExisting_automation + + + + 08afb0e2-b00a-4c88-ad2e-pause + testExisting_automation_pause + testExisting_automation_pause - \ No newline at end of file + diff --git a/test/resources/9999999/queryDefinition/retrieve-response.xml b/test/resources/9999999/queryDefinition/retrieve-CustomerKey=testExisting_queryANDStatus=Active-response.xml similarity index 100% rename from test/resources/9999999/queryDefinition/retrieve-response.xml rename to test/resources/9999999/queryDefinition/retrieve-CustomerKey=testExisting_queryANDStatus=Active-response.xml diff --git a/test/resources/9999999/queryDefinition/retrieve-CustomerKey=testNew_queryANDStatus=Active-response.xml b/test/resources/9999999/queryDefinition/retrieve-CustomerKey=testNew_queryANDStatus=Active-response.xml new file mode 100644 index 000000000..ad6aad842 --- /dev/null +++ b/test/resources/9999999/queryDefinition/retrieve-CustomerKey=testNew_queryANDStatus=Active-response.xml @@ -0,0 +1,30 @@ + + + + RetrieveResponse + urn:uuid:7ef0345e-b559-4fc4-8986-47e54e1a8a58 + urn:uuid:b2e814a6-517c-4882-9bbb-238bfce951ce + http://schemas.xmlsoap.org/ws/2004/08/addressing/role/anonymous + + + 2023-04-11T16:33:48Z + 2023-04-11T16:38:48Z + + + + + + OK + e8eb2988-2f43-4243-a6b0-6ab6b841a6ab + + + 549f0568-607c-4940-afef-437965094dae + + + + \ No newline at end of file diff --git a/test/resources/9999999/triggeredSendDefinition/retrieve-response.xml b/test/resources/9999999/triggeredSendDefinition/retrieve-TriggeredSendStatusINNew,Active,Inactive,Moved,Canceled-response.xml similarity index 100% rename from test/resources/9999999/triggeredSendDefinition/retrieve-response.xml rename to test/resources/9999999/triggeredSendDefinition/retrieve-TriggeredSendStatusINNew,Active,Inactive,Moved,Canceled-response.xml diff --git a/test/type.automation.test.js b/test/type.automation.test.js index 5f9dd60bb..531b6b802 100644 --- a/test/type.automation.test.js +++ b/test/type.automation.test.js @@ -27,8 +27,8 @@ describe('type: automation', () => { const result = cache.getCache(); assert.equal( result.automation ? Object.keys(result.automation).length : 0, - 1, - 'only one automation expected' + 2, + 'only two automations expected' ); assert.deepEqual( await testUtils.getActualJson('testExisting_automation', 'automation'), @@ -50,7 +50,7 @@ describe('type: automation', () => { assert.equal( testUtils.getAPIHistoryLength(), - 14, + 15, 'Unexpected number of requests made. Run testUtils.logAPIHistoryDebug() to see the requests' ); return; @@ -70,8 +70,8 @@ describe('type: automation', () => { const result = cache.getCache(); assert.equal( result.automation ? Object.keys(result.automation).length : 0, - 2, - 'two automations expected' + 3, + 'three automations expected' ); // insert assert.deepEqual( @@ -111,7 +111,69 @@ describe('type: automation', () => { assert.equal( testUtils.getAPIHistoryLength(), - 15, + 16, + 'Unexpected number of requests made. Run testUtils.logAPIHistoryDebug() to see the requests' + ); + return; + }); + it('Should update & start an automation with --execute option', async () => { + // WHEN + handler.setOptions({ execute: true }); + const deployed = await handler.deploy( + 'testInstance/testBU', + ['automation'], + ['testExisting_automation'] + ); + // THEN + assert.equal( + process.exitCode, + false, + 'deploy with --execute should not have thrown an error' + ); + + // get results from cache + const cached = cache.getCache(); + assert.equal( + cached.automation ? Object.keys(cached.automation).length : 0, + 2, + 'two cached automation expected' + ); + assert.equal( + deployed['testInstance/testBU'].automation + ? Object.keys(deployed['testInstance/testBU'].automation).length + : 0, + 1, + 'one deployed automation expected' + ); + assert.equal( + deployed['testInstance/testBU'].automation + ? Object.keys(deployed['testInstance/testBU'].automation)[0] + : null, + 'testExisting_automation', + 'expected specific automation to have been deployed' + ); + + // update + assert.deepEqual( + await testUtils.getActualJson('testExisting_automation', 'automation'), + await testUtils.getExpectedJson('9999999', 'automation', 'update'), + 'returned metadata was not equal expected for update' + ); + // check if MD file was created and equals expectations + expect(file(testUtils.getActualDoc('testExisting_automation', 'automation'))).to.equal( + file( + testUtils.getExpectedFile( + '9999999', + 'automation', + 'update-testExisting_automation', + 'md' + ) + ) + ); + + assert.equal( + testUtils.getAPIHistoryLength(), + 19, 'Unexpected number of requests made. Run testUtils.logAPIHistoryDebug() to see the requests' ); return; @@ -210,7 +272,7 @@ describe('type: automation', () => { ); assert.equal( testUtils.getAPIHistoryLength(), - 14, + 15, 'Unexpected number of requests made. Run testUtils.logAPIHistoryDebug() to see the requests' ); return; @@ -256,4 +318,56 @@ describe('type: automation', () => { return; }); }); + describe('Execute ================', () => { + it('Should start an automation by key', async () => { + const execute = await handler.execute('testInstance/testBU', 'automation', [ + 'testExisting_automation', + ]); + assert.equal(process.exitCode, false, 'execute should not have thrown an error'); + assert.equal(execute, true, 'automation was supposed to be executed'); + return; + }); + it('Should start an automation selected via --like', async () => { + handler.setOptions({ like: { key: 'testExist%automation' } }); + const execute = await handler.execute('testInstance/testBU', 'automation'); + assert.equal(process.exitCode, false, 'execute should not have thrown an error'); + assert.equal(execute, true, 'automation was supposed to be executed'); + return; + }); + it('Should not start executing an automation because key and --like was specified', async () => { + handler.setOptions({ like: { key: 'testExisting%' } }); + const execute = await handler.execute('testInstance/testBU', 'automation', [ + 'testExisting_automation', + ]); + assert.equal(process.exitCode, true, 'execute should not have thrown an error'); + assert.equal(execute, false, 'automation was not supposed to be executed'); + return; + }); + }); + describe('Pause ================', () => { + it('Should pause a automation by key', async () => { + const pause = await handler.pause('testInstance/testBU', 'automation', [ + 'testExisting_automation_pause', + ]); + assert.equal(process.exitCode, false, 'pause should not have thrown an error'); + assert.equal(pause, true, 'automation was supposed to be paused'); + return; + }); + it('Should pause a automation selected via --like', async () => { + handler.setOptions({ like: { key: 'testExisting_a%n_pause' } }); + const pause = await handler.pause('testInstance/testBU', 'automation'); + assert.equal(process.exitCode, false, 'pause should not have thrown an error'); + assert.equal(pause, true, 'automation was supposed to be paused'); + return; + }); + it('Should not pause executing a automation because key and --like was specified', async () => { + handler.setOptions({ like: { key: 'testExisting_a%n_pause' } }); + const pause = await handler.pause('testInstance/testBU', 'automation', [ + 'testExisting_automation_pause', + ]); + assert.equal(process.exitCode, true, 'pause should not have thrown an error'); + assert.equal(pause, false, 'automation was not supposed to be paused'); + return; + }); + }); }); diff --git a/test/type.query.test.js b/test/type.query.test.js index 69bbab6bd..ac5746251 100644 --- a/test/type.query.test.js +++ b/test/type.query.test.js @@ -53,7 +53,7 @@ describe('type: query', () => { ); return; }); - it('Should retrieve one specific query', async () => { + it('Should retrieve one specific query by key', async () => { // WHEN await handler.retrieve('testInstance/testBU', ['query'], ['testExisting_query']); // THEN @@ -80,6 +80,61 @@ describe('type: query', () => { ); return; }); + it('Should retrieve one specific query via --like', async () => { + // WHEN + handler.setOptions({ like: { key: '%Existing_query' } }); + await handler.retrieve('testInstance/testBU', ['query']); + // THEN + assert.equal(process.exitCode, false, 'retrieve should not have thrown an error'); + // get results from cache + const result = cache.getCache(); + assert.equal( + result.query ? Object.keys(result.query).length : 0, + 2, + 'two queries in cache expected' + ); + assert.deepEqual( + await testUtils.getActualJson('testExisting_query', 'query'), + await testUtils.getExpectedJson('9999999', 'query', 'get'), + 'returned metadata was not equal expected' + ); + expect(file(testUtils.getActualFile('testExisting_query', 'query', 'sql'))).to.equal( + file(testUtils.getExpectedFile('9999999', 'query', 'get', 'sql')) + ); + expect(file(testUtils.getActualFile('testExisting_query2', 'query', 'sql'))).to.not + .exist; + assert.equal( + testUtils.getAPIHistoryLength(), + 6, + 'Unexpected number of requests made. Run testUtils.logAPIHistoryDebug() to see the requests' + ); + return; + }); + it('Should not retrieve any query via --like and key due to a mismatching filter', async () => { + // WHEN + handler.setOptions({ like: { key: 'NotExisting_query' } }); + await handler.retrieve('testInstance/testBU', ['query']); + // THEN + assert.equal(process.exitCode, false, 'retrieve should not have thrown an error'); + // get results from cache + const result = cache.getCache(); + assert.equal( + result.query ? Object.keys(result.query).length : 0, + 2, + 'two queries in cache expected' + ); + + expect(file(testUtils.getActualFile('testExisting_query', 'query', 'sql'))).to.not + .exist; + expect(file(testUtils.getActualFile('testExisting_query2', 'query', 'sql'))).to.not + .exist; + assert.equal( + testUtils.getAPIHistoryLength(), + 6, + 'Unexpected number of requests made. Run testUtils.logAPIHistoryDebug() to see the requests' + ); + return; + }); }); describe('Deploy ================', () => { beforeEach(() => { @@ -124,6 +179,33 @@ describe('type: query', () => { return; }); it('Should change the key during update with --changeKeyValue'); + it('Should deploy and execute with --execute', async () => { + handler.setOptions({ execute: true }); + // WHEN + await handler.deploy('testInstance/testBU', ['query']); + // THEN + assert.equal( + process.exitCode, + false, + 'deploy with --execute should not have thrown an error' + ); + // confirm updated item + assert.deepEqual( + await testUtils.getActualJson('testExisting_query', 'query'), + await testUtils.getExpectedJson('9999999', 'query', 'patch'), + 'returned metadata was not equal expected for insert query' + ); + expect(file(testUtils.getActualFile('testExisting_query', 'query', 'sql'))).to.equal( + file(testUtils.getExpectedFile('9999999', 'query', 'patch', 'sql')) + ); + // check number of API calls + assert.equal( + testUtils.getAPIHistoryLength(), + 12, + 'Unexpected number of requests made. Run testUtils.logAPIHistoryDebug() to see the requests' + ); + return; + }); }); describe('FixKeys ================', () => { beforeEach(() => { @@ -314,15 +396,29 @@ describe('type: query', () => { }); }); describe('Execute ================', () => { - it('Should start executing a query', async () => { - const execute = await handler.execute( - 'testInstance/testBU', - ['query'], - ['testExisting_query'] - ); + it('Should start a query by key', async () => { + const execute = await handler.execute('testInstance/testBU', 'query', [ + 'testExisting_query', + ]); + assert.equal(process.exitCode, false, 'execute should not have thrown an error'); + assert.equal(execute, true, 'query was supposed to be executed'); + return; + }); + it('Should start a query selected via --like', async () => { + handler.setOptions({ like: { key: 'testExist%query' } }); + const execute = await handler.execute('testInstance/testBU', 'query'); assert.equal(process.exitCode, false, 'execute should not have thrown an error'); assert.equal(execute, true, 'query was supposed to be executed'); return; }); + it('Should not start executing a query because key and --like was specified', async () => { + handler.setOptions({ like: { key: 'testExisting%' } }); + const execute = await handler.execute('testInstance/testBU', 'query', [ + 'testExisting_query', + ]); + assert.equal(process.exitCode, true, 'execute should not have thrown an error'); + assert.equal(execute, false, 'query was not supposed to be executed'); + return; + }); }); }); diff --git a/test/type.user.test.js b/test/type.user.test.js index cb56fe090..18fc252ba 100644 --- a/test/type.user.test.js +++ b/test/type.user.test.js @@ -26,8 +26,8 @@ describe('type: user', () => { const result = cache.getCache(); assert.equal( result.user ? Object.keys(result.user).length : 0, - 1, - 'only one user expected' + 3, + 'only three users expected' ); assert.deepEqual( await testUtils.getActualJson('testExisting_user', 'user', '_ParentBU_'), @@ -42,7 +42,7 @@ describe('type: user', () => { { encoding: 'utf8' } ); const regexFindDaysSinceLogin = - /\| (\d*) (seconds|minutes|days|weeks|months|years){1} \|/gm; + /\| (\d*) (seconds|minutes|days|weeks|months|years){1} \|/g; // fetch expected time since last login const expectedDaysSinceLogin = expectedFile.match(regexFindDaysSinceLogin); // load actual file and replace days since last login with expected value @@ -50,7 +50,7 @@ describe('type: user', () => { await File.readFile(`./docs/user/testInstance.users.md`, { encoding: 'utf8', }) - ).replaceAll(regexFindDaysSinceLogin, expectedDaysSinceLogin); + ).replaceAll(regexFindDaysSinceLogin, [expectedDaysSinceLogin[0]]); expect(actualFile).to.equal(expectedFile); assert.equal( @@ -83,7 +83,12 @@ describe('type: user', () => { }); it('Should create & upsert a user', async () => { // WHEN - const expectedCache = ['testNew_user', 'testExisting_user']; + const expectedCache = [ + 'testExisting_user', + 'testExisting_user_inactive', + '45372cbb-06e0-438e-88d8-008981f7a18b', + 'testNew_user', + ]; await handler.deploy('testInstance/_ParentBU_', ['user'], expectedCache); // THEN assert.equal(process.exitCode, false, 'deploy should not have thrown an error'); @@ -92,8 +97,8 @@ describe('type: user', () => { const result = cache.getCache(); assert.equal( result.user ? Object.keys(result.user).length : 0, - 2, - 'two users expected' + 4, + 'four users expected' ); // confirm if result.user only includes values from expectedCache assert.deepEqual( @@ -126,14 +131,22 @@ describe('type: user', () => { }); it('Should not deploy user with Marketing Cloud role', async () => { // WHEN - const expectedCache = ['testExisting_user']; + const expectedCache = [ + 'testExisting_user', + 'testExisting_user_inactive', + '45372cbb-06e0-438e-88d8-008981f7a18b', + ]; await handler.deploy('testInstance/_ParentBU_', ['user'], ['testBlocked_user']); // THEN assert.equal(process.exitCode, 1, 'Deployment should have thrown an error'); // get results from cache const result = cache.getCache(); - assert.equal(result.user ? Object.keys(result.user).length : 0, 1, '1 user expected'); + assert.equal( + result.user ? Object.keys(result.user).length : 0, + 3, + 'three users expected' + ); // confirm if result.user only includes values from expectedCache assert.deepEqual( Object.keys(result.user),