From 8df807cc5347f78dbc043c78c482ea3d28c9024c Mon Sep 17 00:00:00 2001 From: Stefano Verna Date: Fri, 18 Sep 2020 11:33:08 +0200 Subject: [PATCH] Better error management, fix Contentful import --- bin/dato.js | 42 +++- src/ApiException.js | 98 ++++++-- src/Client.js | 23 +- src/check/command.js | 5 +- src/contentfulImport/addValidationsOnField.js | 50 ++-- src/contentfulImport/appClient.js | 25 +- src/contentfulImport/createFields.js | 157 ++++++------ src/contentfulImport/createModels.js | 48 ++-- src/contentfulImport/createRecords.js | 232 +++++++++--------- src/contentfulImport/createUploads.js | 73 +++--- src/contentfulImport/destroyExistingAssets.js | 38 +-- src/contentfulImport/destroyExistingModels.js | 41 ++-- src/contentfulImport/linkRecords.js | 133 +++++----- src/contentfulImport/publishRecords.js | 18 +- src/contentfulImport/removeAllValidators.js | 82 +++---- src/createMigrationScript/command.js | 9 +- src/dump/command.js | 68 +++-- src/dump/dump.js | 32 ++- src/dump/requireToken.js | 29 +-- src/environment/destroy/command.js | 8 +- src/environment/getPrimary/command.js | 11 +- src/environment/promote/command.js | 8 +- src/runPendingMigrations/command.js | 15 +- src/toggleMaintenanceMode/command.js | 8 +- 24 files changed, 642 insertions(+), 611 deletions(-) diff --git a/bin/dato.js b/bin/dato.js index 2a68b934..064bacda 100755 --- a/bin/dato.js +++ b/bin/dato.js @@ -1,10 +1,46 @@ -#!/usr/bin/env node +#!/usr/bin/env node --async-stack-traces + +const PrettyError = require('pretty-error'); +const colors = require('colors'); require('../lib'); const runCli = require('../lib/cli'); +const ApiException = require('../lib/ApiException').default; runCli().catch(e => { - process.stdout.write(`Command failed with the following error:\n`); - process.stdout.write(`${e.message}\n`); + process.stderr.write(colors.brightRed(`\nCommand failed!\n`)); + + if (e instanceof ApiException) { + const humanMessage = e.humanMessageForFailedResponse(); + + if (humanMessage) { + process.stderr.write(`${colors.red.underline(humanMessage)} \n\n`); + } + + process.stderr.write(colors.underline.gray(`\nFailed request:\n\n`)); + + process.stderr.write(`${e.requestMethod} ${e.requestUrl}\n\n`); + for (const [key, value] of Object.entries(e.requestHeaders)) { + process.stderr.write(`${key}: ${value}\n`); + } + if (e.requestBody) { + process.stderr.write(`\n${e.requestBody}`); + } + + process.stderr.write(colors.underline.gray(`\n\nHTTP Response:\n\n`)); + + process.stderr.write(`${e.statusCode} ${e.statusText}\n\n`); + for (const [key, value] of Object.entries(e.headers)) { + process.stderr.write(`${key}: ${value}\n`); + } + + if (e.body) { + process.stderr.write(`\n${JSON.stringify(e.body)}`); + } + } + + process.stderr.write(colors.underline.gray(`\n\nException details:\n\n`)); + process.stderr.write(new PrettyError().render(e)); + process.exit(1); }); diff --git a/src/ApiException.js b/src/ApiException.js index 3daecdbe..37f3ffc4 100644 --- a/src/ApiException.js +++ b/src/ApiException.js @@ -1,35 +1,99 @@ -export default function ApiException(response, body) { +export default function ApiException( + response, + body, + { url, options, preCallStack }, +) { if ('captureStackTrace' in Error) { Error.captureStackTrace(this, ApiException); } else { this.stack = new Error().stack; } - if (response) { - if (response.status < 500) { - const error = body.data[0]; - const details = JSON.stringify(error.attributes.details); - this.message = `${response.status} ${error.attributes.code} (details: ${details})`; - } else { - this.message = `${response.status} ${response.statusText}`; - } - - this.body = body; - this.headers = response.headers; - this.statusCode = response.status; - this.statusText = response.statusText; + if (response.status < 500) { + const error = body.data[0]; + const details = JSON.stringify(error.attributes.details); + this.message = `${response.status} ${error.attributes.code} (details: ${details})`; } else { - this.message = 'Misconfigured exception'; + this.message = `${response.status} ${response.statusText}`; } + + this.body = body; + this.headers = response.headers.raw(); + this.statusCode = response.status; + this.statusText = response.statusText; + this.requestUrl = url; + this.requestMethod = options.method || 'GET'; + this.requestHeaders = options.headers; + this.requestBody = options.body; + this.stack += `\nCaused By:\n${preCallStack}`; } ApiException.prototype = Object.create(Error.prototype); ApiException.prototype.name = 'ApiException'; ApiException.prototype.constructor = ApiException; -ApiException.prototype.errorWithCode = function errorWithCode(code) { + +ApiException.prototype.errorWithCode = function errorWithCode(codeOrCodes) { + const codes = Array.isArray(codeOrCodes) ? codeOrCodes : [codeOrCodes]; + if (!this.body || !(this.body.data instanceof Array)) { return null; } - return this.body.data.find(error => error.attributes.code === code); + return this.body.data.find(error => codes.includes(error.attributes.code)); +}; + +const humanMessageForCode = { + BATCH_DATA_VALIDATION_IN_PROGRESS: `The schema of this model changed, we're re-running validations over every record in background. Please retry with this operation in a few seconds!`, + INSUFFICIENT_PERMISSIONS: `Your role does not permit this action`, + MAINTENANCE_MODE: `The project is currently in maintenance mode!`, + DELETE_RESTRICTION: `Sorry, but you cannot delete this resource, as it's currently used/referenced elsewhere!`, + INVALID_CREDENTIALS: `Credentials are incorrect!`, + INVALID_EMAIL: `Email address is incorrect!`, + INVALID_FORMAT: `The format of the parameters passed is incorrect, take a look at the details of the error to know what's wrong!`, + ITEM_LOCKED: `The operation cannot be completed as some other user is currently editing this record!`, + LINKED_FROM_PUBLISHED_ITEMS: `Couldn't unpublish the record, as some published records are linked to it!`, + PLAN_UPGRADE_REQUIRED: `Cannot proceed, please upgrade plan!`, + PUBLISHED_CHILDREN: `Couldn't unpublish the record, some children records are still published!`, + REQUIRED_2FA_SETUP: `This project requires every user to turn on 2-factor authentication! Please go to your Dashboard and activate it! (https://dashboard.datocms.com/account/setup-2fa)`, + REQUIRED_BY_ASSOCIATION: `Cannot delete the record, as it's required by other records:`, + STALE_ITEM_VERSION: `Someone else made a change while you were editing this record, please refresh the page!`, + TITLE_ALREADY_PRESENT: `There can only be one Title field per model`, + UNPUBLISHED_LINK: `Couldn't publish the record, as it links some unpublished records!`, + UNPUBLISHED_PARENT: `Couldn't publish the record, as the parent record is not published!`, + UPLOAD_IS_CURRENTLY_IN_USE: `Couldn't delete this asset, as it's currently used by some records!`, + UPLOAD_NOT_PASSING_FIELD_VALIDATIONS: `Couldn't update this asset since some records are failing to pass the validations!`, +}; + +const humanMessageForPlanUpgradeLimit = { + build_triggers: `You've reached the maximum number of build triggers your plan allows`, + sandbox_environments: `You've reached the maximum number of environments your plan allows`, + item_types: `You've reached the maximum number of models your plan allows to create`, + items: `You've reached the maximum number of records your plan allows to create`, + locales: `You've reached the maximum number of locales your plan allows`, + mux_encoding_seconds: `You've reached the maximum video encoding limits of your plan`, + otp: `Two-factor authentication cannot be on the current plan`, + plugins: `You've reached the maximum number of plugins your plan allows`, + roles: `You've reached the maximum number of roles your plan allows to create`, + uploadable_bytes: `You've reached the file storage limits of your plan`, + users: `You've reached the maximum number of collaborators your plan allows to invite to the project`, + access_tokens: `You've reached the maximum number of API tokens your plan allows to create`, +}; + +ApiException.prototype.humanMessageForFailedResponse = function humanMessageForFailedResponse() { + const planUpgradeError = this.errorWithCode('PLAN_UPGRADE_REQUIRED'); + + if (planUpgradeError) { + const { limit } = planUpgradeError.attributes.details; + return `${humanMessageForPlanUpgradeLimit[limit]}. Please head over to your account dashboard (https://dashboard.datocms.com/) to upgrade the plan or, if no publicly available plan suits your needs, contact our Sales team (https://www.datocms.com/contact) to get a custom quote!`; + } + + const errors = Object.keys(humanMessageForCode) + .filter(code => this.errorWithCode(code)) + .map(code => humanMessageForCode[code]); + + if (errors.length === 0) { + return null; + } + + return errors.join('\n'); }; diff --git a/src/Client.js b/src/Client.js index 282bbc57..361f34fd 100644 --- a/src/Client.js +++ b/src/Client.js @@ -59,15 +59,24 @@ export default class Client { return `${this.baseUrl}${path}${query}`; } - request(url, options = {}, retryCount = 1) { + request(url, options = {}, retryCount = 1, preCallStack = null) { const fullHeaders = { ...this.defaultHeaders(), ...this.extraHeaders, ...options.headers, }; + Object.keys(fullHeaders).forEach( + key => fullHeaders[key] == null && delete fullHeaders[key], + ); + const fullOptions = { ...options, headers: fullHeaders }; + if (!preCallStack) { + // eslint-disable-next-line no-param-reassign + preCallStack = new Error().stack; + } + return fetch(url, fullOptions).then(res => { if (res.status === 429) { const waitTime = parseInt( @@ -78,7 +87,7 @@ export default class Client { `Rate limit exceeded, waiting ${waitTime * retryCount} seconds...`, ); return wait(waitTime * retryCount * 1000).then(() => { - return this.request(url, options, retryCount + 1); + return this.request(url, options, retryCount + 1, preCallStack); }); } @@ -87,7 +96,13 @@ export default class Client { if (res.status >= 200 && res.status < 300) { return Promise.resolve(body); } - return Promise.reject(new ApiException(res, body)); + return Promise.reject( + new ApiException(res, body, { + url, + options: fullOptions, + preCallStack, + }), + ); }) .catch(error => { if ( @@ -102,7 +117,7 @@ export default class Client { `Data validation in progress, waiting ${retryCount} seconds...`, ); return wait(retryCount * 1000).then(() => { - return this.request(url, options, retryCount + 1); + return this.request(url, options, retryCount + 1, preCallStack); }); } throw error; diff --git a/src/check/command.js b/src/check/command.js index 786c4f7e..5cc6c591 100644 --- a/src/check/command.js +++ b/src/check/command.js @@ -4,9 +4,8 @@ export default function() { const token = process.env.DATO_API_TOKEN; if (token) { - process.exit(); - return; + return undefined; } - requireToken().then(() => process.exit()); + return requireToken(); } diff --git a/src/contentfulImport/addValidationsOnField.js b/src/contentfulImport/addValidationsOnField.js index 37119ea8..758a764e 100644 --- a/src/contentfulImport/addValidationsOnField.js +++ b/src/contentfulImport/addValidationsOnField.js @@ -4,7 +4,6 @@ import ora from 'ora'; import Progress from './progress'; import { toItemApiKey, toFieldApiKey } from './toApiKey'; import datoFieldValidatorsFor from './datoFieldValidatorsFor'; -import delay from './delay'; export default async ({ itemTypes, @@ -13,21 +12,22 @@ export default async ({ contentfulData, }) => { const spinner = ora('').start(); - const { contentTypes } = contentfulData; - const fieldsSize = contentTypes - .map(contentType => contentType.fields.length) - .reduce((acc, length) => acc + length, 0); - const progress = new Progress(fieldsSize, 'Adding validations on fields'); - spinner.text = progress.tick(); + try { + const { contentTypes } = contentfulData; + const fieldsSize = contentTypes + .map(contentType => contentType.fields.length) + .reduce((acc, length) => acc + length, 0); - for (const contentType of contentTypes) { - const contentTypeApiKey = toItemApiKey(contentType.sys.id); + const progress = new Progress(fieldsSize, 'Adding validations on fields'); + spinner.text = progress.tick(); - const itemTypeFields = fieldsMapping[contentTypeApiKey]; + for (const contentType of contentTypes) { + const contentTypeApiKey = toItemApiKey(contentType.sys.id); - for (const field of contentType.fields) { - while (true) { + const itemTypeFields = fieldsMapping[contentTypeApiKey]; + + for (const field of contentType.fields) { const fieldApiKey = toFieldApiKey(field.id); const datoField = itemTypeFields.find(f => f.apiKey === fieldApiKey); if (!datoField) { @@ -35,26 +35,14 @@ export default async ({ } const validators = await datoFieldValidatorsFor({ field, itemTypes }); - - try { - await datoClient.fields.update(datoField.id, { validators }); - spinner.text = progress.tick(); - break; - } catch (e) { - if ( - !e.body || - !e.body.data || - !e.body.data.some(d => d.id === 'BATCH_DATA_VALIDATION_IN_PROGRESS') - ) { - spinner.fail(typeof e === 'object' ? e.message : e); - process.exit(); - } else { - await delay(1000); - } - } + await datoClient.fields.update(datoField.id, { validators }); + spinner.text = progress.tick(); } } - } - spinner.succeed(); + spinner.succeed(); + } catch (e) { + spinner.fail(); + throw e; + } }; diff --git a/src/contentfulImport/appClient.js b/src/contentfulImport/appClient.js index 474b6c08..32890574 100644 --- a/src/contentfulImport/appClient.js +++ b/src/contentfulImport/appClient.js @@ -10,19 +10,22 @@ export default async ( datoCmsCmaBaseUrl, ) => { const spinner = ora('Configuring DatoCMS/Contentful clients').start(); - const contentfulClient = createClient({ accessToken: contentfulToken }); - const dato = new SiteClient( - datoCmsToken, - { environment: datoCmsEnvironment }, - datoCmsCmaBaseUrl, - ); - let contentful; + try { - contentful = await contentfulClient.getSpace(contentfulSpaceId); + const contentfulClient = createClient({ accessToken: contentfulToken }); + + const dato = new SiteClient( + datoCmsToken, + { environment: datoCmsEnvironment }, + datoCmsCmaBaseUrl, + ); + + const contentful = await contentfulClient.getSpace(contentfulSpaceId); spinner.succeed(); + + return { dato, contentful }; } catch (e) { - spinner.fail(typeof e === 'object' ? e.message : e); - process.exit(); + spinner.fail(); + throw e; } - return { dato, contentful }; }; diff --git a/src/contentfulImport/createFields.js b/src/contentfulImport/createFields.js index 14623759..3e6508aa 100644 --- a/src/contentfulImport/createFields.js +++ b/src/contentfulImport/createFields.js @@ -4,105 +4,94 @@ import Progress from './progress'; import { toItemApiKey, toFieldApiKey } from './toApiKey'; import datoFieldTypeFor from './datoFieldTypeFor'; import datoLinkItemTypeFor from './datoLinkItemTypeFor'; -import delay from './delay'; export default async ({ itemTypes, datoClient, contentfulData }) => { const spinner = ora('').start(); - const { contentTypes } = contentfulData; - const fieldSize = contentTypes - .map(contentType => contentType.fields.length) - .reduce((acc, length) => acc + length, 0); - const progress = new Progress(fieldSize, 'Creating fields'); - spinner.text = progress.tick(); - const fieldsMapping = {}; + try { + const { contentTypes } = contentfulData; + const fieldSize = contentTypes + .map(contentType => contentType.fields.length) + .reduce((acc, length) => acc + length, 0); - for (const contentType of contentTypes) { - const contentTypeApiKey = toItemApiKey(contentType.sys.id); - fieldsMapping[contentTypeApiKey] = []; + const progress = new Progress(fieldSize, 'Creating fields'); + spinner.text = progress.tick(); + const fieldsMapping = {}; - const itemType = itemTypes.find(iT => { - return iT.apiKey === contentTypeApiKey; - }); + for (const contentType of contentTypes) { + const contentTypeApiKey = toItemApiKey(contentType.sys.id); + fieldsMapping[contentTypeApiKey] = []; - for (const contentfulField of contentType.fields) { - const position = contentType.fields.indexOf(contentfulField); - let validators = {}; + const itemType = itemTypes.find(iT => { + return iT.apiKey === contentTypeApiKey; + }); - if ( - contentfulField.type === 'Link' && - contentfulField.linkType === 'Entry' - ) { - validators = { - itemItemType: { - itemTypes: datoLinkItemTypeFor({ - itemTypes, - field: contentfulField, - }), - }, - }; - } + for (const contentfulField of contentType.fields) { + const position = contentType.fields.indexOf(contentfulField); + let validators = {}; - if ( - contentfulField.type === 'Array' && - contentfulField.items.type === 'Link' && - contentfulField.items.linkType === 'Entry' - ) { - validators = { - itemsItemType: { - itemTypes: datoLinkItemTypeFor({ - itemTypes, - field: contentfulField.items, - }), - }, - }; - } + if ( + contentfulField.type === 'Link' && + contentfulField.linkType === 'Entry' + ) { + validators = { + itemItemType: { + itemTypes: datoLinkItemTypeFor({ + itemTypes, + field: contentfulField, + }), + }, + }; + } - const fieldAttributes = { - label: contentfulField.name, - fieldType: datoFieldTypeFor(contentfulField), - localized: contentfulField.localized, - apiKey: toFieldApiKey(contentfulField.id), - position, - validators, - }; + if ( + contentfulField.type === 'Array' && + contentfulField.items.type === 'Link' && + contentfulField.items.linkType === 'Entry' + ) { + validators = { + itemsItemType: { + itemTypes: datoLinkItemTypeFor({ + itemTypes, + field: contentfulField.items, + }), + }, + }; + } - if ( - contentfulField.id === contentType.displayField && - contentfulField.type === 'Symbol' - ) { - fieldAttributes.appearance = { - editor: 'single_line', - parameters: { heading: true }, - addons: [], + const fieldAttributes = { + label: contentfulField.name, + fieldType: datoFieldTypeFor(contentfulField), + localized: contentfulField.localized, + apiKey: toFieldApiKey(contentfulField.id), + position, + validators, }; - } - while (true) { - try { - const datoField = await datoClient.fields.create( - itemType.id, - fieldAttributes, - ); - spinner.text = progress.tick(); - fieldsMapping[contentTypeApiKey].push(datoField); - break; - } catch (e) { - if ( - !e.body || - !e.body.data || - !e.body.data.some(d => d.id === 'BATCH_DATA_VALIDATION_IN_PROGRESS') - ) { - spinner.fail(typeof e === 'object' ? e.message : e); - process.exit(); - } else { - await delay(1000); - } + if ( + contentfulField.id === contentType.displayField && + contentfulField.type === 'Symbol' + ) { + fieldAttributes.appearance = { + editor: 'single_line', + parameters: { heading: true }, + addons: [], + }; } + + const datoField = await datoClient.fields.create( + itemType.id, + fieldAttributes, + ); + spinner.text = progress.tick(); + fieldsMapping[contentTypeApiKey].push(datoField); } } - } - spinner.succeed(); - return fieldsMapping; + spinner.succeed(); + return fieldsMapping; + } catch (e) { + spinner.fail(); + throw e; + } }; diff --git a/src/contentfulImport/createModels.js b/src/contentfulImport/createModels.js index f67aa108..f515f3b0 100644 --- a/src/contentfulImport/createModels.js +++ b/src/contentfulImport/createModels.js @@ -4,38 +4,38 @@ import { toItemApiKey } from './toApiKey'; export default async ({ datoClient, contentfulData }) => { const spinner = ora().start(); - const { contentTypes } = contentfulData; + try { + const { contentTypes } = contentfulData; - const progress = new Progress(contentTypes.length, 'Creating models'); - spinner.text = progress.tick(); + const progress = new Progress(contentTypes.length, 'Creating models'); + spinner.text = progress.tick(); - const itemTypes = []; + const itemTypes = []; - for (const contentType of contentTypes) { - const itemTypeApiKey = toItemApiKey(contentType.sys.id); - const itemAttributes = { - apiKey: itemTypeApiKey, - name: contentType.name, - modularBlock: false, - orderingDirection: null, - singleton: false, - sortable: false, - tree: false, - orderingField: null, - draftModeActive: true, - }; + for (const contentType of contentTypes) { + const itemTypeApiKey = toItemApiKey(contentType.sys.id); + const itemAttributes = { + apiKey: itemTypeApiKey, + name: contentType.name, + modularBlock: false, + orderingDirection: null, + singleton: false, + sortable: false, + tree: false, + orderingField: null, + draftModeActive: true, + }; - try { const itemType = await datoClient.itemTypes.create(itemAttributes); spinner.text = progress.tick(); itemTypes.push(itemType); - } catch (e) { - spinner.fail(typeof e === 'object' ? e.message : e); - process.exit(); } - } - spinner.succeed(); + spinner.succeed(); - return itemTypes; + return itemTypes; + } catch (e) { + spinner.fail(); + process.exit(); + } }; diff --git a/src/contentfulImport/createRecords.js b/src/contentfulImport/createRecords.js index 6a7a95a4..357cc97e 100644 --- a/src/contentfulImport/createRecords.js +++ b/src/contentfulImport/createRecords.js @@ -11,114 +11,120 @@ export default async ({ contentfulData, }) => { const spinner = ora('').start(); - const { entries } = contentfulData; - const progress = new Progress(entries.length, 'Creating records'); - - const contentfulRecordMap = {}; - const recordsToPublish = []; - - spinner.text = progress.tick(); - - for (const entry of entries) { - const { contentType } = entry.sys; - const contentTypeApiKey = toItemApiKey(contentType.sys.id); - - const itemType = itemTypes.find(iT => { - return iT.apiKey === contentTypeApiKey; - }); - - const itemTypeFields = fieldsMapping[contentTypeApiKey]; - - if (itemType) { - const emptyFieldValues = itemTypeFields.reduce((accFields, field) => { - if (field.localized) { - const value = contentfulData.locales - .map(locale => locale) - .reduce( - (accLocales, locale) => - Object.assign(accLocales, { [locale]: null }), - {}, - ); - return Object.assign(accFields, { [camelize(field.apiKey)]: value }); - } - return Object.assign(accFields, { [camelize(field.apiKey)]: null }); - }, {}); - - const recordAttributes = Object.entries(entry.fields).reduce( - (acc, [option, value]) => { - const apiKey = toFieldApiKey(option); - const field = itemTypeFields.find(f => f.apiKey === apiKey); - switch (field.fieldType) { - case 'link': - case 'links': - case 'file': - case 'gallery': - return acc; - default: - break; - } - if (field.localized) { - const localizedValue = Object.keys(value).reduce( - (innerAcc, locale) => { - let innerValue = value[locale]; - - if (field.fieldType === 'lat_lon') { - innerValue = { - latitude: innerValue.lat, - longitude: innerValue.lon, - }; - } - - if (field.fieldType === 'string' && Array.isArray(innerValue)) { - innerValue = innerValue.join(', '); - } - - if (field.fieldType === 'json') { - innerValue = JSON.stringify(innerValue, null, 2); - } - return Object.assign(innerAcc, { - [locale]: innerValue, - }); - }, - {}, - ); - - const fallbackValues = contentfulData.locales.reduce( - (accLocales, locale) => { - return Object.assign(accLocales, { - [locale]: localizedValue[contentfulData.defaultLocale], - }); - }, - {}, - ); - - return Object.assign(acc, { - [camelize(apiKey)]: { ...fallbackValues, ...localizedValue }, - }); - } - let innerValue = value[contentfulData.defaultLocale]; + try { + const { entries } = contentfulData; + const progress = new Progress(entries.length, 'Creating records'); - if (field.fieldType === 'lat_lon') { - innerValue = { - latitude: innerValue.lat, - longitude: innerValue.lon, - }; - } + const contentfulRecordMap = {}; + const recordsToPublish = []; - if (field.fieldType === 'string' && Array.isArray(innerValue)) { - innerValue = innerValue.join(', '); - } + spinner.text = progress.tick(); + + for (const entry of entries) { + const { contentType } = entry.sys; + const contentTypeApiKey = toItemApiKey(contentType.sys.id); - if (field.fieldType === 'json') { - innerValue = JSON.stringify(innerValue, null, 2); + const itemType = itemTypes.find(iT => { + return iT.apiKey === contentTypeApiKey; + }); + + const itemTypeFields = fieldsMapping[contentTypeApiKey]; + + if (itemType) { + const emptyFieldValues = itemTypeFields.reduce((accFields, field) => { + if (field.localized) { + const value = contentfulData.locales + .map(locale => locale) + .reduce( + (accLocales, locale) => + Object.assign(accLocales, { [locale]: null }), + {}, + ); + return Object.assign(accFields, { + [camelize(field.apiKey)]: value, + }); } - return Object.assign(acc, { [camelize(apiKey)]: innerValue }); - }, - emptyFieldValues, - ); + return Object.assign(accFields, { [camelize(field.apiKey)]: null }); + }, {}); + + const recordAttributes = Object.entries(entry.fields).reduce( + (acc, [option, value]) => { + const apiKey = toFieldApiKey(option); + const field = itemTypeFields.find(f => f.apiKey === apiKey); + switch (field.fieldType) { + case 'link': + case 'links': + case 'file': + case 'gallery': + return acc; + default: + break; + } + + if (field.localized) { + const localizedValue = Object.keys(value).reduce( + (innerAcc, locale) => { + let innerValue = value[locale]; + + if (field.fieldType === 'lat_lon') { + innerValue = { + latitude: innerValue.lat, + longitude: innerValue.lon, + }; + } + + if ( + field.fieldType === 'string' && + Array.isArray(innerValue) + ) { + innerValue = innerValue.join(', '); + } + + if (field.fieldType === 'json') { + innerValue = JSON.stringify(innerValue, null, 2); + } + return Object.assign(innerAcc, { + [locale]: innerValue, + }); + }, + {}, + ); + + const fallbackValues = contentfulData.locales.reduce( + (accLocales, locale) => { + return Object.assign(accLocales, { + [locale]: localizedValue[contentfulData.defaultLocale], + }); + }, + {}, + ); + + return Object.assign(acc, { + [camelize(apiKey)]: { ...fallbackValues, ...localizedValue }, + }); + } + let innerValue = value[contentfulData.defaultLocale]; + + if (field.fieldType === 'lat_lon') { + innerValue = { + latitude: innerValue.lat, + longitude: innerValue.lon, + }; + } + + if (field.fieldType === 'string' && Array.isArray(innerValue)) { + innerValue = innerValue.join(', '); + } + + if (field.fieldType === 'json') { + innerValue = JSON.stringify(innerValue, null, 2); + } + return Object.assign(acc, { [camelize(apiKey)]: innerValue }); + }, + emptyFieldValues, + ); - try { const record = await datoClient.items.create({ ...recordAttributes, itemType: itemType.id.toString(), @@ -130,24 +136,14 @@ export default async ({ spinner.text = progress.tick(); contentfulRecordMap[entry.sys.id] = record.id; - } catch (e) { - if ( - e.body && - e.body.data && - e.body.data.some(d => d.id === 'ITEMS_QUOTA_EXCEEDED') - ) { - spinner.fail( - "You've reached your site's plan record limit: upgrade to complete the import", - ); - } else { - spinner.fail(typeof e === 'object' ? e.message : e); - } - process.exit(); } } - } - spinner.succeed(); + spinner.succeed(); - return { contentfulRecordMap, recordsToPublish }; + return { contentfulRecordMap, recordsToPublish }; + } catch (e) { + spinner.fail(); + throw e; + } }; diff --git a/src/contentfulImport/createUploads.js b/src/contentfulImport/createUploads.js index 85a7cc02..74911428 100644 --- a/src/contentfulImport/createUploads.js +++ b/src/contentfulImport/createUploads.js @@ -24,18 +24,20 @@ export default async ({ contentfulRecordMap, }) => { let spinner = ora('').start(); - const { entries, assets } = contentfulData; - let progress = new Progress(assets.length, 'Uploading assets'); - spinner.text = progress.tick(); + try { + const { entries, assets } = contentfulData; - const contentfulAssetsMap = {}; + let progress = new Progress(assets.length, 'Uploading assets'); + spinner.text = progress.tick(); + + const contentfulAssetsMap = {}; + + for (const asset of assets) { + if (asset.fields && asset.fields.file) { + const fileAttributes = asset.fields.file[contentfulData.defaultLocale]; + const fileUrl = `https:${fileAttributes.url}`; - for (const asset of assets) { - if (asset.fields && asset.fields.file) { - const fileAttributes = asset.fields.file[contentfulData.defaultLocale]; - const fileUrl = `https:${fileAttributes.url}`; - try { const path = await datoClient.createUploadPath(fileUrl); const defaultFieldMetadata = contentfulData.locales.reduce( (acc, locale) => { @@ -60,33 +62,19 @@ export default async ({ contentfulAssetsMap[asset.sys.id.toString()] = upload.id; spinner.text = progress.tick(); - } catch (e) { - if ( - e.body && - e.body.data && - e.body.data.some(d => d.id === 'FILE_STORAGE_QUOTA_EXCEEDED') - ) { - spinner.fail( - "You've reached your site's plan storage limit: upgrade to complete the import", - ); - } else { - spinner.fail(typeof e === 'object' ? e.message : e); - } - process.exit(); + } else { + spinner.text = progress.tick(); } - } else { - spinner.text = progress.tick(); } - } - spinner.succeed(); - spinner = ora('').start(); - progress = new Progress(entries.length, 'Linking assets to records'); - spinner.text = progress.tick(); - - for (const entry of entries) { - const datoItemId = contentfulRecordMap[entry.sys.id]; - let recordAttributes = {}; - try { + spinner.succeed(); + spinner = ora('').start(); + progress = new Progress(entries.length, 'Linking assets to records'); + spinner.text = progress.tick(); + + for (const entry of entries) { + const datoItemId = contentfulRecordMap[entry.sys.id]; + let recordAttributes = {}; + for (const key of Object.keys(entry.fields)) { const entryFieldValue = entry.fields[key]; @@ -156,13 +144,18 @@ export default async ({ } } } - await datoClient.items.update(datoItemId, recordAttributes); + + // if no file/gallery is found, no update needed. + if (Object.entries(recordAttributes).length > 0) { + await datoClient.items.update(datoItemId, recordAttributes); + } + spinner.text = progress.tick(); - } catch (e) { - spinner.fail(typeof e === 'object' ? e.message : e); - process.exit(); } - } - spinner.succeed(); + spinner.succeed(); + } catch (e) { + spinner.fail(); + throw e; + } }; diff --git a/src/contentfulImport/destroyExistingAssets.js b/src/contentfulImport/destroyExistingAssets.js index ce18a1b2..b4b1e878 100644 --- a/src/contentfulImport/destroyExistingAssets.js +++ b/src/contentfulImport/destroyExistingAssets.js @@ -4,31 +4,31 @@ import Progress from './progress'; export default async ({ datoClient }) => { let spinner = ora('Fetching assets not in use').start(); - const uploads = await datoClient.uploads.all( - { 'filter[type]': 'not_used' }, - { allPages: true }, - ); + try { + const uploads = await datoClient.uploads.all( + { 'filter[type]': 'not_used' }, + { allPages: true }, + ); - spinner.succeed(); + spinner.succeed(); - if (uploads.length > 0) { - const progress = new Progress( - uploads.length, - 'Destroying assets not in use', - ); - spinner = ora('').start(); - spinner.text = progress.tick(); + if (uploads.length > 0) { + const progress = new Progress( + uploads.length, + 'Destroying assets not in use', + ); + spinner = ora('').start(); + spinner.text = progress.tick(); - for (const upload of uploads) { - try { + for (const upload of uploads) { await datoClient.uploads.destroy(upload.id); spinner.text = progress.tick(); - } catch (e) { - spinner.fail(typeof e === 'object' ? e.message : e); - process.exit(); } - } - spinner.succeed(); + spinner.succeed(); + } + } catch (e) { + spinner.fail(); + throw e; } }; diff --git a/src/contentfulImport/destroyExistingModels.js b/src/contentfulImport/destroyExistingModels.js index 18e3b931..7b800527 100644 --- a/src/contentfulImport/destroyExistingModels.js +++ b/src/contentfulImport/destroyExistingModels.js @@ -4,36 +4,35 @@ import { toItemApiKey } from './toApiKey'; export default async ({ datoClient, contentfulData }) => { let spinner = ora('Fetching existing models').start(); + try { + const itemTypes = await datoClient.itemTypes.all(); - const itemTypes = await datoClient.itemTypes.all(); - - const importedItemTypes = itemTypes.filter(itemType => { - return contentfulData.contentTypes.some(contentType => { - return itemType.apiKey === toItemApiKey(contentType.sys.id); + const importedItemTypes = itemTypes.filter(itemType => { + return contentfulData.contentTypes.some(contentType => { + return itemType.apiKey === toItemApiKey(contentType.sys.id); + }); }); - }); - spinner.succeed(); + spinner.succeed(); - if (importedItemTypes.length > 0) { - spinner = ora('').start(); - const progress = new Progress( - importedItemTypes.length, - 'Destroying existing models', - ); + if (importedItemTypes.length > 0) { + spinner = ora('').start(); + const progress = new Progress( + importedItemTypes.length, + 'Destroying existing models', + ); - spinner.text = progress.tick(); + spinner.text = progress.tick(); - for (const itemType of importedItemTypes) { - try { + for (const itemType of importedItemTypes) { spinner.text = progress.tick(); await datoClient.itemTypes.destroy(itemType.id); - } catch (e) { - spinner.fail(typeof e === 'object' ? e.message : e); - process.exit(); } - } - spinner.succeed(); + spinner.succeed(); + } + } catch (e) { + spinner.fail(); + throw e; } }; diff --git a/src/contentfulImport/linkRecords.js b/src/contentfulImport/linkRecords.js index 93d4ac55..ed4b6647 100644 --- a/src/contentfulImport/linkRecords.js +++ b/src/contentfulImport/linkRecords.js @@ -11,96 +11,97 @@ export default async ({ contentfulRecordMap, }) => { const spinner = ora('').start(); - const { entries } = contentfulData; - const progress = new Progress(entries.length, 'Linking records'); - const recordsToPublish = []; - spinner.text = progress.tick(); + try { + const { entries } = contentfulData; + const progress = new Progress(entries.length, 'Linking records'); + const recordsToPublish = []; - for (const entry of entries) { - const { contentType } = entry.sys; - const contentTypeApiKey = toItemApiKey(contentType.sys.id); + spinner.text = progress.tick(); - const datoItemId = contentfulRecordMap[entry.sys.id]; + for (const entry of entries) { + const { contentType } = entry.sys; + const contentTypeApiKey = toItemApiKey(contentType.sys.id); - const itemTypeFields = fieldsMapping[contentTypeApiKey]; + const datoItemId = contentfulRecordMap[entry.sys.id]; - const recordAttributes = Object.entries(entry.fields).reduce( - (outerAcc, [option, value]) => { - const apiKey = toFieldApiKey(option); - const field = itemTypeFields.find( - itemTypefield => itemTypefield.apiKey === apiKey, - ); + const itemTypeFields = fieldsMapping[contentTypeApiKey]; - if (field.fieldType !== 'link' && field.fieldType !== 'links') { - return outerAcc; - } + const recordAttributes = Object.entries(entry.fields).reduce( + (outerAcc, [option, value]) => { + const apiKey = toFieldApiKey(option); + const field = itemTypeFields.find( + itemTypefield => itemTypefield.apiKey === apiKey, + ); - if (field.localized) { - const localizedValue = Object.keys(value).reduce( - (innerAcc, locale) => { - const innerValue = value[locale]; - if (field.fieldType === 'link') { + if (field.fieldType !== 'link' && field.fieldType !== 'links') { + return outerAcc; + } + + if (field.localized) { + const localizedValue = Object.keys(value).reduce( + (innerAcc, locale) => { + const innerValue = value[locale]; + if (field.fieldType === 'link') { + return Object.assign(innerAcc, { + [locale]: contentfulRecordMap[innerValue.sys.id], + }); + } return Object.assign(innerAcc, { - [locale]: contentfulRecordMap[innerValue.sys.id], + [locale]: innerValue + .filter(link => contentfulRecordMap[link.sys.id]) + .map(link => contentfulRecordMap[link.sys.id]), }); - } - return Object.assign(innerAcc, { - [locale]: innerValue - .filter(link => contentfulRecordMap[link.sys.id]) - .map(link => contentfulRecordMap[link.sys.id]), - }); - }, - {}, - ); + }, + {}, + ); + + const fallbackValues = contentfulData.locales.reduce( + (accLocales, locale) => { + return Object.assign(accLocales, { + [locale]: localizedValue[contentfulData.defaultLocale], + }); + }, + {}, + ); - const fallbackValues = contentfulData.locales.reduce( - (accLocales, locale) => { - return Object.assign(accLocales, { - [locale]: localizedValue[contentfulData.defaultLocale], - }); - }, - {}, - ); + return Object.assign(outerAcc, { + [camelize(apiKey)]: { ...fallbackValues, ...localizedValue }, + }); + } - return Object.assign(outerAcc, { - [camelize(apiKey)]: { ...fallbackValues, ...localizedValue }, - }); - } + const innerValue = value[contentfulData.defaultLocale]; - const innerValue = value[contentfulData.defaultLocale]; + if (field.fieldType === 'link') { + return Object.assign(outerAcc, { + [camelize(apiKey)]: contentfulRecordMap[innerValue.sys.id], + }); + } - if (field.fieldType === 'link') { return Object.assign(outerAcc, { - [camelize(apiKey)]: contentfulRecordMap[innerValue.sys.id], + [camelize(apiKey)]: innerValue + .filter(link => contentfulRecordMap[link.sys.id]) + .map(link => contentfulRecordMap[link.sys.id]), }); - } + }, + {}, + ); - return Object.assign(outerAcc, { - [camelize(apiKey)]: innerValue - .filter(link => contentfulRecordMap[link.sys.id]) - .map(link => contentfulRecordMap[link.sys.id]), - }); - }, - {}, - ); - - try { // if no links found, no update needed. if (Object.entries(recordAttributes).length > 0) { await datoClient.items.update(datoItemId, recordAttributes); + if (entry.sys.publishedVersion) { recordsToPublish.push(datoItemId); } } + spinner.text = progress.tick(); - } catch (e) { - spinner.fail(typeof e === 'object' ? e.message : e); - process.exit(); } - - spinner.text = progress.tick(); + spinner.succeed(); + return recordsToPublish; + } catch (e) { + spinner.fail(); + throw e; } - spinner.succeed(); - return recordsToPublish; }; diff --git a/src/contentfulImport/publishRecords.js b/src/contentfulImport/publishRecords.js index 2dfe2928..4737fbdb 100644 --- a/src/contentfulImport/publishRecords.js +++ b/src/contentfulImport/publishRecords.js @@ -3,18 +3,18 @@ import Progress from './progress'; export default async ({ recordIds, datoClient }) => { const spinner = ora('').start(); - const progress = new Progress(recordIds.length, 'Publishing records'); + try { + const progress = new Progress(recordIds.length, 'Publishing records'); - spinner.text = progress.tick(); - for (const recordId of recordIds) { - try { + spinner.text = progress.tick(); + for (const recordId of recordIds) { await datoClient.items.publish(recordId); spinner.text = progress.tick(); - } catch (e) { - spinner.fail(typeof e === 'object' ? e.message : e); - process.exit(); } - } - spinner.succeed(); + spinner.succeed(); + } catch (e) { + spinner.fail(); + throw e; + } }; diff --git a/src/contentfulImport/removeAllValidators.js b/src/contentfulImport/removeAllValidators.js index bf3a18c4..dce4a8a7 100644 --- a/src/contentfulImport/removeAllValidators.js +++ b/src/contentfulImport/removeAllValidators.js @@ -3,57 +3,47 @@ import ora from 'ora'; import { toItemApiKey } from './toApiKey'; import Progress from './progress'; -import delay from './delay'; export default async ({ datoClient, contentfulData }) => { let spinner = ora('Fetching existing fields').start(); - const itemTypes = await datoClient.itemTypes.all(); - const importedItemTypes = itemTypes.filter(itemType => { - return contentfulData.contentTypes.some(contentType => { - return itemType.apiKey === toItemApiKey(contentType.sys.id); + + try { + const itemTypes = await datoClient.itemTypes.all(); + const importedItemTypes = itemTypes.filter(itemType => { + return contentfulData.contentTypes.some(contentType => { + return itemType.apiKey === toItemApiKey(contentType.sys.id); + }); }); - }); - - const importedFieldIds = importedItemTypes - .map(itemType => itemType.fields) - .flat(); - - spinner.succeed(); - - spinner = ora('').start(); - const progress = new Progress( - importedFieldIds.length, - 'Removing validations from fields', - ); - spinner.text = progress.tick(); - - for (const fieldId of importedFieldIds) { - while (true) { - try { - const field = await datoClient.fields.find(fieldId); - let validators = {}; - if (field.validators.itemItemType) { - validators = { itemItemType: field.validators.itemItemType }; - } - if (field.validators.itemsItemType) { - validators = { itemsItemType: field.validators.itemsItemType }; - } - await datoClient.fields.update(fieldId, { validators }); - spinner.text = progress.tick(); - break; - } catch (e) { - if ( - !e.body || - !e.body.data || - !e.body.data.some(d => d.id === 'BATCH_DATA_VALIDATION_IN_PROGRESS') - ) { - spinner.fail(typeof e === 'object' ? e.message : e); - process.exit(); - } else { - await delay(1000); - } + + const importedFieldIds = importedItemTypes + .map(itemType => itemType.fields) + .flat(); + + spinner.succeed(); + + spinner = ora('').start(); + const progress = new Progress( + importedFieldIds.length, + 'Removing validations from fields', + ); + spinner.text = progress.tick(); + + for (const fieldId of importedFieldIds) { + const field = await datoClient.fields.find(fieldId); + let validators = {}; + if (field.validators.itemItemType) { + validators = { itemItemType: field.validators.itemItemType }; + } + if (field.validators.itemsItemType) { + validators = { itemsItemType: field.validators.itemsItemType }; } + await datoClient.fields.update(fieldId, { validators }); + spinner.text = progress.tick(); } + + spinner.succeed(); + } catch (e) { + spinner.fail(); + throw e; } - spinner.succeed(); }; diff --git a/src/createMigrationScript/command.js b/src/createMigrationScript/command.js index 5121bf91..5d38be3d 100644 --- a/src/createMigrationScript/command.js +++ b/src/createMigrationScript/command.js @@ -54,13 +54,12 @@ module.exports = async (client) => { const getTemplate = templatePath => { if (!templatePath) return defaultTemplate; - if (fs.existsSync(templatePath)) { - return fs.readFileSync(templatePath, 'utf8'); + + if (!fs.existsSync(templatePath)) { + throw new Error(`Could not load template with path ${templatePath}`); } - return process.stderr.write( - `Could not load template with path ${templatePath}`, - ); + return fs.readFileSync(templatePath, 'utf8'); }; export default async function toggleMaintenanceMode({ diff --git a/src/dump/command.js b/src/dump/command.js index e2b7a82f..41d74c0d 100644 --- a/src/dump/command.js +++ b/src/dump/command.js @@ -23,52 +23,46 @@ export default async function(options) { try { fs.accessSync(configFile); } catch (e) { - process.stderr.write(`Missing config file ${configFile}\n`); - process.exit(1); + throw new Error(`Missing config file ${configFile}\n`); } - try { - const headers = { - 'X-Reason': 'dump', - 'X-SSG': detectSsg(process.cwd()), - }; + const headers = { + 'X-Reason': 'dump', + 'X-SSG': detectSsg(process.cwd()), + }; - if (environment) { - headers['X-Environment'] = environment; - } + if (environment) { + headers['X-Environment'] = environment; + } - const client = new SiteClient(token, headers, cmaBaseUrl); + const client = new SiteClient(token, headers, cmaBaseUrl); - const loader = new Loader(client, previewMode, environment); + const loader = new Loader(client, previewMode, environment); - process.stdout.write('Fetching content from DatoCMS'); - await loader.load(); - process.stdout.write('Done'); + process.stdout.write('Fetching content from DatoCMS'); + await loader.load(); + process.stdout.write('Done'); - await dump(configFile, new ItemsRepo(loader.entitiesRepo), quiet); + await dump(configFile, new ItemsRepo(loader.entitiesRepo), quiet); - if (watch) { - const unwatch = loader.watch(async promise => { - const watchSpinner = ora( - 'Detected change in content, loading new data', - ).start(); - await promise; - watchSpinner.succeed(); - return dump(configFile, new ItemsRepo(loader.entitiesRepo), quiet); - }); + if (watch) { + const unwatch = loader.watch(async promise => { + const watchSpinner = ora( + 'Detected change in content, loading new data', + ).start(); + await promise; + watchSpinner.succeed(); + return dump(configFile, new ItemsRepo(loader.entitiesRepo), quiet); + }); - process.on('SIGINT', () => { - unwatch(); - process.exit(); - }); + process.on('SIGINT', () => { + unwatch(); + process.exit(); + }); - chokidar.watch(configFile).on('change', () => { - process.stdout.write('Detected change to config file!'); - return dump(configFile, loader.itemsRepo, quiet); - }); - } - } catch (e) { - process.stderr.write(e.message); - process.exit(1); + chokidar.watch(configFile).on('change', () => { + process.stdout.write('Detected change to config file!'); + return dump(configFile, loader.itemsRepo, quiet); + }); } } diff --git a/src/dump/dump.js b/src/dump/dump.js index 01a6aa7e..9af4fa21 100644 --- a/src/dump/dump.js +++ b/src/dump/dump.js @@ -1,6 +1,5 @@ import { resolve, relative } from 'path'; import denodeify from 'denodeify'; -import PrettyError from 'pretty-error'; import nodeRimraf from 'rimraf'; import ora from 'ora'; import createPost from './createPost'; @@ -73,7 +72,7 @@ function start(path, config) { }; } -export default function dump( +export default async function dump( configFile, itemsRepo, quiet = false, @@ -90,20 +89,17 @@ export default function dump( const startOperation = start(destinationPath, config.bind(config, itemsRepo)); const spinner = ora('Writing content').start(); - - return startOperation() - .then(operations => { - spinner.succeed(); - if (!quiet) { - process.stdout.write('\n'); - operations.forEach(operation => - process.stdout.write(`* ${operation}\n`), - ); - process.stdout.write('\n'); - } - }) - .catch(e => { - spinner.fail(); - process.stderr.write(new PrettyError().render(e)); - }); + try { + const operations = await startOperation(); + spinner.succeed(); + + if (!quiet) { + process.stdout.write('\n'); + operations.forEach(operation => process.stdout.write(`* ${operation}\n`)); + process.stdout.write('\n'); + } + } catch (e) { + spinner.fail(); + throw e; + } } diff --git a/src/dump/requireToken.js b/src/dump/requireToken.js index 301d2199..19aac710 100644 --- a/src/dump/requireToken.js +++ b/src/dump/requireToken.js @@ -4,8 +4,8 @@ import denodeify from 'denodeify'; const fsAppendFile = denodeify(fs.appendFile); -export default function() { - return new Promise((resolve, reject) => { +export default async function() { + const token = await new Promise((resolve, reject) => { process.stdout.write( 'Site token is not specified! Please paste your DatoCMS site read-only API token.\n', ); @@ -16,31 +16,26 @@ export default function() { }); rl.on('SIGINT', () => { - process.exit(1); + reject(new Error('Received SIGINT')); }); rl.on('SIGCONT', () => { rl.prompt(); }); - rl.question('> ', token => { + rl.question('> ', input => { rl.close(); - if (token) { - resolve(token); + if (input) { + resolve(input); return; } - reject(); - }); - }) - .then(token => { - return fsAppendFile('.env', `DATO_API_TOKEN=${token}`) - .then(() => process.stdout.write('\nToken added to .env file.\n\n')) - .then(() => token); - }) - .catch(() => { - process.stderr.write('\nMissing token.\n'); - process.exit(1); + reject(new Error('Missing token')); }); + }); + + await fsAppendFile('.env', `DATO_API_TOKEN=${token}`); + + process.stdout.write('\nToken added to .env file.\n\n'); } diff --git a/src/environment/destroy/command.js b/src/environment/destroy/command.js index 7a1e6d0f..ae940ee7 100644 --- a/src/environment/destroy/command.js +++ b/src/environment/destroy/command.js @@ -1,6 +1,5 @@ import ora from 'ora'; import SiteClient from '../../site/SiteClient'; -import ApiException from '../../ApiException'; export default async function command({ environmentId, @@ -18,11 +17,6 @@ export default async function command({ spinner.succeed(`Destroyed environment: \`${environmentId}\``); } catch (error) { spinner.fail(); - if (error instanceof ApiException) { - process.stderr.write( - `Unable to destroy: ${environmentId}\n${error.message}`, - ); - process.exit(1); - } + throw error; } } diff --git a/src/environment/getPrimary/command.js b/src/environment/getPrimary/command.js index 1942c133..8f1e9bc1 100644 --- a/src/environment/getPrimary/command.js +++ b/src/environment/getPrimary/command.js @@ -4,12 +4,7 @@ export default async function command({ token: tokenByArg, cmaBaseUrl }) { const token = tokenByArg || process.env.DATO_MANAGEMENT_API_TOKEN; const client = new SiteClient(token, {}, cmaBaseUrl); - try { - const allEnvs = await client.environments.all(); - const primaryEnv = allEnvs.find(({ meta: { primary } }) => primary); - process.stdout.write(primaryEnv.id); - } catch (error) { - process.stderr.write(error.message); - process.exit(1); - } + const allEnvs = await client.environments.all(); + const primaryEnv = allEnvs.find(({ meta: { primary } }) => primary); + process.stdout.write(primaryEnv.id); } diff --git a/src/environment/promote/command.js b/src/environment/promote/command.js index e3bfa0b8..e6ae95a4 100644 --- a/src/environment/promote/command.js +++ b/src/environment/promote/command.js @@ -1,6 +1,5 @@ import ora from 'ora'; import SiteClient from '../../site/SiteClient'; -import ApiException from '../../ApiException'; export default async function command({ environmentId, @@ -19,11 +18,6 @@ export default async function command({ spinner.succeed(`\`${environmentId}\` is now the primary environment`); } catch (error) { spinner.fail(); - if (error instanceof ApiException) { - process.stderr.write( - `Unable to promote: ${environmentId}\n${error.message}`, - ); - process.exit(1); - } + throw error; } } diff --git a/src/runPendingMigrations/command.js b/src/runPendingMigrations/command.js index 0c0989fa..7ff1fff2 100644 --- a/src/runPendingMigrations/command.js +++ b/src/runPendingMigrations/command.js @@ -19,10 +19,7 @@ export default async function runPendingMigrations({ const migrationsDir = path.resolve(relativeMigrationsDir); if (!fs.existsSync(migrationsDir)) { - process.stderr.write( - `Error: ${relativeMigrationsDir} is not a directory!\n`, - ); - throw new Error('Command failed'); + throw new Error(`Error: ${relativeMigrationsDir} is not a directory!\n`); } const allMigrations = fs @@ -47,10 +44,9 @@ export default async function runPendingMigrations({ if (inPlace) { if (primaryEnv.id === environmentId) { - process.stderr.write( - 'Running migrations on primary environment is not allowed!\n', + throw new Error( + 'Running migrations on primary environment is not allowed!', ); - throw new Error('Command failed'); } process.stdout.write( @@ -67,10 +63,9 @@ export default async function runPendingMigrations({ if (existingEnvironment) { forkSpinner.fail(); - process.stderr.write( - `\nError: ${environmentId} already exists! If you want to run the migrations inside this existing environment you can add the --inPlace flag.\n`, + throw new Error( + `Environment ${environmentId} already exists! If you want to run the migrations inside this existing environment you can add the --inPlace flag.`, ); - throw new Error('Command failed'); } await globalClient.environments.fork(sourceEnv.id, { diff --git a/src/toggleMaintenanceMode/command.js b/src/toggleMaintenanceMode/command.js index 7657b2fe..fc0a4f28 100644 --- a/src/toggleMaintenanceMode/command.js +++ b/src/toggleMaintenanceMode/command.js @@ -27,13 +27,9 @@ export default async function toggleMaintenanceMode({ const error = e.errorWithCode('ACTIVE_EDITING_SESSIONS'); if (error) { - process.stderr.write( - 'Cannot activate maintenance mode as some users are currently editing records!\n', + throw new Error( + 'Cannot activate maintenance mode as some users are currently editing records!\nTo proceed anyway, please use the --force flag', ); - process.stderr.write( - 'To proceed anyway, please use the --force flag\n', - ); - process.exit(1); } }