diff --git a/README.md b/README.md
index 9afda8883..7f808219f 100644
--- a/README.md
+++ b/README.md
@@ -791,9 +791,24 @@ parameters:
- [id] {String} rule id, if not set, OSS will auto create it with random string.
- prefix {String} store prefix
- status {String} rule status, allow values: `Enabled` or `Disabled`
- - [days] {Number|String} expire after the `days`
- - [date] {String} expire date, e.g.: `2022-10-11T00:00:00.000Z`
- `date` and `days` only set one.
+ - [expiration] {Object} specifies the expiration attribute of the lifecycle rules for the object.
+ - [days] {Number|String} expire after the `days`
+ - [createdBeforeDate] {String} expire date, e.g.: `2022-10-11T00:00:00.000Z`
+ `createdBeforeDate` and `days` must have one.
+ - [abortMultipartUpload] {Object} Specifies the expiration attribute of the multipart upload tasks that are not complete.
+ - [days] {Number|String} expire after the `days`
+ - [createdBeforeDate] {String} expire date, e.g.: `2022-10-11T00:00:00.000Z`
+ `createdBeforeDate` and `days` must have one.
+ - [transition] {Object} Specifies the time when an object is converted to the IA or archive storage class during a valid life cycle.
+ - storageClass {String} Specifies the storage class that objects that conform to the rule are converted into. allow values: `IA` or `Archive`
+ - [days] {Number|String} expire after the `days`
+ - [createdBeforeDate] {String} expire date, e.g.: `2022-10-11T00:00:00.000Z`
+ `createdBeforeDate` and `days` must have one.
+ `expiration`、 `abortMultipartUpload`、 `transition` must have one.
+ - [tag] {Object} Specifies the object tag applicable to a rule. Multiple tags are supported.
+ - key {String} Indicates the tag key.
+ - value {String} Indicates the tag value.
+ `tag` cannot be used with `abortMultipartUpload`
- [options] {Object} optional parameters
- [timeout] {Number} the operation timeout
diff --git a/lib/browser/bucket.js b/lib/browser/bucket.js
index bad425ccf..42045b571 100644
--- a/lib/browser/bucket.js
+++ b/lib/browser/bucket.js
@@ -159,76 +159,6 @@ proto.deleteBucketLogging = async function deleteBucketLogging(name, options) {
};
};
-// lifecycle
-
-proto.putBucketLifecycle = async function putBucketLifecycle(name, rules, options) {
- // rules: [rule, ...]
- // rule: [id], prefix, status, expiration, [days or date]
- // status: 'Enabled' or 'Disabled'
- const params = this._bucketRequestParams('PUT', name, 'lifecycle', options);
- let xml = '\n\n';
- for (let i = 0; i < rules.length; i++) {
- const rule = rules[i];
- const expiration = rule.days ?
- `${rule.days}`
- :
- `${rule.date}`;
- const id = rule.id ? `${rule.id}\n` : '';
- xml += ` \n${id
- } ${rule.prefix}\n` +
- ` ${rule.status}\n` +
- ` ${expiration}\n` +
- ' \n';
- }
- xml += '';
- params.content = xml;
- params.mime = 'xml';
- params.successStatuses = [200];
- const result = await this.request(params);
- return {
- res: result.res
- };
-};
-
-proto.getBucketLifecycle = async function getBucketLifecycle(name, options) {
- const params = this._bucketRequestParams('GET', name, 'lifecycle', options);
- params.successStatuses = [200];
- params.xmlResponse = true;
- const result = await this.request(params);
- let rules = result.data.Rule || null;
- if (rules) {
- if (!isArray(rules)) {
- rules = [rules];
- }
- rules = rules.map((rule) => {
- const item = {
- id: rule.ID,
- prefix: rule.Prefix,
- status: rule.Status
- };
- if (rule.Expiration.Days) {
- item.days = rule.Expiration.Days;
- } else {
- item.date = rule.Expiration.Date;
- }
- return item;
- });
- }
- return {
- rules,
- res: result.res
- };
-};
-
-proto.deleteBucketLifecycle = async function deleteBucketLifecycle(name, options) {
- const params = this._bucketRequestParams('DELETE', name, 'lifecycle', options);
- params.successStatuses = [204];
- const result = await this.request(params);
- return {
- res: result.res
- };
-};
-
proto.putBucketCORS = async function putBucketCORS(name, rules, options) {
rules = rules || [];
assert(rules.length, 'rules is required');
diff --git a/lib/browser/client.js b/lib/browser/client.js
index e8f7fd659..7fd8a8f40 100644
--- a/lib/browser/client.js
+++ b/lib/browser/client.js
@@ -105,7 +105,10 @@ merge(proto, require('../common/bucket/getBucketWebsite'));
merge(proto, require('../common/bucket/putBucketWebsite'));
merge(proto, require('../common/bucket/deleteBucketWebsite'));
-// merge(proto, require('./bucket'));
+// lifecycle
+merge(proto, require('../common/bucket/getBucketLifecycle'));
+merge(proto, require('../common/bucket/putBucketLifecycle'));
+merge(proto, require('../common/bucket/deleteBucketLifecycle'));
// multipart upload
diff --git a/lib/bucket.js b/lib/bucket.js
index 18ec769a9..2f6ebb9fe 100644
--- a/lib/bucket.js
+++ b/lib/bucket.js
@@ -186,79 +186,6 @@ proto.deleteBucketLogging = async function deleteBucketLogging(name, options) {
};
};
-// lifecycle
-
-proto.putBucketLifecycle = async function putBucketLifecycle(name, rules, options) {
- this._checkBucketName(name);
- // rules: [rule, ...]
- // rule: [id], prefix, status, expiration, [days or date]
- // status: 'Enabled' or 'Disabled'
- const params = this._bucketRequestParams('PUT', name, 'lifecycle', options);
- let xml = '\n\n';
- for (let i = 0; i < rules.length; i++) {
- const rule = rules[i];
- const expiration = rule.days ?
- `${rule.days}`
- :
- `${rule.date}`;
- const id = rule.id ? `${rule.id}\n` : '';
- xml += ` \n${id
- } ${rule.prefix}\n` +
- ` ${rule.status}\n` +
- ` ${expiration}\n` +
- ' \n';
- }
- xml += '';
- params.content = xml;
- params.mime = 'xml';
- params.successStatuses = [200];
- const result = await this.request(params);
- return {
- res: result.res
- };
-};
-
-proto.getBucketLifecycle = async function getBucketLifecycle(name, options) {
- this._checkBucketName(name);
- const params = this._bucketRequestParams('GET', name, 'lifecycle', options);
- params.successStatuses = [200];
- params.xmlResponse = true;
- const result = await this.request(params);
- let rules = result.data.Rule || null;
- if (rules) {
- if (!isArray(rules)) {
- rules = [rules];
- }
- rules = rules.map((rule) => {
- const item = {
- id: rule.ID,
- prefix: rule.Prefix,
- status: rule.Status
- };
- if (rule.Expiration.Days) {
- item.days = rule.Expiration.Days;
- } else {
- item.date = rule.Expiration.Date;
- }
- return item;
- });
- }
- return {
- rules,
- res: result.res
- };
-};
-
-proto.deleteBucketLifecycle = async function deleteBucketLifecycle(name, options) {
- this._checkBucketName(name);
- const params = this._bucketRequestParams('DELETE', name, 'lifecycle', options);
- params.successStatuses = [204];
- const result = await this.request(params);
- return {
- res: result.res
- };
-};
-
proto.putBucketCORS = async function putBucketCORS(name, rules, options) {
this._checkBucketName(name);
rules = rules || [];
diff --git a/lib/common/bucket/deleteBucketLifecycle.js b/lib/common/bucket/deleteBucketLifecycle.js
new file mode 100644
index 000000000..f4c1d955a
--- /dev/null
+++ b/lib/common/bucket/deleteBucketLifecycle.js
@@ -0,0 +1,11 @@
+const proto = exports;
+
+proto.deleteBucketLifecycle = async function deleteBucketLifecycle(name, options) {
+ this._checkBucketName(name);
+ const params = this._bucketRequestParams('DELETE', name, 'lifecycle', options);
+ params.successStatuses = [204];
+ const result = await this.request(params);
+ return {
+ res: result.res
+ };
+};
diff --git a/lib/common/bucket/getBucketLifecycle.js b/lib/common/bucket/getBucketLifecycle.js
new file mode 100644
index 000000000..72509a3e9
--- /dev/null
+++ b/lib/common/bucket/getBucketLifecycle.js
@@ -0,0 +1,33 @@
+const isArray = require('../utils/isArray');
+const formatObjKey = require('../utils/formatObjKey');
+
+const proto = exports;
+
+proto.getBucketLifecycle = async function getBucketLifecycle(name, options) {
+ this._checkBucketName(name);
+ const params = this._bucketRequestParams('GET', name, 'lifecycle', options);
+ params.successStatuses = [200];
+ params.xmlResponse = true;
+ const result = await this.request(params);
+ let rules = result.data.Rule || null;
+ if (rules) {
+ if (!isArray(rules)) {
+ rules = [rules];
+ }
+ rules = rules.map((_) => {
+ if (_.ID) {
+ _.id = _.ID;
+ delete _.ID;
+ }
+ if (_.Tag && !isArray(_.Tag)) {
+ _.Tag = [_.Tag];
+ }
+ return formatObjKey(_, 'firstLowerCase');
+ });
+ }
+ return {
+ rules,
+ res: result.res
+ };
+};
+
diff --git a/lib/common/bucket/index.js b/lib/common/bucket/index.js
index fe95ddb11..557224b27 100644
--- a/lib/common/bucket/index.js
+++ b/lib/common/bucket/index.js
@@ -15,3 +15,6 @@ merge(proto, require('./_checkBucketName'));
merge(proto, require('./getBucketWebsite'));
merge(proto, require('./putBucketWebsite'));
merge(proto, require('./deleteBucketWebsite'));
+merge(proto, require('./getBucketLifecycle'));
+merge(proto, require('./putBucketLifecycle'));
+merge(proto, require('./deleteBucketLifecycle'));
diff --git a/lib/common/bucket/putBucketLifecycle.js b/lib/common/bucket/putBucketLifecycle.js
new file mode 100644
index 000000000..7f8bdf211
--- /dev/null
+++ b/lib/common/bucket/putBucketLifecycle.js
@@ -0,0 +1,122 @@
+/* eslint-disable no-use-before-define */
+
+const isArray = require('../utils/isArray');
+const deepCopy = require('../utils/deepCopy');
+const isObject = require('../utils/isObject');
+const obj2xml = require('../utils/obj2xml');
+const checkObjectTag = require('../utils/checkObjectTag');
+const getStrBytesCount = require('../utils/getStrBytesCount');
+
+const proto = exports;
+
+
+proto.putBucketLifecycle = async function putBucketLifecycle(name, rules, options) {
+ this._checkBucketName(name);
+
+ if (!isArray(rules)) {
+ throw new Error('rules must be Array');
+ }
+
+ const params = this._bucketRequestParams('PUT', name, 'lifecycle', options);
+ const Rule = [];
+ const paramXMLObj = {
+ LifecycleConfiguration: {
+ Rule
+ }
+ };
+
+ rules.forEach((_) => {
+ defaultDaysAndDate2Expiration(_); // todo delete, 兼容旧版本
+ checkRule(_);
+ if (_.id) {
+ _.ID = _.id;
+ delete _.id;
+ }
+ Rule.push(_);
+ });
+
+ const paramXML = obj2xml(paramXMLObj, {
+ headers: true,
+ firstUpperCase: true
+ });
+
+ params.content = paramXML;
+ params.mime = 'xml';
+ params.successStatuses = [200];
+ const result = await this.request(params);
+ return {
+ res: result.res
+ };
+};
+
+// todo delete, 兼容旧版本
+function defaultDaysAndDate2Expiration(obj) {
+ if (obj.days) {
+ obj.expiration = {
+ days: obj.days
+ };
+ }
+ if (obj.date) {
+ obj.expiration = {
+ createdBeforeDate: obj.date
+ };
+ }
+}
+
+function checkDaysAndDate(obj, key) {
+ const { days, createdBeforeDate } = obj;
+ if (!days && !createdBeforeDate) {
+ throw new Error(`${key} must includes days or createdBeforeDate`);
+ } else if (days && !/^[1-9][0-9]*$/.test(days)) {
+ throw new Error('days must be a positive integer');
+ } else if (createdBeforeDate && !/\d{4}-\d{2}-\d{2}T00:00:00.000Z/.test(createdBeforeDate)) {
+ throw new Error('createdBeforeDate must be date and conform to iso8601 format');
+ }
+}
+
+function handleCheckTag(tag) {
+ if (!isArray(tag) && !isObject(tag)) {
+ throw new Error('tag must be Object or Array');
+ }
+ tag = isObject(tag) ? [tag] : tag;
+ const tagObj = {};
+ const tagClone = deepCopy(tag);
+ tagClone.forEach((v) => {
+ tagObj[v.key] = v.value;
+ });
+
+ checkObjectTag(tagObj);
+}
+
+function checkRule(rule) {
+ if (rule.id && getStrBytesCount(rule.id) > 255) throw new Error('ID is composed of 255 bytes at most');
+
+ if (rule.prefix === '' || rule.prefix === undefined) throw new Error('Rule must includes prefix');
+
+ if (!['Enabled', 'Disabled'].includes(rule.status)) throw new Error('Status must be Enabled or Disabled');
+
+ if (rule.transition) {
+ if (!['IA', 'Archive'].includes(rule.transition.storageClass)) throw new Error('StorageClass must be IA or Archive');
+ checkDaysAndDate(rule.transition, 'Transition');
+ }
+
+ if (rule.expiration) {
+ checkDaysAndDate(rule.expiration, 'Expiration');
+ }
+
+ if (rule.abortMultipartUpload) {
+ checkDaysAndDate(rule.abortMultipartUpload, 'AbortMultipartUpload');
+ }
+
+ if (!rule.expiration && !rule.abortMultipartUpload && !rule.transition) {
+ throw new Error('Rule must includes expiration or abortMultipartUpload or transition');
+ }
+
+ if (rule.tag) {
+ if (rule.abortMultipartUpload) {
+ throw new Error('Tag cannot be used with abortMultipartUpload');
+ }
+ handleCheckTag(rule.tag);
+ }
+}
+
diff --git a/lib/common/utils/formatObjKey.js b/lib/common/utils/formatObjKey.js
new file mode 100644
index 000000000..7e4760429
--- /dev/null
+++ b/lib/common/utils/formatObjKey.js
@@ -0,0 +1,29 @@
+/* eslint-disable no-use-before-define */
+module.exports = function formatObjKey(obj, type) {
+ if (obj === null || typeof obj !== 'object') {
+ return obj;
+ }
+
+ let o;
+ if (Array.isArray(obj)) {
+ o = [];
+ for (let i = 0; i < obj.length; i++) {
+ o.push(formatObjKey(obj[i], type));
+ }
+ } else {
+ o = {};
+ Object.keys(obj).forEach((key) => {
+ o[handelFormat(key, type)] = formatObjKey(obj[key], type);
+ });
+ }
+ return o;
+};
+
+function handelFormat(key, type) {
+ if (type === 'firstUpperCase') {
+ key = key.replace(/^./, _ => _.toUpperCase());
+ } else if (type === 'firstLowerCase') {
+ key = key.replace(/^./, _ => _.toLowerCase());
+ }
+ return key;
+}
diff --git a/lib/common/utils/getStrBytesCount.js b/lib/common/utils/getStrBytesCount.js
new file mode 100644
index 000000000..fc5e09571
--- /dev/null
+++ b/lib/common/utils/getStrBytesCount.js
@@ -0,0 +1,12 @@
+module.exports = function getStrBytesCount(str) {
+ let bytesCount = 0;
+ for (let i = 0; i < str.length; i++) {
+ const c = str.charAt(i);
+ if (/^[\u00-\uff]$/.test(c)) {
+ bytesCount += 1;
+ } else {
+ bytesCount += 2;
+ }
+ }
+ return bytesCount;
+};
diff --git a/lib/common/utils/obj2xml.js b/lib/common/utils/obj2xml.js
index 19664f977..5b24d5f87 100644
--- a/lib/common/utils/obj2xml.js
+++ b/lib/common/utils/obj2xml.js
@@ -1,3 +1,5 @@
+const formatObjKey = require('./formatObjKey');
+
function type(params) {
return Object.prototype.toString
.call(params)
@@ -10,6 +12,9 @@ function obj2xml(obj, options) {
if (options && options.headers) {
s = '\n';
}
+ if (options && options.firstUpperCase) {
+ obj = formatObjKey(obj, 'firstUpperCase');
+ }
if (type(obj) === 'object') {
Object.keys(obj).forEach((key) => {
if (type(obj[key]) === 'string' || type(obj[key]) === 'number') {
diff --git a/test/node/bucket.test.js b/test/node/bucket.test.js
index 47378be01..27973966f 100644
--- a/test/node/bucket.test.js
+++ b/test/node/bucket.test.js
@@ -375,45 +375,6 @@ describe('test/bucket.test.js', () => {
});
});
- describe('putBucketLifecycle(), getBucketLifecycle(), deleteBucketLifecycle()', () => {
- it('should create, get and delete the lifecycle', async () => {
- const putresult1 = await store.putBucketLifecycle(bucket, [{
- id: 'delete after one day',
- prefix: 'logs/',
- status: 'Enabled',
- days: 1
- }]);
- assert.equal(putresult1.res.status, 200);
-
- // put again will be fine
- const putresult2 = await store.putBucketLifecycle(bucket, [
- {
- id: 'delete after one day',
- prefix: 'logs/',
- status: 'Enabled',
- days: 1
- },
- {
- prefix: 'logs2/',
- status: 'Disabled',
- date: '2022-10-11T00:00:00.000Z'
- }
- ]);
- assert.equal(putresult2.res.status, 200);
-
- await utils.sleep(ms(metaSyncTime));
-
- // get
- const getBucketLifecycle = await store.getBucketLifecycle(bucket);
- assert(getBucketLifecycle.rules.length > 0);
- assert.equal(getBucketLifecycle.res.status, 200);
-
- // delete it
- const deleteResult = await store.deleteBucketLifecycle(bucket);
- assert.equal(deleteResult.res.status, 204);
- });
- });
-
describe('putBucketReferer(), getBucketReferer(), deleteBucketReferer()', () => {
it('should create, get and delete the referer', async () => {
const putresult = await store.putBucketReferer(bucket, true, [
@@ -706,4 +667,466 @@ describe('test/bucket.test.js', () => {
assert.equal(deleteResult.res.status, 204);
});
});
+
+ describe('putBucketLifecycle()', () => {
+ // todo delete
+ it('should put the lifecycle with old api', async () => {
+ const putresult1 = await store.putBucketLifecycle(bucket, [{
+ id: 'expiration1',
+ prefix: 'logs/',
+ status: 'Enabled',
+ days: 1
+ }]);
+ assert.equal(putresult1.res.status, 200);
+
+ const putresult2 = await store.putBucketLifecycle(bucket, [{
+ id: 'expiration2',
+ prefix: 'logs/',
+ status: 'Enabled',
+ date: '2020-02-18T00:00:00.000Z'
+ }]);
+ assert.equal(putresult2.res.status, 200);
+ });
+
+ it('should put the lifecycle with expiration and id', async () => {
+ const putresult1 = await store.putBucketLifecycle(bucket, [{
+ id: 'expiration1',
+ prefix: 'logs/',
+ status: 'Enabled',
+ expiration: {
+ days: 1
+ }
+ }]);
+ assert.equal(putresult1.res.status, 200);
+
+ const getBucketLifecycle = await store.getBucketLifecycle(bucket);
+ assert(getBucketLifecycle.rules.length > 0 && getBucketLifecycle.rules.find(v => v.id === 'expiration1'));
+
+ const putresult2 = await store.putBucketLifecycle(bucket, [{
+ id: 'expiration2',
+ prefix: 'logs/',
+ status: 'Enabled',
+ expiration: {
+ createdBeforeDate: '2020-02-18T00:00:00.000Z'
+ }
+ }]);
+ assert.equal(putresult2.res.status, 200);
+ });
+
+ it('should put the lifecycle with AbortMultipartUpload', async () => {
+ const putresult1 = await store.putBucketLifecycle(bucket, [{
+ id: 'abortMultipartUpload1',
+ prefix: 'logs/',
+ status: 'Enabled',
+ abortMultipartUpload: {
+ days: 1
+ }
+ }]);
+ assert.equal(putresult1.res.status, 200);
+
+ const putresult2 = await store.putBucketLifecycle(bucket, [{
+ id: 'abortMultipartUpload2',
+ prefix: 'logs/',
+ status: 'Enabled',
+ abortMultipartUpload: {
+ createdBeforeDate: '2020-02-18T00:00:00.000Z'
+ }
+ }]);
+ assert.equal(putresult2.res.status, 200);
+ });
+
+ it('should put the lifecycle with Transition', async () => {
+ const putresult1 = await store.putBucketLifecycle(bucket, [{
+ id: 'transition',
+ prefix: 'logs/',
+ status: 'Enabled',
+ transition: {
+ createdBeforeDate: '2020-02-18T00:00:00.000Z',
+ storageClass: 'Archive'
+ },
+ expiration: {
+ createdBeforeDate: '2020-02-19T00:00:00.000Z'
+ },
+ tag: {
+ key: 'test',
+ value: '123'
+ }
+ }]);
+ assert.equal(putresult1.res.status, 200);
+
+ const putresult2 = await store.putBucketLifecycle(bucket, [{
+ id: 'transition',
+ prefix: 'logs/',
+ status: 'Enabled',
+ transition: {
+ days: 20,
+ storageClass: 'Archive'
+ },
+ tag: {
+ key: 'test',
+ value: '123'
+ }
+ }]);
+ assert.equal(putresult2.res.status, 200);
+ });
+
+ it('should put the lifecycle with expiration and Tag', async () => {
+ const putresult1 = await store.putBucketLifecycle(bucket, [{
+ id: 'tag1',
+ prefix: 'logs/',
+ status: 'Enabled',
+ expiration: {
+ days: 1
+ },
+ tag: {
+ key: 1,
+ value: '2'
+ }
+ }]);
+ assert.equal(putresult1.res.status, 200);
+
+ const putresult2 = await store.putBucketLifecycle(bucket, [{
+ id: 'tag2',
+ prefix: 'logs/',
+ status: 'Enabled',
+ expiration: {
+ createdBeforeDate: '2020-02-18T00:00:00.000Z'
+ },
+ tag: {
+ key: 1,
+ value: '2'
+ }
+ }]);
+ assert.equal(putresult2.res.status, 200);
+
+ const putresult3 = await store.putBucketLifecycle(bucket, [{
+ id: 'tag2',
+ prefix: 'logs/',
+ status: 'Enabled',
+ expiration: {
+ createdBeforeDate: '2020-02-18T00:00:00.000Z'
+ },
+ tag: [{
+ key: 1,
+ value: '2'
+ }, {
+ key: 'testkey',
+ value: 'testvalue'
+ }]
+ }]);
+ assert.equal(putresult3.res.status, 200);
+ });
+
+ it('should throw error when id more than 255 bytes ', async () => {
+ const testID = Array(256).fill('a').join('');
+ try {
+ await store.putBucketLifecycle(bucket, [{
+ id: testID,
+ prefix: 'testid/',
+ status: 'Enabled'
+ }]);
+ assert(false);
+ } catch (error) {
+ assert(error.message.includes('255'));
+ }
+ });
+
+ it('should throw error when no prefix', async () => {
+ try {
+ await store.putBucketLifecycle(bucket, [{
+ id: 'prefix',
+ status: 'Enabled'
+ }]);
+ assert(false);
+ } catch (error) {
+ assert(error.message.includes('prefix'));
+ }
+ });
+
+ it('should throw error when status is not Enabled or Disabled', async () => {
+ try {
+ await store.putBucketLifecycle(bucket, [{
+ id: 'status',
+ prefix: 'fix/',
+ status: 'test'
+ }]);
+ assert(false);
+ } catch (error) {
+ assert(error.message.includes('Enabled or Disabled'));
+ }
+ try {
+ await store.putBucketLifecycle(bucket, [{
+ id: 'status',
+ prefix: 'fix/',
+ status: ''
+ }]);
+ assert(false);
+ } catch (error) {
+ assert(error.message.includes('Enabled or Disabled'));
+ }
+ });
+
+ it('should throw error when storageClass is not Archive or IA', async () => {
+ try {
+ await store.putBucketLifecycle(bucket, [{
+ id: 'storageClass',
+ prefix: 'fix/',
+ status: 'Enabled',
+ transition: {
+ createdBeforeDate: '2020-02-18T00:00:00.000Z',
+ storageClass: 'test'
+ }
+ }]);
+ assert(false);
+ } catch (error) {
+ assert(error.message.includes('IA or Archive'));
+ }
+ });
+
+ it('should throw error when transition must have days or createdBeforeDate', async () => {
+ try {
+ await store.putBucketLifecycle(bucket, [{
+ id: 'storageClass',
+ prefix: 'fix/',
+ status: 'Enabled',
+ transition: {
+ storageClass: 'Archive'
+ }
+ }]);
+ assert(false);
+ } catch (error) {
+ assert(error.message.includes('days or createdBeforeDate'));
+ }
+ });
+
+ it('should throw error when days of transition is not a positive integer', async () => {
+ const errorMessage = 'a positive integer';
+ try {
+ await store.putBucketLifecycle(bucket, [{
+ id: 'transition',
+ prefix: 'fix/',
+ status: 'Enabled',
+ transition: {
+ days: 1.1,
+ storageClass: 'Archive'
+ }
+ }]);
+ assert(false);
+ } catch (error) {
+ assert(error.message.includes(errorMessage));
+ }
+
+ try {
+ await store.putBucketLifecycle(bucket, [{
+ id: 'transition',
+ prefix: 'fix/',
+ status: 'Enabled',
+ transition: {
+ days: 'asd',
+ storageClass: 'Archive'
+ }
+ }]);
+ assert(false);
+ } catch (error) {
+ assert(error.message.includes(errorMessage));
+ }
+ });
+
+ it('should throw error when createdBeforeDate of transition is not iso8601 format', async () => {
+ const errorMessage = 'iso8601';
+ try {
+ await store.putBucketLifecycle(bucket, [{
+ id: 'transition',
+ prefix: 'fix/',
+ status: 'Enabled',
+ transition: {
+ createdBeforeDate: new Date().toISOString(), // eg: YYYY-MM-DDT00:00:00.000Z
+ storageClass: 'Archive'
+ }
+ }]);
+ assert(false);
+ } catch (error) {
+ assert(error.message.includes(errorMessage));
+ }
+
+ try {
+ await store.putBucketLifecycle(bucket, [{
+ id: 'transition',
+ prefix: 'fix/',
+ status: 'Enabled',
+ transition: {
+ createdBeforeDate: new Date().toString(),
+ storageClass: 'Archive'
+ }
+ }]);
+ assert(false);
+ } catch (error) {
+ assert(error.message.includes(errorMessage));
+ }
+ });
+
+ it('should throw error when abortMultipartUpload must have days or createdBeforeDate', async () => {
+ try {
+ await store.putBucketLifecycle(bucket, [{
+ id: 'storageClass',
+ prefix: 'fix/',
+ status: 'Enabled',
+ abortMultipartUpload: {}
+ }]);
+ assert(false);
+ } catch (error) {
+ assert(error.message.includes('days or createdBeforeDate'));
+ }
+ });
+
+ it('should throw error when days of abortMultipartUpload is not a positive integer', async () => {
+ const errorMessage = 'a positive integer';
+ try {
+ await store.putBucketLifecycle(bucket, [{
+ id: 'abortMultipartUpload',
+ prefix: 'fix/',
+ status: 'Enabled',
+ abortMultipartUpload: {
+ days: 1.1
+ }
+ }]);
+ assert(false);
+ } catch (error) {
+ assert(error.message.includes(errorMessage));
+ }
+
+ try {
+ await store.putBucketLifecycle(bucket, [{
+ id: 'abortMultipartUpload',
+ prefix: 'fix/',
+ status: 'Enabled',
+ abortMultipartUpload: {
+ days: 'a'
+ }
+ }]);
+ assert(false);
+ } catch (error) {
+ assert(error.message.includes(errorMessage));
+ }
+ });
+
+ it('should throw error when createdBeforeDate of abortMultipartUpload is not iso8601 format', async () => {
+ const errorMessage = 'iso8601';
+ try {
+ await store.putBucketLifecycle(bucket, [{
+ id: 'abortMultipartUpload',
+ prefix: 'fix/',
+ status: 'Enabled',
+ abortMultipartUpload: {
+ createdBeforeDate: new Date().toISOString() // eg: YYYY-MM-DDT00:00:00.000Z
+ }
+ }]);
+ assert(false);
+ } catch (error) {
+ assert(error.message.includes(errorMessage));
+ }
+
+ try {
+ await store.putBucketLifecycle(bucket, [{
+ id: 'abortMultipartUpload',
+ prefix: 'fix/',
+ status: 'Enabled',
+ abortMultipartUpload: {
+ createdBeforeDate: new Date().toString() // eg: YYYY-MM-DDT00:00:00.000Z
+ }
+ }]);
+ assert(false);
+ } catch (error) {
+ assert(error.message.includes(errorMessage));
+ }
+ });
+
+ it('should throw error when rule have no expiration or abortMultipartUpload', async () => {
+ const errorMessage = 'expiration or abortMultipartUpload';
+ try {
+ await store.putBucketLifecycle(bucket, [{
+ prefix: 'expirationAndAbortMultipartUpload/',
+ status: 'Enabled'
+ }]);
+ assert(false);
+ } catch (error) {
+ assert(error.message.includes(errorMessage));
+ }
+ });
+
+ it('should throw error when tag is used with abortMultipartUpload', async () => {
+ const errorMessage = 'Tag cannot be used with abortMultipartUpload';
+ try {
+ await store.putBucketLifecycle(bucket, [{
+ prefix: 'expirationAndAbortMultipartUpload/',
+ status: 'Enabled',
+ abortMultipartUpload: {
+ days: 1
+ },
+ expiration: {
+ days: 1
+ },
+ tag: {
+ value: '1',
+ key: 'test'
+ }
+ }]);
+ assert(false);
+ } catch (error) {
+ assert(error.message.includes(errorMessage));
+ }
+ });
+ });
+
+ describe('getBucketLifecycle()', () => {
+ it('should get the lifecycle', async () => {
+ const putresult = await store.putBucketLifecycle(bucket, [{
+ id: 'get_test',
+ prefix: 'logs/',
+ status: 'Enabled',
+ expiration: {
+ days: 1
+ },
+ tag: [{
+ key: 'test',
+ value: '1'
+ },
+ {
+ key: 'test1',
+ value: '2'
+ }]
+ }]);
+ assert.equal(putresult.res.status, 200);
+
+ const getBucketLifecycle = await store.getBucketLifecycle(bucket);
+ assert(getBucketLifecycle.rules.length > 0);
+ assert.equal(getBucketLifecycle.res.status, 200);
+ });
+ });
+
+ describe('deleteBucketLifecycle()', () => {
+ it('should delete the lifecycle', async () => {
+ const putresult = await store.putBucketLifecycle(bucket, [{
+ id: 'delete',
+ prefix: 'logs/',
+ status: 'Enabled',
+ expiration: {
+ days: 1
+ },
+ tag: [{
+ key: 'test',
+ value: '1'
+ },
+ {
+ key: 'test1',
+ value: '2'
+ }]
+ }]);
+ assert.equal(putresult.res.status, 200);
+
+ // delete it
+ const deleteResult = await store.deleteBucketLifecycle(bucket);
+ assert.equal(deleteResult.res.status, 204);
+ });
+ });
});