From bec88457daa011a19ff53da9e8f193f8a87e0616 Mon Sep 17 00:00:00 2001 From: moca_tao7 Date: Tue, 26 Oct 2021 16:37:12 +0800 Subject: [PATCH] fix: to add minify3 git action and some test erro (#1005) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * codecov * fix: to install error * feat: to add reveal * feat: to add reveal * fix: to fix install error fix: to fix install error fix: to fix install error fix: to fix install error fix: to fix install error fix: to fix install error fix: to fix install error fix: to fix install error fix: to fix install error fix: to fix install error fix: to fix install error fix: to fix install error fix: to fix install error fix: to fix install error fix: to fix install error fix: to fix install error fix: to fix install error fix: to fix install error fix: to fix install error fix: to fix install error fix: to fix install error fix: to ci test error fix: to ci test error fix: to fix test error fix: to fix test error fix: to fix test error fix: to fix test error feat: to add feat feat: to add feat feat: to add feat fix: to add network timeout fix: to add network timeout fix: to add network timeout fix: to add network timeout fix: to add network timeout fix: to add network timeout fix: to add network timeout fix: to add network timeout fix: to add network timeout fix: to add network timeout feat: to fix gitaction test error feat: to fix network error * feat: to fix network error * docs: update outdated links. (#976) (#1002) Co-authored-by: richex Co-authored-by: richex-cn Co-authored-by: richex Co-authored-by: peize.rpz * chore(release): 6.17.0 * feat: to fix network error feat: to fix network error feat: to fix network error * feat: to fix network error * fix: to fix network error * fix: to fix network error * Update node_test.yml * feat: to remove secrets * feat: to remove secrets * fix: to remove travis (#1006) Co-authored-by: 饶培泽 Co-authored-by: richex-cn Co-authored-by: richex Co-authored-by: peize.rpz --- .github/workflows/codeCov.yml | 3 +- .github/workflows/node_test.yml | 67 +++++++++++++++++++++++++-------- .travis.yml | 19 ---------- package.json | 4 +- test/node/bucket.test.js | 49 +++++++++++------------- test/node/bucket_worm.test.js | 20 ++-------- test/node/bukcet_worm.test.js | 19 ++-------- test/node/cleanAllBucket.js | 6 +++ test/node/multipart.test.js | 4 +- test/node/multiversion.test.js | 13 +++++-- test/node/object.test.js | 59 +++++++++++++++++------------ 11 files changed, 135 insertions(+), 128 deletions(-) delete mode 100644 .travis.yml create mode 100644 test/node/cleanAllBucket.js diff --git a/.github/workflows/codeCov.yml b/.github/workflows/codeCov.yml index de3084039..a704f95e6 100644 --- a/.github/workflows/codeCov.yml +++ b/.github/workflows/codeCov.yml @@ -4,7 +4,8 @@ on: push: branches: [master] workflow_run: - workflows: ['Receive PR'] + + workflows: ['TEST'] types: - completed diff --git a/.github/workflows/node_test.yml b/.github/workflows/node_test.yml index d1a0a7a21..6ea599d35 100644 --- a/.github/workflows/node_test.yml +++ b/.github/workflows/node_test.yml @@ -12,13 +12,14 @@ on: types: - completed -jobs: - browser_test: +jobs: + browser_test: environment: ali_oss_AK runs-on: macos-latest env: ONCI: true + MINIFY: 1 ALI_SDK_OSS_ID: ${{secrets.ALI_SDK_OSS_ID}} ALI_SDK_OSS_REGION: ${{secrets.ALI_SDK_OSS_REGION}} ALI_SDK_OSS_SECRET: ${{secrets.ALI_SDK_OSS_SECRET}} @@ -32,7 +33,6 @@ jobs: strategy: matrix: node-version: [14.x] - # See supported Node.js release schedule at https://nodejs.org/en/about/releases/ steps: - uses: actions/checkout@v2 @@ -41,15 +41,15 @@ jobs: curl cip.cc npm install mkdir test/browser/build - node browser-build.js > test/browser/build/aliyun-oss-sdk.min.js + node browser-build.js > test/browser/build/aliyun-oss-sdk.min.js node task/browser-test-build.js > test/browser/build/tests.js npx karma start - - node_10: + node_10: environment: ali_oss_AK runs-on: macos-latest env: + ONCI: true ALI_SDK_OSS_ID: ${{secrets.ALI_SDK_OSS_ID}} ALI_SDK_OSS_SECRET: ${{secrets.ALI_SDK_OSS_SECRET}} ALI_SDK_OSS_REGION: ${{secrets.ALI_SDK_OSS_REGION}} @@ -58,7 +58,6 @@ jobs: ALI_SDK_STS_REGION: ${{secrets.ALI_SDK_STS_REGION}} ALI_SDK_STS_ROLE: ${{secrets.ALI_SDK_STS_ROLE}} ALI_SDK_STS_SECRET: ${{secrets.ALI_SDK_STS_SECRET}} - ONCI: true strategy: @@ -72,23 +71,26 @@ jobs: uses: actions/setup-node@v2 with: node-version: ${{ matrix.node-version }} - - run: npm install + - run: npm install - run: npm run test + - run: node test/node/cleanAllBucket.js - node_12: + node_12: + if: always() + needs: [node_10] environment: ali_oss_AK runs-on: macos-latest env: + ONCI: true ALI_SDK_OSS_ID: ${{secrets.ALI_SDK_OSS_ID}} - ALI_SDK_OSS_REGION: ${{secrets.ALI_SDK_OSS_REGION}} ALI_SDK_OSS_SECRET: ${{secrets.ALI_SDK_OSS_SECRET}} + ALI_SDK_OSS_REGION: ${{secrets.ALI_SDK_OSS_REGION}} ALI_SDK_STS_BUCKET: ${{secrets.ALI_SDK_STS_BUCKET}} ALI_SDK_STS_ID: ${{secrets.ALI_SDK_STS_ID}} ALI_SDK_STS_REGION: ${{secrets.ALI_SDK_STS_REGION}} ALI_SDK_STS_ROLE: ${{secrets.ALI_SDK_STS_ROLE}} ALI_SDK_STS_SECRET: ${{secrets.ALI_SDK_STS_SECRET}} - ONCI: true strategy: @@ -102,23 +104,26 @@ jobs: uses: actions/setup-node@v2 with: node-version: ${{ matrix.node-version }} - - run: npm install + - run: npm install - run: npm run test + - run: node test/node/cleanAllBucket.js - node_14: + node_14: + if: always() + needs: [node_10,node_12] environment: ali_oss_AK runs-on: macos-latest env: + ONCI: true ALI_SDK_OSS_ID: ${{secrets.ALI_SDK_OSS_ID}} - ALI_SDK_OSS_REGION: ${{secrets.ALI_SDK_OSS_REGION}} ALI_SDK_OSS_SECRET: ${{secrets.ALI_SDK_OSS_SECRET}} + ALI_SDK_OSS_REGION: ${{secrets.ALI_SDK_OSS_REGION}} ALI_SDK_STS_BUCKET: ${{secrets.ALI_SDK_STS_BUCKET}} ALI_SDK_STS_ID: ${{secrets.ALI_SDK_STS_ID}} ALI_SDK_STS_REGION: ${{secrets.ALI_SDK_STS_REGION}} ALI_SDK_STS_ROLE: ${{secrets.ALI_SDK_STS_ROLE}} ALI_SDK_STS_SECRET: ${{secrets.ALI_SDK_STS_SECRET}} - ONCI: true strategy: matrix: @@ -131,5 +136,35 @@ jobs: uses: actions/setup-node@v2 with: node-version: ${{ matrix.node-version }} - - run: npm install + - run: npm install - run: npm run test + - run: node test/node/cleanAllBucket.js + + cleanAllBucket: + if: always() + needs: [node_10,node_12,node_14] + environment: ali_oss_AK + runs-on: ubuntu-latest + + env: + ONCI: true + ALI_SDK_OSS_ID: ${{secrets.ALI_SDK_OSS_ID}} + ALI_SDK_OSS_SECRET: ${{secrets.ALI_SDK_OSS_SECRET}} + ALI_SDK_OSS_REGION: ${{secrets.ALI_SDK_OSS_REGION}} + ALI_SDK_STS_BUCKET: ${{secrets.ALI_SDK_STS_BUCKET}} + ALI_SDK_STS_ID: ${{secrets.ALI_SDK_STS_ID}} + ALI_SDK_STS_REGION: ${{secrets.ALI_SDK_STS_REGION}} + ALI_SDK_STS_ROLE: ${{secrets.ALI_SDK_STS_ROLE}} + ALI_SDK_STS_SECRET: ${{secrets.ALI_SDK_STS_SECRET}} + + strategy: + matrix: + node-version: [14.x] + steps: + - uses: actions/checkout@v2 + - name: clean all bucket + uses: actions/setup-node@v2 + with: + node-version: ${{ matrix.node-version }} + - run: npm install + - run: node test/node/cleanAllBucket.js diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 8a91969de..000000000 --- a/.travis.yml +++ /dev/null @@ -1,19 +0,0 @@ -language: node_js -node_js: - - '14' - - '12' - - '10' - - '8' -env: - global: - - secure: Ws1h7ZU97/2npejMZp3/Jac0ZYkgXBW5gEilR5WrnkdkEbIhhK6HamrcqLrA6qCPV0Ax9nNbZCRJcQwPsTdOxBekJ3+3zflMWevfa9+CRS1Hh0mMcn+D864CjrRiQRgIQa8ZoFVPLaL/7qyQp1i7gLbPcpMjO6CA7LgV1reaf+U= - - secure: Sj56ZeEZ48Sms7+jEI0qvrtEZoGc5ywotxYxvZ3EHg42Xpv6HBE6aEl3AZhz8ggQgmtJ/AneWLAZGOnZHpQiVrMetUa46HcAzCJ0h/wJXechpyu/KRTqaDjHkn3lD0yi+WnvBTYUIBr+Db8VwLbbcRVM2h/GxgBnWBTaE6jlmag= - - secure: WixkbIJUiMfk4YZ+6Unlya1zxBl0i8X90O6xjd+gGogTf2IHeNUM8alLAMe9uFajbQnej8vKI+Lx9DTfaICtEoacjuG0UX8udZgMMyGxwBc36FJbUxeTtXpXp/PIz0rjmEGdIlFrjReMFcPjgOvyPdSNtUXYPACfGQFEhy69nJM= - - secure: PADJkH7wZP4vfIhvobEW66vmqQuIu6LZH2fcQ+aFRPiOCLU18jRiyKIMzAT9+igQTS5/nLGOJP1O94rD0wXpf7A07fK3DUYOGvC/kzD/Gdh/EkcjeyrL/EX7vITeVZJIxlUVE/F5WHDs04Xpezi4DlsE0v1WBj/Im+JKR7yyoZA= - - secure: A5B0GOXgKRNLc1+EAISSpxZeD9ZlPHJlv1vGUMXWv9qiR9KnZofh1GZYDbVKgOdbfPKSWgvb9vp8m1WPebOj8q/rT+DyfqRuKhqhQ2AudP2AcOBOATaOvLIMFrh4E+qdcvFq65A+nhg0b6EkINQjxrRyts/6gaAnQbshrYbElsc= - - secure: bf2G/bsk8+7xBU3zVF47/U5+kfpcyBv14dNkiQzax9r9fyhUlyKSyDsoWbGKijWYYmp9vjQlo9ymhmnRP5+zKho9XhJ5qteZmElCtQkwvAmKVIeZPbTcYA+ofagU2WCggF4k3R/YtXRX8P+Ie/+ge1dhq4YX1fmbDWXalRdhO7M= - - secure: F8d9Ix7IlWTA0XcxgxTlqzyIBkGJ0fT2YAa3GbpknU4KfrgK5rTzWNmN8N7Q2acxArjFRp2pYjFL/RlL8pI2skvj1HDK3FTABQh0cyBShhiuQLgm0xUSC3JSltpPBF4wEoC6Pdjd9oF1moc900A4g+pjCf/fx7vYm5GxnyHTKHI= - - secure: Og1xl8+NoUfc3ZlWT/yVOPy/8YhRHVNT2M+LVZ81iRwJNBxwm1cj40cck41Py2eG3AwH0yCj0K3u+o74fOHbTuYLzNskB1gYbA8m6v2vuMnGRblb6+UYEm3yUcYHJ4VXFMmFTMKyNoe/iqEpetgoKffbCAk3Xa7kZbbQR7T03tI= -script: npm run test-cov -after_script: - - npm i codecov && codecov diff --git a/package.json b/package.json index 179c382e9..ae377a3bb 100644 --- a/package.json +++ b/package.json @@ -21,8 +21,8 @@ }, "scripts": { "build-change-log": "standard-version", - "test": "npm run tsc && mocha -t 60000 -r should -r dotenv/config test/node/*.test.js test/node/**/*.test.js", - "test-cov": "npm run tsc && nyc --reporter=lcov node_modules/.bin/_mocha -t 60000 -r should test/node/*.test.js test/node/**/*.test.js", + "test": "npm run tsc && mocha -t 120000 -r should -r dotenv/config test/node/*.test.js test/node/**/*.test.js", + "test-cov": "npm run tsc && nyc --reporter=lcov node_modules/.bin/_mocha -t 120000 -r should test/node/*.test.js test/node/**/*.test.js", "jshint": "jshint .", "autod": "autod", "build-test": "MINIFY=1 node browser-build.js > test/browser/build/aliyun-oss-sdk.min.js && node -r dotenv/config task/browser-test-build.js > test/browser/build/tests.js", diff --git a/test/node/bucket.test.js b/test/node/bucket.test.js index 8055cc980..32f7ef7a0 100644 --- a/test/node/bucket.test.js +++ b/test/node/bucket.test.js @@ -22,31 +22,18 @@ describe('test/bucket.test.js', () => { const defaultRegion = config.region; before(async () => { store = oss(config); - - const bucketResult = await store.listBuckets({ - // prefix: '', - 'max-keys': 20 - }); - - await Promise.all( - (bucketResult.buckets || []) - .filter(_ => _.name.startsWith('ali-oss')) - .map(_bucket => utils.cleanBucket(oss(Object.assign(config, { region: _bucket.region })), _bucket.name)) - ); - config.region = defaultRegion; store = oss(config); bucket = `ali-oss-test-bucket-${prefix.replace(/[/.]/g, '-')}`; bucket = bucket.substring(0, bucket.length - 1); bucketRegion = defaultRegion; - const result = await store.putBucket(bucket); + const result = await store.putBucket(bucket, { timeout: process.env.ONCI ? 60000 : 10000 }); assert.equal(result.bucket, bucket); assert.equal(result.res.status, 200); }); - after(async () => { - await utils.cleanBucket(store, bucket); + await utils.cleanAllBucket(store); }); describe('setBucket()', () => { @@ -79,18 +66,20 @@ describe('test/bucket.test.js', () => { // just for archive bucket test archvieBucket = `ali-oss-archive-bucket-${prefix.replace(/[/.]/g, '-')}`; archvieBucket = archvieBucket.substring(0, archvieBucket.length - 1); - await store.putBucket(archvieBucket, { StorageClass: 'Archive' }); + await store.putBucket(archvieBucket, { StorageClass: 'Archive', timeout: 120000 }); }); it('should create a new bucket', async () => { - const result1 = await store.putBucket(name); + const result1 = await store.putBucket(name, { timeout: 120000 }); assert.equal(result1.bucket, name); assert.equal(result1.res.status, 200); }); it('should create an archive bucket', async () => { await utils.sleep(ms(metaSyncTime)); - const result2 = await store.listBuckets(); + const result2 = await store.listBuckets({}, { + timeout: 120000, + }); const { buckets } = result2; const m = buckets.some(item => item.name === archvieBucket); assert(m === true); @@ -174,6 +163,7 @@ describe('test/bucket.test.js', () => { it('should delete not empty bucket throw BucketNotEmptyError', async () => { store.useBucket(bucket); await store.put('ali-oss-test-bucket.txt', __filename); + utils.sleep(ms(metaSyncTime)); await utils.throws(async () => { await store.deleteBucket(bucket); }, 'BucketNotEmptyError'); @@ -223,7 +213,9 @@ describe('test/bucket.test.js', () => { it('should list buckets by prefix', async () => { const result = await store.listBuckets({ prefix: listBucketsPrefix, - 'max-keys': 20 + 'max-keys': 20, + }, { + timeout: 120000 }); assert(Array.isArray(result.buckets)); @@ -413,7 +405,7 @@ describe('test/bucket.test.js', () => { it('should create, get and delete the referer', async () => { const putresult = await store.putBucketReferer(bucket, true, [ 'http://npm.taobao.org' - ]); + ], { timeout: 120000 }); assert.equal(putresult.res.status, 200); // put again will be fine @@ -422,7 +414,7 @@ describe('test/bucket.test.js', () => { 'https://npm.taobao.org', 'http://cnpmjs.org' ]; - const putReferer = await store.putBucketReferer(bucket, false, referers); + const putReferer = await store.putBucketReferer(bucket, false, referers, { timeout: 120000 }); assert.equal(putReferer.res.status, 200); await utils.sleep(ms(metaSyncTime)); @@ -442,7 +434,7 @@ describe('test/bucket.test.js', () => { describe('putBucketCORS(), getBucketCORS(), deleteBucketCORS()', () => { afterEach(async () => { // delete it - const result = await store.deleteBucketCORS(bucket); + const result = await store.deleteBucketCORS(bucket, { timeout: 120000 }); assert.equal(result.res.status, 204); }); @@ -457,7 +449,7 @@ describe('test/bucket.test.js', () => { const putResult = await store.putBucketCORS(bucket, rules); assert.equal(putResult.res.status, 200); - const getResult = await store.getBucketCORS(bucket); + const getResult = await store.getBucketCORS(bucket, { timeout: 120000 }); assert.equal(getResult.res.status, 200); assert.deepEqual(getResult.rules, [{ allowedOrigin: '*', @@ -471,14 +463,15 @@ describe('test/bucket.test.js', () => { it('should overwrite cors', async () => { const rules1 = [{ allowedOrigin: '*', - allowedMethod: 'GET' + allowedMethod: 'GET', + timeout: 120000 }]; const putCorsResult1 = await store.putBucketCORS(bucket, rules1); assert.equal(putCorsResult1.res.status, 200); - await utils.sleep(ms('1000ms')); + await utils.sleep(ms(metaSyncTime)); - const getCorsResult1 = await store.getBucketCORS(bucket); + const getCorsResult1 = await store.getBucketCORS(bucket, { timeout: 120000 }); assert.equal(getCorsResult1.res.status, 200); assert.deepEqual(getCorsResult1.rules, [{ allowedOrigin: '*', @@ -492,9 +485,9 @@ describe('test/bucket.test.js', () => { const putCorsResult2 = await store.putBucketCORS(bucket, rules2); assert.equal(putCorsResult2.res.status, 200); - await utils.sleep(ms('1000ms')); + await utils.sleep(ms(metaSyncTime)); - const getCorsResult2 = await store.getBucketCORS(bucket); + const getCorsResult2 = await store.getBucketCORS(bucket, { timeout: 120000 }); assert.equal(getCorsResult2.res.status, 200); assert.deepEqual(getCorsResult2.rules, [{ allowedOrigin: 'localhost', diff --git a/test/node/bucket_worm.test.js b/test/node/bucket_worm.test.js index 2813dffc8..75d0a6b58 100644 --- a/test/node/bucket_worm.test.js +++ b/test/node/bucket_worm.test.js @@ -11,31 +11,19 @@ describe('test/bucket_worm.test.js', () => { const defaultRegion = config.region; before(async () => { store = oss(config); - - const bucketResult = await store.listBuckets({ - 'max-keys': 20 - }); - - await Promise.all((bucketResult.buckets || []) - .filter(_ => _.name.startsWith('ali-oss')) - .map(_bucket => utils - .cleanBucket( - oss(Object.assign(config, { region: _bucket.region })), - _bucket.name - ))); - config.region = defaultRegion; store = oss(config); - bucket = `ali-oss-test-bucket-worm-${prefix.replace(/[/.]/g, '-')}`; + bucket = `ali-oss-test-worm-bucket-worm-${prefix.replace(/[/.]/g, '-')}`; bucket = bucket.substring(0, bucket.length - 1); - const result = await store.putBucket(bucket); + const result = await store.putBucket(bucket, { + timeout: process.env.ONCI ? 60000 : 10000 }); assert.equal(result.bucket, bucket); assert.equal(result.res.status, 200); }); after(async () => { - await utils.cleanBucket(store, bucket); + await utils.cleanAllBucket(store); }); describe('worm()', () => { describe('initiateBucketWorm()', () => { diff --git a/test/node/bukcet_worm.test.js b/test/node/bukcet_worm.test.js index 91003ec19..32ebfe6a0 100644 --- a/test/node/bukcet_worm.test.js +++ b/test/node/bukcet_worm.test.js @@ -11,31 +11,18 @@ describe('test/bucket.test.js', () => { const defaultRegion = config.region; before(async () => { store = oss(config); - - const bucketResult = await store.listBuckets({ - 'max-keys': 20 - }); - - await Promise.all((bucketResult.buckets || []) - .filter(_ => _.name.startsWith('ali-oss')) - .map(_bucket => utils - .cleanBucket( - oss(Object.assign(config, { region: _bucket.region })), - _bucket.name - ))); - config.region = defaultRegion; store = oss(config); - bucket = `ali-oss-test-bucket-${prefix.replace(/[/.]/g, '-')}`; + bucket = `ali-oss-test-worm2-bucket-${prefix.replace(/[/.]/g, '-')}`; bucket = bucket.substring(0, bucket.length - 1); - const result = await store.putBucket(bucket); + const result = await store.putBucket(bucket, { timeout: process.env.ONCI ? 60000 : 10000 }); assert.equal(result.bucket, bucket); assert.equal(result.res.status, 200); }); after(async () => { - await utils.cleanBucket(store, bucket); + await utils.cleanAllBucket(store); }); describe('worm()', () => { describe('initiateBucketWorm()', () => { diff --git a/test/node/cleanAllBucket.js b/test/node/cleanAllBucket.js new file mode 100644 index 000000000..e43301a28 --- /dev/null +++ b/test/node/cleanAllBucket.js @@ -0,0 +1,6 @@ +const utils = require('./utils'); +const config = require('../config').oss; +const oss = require('../..'); + +const store = oss(config); +utils.cleanAllBucket(store); diff --git a/test/node/multipart.test.js b/test/node/multipart.test.js index c51d94105..60e988951 100644 --- a/test/node/multipart.test.js +++ b/test/node/multipart.test.js @@ -491,7 +491,7 @@ describe('test/multipart.test.js', () => { it('should upload partSize be int number and greater then minPartSize', async () => { // create a file with 1M random data const fileName = await utils.createTempFile('multipart-upload-file', 1024 * 1024); - + const name = `${prefix}multipart/upload-file`; let progress = 0; try { @@ -504,7 +504,7 @@ describe('test/multipart.test.js', () => { } catch (e) { assert.equal('partSize must be int number', e.message); } - + try { await store.multipartUpload(name, fileName, { partSize: 1, diff --git a/test/node/multiversion.test.js b/test/node/multiversion.test.js index 5cd5b2bfe..ad740e58c 100644 --- a/test/node/multiversion.test.js +++ b/test/node/multiversion.test.js @@ -3,6 +3,8 @@ const utils = require('./utils'); const oss = require('../..'); const config = require('../config').oss; const fs = require('fs'); +const ms = require('humanize-ms'); +const { metaSyncTime } = require('../config'); describe('test/multiversion.test.js', () => { const { prefix } = utils; @@ -103,7 +105,7 @@ describe('test/multiversion.test.js', () => { }); }); - describe('putBucketLifecycle() getBucketLifecycle()', () => { + describe('putBucketLifecycle() getBucketLifecycle()', async () => { it('should putBucketLifecycle with NoncurrentVersionExpiration', async () => { const putresult1 = await store.putBucketLifecycle(bucket, [{ id: 'expiration1', @@ -115,7 +117,10 @@ describe('test/multiversion.test.js', () => { noncurrentVersionExpiration: { noncurrentDays: 1 } - }]); + }], { + timeout: 120000 + }); + await utils.sleep(ms(metaSyncTime)); assert.strictEqual(putresult1.res.status, 200); const { rules } = await store.getBucketLifecycle(bucket); assert.strictEqual(rules[0].noncurrentVersionExpiration.noncurrentDays, '1'); @@ -166,6 +171,7 @@ describe('test/multiversion.test.js', () => { before(async () => { await store.putBucketVersioning(bucket, enabled); const result = await store.put(name, __filename); + await utils.sleep(ms(metaSyncTime)); await store.delete(name); versionId = result.res.headers['x-oss-version-id']; }); @@ -468,6 +474,7 @@ describe('test/multiversion.test.js', () => { // 不指定version id,删除当前版本,生成DELETE标记 it('should delete object without versionId', async () => { + await utils.sleep(ms(metaSyncTime)); const res = await store.delete(name); assert.strictEqual(res.res.headers['x-oss-delete-marker'], 'true'); assert(res.res.headers['x-oss-version-id']); @@ -557,7 +564,7 @@ describe('test/multiversion.test.js', () => { describe('getBucketInfo()', () => { it('should return bucket Versioning', async () => { try { - await store.putBucketVersioning(bucket, enabled); + await store.putBucketVersioning(bucket, enabled,); const result = await store.getBucketInfo(bucket); assert.equal(result.res.status, 200); assert.equal(result.bucket.Versioning, enabled); diff --git a/test/node/object.test.js b/test/node/object.test.js index a87ce846b..d568d8e38 100644 --- a/test/node/object.test.js +++ b/test/node/object.test.js @@ -2,6 +2,8 @@ const fs = require('fs'); const path = require('path'); const assert = require('assert'); const { Readable } = require('stream'); +const ms = require('humanize-ms'); +const { metaSyncTime } = require('../config'); const AgentKeepalive = require('agentkeepalive'); const HttpsAgentKeepalive = require('agentkeepalive').HttpsAgent; const sleep = require('mz-modules/sleep'); @@ -49,10 +51,6 @@ describe('test/object.test.js', () => { // store.useBucket(archvieBucket, bucketRegion); }); - after(async () => { - await utils.cleanAllBucket(store); - }); - describe('putStream()', () => { afterEach(mm.restore); @@ -1242,6 +1240,7 @@ describe('test/object.test.js', () => { }); it('should get exists object stream', async () => { + await utils.sleep(ms(metaSyncTime)); const result = await store.getStream(name); assert.equal(result.res.status, 200); assert(result.stream instanceof Readable); @@ -1306,21 +1305,24 @@ describe('test/object.test.js', () => { } }); - it('should throw error and consume the response stream', async () => { - store.agent = new AgentKeepalive({ - keepAlive: true + if(!process.env.ONCI) { + it('should throw error and consume the response stream', async () => { + store.agent = new AgentKeepalive({ + keepAlive: true + }); + store.httpsAgent = new HttpsAgentKeepalive(); + try { + await store.getStream(`${name}not-exists`); + throw new Error('should not run this'); + } catch (err) { + console.log('error is', err) + assert.equal(err.name, 'NoSuchKeyError'); + assert(Object.keys(store.agent.freeSockets).length === 0); + await utils.sleep(ms(metaSyncTime)); + assert(Object.keys(store.agent.freeSockets).length === 1); + } }); - store.httpsAgent = new HttpsAgentKeepalive(); - try { - await store.getStream(`${name}not-exists`); - throw new Error('should not run this'); - } catch (err) { - assert.equal(err.name, 'NoSuchKeyError'); - assert(Object.keys(store.agent.freeSockets).length === 0); - await sleep(1); - assert(Object.keys(store.agent.freeSockets).length === 1); - } - }); + } }); describe('delete()', () => { @@ -2245,7 +2247,10 @@ describe('test/object.test.js', () => { type: 'ColdArchive', Days: 2 }); - assert.equal(['Expedited', 'Standard', 'Bulk'].includes(result.res.headers['x-oss-object-restore-priority']), true); + assert.equal( + ['Expedited', 'Standard', 'Bulk'].includes(result.res.headers['x-oss-object-restore-priority']), + true + ); }); it('ColdArchive is Accepted', async () => { @@ -2253,7 +2258,10 @@ describe('test/object.test.js', () => { const result = await store.restore(name, { type: 'ColdArchive' }); - assert.equal(['Expedited', 'Standard', 'Bulk'].includes(result.res.headers['x-oss-object-restore-priority']), true); + assert.equal( + ['Expedited', 'Standard', 'Bulk'].includes(result.res.headers['x-oss-object-restore-priority']), + true + ); }); }); @@ -2322,12 +2330,13 @@ describe('test/object.test.js', () => { } }; - const postFile = () => new Promise((resolve, reject) => { - request(options, (err, res) => { - if (err) reject(err); - if (res) resolve(res); + const postFile = () => + new Promise((resolve, reject) => { + request(options, (err, res) => { + if (err) reject(err); + if (res) resolve(res); + }); }); - }); const result = await postFile(); assert(result.statusCode === 204);