-
Notifications
You must be signed in to change notification settings - Fork 1.9k
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #2997 from GoogleCloudPlatform/nodejs-storage-tran…
…sfer-migration migrate code from googleapis/nodejs-storage-transfer
- Loading branch information
Showing
32 changed files
with
2,587 additions
and
2 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,82 @@ | ||
name: storagetransfer | ||
on: | ||
push: | ||
branches: | ||
- main | ||
paths: | ||
- 'storagetransfer/**' | ||
- '.github/workflows/storagetransfer.yaml' | ||
pull_request: | ||
paths: | ||
- 'storagetransfer/**' | ||
- '.github/workflows/storagetransfer.yaml' | ||
pull_request_target: | ||
types: [labeled] | ||
paths: | ||
- 'storagetransfer/**' | ||
- '.github/workflows/storagetransfer.yaml' | ||
schedule: | ||
- cron: '0 0 * * 0' | ||
jobs: | ||
test: | ||
if: ${{ github.event.action != 'labeled' || github.event.label.name == 'actions:force-run' }} | ||
runs-on: ubuntu-latest | ||
timeout-minutes: 60 | ||
permissions: | ||
contents: 'write' | ||
pull-requests: 'write' | ||
id-token: 'write' | ||
steps: | ||
- uses: actions/checkout@v3.1.0 | ||
with: | ||
ref: ${{github.event.pull_request.head.sha}} | ||
- uses: 'google-github-actions/auth@v1.0.0' | ||
with: | ||
workload_identity_provider: 'projects/1046198160504/locations/global/workloadIdentityPools/github-actions-pool/providers/github-actions-provider' | ||
service_account: 'kokoro-system-test@long-door-651.iam.gserviceaccount.com' | ||
create_credentials_file: 'true' | ||
access_token_lifetime: 600s | ||
- id: secrets | ||
uses: "google-github-actions/get-secretmanager-secrets@v1" | ||
with: | ||
secrets: |- | ||
sts_aws_secret:nodejs-docs-samples-tests/nodejs-docs-samples-storagetransfer-aws | ||
sts_azure_secret:nodejs-docs-samples-tests/nodejs-docs-samples-storagetransfer-azure | ||
- uses: actions/setup-node@v3.5.1 | ||
with: | ||
node-version: 16 | ||
- run: npm install | ||
working-directory: storagetransfer | ||
- run: npm test | ||
working-directory: storagetransfer | ||
env: | ||
AWS_ACCESS_KEY_ID : ${{ fromJSON(steps.secrets.outputs.sts_aws_secret).AccessKeyId }} | ||
AWS_SECRET_ACCESS_KEY: ${{ fromJSON(steps.secrets.outputs.sts_aws_secret).SecretAccessKey }} | ||
AZURE_STORAGE_ACCOUNT: ${{ fromJSON(steps.secrets.outputs.sts_azure_secret).StorageAccount }} | ||
AZURE_CONNECTION_STRING: ${{ fromJSON(steps.secrets.outputs.sts_azure_secret).ConnectionString }} | ||
AZURE_SAS_TOKEN: ${{ fromJSON(steps.secrets.outputs.sts_azure_secret).SAS }} | ||
MOCHA_REPORTER_SUITENAME: storagetransfer | ||
MOCHA_REPORTER_OUTPUT: storagetransfer_sponge_log.xml | ||
MOCHA_REPORTER: xunit | ||
- if: ${{ github.event.action == 'labeled' && github.event.label.name == 'actions:force-run' }} | ||
uses: actions/github-script@v6 | ||
with: | ||
github-token: ${{ secrets.GITHUB_TOKEN }} | ||
script: | | ||
try { | ||
await github.rest.issues.removeLabel({ | ||
name: 'actions:force-run', | ||
owner: 'GoogleCloudPlatform', | ||
repo: 'nodejs-docs-samples', | ||
issue_number: context.payload.pull_request.number | ||
}); | ||
} catch (e) { | ||
if (!e.message.includes('Label does not exist')) { | ||
throw e; | ||
} | ||
} | ||
- if: ${{ github.event_name == 'schedule' && always() }} | ||
run: | | ||
curl https://github.com/googleapis/repo-automation-bots/releases/download/flakybot-1.1.0/flakybot -o flakybot -s -L | ||
chmod +x ./flakybot | ||
./flakybot --repo GoogleCloudPlatform/nodejs-docs-samples --commit_hash ${{github.sha}} --build_url https://github.com/${{github.repository}}/actions/runs/${{github.run_id}} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Validating CODEOWNERS rules …
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,110 @@ | ||
/** | ||
* Copyright 2022 Google LLC | ||
* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
'use strict'; | ||
|
||
async function main( | ||
projectId, | ||
description, | ||
awsSourceBucket, | ||
gcsSinkBucket, | ||
awsAccessKeyId = process.env.AWS_ACCESS_KEY_ID, | ||
awsSecretAccessKey = process.env.AWS_SECRET_ACCESS_KEY | ||
) { | ||
// [START storagetransfer_transfer_from_aws] | ||
|
||
// Imports the Google Cloud client library | ||
const { | ||
StorageTransferServiceClient, | ||
} = require('@google-cloud/storage-transfer'); | ||
|
||
/** | ||
* TODO(developer): Uncomment the following lines before running the sample. | ||
*/ | ||
// The ID of the Google Cloud Platform Project that owns the job | ||
// projectId = 'my-project-id' | ||
|
||
// A useful description for your transfer job | ||
// description = 'My transfer job' | ||
|
||
// AWS S3 source bucket name | ||
// awsSourceBucket = 'my-s3-source-bucket' | ||
|
||
// AWS Access Key ID | ||
// awsAccessKeyId = 'AKIA...' | ||
|
||
// AWS Secret Access Key | ||
// awsSecretAccessKey = 'HEAoMK2.../...ku8' | ||
|
||
// Google Cloud Storage destination bucket name | ||
// gcsSinkBucket = 'my-gcs-destination-bucket' | ||
|
||
// Creates a client | ||
const client = new StorageTransferServiceClient(); | ||
|
||
/** | ||
* Creates a one-time transfer job from Amazon S3 to Google Cloud Storage. | ||
*/ | ||
async function transferFromS3() { | ||
// Setting the start date and the end date as the same time creates a | ||
// one-time transfer | ||
const now = new Date(); | ||
const oneTimeSchedule = { | ||
day: now.getDate(), | ||
month: now.getMonth() + 1, | ||
year: now.getFullYear(), | ||
}; | ||
|
||
// Runs the request and creates the job | ||
const [transferJob] = await client.createTransferJob({ | ||
transferJob: { | ||
projectId, | ||
description, | ||
status: 'ENABLED', | ||
schedule: { | ||
scheduleStartDate: oneTimeSchedule, | ||
scheduleEndDate: oneTimeSchedule, | ||
}, | ||
transferSpec: { | ||
awsS3DataSource: { | ||
bucketName: awsSourceBucket, | ||
awsAccessKey: { | ||
accessKeyId: awsAccessKeyId, | ||
secretAccessKey: awsSecretAccessKey, | ||
}, | ||
}, | ||
gcsDataSink: { | ||
bucketName: gcsSinkBucket, | ||
}, | ||
}, | ||
}, | ||
}); | ||
|
||
console.log( | ||
`Created and ran a transfer job from '${awsSourceBucket}' to '${gcsSinkBucket}' with name ${transferJob.name}` | ||
); | ||
} | ||
|
||
transferFromS3(); | ||
// [END storagetransfer_transfer_from_aws] | ||
} | ||
|
||
main(...process.argv.slice(2)); | ||
|
||
process.on('unhandledRejection', err => { | ||
console.error(err); | ||
process.exitCode = 1; | ||
}); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,132 @@ | ||
/** | ||
* Copyright 2022 Google LLC | ||
* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
'use strict'; | ||
|
||
const {protos} = require('@google-cloud/storage-transfer'); | ||
const {AuthMethod, NetworkProtocol, RequestModel} = | ||
protos.google.storagetransfer.v1.S3CompatibleMetadata; | ||
|
||
async function main( | ||
projectId = 'my-project', | ||
sourceAgentPoolName = 'projects/my-project/agentPools/transfer_service_default', | ||
sourceBucketName = 'my-bucket-name', | ||
sourcePath = 'path/to/data/', | ||
gcsSinkBucket = 'my-sink-bucket', | ||
gcsPath = 'path/to/data/', | ||
region = 'us-east-1', | ||
endpoint = 'us-east-1.example.com', | ||
protocol = NetworkProtocol.NETWORK_PROTOCOL_HTTPS, | ||
requestModel = RequestModel.REQUEST_MODEL_VIRTUAL_HOSTED_STYLE, | ||
authMethod = AuthMethod.AUTH_METHOD_AWS_SIGNATURE_V4 | ||
) { | ||
// [START storagetransfer_transfer_from_s3_compatible_source] | ||
|
||
// Imports the Google Cloud client library | ||
const storageTransfer = require('@google-cloud/storage-transfer'); | ||
|
||
/** | ||
* TODO(developer): Uncomment the following lines before running the sample. | ||
*/ | ||
// Useful enums for AWS S3-Compatible Transfers | ||
// const {AuthMethod, NetworkProtocol, RequestModel} = storageTransfer.protos.google.storagetransfer.v1.S3CompatibleMetadata; | ||
|
||
// Your project id | ||
// const projectId = 'my-project'; | ||
|
||
// The agent pool associated with the S3-compatible data source. Defaults to the default agent | ||
// const sourceAgentPoolName = 'projects/my-project/agentPools/transfer_service_default'; | ||
|
||
// The S3-compatible bucket name to transfer data from | ||
// const sourceBucketName = "my-bucket-name"; | ||
|
||
// The S3-compatible path (object prefix) to transfer data from | ||
// const sourcePath = "path/to/data/"; | ||
|
||
// The ID of the GCS bucket to transfer data to | ||
// const gcsSinkBucket = "my-sink-bucket"; | ||
|
||
// The GCS path (object prefix) to transfer data to | ||
// const gcsPath = "path/to/data/"; | ||
|
||
// The S3 region of the source bucket | ||
// const region = 'us-east-1'; | ||
|
||
// The S3-compatible endpoint | ||
// const endpoint = "us-east-1.example.com"; | ||
|
||
// The S3-compatible network protocol | ||
// const protocol = NetworkProtocol.NETWORK_PROTOCOL_HTTPS; | ||
|
||
// The S3-compatible request model | ||
// const requestModel = RequestModel.REQUEST_MODEL_VIRTUAL_HOSTED_STYLE; | ||
|
||
// The S3-compatible auth method | ||
// const authMethod = AuthMethod.AUTH_METHOD_AWS_SIGNATURE_V4; | ||
|
||
// Creates a client | ||
const client = new storageTransfer.StorageTransferServiceClient(); | ||
|
||
/** | ||
* Creates a transfer from an AWS S3-compatible source to GCS | ||
*/ | ||
async function transferFromS3CompatibleSource() { | ||
// Runs the request and creates the job | ||
const [transferJob] = await client.createTransferJob({ | ||
transferJob: { | ||
projectId, | ||
transferSpec: { | ||
sourceAgentPoolName, | ||
awsS3CompatibleDataSource: { | ||
region, | ||
s3Metadata: { | ||
authMethod, | ||
protocol, | ||
requestModel, | ||
}, | ||
endpoint, | ||
bucketName: sourceBucketName, | ||
path: sourcePath, | ||
}, | ||
gcsDataSink: { | ||
bucketName: gcsSinkBucket, | ||
path: gcsPath, | ||
}, | ||
}, | ||
status: 'ENABLED', | ||
}, | ||
}); | ||
|
||
await client.runTransferJob({ | ||
jobName: transferJob.name, | ||
projectId, | ||
}); | ||
|
||
console.log( | ||
`Created and ran a transfer job from '${sourceBucketName}' to '${gcsSinkBucket}' with name ${transferJob.name}` | ||
); | ||
} | ||
|
||
transferFromS3CompatibleSource(); | ||
// [END storagetransfer_transfer_from_s3_compatible_source] | ||
} | ||
|
||
main(...process.argv.slice(2)); | ||
|
||
process.on('unhandledRejection', err => { | ||
console.error(err.message); | ||
process.exitCode = 1; | ||
}); |
Oops, something went wrong.