Skip to content

Commit ceca4ae

Browse files
authored
Merge pull request #2997 from GoogleCloudPlatform/nodejs-storage-transfer-migration
migrate code from googleapis/nodejs-storage-transfer
2 parents 10b9c65 + ade3183 commit ceca4ae

32 files changed

+2587
-2
lines changed
+82
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
name: storagetransfer
2+
on:
3+
push:
4+
branches:
5+
- main
6+
paths:
7+
- 'storagetransfer/**'
8+
- '.github/workflows/storagetransfer.yaml'
9+
pull_request:
10+
paths:
11+
- 'storagetransfer/**'
12+
- '.github/workflows/storagetransfer.yaml'
13+
pull_request_target:
14+
types: [labeled]
15+
paths:
16+
- 'storagetransfer/**'
17+
- '.github/workflows/storagetransfer.yaml'
18+
schedule:
19+
- cron: '0 0 * * 0'
20+
jobs:
21+
test:
22+
if: ${{ github.event.action != 'labeled' || github.event.label.name == 'actions:force-run' }}
23+
runs-on: ubuntu-latest
24+
timeout-minutes: 60
25+
permissions:
26+
contents: 'write'
27+
pull-requests: 'write'
28+
id-token: 'write'
29+
steps:
30+
- uses: actions/[email protected]
31+
with:
32+
ref: ${{github.event.pull_request.head.sha}}
33+
- uses: 'google-github-actions/[email protected]'
34+
with:
35+
workload_identity_provider: 'projects/1046198160504/locations/global/workloadIdentityPools/github-actions-pool/providers/github-actions-provider'
36+
service_account: '[email protected]'
37+
create_credentials_file: 'true'
38+
access_token_lifetime: 600s
39+
- id: secrets
40+
uses: "google-github-actions/get-secretmanager-secrets@v1"
41+
with:
42+
secrets: |-
43+
sts_aws_secret:nodejs-docs-samples-tests/nodejs-docs-samples-storagetransfer-aws
44+
sts_azure_secret:nodejs-docs-samples-tests/nodejs-docs-samples-storagetransfer-azure
45+
- uses: actions/[email protected]
46+
with:
47+
node-version: 16
48+
- run: npm install
49+
working-directory: storagetransfer
50+
- run: npm test
51+
working-directory: storagetransfer
52+
env:
53+
AWS_ACCESS_KEY_ID : ${{ fromJSON(steps.secrets.outputs.sts_aws_secret).AccessKeyId }}
54+
AWS_SECRET_ACCESS_KEY: ${{ fromJSON(steps.secrets.outputs.sts_aws_secret).SecretAccessKey }}
55+
AZURE_STORAGE_ACCOUNT: ${{ fromJSON(steps.secrets.outputs.sts_azure_secret).StorageAccount }}
56+
AZURE_CONNECTION_STRING: ${{ fromJSON(steps.secrets.outputs.sts_azure_secret).ConnectionString }}
57+
AZURE_SAS_TOKEN: ${{ fromJSON(steps.secrets.outputs.sts_azure_secret).SAS }}
58+
MOCHA_REPORTER_SUITENAME: storagetransfer
59+
MOCHA_REPORTER_OUTPUT: storagetransfer_sponge_log.xml
60+
MOCHA_REPORTER: xunit
61+
- if: ${{ github.event.action == 'labeled' && github.event.label.name == 'actions:force-run' }}
62+
uses: actions/github-script@v6
63+
with:
64+
github-token: ${{ secrets.GITHUB_TOKEN }}
65+
script: |
66+
try {
67+
await github.rest.issues.removeLabel({
68+
name: 'actions:force-run',
69+
owner: 'GoogleCloudPlatform',
70+
repo: 'nodejs-docs-samples',
71+
issue_number: context.payload.pull_request.number
72+
});
73+
} catch (e) {
74+
if (!e.message.includes('Label does not exist')) {
75+
throw e;
76+
}
77+
}
78+
- if: ${{ github.event_name == 'schedule' && always() }}
79+
run: |
80+
curl https://github.com/googleapis/repo-automation-bots/releases/download/flakybot-1.1.0/flakybot -o flakybot -s -L
81+
chmod +x ./flakybot
82+
./flakybot --repo GoogleCloudPlatform/nodejs-docs-samples --commit_hash ${{github.sha}} --build_url https://github.com/${{github.repository}}/actions/runs/${{github.run_id}}

.github/workflows/workflows.json

+1
Original file line numberDiff line numberDiff line change
@@ -82,6 +82,7 @@
8282
"service-directory/snippets",
8383
"secret-manager",
8484
"speech",
85+
"storagetransfer",
8586
"talent",
8687
"texttospeech",
8788
"translate",

CODEOWNERS

+3-2
Original file line numberDiff line numberDiff line change
@@ -47,8 +47,9 @@ functions/memorystore @GoogleCloudPlatform/nodejs-samples-reviewers
4747
functions/spanner @jsimonweb @GoogleCloudPlatform/nodejs-samples-reviewers
4848

4949
# SoDa teams
50-
/cloud-sql/**/*.js @GoogleCloudPlatform/infra-db-dpes @GoogleCloudPlatform/nodejs-samples-reviewers
51-
/datastore/**/*.js @GoogleCloudPlatform/cloud-native-db-dpes @GoogleCloudPlatform/nodejs-samples-reviewers
50+
cloud-sql @GoogleCloudPlatform/infra-db-dpes @GoogleCloudPlatform/nodejs-samples-reviewers
51+
datastore @GoogleCloudPlatform/cloud-native-db-dpes @GoogleCloudPlatform/nodejs-samples-reviewers
52+
storagetransfer @GoogleCloudPlatform/cloud-native-db-dpes @GoogleCloudPlatform/nodejs-samples-reviewers
5253

5354
# One-offs
5455
composer @leahecole @sofisl @GoogleCloudPlatform/nodejs-samples-reviewers

storagetransfer/aws-request.js

+110
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,110 @@
1+
/**
2+
* Copyright 2022 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
'use strict';
18+
19+
async function main(
20+
projectId,
21+
description,
22+
awsSourceBucket,
23+
gcsSinkBucket,
24+
awsAccessKeyId = process.env.AWS_ACCESS_KEY_ID,
25+
awsSecretAccessKey = process.env.AWS_SECRET_ACCESS_KEY
26+
) {
27+
// [START storagetransfer_transfer_from_aws]
28+
29+
// Imports the Google Cloud client library
30+
const {
31+
StorageTransferServiceClient,
32+
} = require('@google-cloud/storage-transfer');
33+
34+
/**
35+
* TODO(developer): Uncomment the following lines before running the sample.
36+
*/
37+
// The ID of the Google Cloud Platform Project that owns the job
38+
// projectId = 'my-project-id'
39+
40+
// A useful description for your transfer job
41+
// description = 'My transfer job'
42+
43+
// AWS S3 source bucket name
44+
// awsSourceBucket = 'my-s3-source-bucket'
45+
46+
// AWS Access Key ID
47+
// awsAccessKeyId = 'AKIA...'
48+
49+
// AWS Secret Access Key
50+
// awsSecretAccessKey = 'HEAoMK2.../...ku8'
51+
52+
// Google Cloud Storage destination bucket name
53+
// gcsSinkBucket = 'my-gcs-destination-bucket'
54+
55+
// Creates a client
56+
const client = new StorageTransferServiceClient();
57+
58+
/**
59+
* Creates a one-time transfer job from Amazon S3 to Google Cloud Storage.
60+
*/
61+
async function transferFromS3() {
62+
// Setting the start date and the end date as the same time creates a
63+
// one-time transfer
64+
const now = new Date();
65+
const oneTimeSchedule = {
66+
day: now.getDate(),
67+
month: now.getMonth() + 1,
68+
year: now.getFullYear(),
69+
};
70+
71+
// Runs the request and creates the job
72+
const [transferJob] = await client.createTransferJob({
73+
transferJob: {
74+
projectId,
75+
description,
76+
status: 'ENABLED',
77+
schedule: {
78+
scheduleStartDate: oneTimeSchedule,
79+
scheduleEndDate: oneTimeSchedule,
80+
},
81+
transferSpec: {
82+
awsS3DataSource: {
83+
bucketName: awsSourceBucket,
84+
awsAccessKey: {
85+
accessKeyId: awsAccessKeyId,
86+
secretAccessKey: awsSecretAccessKey,
87+
},
88+
},
89+
gcsDataSink: {
90+
bucketName: gcsSinkBucket,
91+
},
92+
},
93+
},
94+
});
95+
96+
console.log(
97+
`Created and ran a transfer job from '${awsSourceBucket}' to '${gcsSinkBucket}' with name ${transferJob.name}`
98+
);
99+
}
100+
101+
transferFromS3();
102+
// [END storagetransfer_transfer_from_aws]
103+
}
104+
105+
main(...process.argv.slice(2));
106+
107+
process.on('unhandledRejection', err => {
108+
console.error(err);
109+
process.exitCode = 1;
110+
});
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
/**
2+
* Copyright 2022 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
'use strict';
18+
19+
const {protos} = require('@google-cloud/storage-transfer');
20+
const {AuthMethod, NetworkProtocol, RequestModel} =
21+
protos.google.storagetransfer.v1.S3CompatibleMetadata;
22+
23+
async function main(
24+
projectId = 'my-project',
25+
sourceAgentPoolName = 'projects/my-project/agentPools/transfer_service_default',
26+
sourceBucketName = 'my-bucket-name',
27+
sourcePath = 'path/to/data/',
28+
gcsSinkBucket = 'my-sink-bucket',
29+
gcsPath = 'path/to/data/',
30+
region = 'us-east-1',
31+
endpoint = 'us-east-1.example.com',
32+
protocol = NetworkProtocol.NETWORK_PROTOCOL_HTTPS,
33+
requestModel = RequestModel.REQUEST_MODEL_VIRTUAL_HOSTED_STYLE,
34+
authMethod = AuthMethod.AUTH_METHOD_AWS_SIGNATURE_V4
35+
) {
36+
// [START storagetransfer_transfer_from_s3_compatible_source]
37+
38+
// Imports the Google Cloud client library
39+
const storageTransfer = require('@google-cloud/storage-transfer');
40+
41+
/**
42+
* TODO(developer): Uncomment the following lines before running the sample.
43+
*/
44+
// Useful enums for AWS S3-Compatible Transfers
45+
// const {AuthMethod, NetworkProtocol, RequestModel} = storageTransfer.protos.google.storagetransfer.v1.S3CompatibleMetadata;
46+
47+
// Your project id
48+
// const projectId = 'my-project';
49+
50+
// The agent pool associated with the S3-compatible data source. Defaults to the default agent
51+
// const sourceAgentPoolName = 'projects/my-project/agentPools/transfer_service_default';
52+
53+
// The S3-compatible bucket name to transfer data from
54+
// const sourceBucketName = "my-bucket-name";
55+
56+
// The S3-compatible path (object prefix) to transfer data from
57+
// const sourcePath = "path/to/data/";
58+
59+
// The ID of the GCS bucket to transfer data to
60+
// const gcsSinkBucket = "my-sink-bucket";
61+
62+
// The GCS path (object prefix) to transfer data to
63+
// const gcsPath = "path/to/data/";
64+
65+
// The S3 region of the source bucket
66+
// const region = 'us-east-1';
67+
68+
// The S3-compatible endpoint
69+
// const endpoint = "us-east-1.example.com";
70+
71+
// The S3-compatible network protocol
72+
// const protocol = NetworkProtocol.NETWORK_PROTOCOL_HTTPS;
73+
74+
// The S3-compatible request model
75+
// const requestModel = RequestModel.REQUEST_MODEL_VIRTUAL_HOSTED_STYLE;
76+
77+
// The S3-compatible auth method
78+
// const authMethod = AuthMethod.AUTH_METHOD_AWS_SIGNATURE_V4;
79+
80+
// Creates a client
81+
const client = new storageTransfer.StorageTransferServiceClient();
82+
83+
/**
84+
* Creates a transfer from an AWS S3-compatible source to GCS
85+
*/
86+
async function transferFromS3CompatibleSource() {
87+
// Runs the request and creates the job
88+
const [transferJob] = await client.createTransferJob({
89+
transferJob: {
90+
projectId,
91+
transferSpec: {
92+
sourceAgentPoolName,
93+
awsS3CompatibleDataSource: {
94+
region,
95+
s3Metadata: {
96+
authMethod,
97+
protocol,
98+
requestModel,
99+
},
100+
endpoint,
101+
bucketName: sourceBucketName,
102+
path: sourcePath,
103+
},
104+
gcsDataSink: {
105+
bucketName: gcsSinkBucket,
106+
path: gcsPath,
107+
},
108+
},
109+
status: 'ENABLED',
110+
},
111+
});
112+
113+
await client.runTransferJob({
114+
jobName: transferJob.name,
115+
projectId,
116+
});
117+
118+
console.log(
119+
`Created and ran a transfer job from '${sourceBucketName}' to '${gcsSinkBucket}' with name ${transferJob.name}`
120+
);
121+
}
122+
123+
transferFromS3CompatibleSource();
124+
// [END storagetransfer_transfer_from_s3_compatible_source]
125+
}
126+
127+
main(...process.argv.slice(2));
128+
129+
process.on('unhandledRejection', err => {
130+
console.error(err.message);
131+
process.exitCode = 1;
132+
});

0 commit comments

Comments
 (0)