Skip to content

Commit ace1413

Browse files
samples: Add AWS S3-compatible Sample (#118)
* samples: Add AWS S3-compatible sample * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * docs: description * docs: typo * docs: another typo * docs: typos Co-authored-by: Owl Bot <gcf-owl-bot[bot]@users.noreply.github.com>
1 parent 09a575a commit ace1413

File tree

3 files changed

+231
-0
lines changed

3 files changed

+231
-0
lines changed
Lines changed: 132 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
/**
2+
* Copyright 2022 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
'use strict';
18+
19+
const {protos} = require('@google-cloud/storage-transfer');
20+
const {AuthMethod, NetworkProtocol, RequestModel} =
21+
protos.google.storagetransfer.v1.S3CompatibleMetadata;
22+
23+
async function main(
24+
projectId = 'my-project',
25+
sourceAgentPoolName = 'projects/my-project/agentPools/transfer_service_default',
26+
sourceBucketName = 'my-bucket-name',
27+
sourcePath = 'path/to/data/',
28+
gcsSinkBucket = 'my-sink-bucket',
29+
gcsPath = 'path/to/data/',
30+
region = 'us-east-1',
31+
endpoint = 'us-east-1.example.com',
32+
protocol = NetworkProtocol.NETWORK_PROTOCOL_HTTPS,
33+
requestModel = RequestModel.REQUEST_MODEL_VIRTUAL_HOSTED_STYLE,
34+
authMethod = AuthMethod.AUTH_METHOD_AWS_SIGNATURE_V4
35+
) {
36+
// [START storagetransfer_transfer_from_s3_compatible_source]
37+
38+
// Imports the Google Cloud client library
39+
const storageTransfer = require('@google-cloud/storage-transfer');
40+
41+
/**
42+
* TODO(developer): Uncomment the following lines before running the sample.
43+
*/
44+
// Useful enums for AWS S3-Compatible Transfers
45+
// const {AuthMethod, NetworkProtocol, RequestModel} = storageTransfer.protos.google.storagetransfer.v1.S3CompatibleMetadata;
46+
47+
// Your project id
48+
// const projectId = 'my-project';
49+
50+
// The agent pool associated with the S3-compatible data source. Defaults to the default agent
51+
// const sourceAgentPoolName = 'projects/my-project/agentPools/transfer_service_default';
52+
53+
// The S3-compatible bucket name to transfer data from
54+
// const sourceBucketName = "my-bucket-name";
55+
56+
// The S3-compatible path (object prefix) to transfer data from
57+
// const sourcePath = "path/to/data/";
58+
59+
// The ID of the GCS bucket to transfer data to
60+
// const gcsSinkBucket = "my-sink-bucket";
61+
62+
// The GCS path (object prefix) to transfer data to
63+
// const gcsPath = "path/to/data/";
64+
65+
// The S3 region of the source bucket
66+
// const region = 'us-east-1';
67+
68+
// The S3-compatible endpoint
69+
// const endpoint = "us-east-1.example.com";
70+
71+
// The S3-compatible network protocol
72+
// const protocol = NetworkProtocol.NETWORK_PROTOCOL_HTTPS;
73+
74+
// The S3-compatible request model
75+
// const requestModel = RequestModel.REQUEST_MODEL_VIRTUAL_HOSTED_STYLE;
76+
77+
// The S3-compatible auth method
78+
// const authMethod = AuthMethod.AUTH_METHOD_AWS_SIGNATURE_V4;
79+
80+
// Creates a client
81+
const client = new storageTransfer.StorageTransferServiceClient();
82+
83+
/**
84+
* Creates a transfer from an AWS S3-compatible source to GCS
85+
*/
86+
async function transferFromS3CompatibleSource() {
87+
// Runs the request and creates the job
88+
const [transferJob] = await client.createTransferJob({
89+
transferJob: {
90+
projectId,
91+
transferSpec: {
92+
sourceAgentPoolName,
93+
awsS3CompatibleDataSource: {
94+
region,
95+
s3Metadata: {
96+
authMethod,
97+
protocol,
98+
requestModel,
99+
},
100+
endpoint,
101+
bucketName: sourceBucketName,
102+
path: sourcePath,
103+
},
104+
gcsDataSink: {
105+
bucketName: gcsSinkBucket,
106+
path: gcsPath,
107+
},
108+
},
109+
status: 'ENABLED',
110+
},
111+
});
112+
113+
await client.runTransferJob({
114+
jobName: transferJob.name,
115+
projectId,
116+
});
117+
118+
console.log(
119+
`Created and ran a transfer job from '${sourceBucketName}' to '${gcsSinkBucket}' with name ${transferJob.name}`
120+
);
121+
}
122+
123+
transferFromS3CompatibleSource();
124+
// [END storagetransfer_transfer_from_s3_compatible_source]
125+
}
126+
127+
main(...process.argv.slice(2));
128+
129+
process.on('unhandledRejection', err => {
130+
console.error(err.message);
131+
process.exitCode = 1;
132+
});
Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
/**
2+
* Copyright 2022 Google LLC
3+
*
4+
* Licensed under the Apache License, Version 2.0 (the "License");
5+
* you may not use this file except in compliance with the License.
6+
* You may obtain a copy of the License at
7+
*
8+
* http://www.apache.org/licenses/LICENSE-2.0
9+
*
10+
* Unless required by applicable law or agreed to in writing, software
11+
* distributed under the License is distributed on an "AS IS" BASIS,
12+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13+
* See the License for the specific language governing permissions and
14+
* limitations under the License.
15+
*/
16+
17+
'use strict';
18+
19+
const {assert} = require('chai');
20+
const {after, before, describe, it} = require('mocha');
21+
22+
const {BucketManager, TransferJobManager, runSample} = require('./utils');
23+
24+
describe('aws-s3-compatible-source-request', () => {
25+
const testBucketManager = new BucketManager();
26+
const testTransferJobManager = new TransferJobManager();
27+
const {NetworkProtocol, RequestModel, AuthMethod} =
28+
TransferJobManager.protos.storagetransfer.v1.S3CompatibleMetadata;
29+
30+
let projectId;
31+
let sourceAgentPoolName;
32+
let sourceBucketName;
33+
let sourcePath;
34+
let gcsSinkBucket;
35+
let gcsPath;
36+
let region;
37+
let endpoint;
38+
let protocol;
39+
let requestModel;
40+
let authMethod;
41+
42+
before(async () => {
43+
projectId = await testTransferJobManager.client.getProjectId();
44+
45+
// Use default pool
46+
sourceAgentPoolName = '';
47+
48+
const sourceBucket = await testBucketManager.generateGCSBucket();
49+
sourceBucketName = sourceBucket.name;
50+
sourcePath = 'path/to/data/';
51+
52+
gcsSinkBucket = (await testBucketManager.generateGCSBucket()).name;
53+
gcsPath = 'path/to/data/';
54+
55+
region = sourceBucket.getMetadata().location;
56+
endpoint = sourceBucket.baseUrl;
57+
protocol = NetworkProtocol.NETWORK_PROTOCOL_HTTPS;
58+
requestModel = RequestModel.REQUEST_MODEL_VIRTUAL_HOSTED_STYLE;
59+
authMethod = AuthMethod.AUTH_METHOD_AWS_SIGNATURE_V4;
60+
});
61+
62+
after(async () => {
63+
await testBucketManager.deleteBuckets();
64+
await testTransferJobManager.cleanUp();
65+
});
66+
67+
it('should create a transfer job from an AWS S3-compatible source to GCS', async () => {
68+
const output = await runSample('aws-s3-compatible-source-request', [
69+
projectId,
70+
sourceAgentPoolName,
71+
sourceBucketName,
72+
sourcePath,
73+
gcsSinkBucket,
74+
gcsPath,
75+
region,
76+
endpoint,
77+
protocol,
78+
requestModel,
79+
authMethod,
80+
]);
81+
82+
// If it ran successfully and a job was created, delete it to clean up
83+
const [jobName] = output.match(/transferJobs.*/);
84+
if (jobName) {
85+
testTransferJobManager.transferJobToCleanUp(jobName);
86+
}
87+
88+
// Find at least 1 transfer operation from the transfer job in the output
89+
assert.include(output, 'Created and ran a transfer job');
90+
});
91+
});

storagetransfer/test/utils/transfer.js

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,12 @@
1818

1919
const {
2020
StorageTransferServiceClient,
21+
protos,
2122
} = require('@google-cloud/storage-transfer');
2223

24+
this.protos = protos.google;
25+
require('@google-cloud/storage-transfer');
26+
2327
const {BucketManager} = require('./bucket');
2428

2529
class TransferJobManager {
@@ -105,6 +109,10 @@ class TransferJobManager {
105109
transferJobToCleanUp(jobName) {
106110
this.transferJobsToCleanup.push(jobName);
107111
}
112+
113+
static get protos() {
114+
return protos.google;
115+
}
108116
}
109117

110118
module.exports = {TransferJobManager};

0 commit comments

Comments
 (0)