Skip to content

SDK | Upgrade AWS SDK to v3 - Unit tests #9042

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Draft
wants to merge 6 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 1 addition & 7 deletions src/server/utils/bucket_diff.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
'use strict';

const _ = require('lodash');
const AWS = require('aws-sdk');

const SensitiveString = require('../../util/sensitive_string');
const replication_utils = require('../utils/replication_utils');
Expand All @@ -15,8 +14,7 @@ class BucketDiff {
* first_bucket: string;
* second_bucket: string;
* version: boolean;
* s3_params?: AWS.S3.ClientConfiguration
* connection?: AWS.S3
* connection?: import('@aws-sdk/client-s3').S3
* for_replication: boolean
* for_deletion: boolean
* }} params
Expand All @@ -26,7 +24,6 @@ class BucketDiff {
first_bucket,
second_bucket,
version,
s3_params,
connection,
for_replication,
for_deletion,
Expand All @@ -38,9 +35,6 @@ class BucketDiff {
// If we want to do a diff on none s3 we should use noobaa namespace buckets.
if (connection) {
this.s3 = connection;
} else {
if (!s3_params) throw new Error('Expected s3_params');
this.s3 = new AWS.S3(s3_params);
}
// special treatment when we want the diff for replication purpose.
this.for_replication = for_replication;
Expand Down
55 changes: 29 additions & 26 deletions src/test/system_tests/test_bucket_access.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ const rpc = api.new_rpc();
const test_utils = require('./test_utils');

const fs = require('fs');
const AWS = require('aws-sdk');
const { S3 } = require('@aws-sdk/client-s3');
const crypto = require('crypto');
const assert = require('assert');

Expand Down Expand Up @@ -150,12 +150,15 @@ async function setup() {
function get_new_server(user) {
const access_key = user.access_keys.access_key;
const secret_key = user.access_keys.secret_key;
return new AWS.S3({
return new S3({
endpoint: target_s3_endpoint,
s3ForcePathStyle: true,
accessKeyId: access_key.unwrap(),
secretAccessKey: secret_key.unwrap(),
maxRedirects: 10,
forcePathStyle: true,
credentials: {
accessKeyId: access_key.unwrap(),
secretAccessKey: secret_key.unwrap(),
},
// v3: Deprecated. SDK does not follow redirects to avoid unintentional cross-region requests.
//maxRedirects: 10,
});
}

Expand Down Expand Up @@ -194,8 +197,8 @@ async function test_bucket_write_allowed() {
Key: file_name,
Body: fs.createReadStream(file_name)
};
await server.upload(params1).promise();
await server.upload(params2).promise();
await server.putObject({Bucket: params1.Bucket, Key: params1.Key, Body: params1.Body });
await server.putObject({Bucket: params2.Bucket, Key: params2.Key, Body: params2.Body });

file_name = await ops.generate_random_file(1);
// upload with full_access_user to both buckets:
Expand All @@ -205,7 +208,7 @@ async function test_bucket_write_allowed() {
Key: file_name,
Body: fs.createReadStream(file_name)
};
await server.upload(params).promise();
await server.putObject({Bucket: params.Bucket, Key: params.Key, Body: params.Body});
console.log('test_bucket_write_allowed PASSED');
}

Expand All @@ -218,13 +221,13 @@ async function test_bucket_read_allowed() {
Key: file_name,
Body: fs.createReadStream(file_name)
};
await server.upload(params1).promise();
await server.putObject({Bucket: params1.Bucket, Key: params1.Key, Body: params1.Body });
const server2 = get_new_server(bucket1_user);
const params2 = {
Bucket: 'bucket1',
Key: file_name
};
await server2.getObject(params2).promise();
await server2.getObject(params2);
console.log('test_bucket_read_allowed PASSED');
}

Expand All @@ -238,13 +241,13 @@ async function test_bucket_list_allowed() {
Key: file_name,
Body: fs.createReadStream(file_name)
};
await server.upload(params1).promise();
await server.putObject(params1);

const server2 = get_new_server(bucket1_user);
const params2 = {
Bucket: 'bucket1'
};
await server2.listObjects(params2).promise();
await server2.listObjects(params2);

}

Expand All @@ -260,7 +263,7 @@ async function test_bucket_write_denied() {
Body: fs.createReadStream(file_name)
};
try {
await server.upload(params1).promise();
await server.putObject(params1);

throw new Error('expecting upload to fail with statusCode 403- AccessDenied');

Expand All @@ -279,14 +282,14 @@ async function test_bucket_read_denied() {
Key: file_name,
Body: fs.createReadStream(file_name)
};
await server.upload(params1).promise();
await server.putObject(params1);
const server2 = get_new_server(bucket1_user);
const params2 = {
Bucket: 'bucket2',
Key: file_name
};
try {
await server2.getObject(params2).promise();
await server2.getObject(params2);
throw new Error('expecting read to fail with statusCode 403- AccessDenied');
} catch (err) {
assert(err.statusCode === 403, 'expecting read to fail with statusCode 403- AccessDenied');
Expand All @@ -304,14 +307,14 @@ async function test_bucket_list_denied() {
Key: file_name,
Body: fs.createReadStream(file_name)
};
await server.upload(params1).promise();
await server.putObject(params1);

const server2 = get_new_server(bucket1_user);
const params2 = {
Bucket: 'bucket2'
};
try {
await server2.listObjects(params2).promise();
await server2.listObjects(params2);
throw new Error('expecting read to fail with statusCode 403- AccessDenied');
} catch (err) {
assert(err.statusCode === 403, 'expecting read to fail with statusCode 403- AccessDenied');
Expand All @@ -326,7 +329,7 @@ async function test_create_bucket_add_creator_permissions() {
const params = {
Bucket: unique_bucket_name
};
await server.createBucket(params).promise();
await server.createBucket(params);

// Owners have full access to the bucket
const bucket = await client.bucket.read_bucket({ rpc_params: { name: unique_bucket_name } });
Expand All @@ -338,12 +341,12 @@ async function test_delete_bucket_deletes_permissions() {
const server = get_new_server(full_access_user);
const unique_bucket_name = 'bucket' + crypto.randomUUID();

await server.createBucket({ Bucket: unique_bucket_name }).promise();
await server.createBucket({ Bucket: unique_bucket_name });

const bucket = await client.bucket.read_bucket({ rpc_params: { name: unique_bucket_name } });
assert(bucket.owner_account.email.unwrap() === full_access_user.email, 'expecting full_access_user to have permissions to access ' + unique_bucket_name);

await server.deleteBucket({ Bucket: unique_bucket_name }).promise();
await server.deleteBucket({ Bucket: unique_bucket_name });

try {
await client.bucket.read_bucket({ rpc_params: { name: unique_bucket_name } });
Expand All @@ -356,7 +359,7 @@ async function test_delete_bucket_deletes_permissions() {
async function test_no_s3_access() {
console.log(`Starting test_no_s3_access`);
const server = get_new_server(no_access_user);
const data = await server.listBuckets().promise();
const data = await server.listBuckets();
assert(data.Buckets.length === 0, 'expecting an empty bucket list for no_access_user');
}

Expand All @@ -378,21 +381,21 @@ async function test_ip_restrictions() {

await client.account.update_account(single_ip_restriction);
try {
await server.listBuckets().promise();
await server.listBuckets();
} catch (err) {
assert(err.statusCode === 403, 'expecting read to fail with statusCode 403- AccessDenied');
}
await client.account.update_account(no_ip_restriction);
let data = await server.listBuckets().promise();
let data = await server.listBuckets();
assert(data.Buckets.length !== 0, 'expecting none empty bucket list for none-restricted IP');
await client.account.update_account(range_ip_restriction);
try {
await server.listBuckets().promise();
await server.listBuckets();
} catch (err) {
assert(err.statusCode === 403, 'expecting read to fail with statusCode 403- AccessDenied');
}
await client.account.update_account(no_ip_restriction);
data = await server.listBuckets().promise();
data = await server.listBuckets();
assert(data.Buckets.length !== 0, 'expecting none empty bucket list for none-restricted IP');
}

Expand Down
19 changes: 7 additions & 12 deletions src/test/system_tests/test_build_chunks.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@ dbg.set_process_name('test_build_chunks');

const _ = require('lodash');
const fs = require('fs');
const AWS = require('aws-sdk');
//const AWS = require('aws-sdk');
const { S3 } = require('@aws-sdk/client-s3');
// const util = require('util');
// const crypto = require('crypto');

Expand Down Expand Up @@ -112,23 +113,17 @@ async function setup_case(

async function upload_random_file(size_mb, bucket_name, extension, content_type) {
const filename = await ops.generate_random_file(size_mb, extension);
const s3bucket = new AWS.S3({
const s3bucket = new S3({
endpoint: TEST_CTX.s3_endpoint,
credentials: {
accessKeyId: '123',
secretAccessKey: 'abc'
},
s3ForcePathStyle: true,
sslEnabled: false
forcePathStyle: true,
tls: false
});

await P.ninvoke(s3bucket, 'upload', {
Bucket: bucket_name,
Key: filename,
Body: fs.createReadStream(filename),
ContentType: content_type
});

await s3bucket.putObject({Bucket: bucket_name, Key: filename,
Body: fs.createReadStream(filename), ContentType: content_type});
return filename;
}

Expand Down
21 changes: 12 additions & 9 deletions src/test/system_tests/test_cloud_pools.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,23 +5,25 @@ const api = require('../../api');
const rpc = api.new_rpc();
const util = require('util');
const _ = require('lodash');
const AWS = require('aws-sdk');
//const AWS = require('aws-sdk');
const { S3 } = require('@aws-sdk/client-s3');
const argv = require('minimist')(process.argv);
const P = require('../../util/promise');
const basic_server_ops = require('../utils/basic_server_ops');
const dotenv = require('../../util/dotenv');
dotenv.load();
const test_utils = require('./test_utils');

const s3 = new AWS.S3({
const s3 = new S3({
// endpoint: 'https://s3.amazonaws.com',
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY
},
s3ForcePathStyle: true,
sslEnabled: false,
signatureVersion: 'v4',
forcePathStyle: true,
tls: false,
// signatureVersion is Deprecated in SDK v3
//signatureVersion: 'v4',
// region: 'eu-central-1'
});

Expand Down Expand Up @@ -295,15 +297,16 @@ function run_test() {
.then(() => block_ids);
})
.then(function(block_ids) {
return P.ninvoke(new AWS.S3({
return P.ninvoke(new S3({
endpoint: 'http://' + TEST_CTX.source_ip,
credentials: {
accessKeyId: argv.access_key || '123',
secretAccessKey: argv.secret_key || 'abc'
},
s3ForcePathStyle: true,
sslEnabled: false,
signatureVersion: 'v4',
forcePathStyle: true,
tls: false,
// signatureVersion is Deprecated in SDK v3
//signatureVersion: 'v4',
// region: 'eu-central-1'
}), 'deleteObject', {
Bucket: TEST_CTX.source_bucket,
Expand Down
4 changes: 2 additions & 2 deletions src/test/system_tests/test_utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ const is_nc_coretest = process.env.NC_CORETEST === 'true';
* @param {*} pool_id
* @param {*} bucket_name
* @param {*} blocks
* @param {AWS.S3} s3
* @param {S3} s3
*/
async function blocks_exist_on_cloud(need_to_exist, pool_id, bucket_name, blocks, s3) {
console.log('blocks_exist_on_cloud::', need_to_exist, pool_id, bucket_name);
Expand All @@ -69,7 +69,7 @@ async function blocks_exist_on_cloud(need_to_exist, pool_id, bucket_name, blocks
return s3.headObject({
Bucket: bucket_name,
Key: `noobaa_blocks/${pool_id}/blocks_tree/${block.slice(block.length - 3)}.blocks/${block}`
}).promise();
});
}));

let condition_correct;
Expand Down
Loading
Loading