Skip to content

Commit

Permalink
NSFS | NC | whitelist ip fix
Browse files Browse the repository at this point in the history
Signed-off-by: Romy <35330373+romayalon@users.noreply.github.com>

NC - fix server ip whitelist validation

Signed-off-by: Romy <35330373+romayalon@users.noreply.github.com>
  • Loading branch information
romayalon committed May 1, 2024
1 parent 04aa47a commit 167ed0d
Show file tree
Hide file tree
Showing 9 changed files with 119 additions and 66 deletions.
3 changes: 2 additions & 1 deletion config.js
Original file line number Diff line number Diff line change
Expand Up @@ -822,6 +822,7 @@ config.NSFS_LOW_FREE_SPACE_PERCENT_UNLEASH = 0.10;
// NSFS NON CONTAINERIZED //
////////////////////////////

config.NC_RELOAD_CONFIG_INTERVAL = 10 * 1000;
config.NSFS_NC_CONF_DIR_REDIRECT_FILE = 'config_dir_redirect';
config.NSFS_NC_DEFAULT_CONF_DIR = '/etc/noobaa.conf.d';
config.NSFS_NC_CONF_DIR = process.env.NSFS_NC_CONF_DIR || '';
Expand Down Expand Up @@ -1066,7 +1067,7 @@ function reload_nsfs_nc_config() {
try {
const config_path = path.join(config.NSFS_NC_CONF_DIR, 'config.json');
fs.watchFile(config_path, {
interval: 10 * 1000
interval: config.NC_RELOAD_CONFIG_INTERVAL
}, () => {
delete require.cache[config_path];
try {
Expand Down
2 changes: 1 addition & 1 deletion docs/dev_guide/NonContainerizedDeveloperCustomizations.md
Original file line number Diff line number Diff line change
Expand Up @@ -234,7 +234,7 @@ Example:
```

## 13. Whitelist IPs -
**Description -** List of whitelist IPs. Access is restricted to these IPs only. If there are no IPs mentioned all IPs are allowed.
**Description -** List of whitelist IPs. Access is restricted to these server IPs only. If there are no IPs mentioned all IPs are allowed.

**Configuration Key -** S3_SERVER_IP_WHITELIST

Expand Down
2 changes: 1 addition & 1 deletion src/endpoint/s3/s3_rest.js
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ async function s3_rest(req, res) {

async function handle_request(req, res) {

http_utils.validate_nsfs_whitelist(req);
http_utils.validate_server_ip_whitelist(req);
http_utils.set_amz_headers(req, res);
http_utils.set_cors_headers_s3(req, res);

Expand Down
2 changes: 1 addition & 1 deletion src/server/system_services/schemas/nsfs_config_schema.js
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ const nsfs_node_config_schema = {
S3_SERVER_IP_WHITELIST: {
type: 'array',
default: [],
description: 'List of whitelisted IPs for S3 access, Allow access from all the IPs if list is empty.'
description: 'Whitelist of server IPs for S3 access, Allow access to all the IPs if list is empty.'
},
NSFS_DIR_CACHE_MAX_DIR_SIZE: {
type: 'number',
Expand Down
20 changes: 20 additions & 0 deletions src/test/system_tests/test_utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,13 @@

const fs = require('fs');
const _ = require('lodash');
const http = require('http');
const P = require('../../util/promise');
const os_utils = require('../../util/os_utils');
const native_fs_utils = require('../../util/native_fs_utils');
const config = require('../../../config');
const { S3 } = require('@aws-sdk/client-s3');
const { NodeHttpHandler } = require("@smithy/node-http-handler");

/**
* TMP_PATH is a path to the tmp path based on the process platform
Expand Down Expand Up @@ -328,12 +331,29 @@ function set_nc_config_dir_in_config(config_root) {
config.NSFS_NC_CONF_DIR = config_root;
}


function generate_s3_client(access_key, secret_key, endpoint) {
return new S3({
forcePathStyle: true,
region: config.DEFAULT_REGION,
requestHandler: new NodeHttpHandler({
httpAgent: new http.Agent({ keepAlive: false })
}),
credentials: {
accessKeyId: access_key,
secretAccessKey: secret_key,
},
endpoint
});
}

exports.blocks_exist_on_cloud = blocks_exist_on_cloud;
exports.create_hosts_pool = create_hosts_pool;
exports.delete_hosts_pool = delete_hosts_pool;
exports.empty_and_delete_buckets = empty_and_delete_buckets;
exports.disable_accounts_s3_access = disable_accounts_s3_access;
exports.generate_s3_policy = generate_s3_policy;
exports.generate_s3_client = generate_s3_client;
exports.invalid_nsfs_root_permissions = invalid_nsfs_root_permissions;
exports.get_coretest_path = get_coretest_path;
exports.exec_manage_cli = exec_manage_cli;
Expand Down
6 changes: 5 additions & 1 deletion src/test/unit_tests/nc_coretest.js
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,11 @@ async function config_dir_setup() {
await fs.promises.mkdir(config.NSFS_NC_DEFAULT_CONF_DIR, { recursive: true });
await fs.promises.mkdir(NC_CORETEST_CONFIG_DIR_PATH, { recursive: true });
await fs.promises.writeFile(NC_CORETEST_REDIRECT_FILE_PATH, NC_CORETEST_CONFIG_DIR_PATH);
await fs.promises.writeFile(CONFIG_FILE_PATH, JSON.stringify({ ALLOW_HTTP: true, OBJECT_SDK_BUCKET_CACHE_EXPIRY_MS: 1 }));
await fs.promises.writeFile(CONFIG_FILE_PATH, JSON.stringify({
ALLOW_HTTP: true,
OBJECT_SDK_BUCKET_CACHE_EXPIRY_MS: 1,
NC_RELOAD_CONFIG_INTERVAL: 1
}));
await fs.promises.mkdir(FIRST_BUCKET_PATH, { recursive: true });

}
Expand Down
89 changes: 66 additions & 23 deletions src/test/unit_tests/test_bucketspace.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,12 @@ const fs_utils = require('../../util/fs_utils');
const test_utils = require('../system_tests/test_utils');
const { stat, open } = require('../../util/nb_native')().fs;
const { get_process_fs_context } = require('../../util/native_fs_utils');
const { TYPES } = require('../../manage_nsfs/manage_nsfs_constants');
const ManageCLIError = require('../../manage_nsfs/manage_nsfs_cli_errors').ManageCLIError;
const { TMP_PATH, get_coretest_path, invalid_nsfs_root_permissions,
generate_s3_policy, create_fs_user_by_platform, delete_fs_user_by_platform } = require('../system_tests/test_utils');
generate_s3_policy, create_fs_user_by_platform, delete_fs_user_by_platform,
generate_s3_client, exec_manage_cli } = require('../system_tests/test_utils');


const { S3 } = require('@aws-sdk/client-s3');
const { NodeHttpHandler } = require("@smithy/node-http-handler");
Expand All @@ -27,7 +30,7 @@ const coretest_path = get_coretest_path();
const coretest = require(coretest_path);
const { rpc_client, EMAIL, PASSWORD, SYSTEM, get_admin_account_details } = coretest;
coretest.setup({});

let CORETEST_ENDPOINT;
const inspect = (x, max_arr = 5) => util.inspect(x, { colors: true, depth: null, maxArrayLength: max_arr });

const DEFAULT_FS_CONFIG = get_process_fs_context();
Expand Down Expand Up @@ -86,6 +89,7 @@ mocha.describe('bucket operations - namespace_fs', function() {
if (process.env.NC_CORETEST) {
await create_fs_user_by_platform(no_permissions_dn, 'new_password', 123123123, 123123123);
}
CORETEST_ENDPOINT = coretest.get_http_address();
});
mocha.after(async () => {
await fs_utils.folder_delete(tmp_fs_root);
Expand Down Expand Up @@ -514,7 +518,7 @@ mocha.describe('bucket operations - namespace_fs', function() {

mocha.it('put object object with wrong uid gid', async function() {
const { access_key, secret_key } = account_wrong_uid.access_keys[0];
const s3_client_wrong_uid = generate_s3_client(access_key.unwrap(), secret_key.unwrap());
const s3_client_wrong_uid = generate_s3_client(access_key.unwrap(), secret_key.unwrap(), CORETEST_ENDPOINT);
const bucket = bucket_name + '-s3';
const key = 'ob1.txt';
try {
Expand All @@ -528,7 +532,7 @@ mocha.describe('bucket operations - namespace_fs', function() {

mocha.it('putObject - regular object - head/get object - key/ - should fail with 404', async function() {
const { access_key, secret_key } = account_correct_uid.access_keys[0];
const s3_client = generate_s3_client(access_key.unwrap(), secret_key.unwrap());
const s3_client = generate_s3_client(access_key.unwrap(), secret_key.unwrap(), CORETEST_ENDPOINT);
const bucket = bucket_name + '-s3';
const key = 'reg_obj.txt';
const invalid_dir_key = `${key}/`;
Expand All @@ -555,7 +559,7 @@ mocha.describe('bucket operations - namespace_fs', function() {

mocha.it('putObject/head object - xattr invalid URI', async function() {
const { access_key, secret_key } = account_correct_uid.access_keys[0];
const s3_client = generate_s3_client(access_key.unwrap(), secret_key.unwrap());
const s3_client = generate_s3_client(access_key.unwrap(), secret_key.unwrap(), CORETEST_ENDPOINT);
const bucket = bucket_name + '-s3';
const key = 'obj_dot_xattr.txt';
const xattr = { 'key1.2.3': encoded_xattr };
Expand All @@ -573,7 +577,7 @@ mocha.describe('bucket operations - namespace_fs', function() {
//const s3_creds_aws_sdk_v2 = get_aws_sdk_v2_base_params(account_correct_uid, s3_endpoint_address);
//const s3 = new AWS.S3(s3_creds_aws_sdk_v2);
const { access_key, secret_key } = account_correct_uid.access_keys[0];
const s3_client = generate_s3_client(access_key.unwrap(), secret_key.unwrap());
const s3_client = generate_s3_client(access_key.unwrap(), secret_key.unwrap(), CORETEST_ENDPOINT);
const key = 'fs_obj_dot_xattr.txt';
const bucket = bucket_name + '-s3';
const PATH = path.join(s3_new_buckets_path, bucket, key);
Expand All @@ -593,7 +597,7 @@ mocha.describe('bucket operations - namespace_fs', function() {

mocha.it('PUT file directly to FS/head object - xattr encoded invalid URI', async function() {
const { access_key, secret_key } = account_correct_uid.access_keys[0];
const s3_client = generate_s3_client(access_key.unwrap(), secret_key.unwrap());
const s3_client = generate_s3_client(access_key.unwrap(), secret_key.unwrap(), CORETEST_ENDPOINT);
//get_s3_v2_client(account_correct_uid, s3_endpoint_address);
const key = 'fs_obj_dot_xattr.txt';
const bucket = bucket_name + '-s3';
Expand Down Expand Up @@ -992,11 +996,12 @@ mocha.describe('list objects - namespace_fs', async function() {

const s3_policy = generate_s3_policy('*', bucket_name, ['s3:*']);
await rpc_client.bucket.put_bucket_policy({ name: s3_policy.params.bucket, policy: s3_policy.policy });
CORETEST_ENDPOINT = coretest.get_http_address();

for (const account_name of Object.keys(accounts)) {
const { uid, gid } = accounts[account_name];
const account = await generate_nsfs_account({ uid, gid, account_name, default_resource: namespace_resource_name });
const s3_client = generate_s3_client(account.access_key, account.secret_key);
const s3_client = generate_s3_client(account.access_key, account.secret_key, CORETEST_ENDPOINT);
accounts[account_name].s3_client = s3_client;
}
});
Expand Down Expand Up @@ -1568,12 +1573,12 @@ mocha.describe('list buckets - namespace_fs', async function() {
fs_root_path: tmp_fs_root,
}
});

CORETEST_ENDPOINT = coretest.get_http_address();
for (const account_name of Object.keys(accounts)) {
const { create_bucket_via, bucket, account_info } = accounts[account_name];
const { uid, gid } = account_info;
const account = await generate_nsfs_account({ uid, gid, account_name, new_buckets_path });
const s3_client = generate_s3_client(account.access_key, account.secret_key);
const s3_client = generate_s3_client(account.access_key, account.secret_key, CORETEST_ENDPOINT);
accounts[account_name].s3_client = s3_client;

if (create_bucket_via === 'CLI') {
Expand Down Expand Up @@ -1703,20 +1708,58 @@ mocha.describe('list buckets - namespace_fs', async function() {
});
});

function generate_s3_client(access_key, secret_key) {
return new S3({
forcePathStyle: true,
region: config.DEFAULT_REGION,
requestHandler: new NodeHttpHandler({
httpAgent: new http.Agent({ keepAlive: false })
}),
credentials: {
accessKeyId: access_key,
secretAccessKey: secret_key,
},
endpoint: coretest.get_http_address()
mocha.describe('s3 whitelist flow', async function() {
this.timeout(50000); // eslint-disable-line no-invalid-this
const nsr = 'server_ip_whitelist_nsr';
const account_name = 'server_ip_whitelist_account';
const fs_path = path.join(TMP_PATH, 'server_ip_whitelist_tests');

let s3_client;
mocha.before(async function() {
if (!process.env.NC_CORETEST) this.skip(); // eslint-disable-line no-invalid-this
await fs.promises.mkdir(fs_path, { recursive: true });
await rpc_client.pool.create_namespace_resource({ name: nsr, nsfs_config: { fs_root_path: fs_path }});
const nsfs_account_config = {
uid: process.getuid(), gid: process.getgid(), new_buckets_path: '/', nsfs_only: true
};
const account_params = { ...new_account_params, email: `${account_name}@noobaa.io`, name: account_name, default_resource: nsr, nsfs_account_config };
const res = await rpc_client.account.create_account(account_params);
const { access_key, secret_key } = res.access_keys[0];
CORETEST_ENDPOINT = coretest.get_http_address();
s3_client = generate_s3_client(access_key.unwrap(), secret_key.unwrap(), CORETEST_ENDPOINT);

});
}

mocha.after(async function() {
if (!process.env.NC_CORETEST) return;
await rpc_client.account.delete_account({ email: `${account_name}@noobaa.io` });
await exec_manage_cli(TYPES.IP_WHITELIST, '', { ips: JSON.stringify([]) });
});

mocha.it('whitelist ips empty - s3 request should succeed', async function() {
const buckets_list = await s3_client.listBuckets({});
bucket_in_list([first_bucket], [], buckets_list.Buckets);
});

mocha.it('server ip is in whitelist ips - s3 request should succeed', async function() {
const ips = ['::1', '127.0.0.1'];
await exec_manage_cli(TYPES.IP_WHITELIST, '', { ips: JSON.stringify(ips) });
const buckets_list = await s3_client.listBuckets({});
bucket_in_list([first_bucket], [], buckets_list.Buckets);
});

mocha.it('server ip is not in whitelist ips - s3 request should fail', async function() {
const ips = ['162.168.1.2'];
await exec_manage_cli(TYPES.IP_WHITELIST, '', { ips: JSON.stringify(ips) });
try {
await s3_client.listBuckets({});
assert.fail('test should have failed with AccessDenied - server ip is not in whitelist ips');
} catch (err) {
assert.equal(err.Code, 'AccessDenied');
}
});
});


async function generate_nsfs_account(options = {}) {
const { uid, gid, new_buckets_path, nsfs_only, admin, default_resource, account_name } = options;
Expand Down
Loading

0 comments on commit 167ed0d

Please sign in to comment.