diff --git a/src/cmd/manage_nsfs.js b/src/cmd/manage_nsfs.js index 822ed6a0d7..b3dae0db1a 100644 --- a/src/cmd/manage_nsfs.js +++ b/src/cmd/manage_nsfs.js @@ -18,10 +18,10 @@ const ManageCLIResponse = require('../manage_nsfs/manage_nsfs_cli_responses').Ma const manage_nsfs_glacier = require('../manage_nsfs/manage_nsfs_glacier'); const manage_nsfs_logging = require('../manage_nsfs/manage_nsfs_logging'); const noobaa_cli_diagnose = require('../manage_nsfs/diagnose'); -const nsfs_schema_utils = require('../manage_nsfs/nsfs_schema_utils'); const { print_usage } = require('../manage_nsfs/manage_nsfs_help_utils'); const { TYPES, ACTIONS, LIST_ACCOUNT_FILTERS, LIST_BUCKET_FILTERS, GLACIER_ACTIONS } = require('../manage_nsfs/manage_nsfs_constants'); -const { throw_cli_error, write_stdout_response, get_boolean_or_string_value, has_access_keys, set_debug_level } = require('../manage_nsfs/manage_nsfs_cli_utils'); +const { throw_cli_error, write_stdout_response, get_boolean_or_string_value, + has_access_keys, set_debug_level, _is_name_update, _is_access_key_update } = require('../manage_nsfs/manage_nsfs_cli_utils'); const manage_nsfs_validations = require('../manage_nsfs/manage_nsfs_validations'); const nc_mkm = require('../manage_nsfs/nc_master_key_manager').get_instance(); @@ -66,7 +66,6 @@ async function main(argv = minimist(process.argv.slice(2))) { } else if (type === TYPES.DIAGNOSE) { await noobaa_cli_diagnose.manage_diagnose_operations(action, user_input, config_fs); } else { - // we should not get here (we check it before) throw_cli_error(ManageCLIError.InvalidType); } } catch (err) { @@ -82,14 +81,6 @@ async function main(argv = minimist(process.argv.slice(2))) { } } -async function bucket_management(action, user_input) { - let data; - if (action !== ACTIONS.LIST) { - data = await fetch_bucket_data(action, user_input); - } - await manage_bucket_operations(action, data, user_input); -} - // in name and new_name we allow type number, hence convert it to string async function fetch_bucket_data(action, user_input) { let data = { @@ -123,7 +114,7 @@ async function fetch_bucket_data(action, user_input) { if (action === ACTIONS.UPDATE || action === ACTIONS.DELETE) { // @ts-ignore data = _.omitBy(data, _.isUndefined); - data = await fetch_existing_bucket_data(data); + data = await merge_new_and_existing_config_data(data); } // override values @@ -137,77 +128,80 @@ async function fetch_bucket_data(action, user_input) { return data; } -async function fetch_existing_bucket_data(target) { - let source; + +/** + * merge_new_and_existing_config_data returns the merged object of the existing bucket data and the user data + * @param {Object} user_bucket_data + * @returns {Promise} + */ +async function merge_new_and_existing_config_data(user_bucket_data) { + let existing_bucket_data; try { - source = await config_fs.get_bucket_by_name(target.name); + existing_bucket_data = await config_fs.get_bucket_by_name(user_bucket_data.name); } catch (err) { - throw_cli_error(ManageCLIError.NoSuchBucket, target.name); + throw_cli_error(ManageCLIError.NoSuchBucket, user_bucket_data.name); } - const data = _.merge({}, source, target); + const data = _.merge({}, existing_bucket_data, user_bucket_data); return data; } +/** + * get_bucket_status returns the bucket data by the provided bucket name + * @param {Object} data + * @returns { Promise<{ bucket_data: Object, code: ManageCLIResponse.BucketCreated, event_arg: Object }>} + */ async function add_bucket(data) { - await manage_nsfs_validations.validate_bucket_args(config_fs, data, ACTIONS.ADD); - const exists = await config_fs.is_bucket_exists(data.name); - if (exists) throw_cli_error(ManageCLIError.BucketAlreadyExists, data.name, { bucket: data.name }); data._id = mongo_utils.mongoObjectId(); - const data_json = JSON.stringify(data); - // We take an object that was stringify - // (it unwraps ths sensitive strings, creation_date to string and removes undefined parameters) - // for validating against the schema we need an object, hence we parse it back to object - nsfs_schema_utils.validate_bucket_schema(JSON.parse(data_json)); - await config_fs.create_bucket_config_file(data.name, data_json); - write_stdout_response(ManageCLIResponse.BucketCreated, data_json, { bucket: data.name }); + const bucket_data = await config_fs.create_bucket_config_file(data); + return { code: ManageCLIResponse.BucketCreated, bucket_data, event_arg: { bucket: data.name }}; } +/** + * get_bucket_status returns the bucket data by the provided bucket name + * @param {Object} data + * @returns { Promise<{ code: typeof ManageCLIResponse.BucketStatus, bucket_data: Object }>} + */ async function get_bucket_status(data) { - await manage_nsfs_validations.validate_bucket_args(config_fs, data, ACTIONS.STATUS); - try { - const config_data = await config_fs.get_bucket_by_name(data.name); - write_stdout_response(ManageCLIResponse.BucketStatus, config_data); + const bucket_data = await config_fs.get_bucket_by_name(data.name); + return { code: ManageCLIResponse.BucketStatus, bucket_data }; } catch (err) { const err_code = err.code === 'EACCES' ? ManageCLIError.AccessDenied : ManageCLIError.NoSuchBucket; throw_cli_error(err_code, data.name); } } +/** + * update_bucket does the following - + * 1. checks if the update includes bucket name update + * 1.1. if yes - a new config file should be created and the old one should be removed + * 1.2. else - update the config file + * @param {Object} data + * @returns { Promise<{ bucket_data: Object, code: typeof ManageCLIResponse.BucketUpdated }>} + */ async function update_bucket(data) { - await manage_nsfs_validations.validate_bucket_args(config_fs, data, ACTIONS.UPDATE); const cur_name = data.name; const new_name = data.new_name; - const update_name = new_name && cur_name && new_name !== cur_name; - - if (!update_name) { - data = JSON.stringify(data); - // We take an object that was stringify - // (it unwraps ths sensitive strings, creation_date to string and removes undefined parameters) - // for validating against the schema we need an object, hence we parse it back to object - nsfs_schema_utils.validate_bucket_schema(JSON.parse(data)); - await config_fs.update_bucket_config_file(cur_name, data); - write_stdout_response(ManageCLIResponse.BucketUpdated, data); - return; - } - data.name = new_name; - - const exists = await config_fs.is_bucket_exists(data.name); - if (exists) throw_cli_error(ManageCLIError.BucketAlreadyExists, data.name); + let bucket_data; + if (_is_name_update(data)) { + bucket_data = await config_fs.create_bucket_config_file({ ...data, name: new_name }); + await config_fs.delete_bucket_config_file(cur_name); + } else { + bucket_data = await config_fs.update_bucket_config_file(data); + } + return { code: ManageCLIResponse.BucketUpdated, bucket_data }; - data = JSON.stringify(_.omit(data, ['new_name'])); - // We take an object that was stringify - // (it unwraps ths sensitive strings, creation_date to string and removes undefined parameters) - // for validating against the schema we need an object, hence we parse it back to object - nsfs_schema_utils.validate_bucket_schema(JSON.parse(data)); - await config_fs.create_bucket_config_file(new_name, data); - await config_fs.delete_bucket_config_file(cur_name); - write_stdout_response(ManageCLIResponse.BucketUpdated, data); } +/** + * delete_bucket deletes a bucket - + * 1. if there are existing objects in the bucket and force flag was not provided the deletion will be blocked + * @param {Object} data + * @param {Boolean} force + * @returns { Promise<{ code: typeof ManageCLIResponse.BucketDeleted, bucket_data: Object, event_arg: Object }>} + */ async function delete_bucket(data, force) { - await manage_nsfs_validations.validate_bucket_args(config_fs, data, ACTIONS.DELETE); try { const temp_dir_name = native_fs_utils.get_bucket_tmpdir_name(data._id); const bucket_temp_dir_path = native_fs_utils.get_bucket_tmpdir_full_path(data.path, data._id); @@ -230,49 +224,46 @@ async function delete_bucket(data, force) { } await native_fs_utils.folder_delete(bucket_temp_dir_path, fs_context_fs_backend, true); await config_fs.delete_bucket_config_file(data.name); - write_stdout_response(ManageCLIResponse.BucketDeleted, '', { bucket: data.name }); + return { code: ManageCLIResponse.BucketDeleted, bucket_data: '', event_arg: { bucket: data.name } }; } catch (err) { if (err.code === 'ENOENT') throw_cli_error(ManageCLIError.NoSuchBucket, data.name); throw err; } } -async function manage_bucket_operations(action, data, user_input) { +/** + * bucket_management does the following - + * 1. fetches the bucket data if this is not a list operation + * 2. validates bucket args - TODO - we should split it to validate_bucket_args + * and validations of the merged (user_input + existing bucket config) bucket + * 3. call bucket operation based on the action argument + * 4. write output to stdout + * @param {String} action + * @param {Object} user_input + */ +async function bucket_management(action, user_input) { + const data = action === ACTIONS.LIST ? undefined : await fetch_bucket_data(action, user_input); + await manage_nsfs_validations.validate_bucket_args(config_fs, data, action); + + let res; if (action === ACTIONS.ADD) { - await add_bucket(data); + res = await add_bucket(data); } else if (action === ACTIONS.STATUS) { - await get_bucket_status(data); + res = await get_bucket_status(data); } else if (action === ACTIONS.UPDATE) { - await update_bucket(data); + res = await update_bucket(data); } else if (action === ACTIONS.DELETE) { const force = get_boolean_or_string_value(user_input.force); - await delete_bucket(data, force); + res = await delete_bucket(data, force); } else if (action === ACTIONS.LIST) { const bucket_filters = _.pick(user_input, LIST_BUCKET_FILTERS); const wide = get_boolean_or_string_value(user_input.wide); - const buckets = await list_config_files(TYPES.BUCKET, wide, undefined, bucket_filters); - write_stdout_response(ManageCLIResponse.BucketList, buckets); + res = { code: ManageCLIResponse.BucketList }; + res.bucket_data = await list_config_files(TYPES.BUCKET, wide, undefined, bucket_filters); } else { - // we should not get here (we check it before) throw_cli_error(ManageCLIError.InvalidAction); } -} - -async function account_management(action, user_input) { - const show_secrets = get_boolean_or_string_value(user_input.show_secrets); - if (get_boolean_or_string_value(user_input.anonymous)) { - user_input.name = config.ANONYMOUS_ACCOUNT_NAME; - user_input.email = config.ANONYMOUS_ACCOUNT_NAME; - } - // init nc_mkm here to avoid concurrent initializations - // init if actions is add/update (require encryption) or show_secrets = true (require decryption) - if ([ACTIONS.ADD, ACTIONS.UPDATE].includes(action) || show_secrets) await nc_mkm.init(); - - let data; - if (action !== ACTIONS.LIST) { - data = await fetch_account_data(action, user_input); - } - await manage_account_operations(action, data, show_secrets, user_input); + write_stdout_response(res.code, res.bucket_data, res.event_arg); } /** @@ -383,111 +374,73 @@ async function fetch_existing_account_data(action, target, decrypt_secret_key) { return data; } -async function add_account(data) { - await manage_nsfs_validations.validate_account_args(config_fs, data, ACTIONS.ADD, undefined); +async function add_account(account_data) { - const access_key = has_access_keys(data.access_keys) ? data.access_keys[0].access_key : undefined; - const name_exists = await config_fs.is_account_exists_by_name(data.name, undefined); + const access_key = has_access_keys(account_data.access_keys) ? account_data.access_keys[0].access_key : undefined; + const name_exists = await config_fs.is_account_exists_by_name(account_data.name, undefined); const access_key_exists = access_key && await config_fs.is_account_exists_by_access_key(access_key); - const event_arg = data.name ? data.name : access_key; + const event_arg = account_data.name ? account_data.name : access_key; if (name_exists || access_key_exists) { const err_code = name_exists ? ManageCLIError.AccountNameAlreadyExists : ManageCLIError.AccountAccessKeyAlreadyExists; - throw_cli_error(err_code, event_arg, {account: event_arg}); + throw_cli_error(err_code, event_arg, { account: event_arg }); } - data._id = mongo_utils.mongoObjectId(); - const encrypted_account = await nc_mkm.encrypt_access_keys(data); - data.master_key_id = encrypted_account.master_key_id; - const encrypted_data = JSON.stringify(encrypted_account); - data = _.omitBy(data, _.isUndefined); - // We take an object that was stringify - // (it unwraps ths sensitive strings, creation_date to string and removes undefined parameters) - // for validating against the schema we need an object, hence we parse it back to object - const account = encrypted_data ? JSON.parse(encrypted_data) : data; - nsfs_schema_utils.validate_account_schema(account); - await config_fs.create_account_config_file(account); - write_stdout_response(ManageCLIResponse.AccountCreated, data, { account: event_arg }); + account_data._id = mongo_utils.mongoObjectId(); + await config_fs.create_account_config_file(account_data); + return { code: ManageCLIResponse.AccountCreated, account_data, event_arg: { account: event_arg } }; } - -async function update_account(data, is_flag_iam_operate_on_root_account) { - await manage_nsfs_validations.validate_account_args(config_fs, data, ACTIONS.UPDATE, is_flag_iam_operate_on_root_account); - - const cur_name = data.name; - const new_name = data.new_name; - const cur_access_key = has_access_keys(data.access_keys) ? data.access_keys[0].access_key : undefined; - const update_name = new_name && cur_name && data.new_name !== cur_name; - const update_access_key = data.new_access_key && cur_access_key && data.new_access_key.unwrap() !== cur_access_key.unwrap(); - - if (!update_name && !update_access_key) { - if (data.new_access_key) { - // the user set the same access-key as was before - data.access_keys[0] = _.pick(data.access_keys[0], ['access_key', 'secret_key']); - data = _.omit(data, ['new_access_key']); - } - const encrypted_account = await nc_mkm.encrypt_access_keys(data); - data.master_key_id = encrypted_account.master_key_id; - const encrypted_data = JSON.stringify(encrypted_account); - data = _.omitBy(data, _.isUndefined); - // We take an object that was stringify - // (it unwraps ths sensitive strings, creation_date to string and removes undefined parameters) - // for validating against the schema we need an object, hence we parse it back to object - const account = encrypted_data ? JSON.parse(encrypted_data) : data; - nsfs_schema_utils.validate_account_schema(account); - await config_fs.update_account_config_file(account); - write_stdout_response(ManageCLIResponse.AccountUpdated, data); - return; - } - const data_name = new_name || cur_name; - data.name = data_name; - data.email = data_name; // saved internally - data.access_keys[0] = { - access_key: data.new_access_key || cur_access_key, - secret_key: data.access_keys[0].secret_key, - }; - - const name_exists = update_name && await config_fs.is_account_exists_by_name(data.name, undefined); - const access_key_exists = update_access_key && - await config_fs.is_account_exists_by_access_key(data.access_keys[0].access_key.unwrap()); - - if (name_exists || access_key_exists) { - const err_code = name_exists ? ManageCLIError.AccountNameAlreadyExists : ManageCLIError.AccountAccessKeyAlreadyExists; - throw_cli_error(err_code); - } - data = _.omit(data, ['new_name', 'new_access_key']); - const encrypted_account = await nc_mkm.encrypt_access_keys(data); - data.master_key_id = encrypted_account.master_key_id; - const encrypted_data = JSON.stringify(encrypted_account); - data = JSON.stringify(data); - - // We take an object that was stringify - // (it unwraps ths sensitive strings, creation_date to string and removes undefined parameters) - // for validating against the schema we need an object, hence we parse it back to object - const parsed_data = JSON.parse(encrypted_data); - nsfs_schema_utils.validate_account_schema(parsed_data); - await config_fs.update_account_config_file(parsed_data, { +/** + * update_account does the following updates an existing config file, + * if a new_name provided it will pass old_name to the update function for unlinking the index + * if a new_access_key provided it will pass the cur_access_key for unlinking the index + * @param {Object} account_data + * @returns { Promise<{ code: typeof ManageCLIResponse.AccountUpdated, account_data: Object, }>} + */ +async function update_account(account_data) { + const cur_name = account_data.name; + const new_name = account_data.new_name; + const cur_access_key = has_access_keys(account_data.access_keys) ? account_data.access_keys[0].access_key : undefined; + const update_name = _is_name_update(account_data); + const update_access_key = _is_access_key_update(account_data); + + const account_name = new_name || cur_name; + account_data.name = account_name; + account_data.email = account_name; // saved internally + account_data.access_keys[0].access_key = account_data.new_access_key || cur_access_key; + + await config_fs.update_account_config_file(account_data, { old_name: update_name && cur_name, - new_access_keys_to_link: update_access_key && parsed_data.access_keys, + new_access_keys_to_link: update_access_key && account_data.access_keys, access_keys_to_delete: update_access_key && [{ access_key: cur_access_key }] }); - write_stdout_response(ManageCLIResponse.AccountUpdated, data); + return { code: ManageCLIResponse.AccountUpdated, account_data }; } +/** + * delete_account deletes an account + * @param {Object} data + * @returns { Promise<{ code: typeof ManageCLIResponse.AccountDeleted, account_data: Object, event_arg: Object }>} + */ async function delete_account(data) { - await manage_nsfs_validations.validate_account_args(config_fs, data, ACTIONS.DELETE, undefined); await config_fs.delete_account_config_file(data._id, data.name, data.access_keys); - write_stdout_response(ManageCLIResponse.AccountDeleted, '', { account: data.name }); + return { code: ManageCLIResponse.AccountDeleted, account_data: '', event_arg: { account: data.name } }; } +/** + * get_account_status returns the account data by the provided account name/access key + * @param {Object} data + * @param {Boolean} [show_secrets] + * @returns { Promise<{ code: typeof ManageCLIResponse.AccountStatus, account_data: Object }>} + */ async function get_account_status(data, show_secrets) { - await manage_nsfs_validations.validate_account_args(config_fs, data, ACTIONS.STATUS, undefined); const options = { show_secrets, decrypt_secret_key: show_secrets }; try { - const config_data = data.name === undefined ? + const account_data = data.name === undefined ? await config_fs.get_account_by_access_key(data.access_keys[0].access_key, options) : await config_fs.get_account_by_name(data.name, options); - write_stdout_response(ManageCLIResponse.AccountStatus, config_data); + return { code: ManageCLIResponse.AccountStatus, account_data }; } catch (err) { if (err.code !== 'ENOENT') throw err; if (data.name === undefined) { @@ -498,25 +451,49 @@ async function get_account_status(data, show_secrets) { } } -async function manage_account_operations(action, data, show_secrets, user_input) { +/** + * account_management does the following - + * 1. sets variables by the user input options + * 2. iniates nc_master_key_manager on UPDATE/ADD/show_secrets + * 2. validates account args - TODO - we should split it to validate_account_args + * and validations of the merged account (user_input + existing account config) + * 3. call account operation based on the action argument + * 4. write output to stdout + * @param {String} action + * @param {Object} user_input + */ +async function account_management(action, user_input) { + const show_secrets = get_boolean_or_string_value(user_input.show_secrets); + const is_flag_iam_operate_on_root_account = get_boolean_or_string_value(user_input.iam_operate_on_root_account); + const account_filters = _.pick(user_input, LIST_ACCOUNT_FILTERS); + const wide = get_boolean_or_string_value(user_input.wide); + if (get_boolean_or_string_value(user_input.anonymous)) { + user_input.name = config.ANONYMOUS_ACCOUNT_NAME; + user_input.email = config.ANONYMOUS_ACCOUNT_NAME; + } + // init nc_mkm here to avoid concurrent initializations + // init if actions is add/update (require encryption) or show_secrets = true (require decryption) + if ([ACTIONS.ADD, ACTIONS.UPDATE].includes(action) || show_secrets) await nc_mkm.init(); + const data = action === ACTIONS.LIST ? undefined : await fetch_account_data(action, user_input); + await manage_nsfs_validations.validate_account_args(config_fs, data, action, is_flag_iam_operate_on_root_account); + + let res = {}; if (action === ACTIONS.ADD) { - await add_account(data); + res = await add_account(data); } else if (action === ACTIONS.STATUS) { - await get_account_status(data, show_secrets); + res = await get_account_status(data, show_secrets); } else if (action === ACTIONS.UPDATE) { - const is_flag_iam_operate_on_root_account = get_boolean_or_string_value(user_input.iam_operate_on_root_account); - await update_account(data, is_flag_iam_operate_on_root_account); + res = await update_account(data); } else if (action === ACTIONS.DELETE) { - await delete_account(data); + res = await delete_account(data); } else if (action === ACTIONS.LIST) { - const account_filters = _.pick(user_input, LIST_ACCOUNT_FILTERS); - const wide = get_boolean_or_string_value(user_input.wide); - const accounts = await list_config_files(TYPES.ACCOUNT, wide, show_secrets, account_filters); - write_stdout_response(ManageCLIResponse.AccountList, accounts); + res = { code: ManageCLIResponse.AccountList }; + res.account_data = await list_config_files(TYPES.ACCOUNT, wide, show_secrets, account_filters); } else { - // we should not get here (we check it before) throw_cli_error(ManageCLIError.InvalidAction); } + write_stdout_response(res.code, res.account_data, res.event_arg); + } /** diff --git a/src/manage_nsfs/manage_nsfs_cli_utils.js b/src/manage_nsfs/manage_nsfs_cli_utils.js index e0e92f43ca..00551708d7 100644 --- a/src/manage_nsfs/manage_nsfs_cli_utils.js +++ b/src/manage_nsfs/manage_nsfs_cli_utils.js @@ -129,6 +129,30 @@ function check_root_account_owns_user(root_account, account) { } +/** + * _is_name_update returns true if a new_name flag was provided and it's not equal than + * the current name, + * @param {Object} data + * @returns {Boolean} + */ +function _is_name_update(data) { + const cur_name = data.name; + const new_name = data.new_name; + return new_name && cur_name && new_name !== cur_name; +} + +/** + * _is_access_key_update returns true if a new_access_key flag was provided and it's not equal than + * the current access_key, + * @param {Object} data + * @returns {Boolean} + */ +function _is_access_key_update(data) { + const cur_access_key = has_access_keys(data.access_keys) ? data.access_keys[0].access_key.unwrap() : undefined; + const new_access_key = data.new_access_key; + return new_access_key && cur_access_key && new_access_key !== cur_access_key; +} + // EXPORTS exports.throw_cli_error = throw_cli_error; exports.write_stdout_response = write_stdout_response; @@ -139,3 +163,5 @@ exports.has_access_keys = has_access_keys; exports.generate_id = generate_id; exports.set_debug_level = set_debug_level; exports.check_root_account_owns_user = check_root_account_owns_user; +exports._is_name_update = _is_name_update; +exports._is_access_key_update = _is_access_key_update; diff --git a/src/manage_nsfs/manage_nsfs_validations.js b/src/manage_nsfs/manage_nsfs_validations.js index 6e49082bbe..2c800cda71 100644 --- a/src/manage_nsfs/manage_nsfs_validations.js +++ b/src/manage_nsfs/manage_nsfs_validations.js @@ -10,7 +10,7 @@ const native_fs_utils = require('../util/native_fs_utils'); const ManageCLIError = require('../manage_nsfs/manage_nsfs_cli_errors').ManageCLIError; const bucket_policy_utils = require('../endpoint/s3/s3_bucket_policy_utils'); const { throw_cli_error, get_bucket_owner_account, get_options_from_file, get_boolean_or_string_value, - check_root_account_owns_user } = require('../manage_nsfs/manage_nsfs_cli_utils'); + check_root_account_owns_user, _is_name_update, _is_access_key_update } = require('../manage_nsfs/manage_nsfs_cli_utils'); const { TYPES, ACTIONS, VALID_OPTIONS, OPTION_TYPE, FROM_FILE, BOOLEAN_STRING_VALUES, BOOLEAN_STRING_OPTIONS, GLACIER_ACTIONS, LIST_UNSETABLE_OPTIONS, ANONYMOUS, DIAGNOSE_ACTIONS } = require('../manage_nsfs/manage_nsfs_constants'); const iam_utils = require('../endpoint/iam/iam_utils'); @@ -306,6 +306,22 @@ if (action === ACTIONS.STATUS || action === ACTIONS.ADD || action === ACTIONS.UP // in list there is no identifier } +/** + * check_new_bucket_name_exists will validate that we have the needed identifier for the command + * @param {import('../sdk/config_fs').ConfigFS} config_fs + * @param {string} action + * @param {object} data + */ +async function check_new_bucket_name_exists(config_fs, action, data) { + let new_bucket_name = data.name; + if (action === ACTIONS.UPDATE) { + if (!data.new_name) return; + new_bucket_name = data.new_name; + } + const exists = await config_fs.is_bucket_exists(new_bucket_name); + if (exists) throw_cli_error(ManageCLIError.BucketAlreadyExists, new_bucket_name, { bucket: new_bucket_name }); +} + /** * validate_bucket_args will validate the cli args of the bucket command * @param {import('../sdk/config_fs').ConfigFS} config_fs @@ -322,6 +338,7 @@ async function validate_bucket_args(config_fs, data, action) { if (data.fs_backend !== undefined && !['GPFS', 'CEPH_FS', 'NFSv4'].includes(data.fs_backend)) { throw_cli_error(ManageCLIError.InvalidFSBackend); } + await check_new_bucket_name_exists(config_fs, action, data); // in case we have the fs_backend it changes the fs_context that we use for the path const fs_context_fs_backend = native_fs_utils.get_process_fs_context(data.fs_backend); const exists = await native_fs_utils.is_path_exists(fs_context_fs_backend, data.path); @@ -393,6 +410,15 @@ function validate_account_identifier(action, input_options) { */ async function validate_account_args(config_fs, data, action, is_flag_iam_operate_on_root_account_update_action) { if (action === ACTIONS.ADD || action === ACTIONS.UPDATE) { + const update_name = _is_name_update(data); + const update_access_key = _is_access_key_update(data); + + const name_exists = update_name && await config_fs.is_account_exists_by_name(data.new_name); + const access_key_exists = update_access_key && await config_fs.is_account_exists_by_access_key(data.new_access_keys); + if (name_exists || access_key_exists) { + const err_code = name_exists ? ManageCLIError.AccountNameAlreadyExists : ManageCLIError.AccountAccessKeyAlreadyExists; + throw_cli_error(err_code); + } if (data.nsfs_account_config.gid && data.nsfs_account_config.uid === undefined) { throw_cli_error(ManageCLIError.MissingAccountNSFSConfigUID, data.nsfs_account_config); } diff --git a/src/sdk/accountspace_fs.js b/src/sdk/accountspace_fs.js index 14a6421caa..18728412d7 100644 --- a/src/sdk/accountspace_fs.js +++ b/src/sdk/accountspace_fs.js @@ -10,12 +10,10 @@ const native_fs_utils = require('../util/native_fs_utils'); const { create_arn, get_action_message_title, check_iam_path_was_set } = require('../endpoint/iam/iam_utils'); const { IAM_ACTIONS, MAX_NUMBER_OF_ACCESS_KEYS, IAM_DEFAULT_PATH, ACCESS_KEY_STATUS_ENUM, IDENTITY_ENUM } = require('../endpoint/iam/iam_constants'); -const nsfs_schema_utils = require('../manage_nsfs/nsfs_schema_utils'); const IamError = require('../endpoint/iam/iam_errors').IamError; const cloud_utils = require('../util/cloud_utils'); const SensitiveString = require('../util/sensitive_string'); const { generate_id } = require('../manage_nsfs/manage_nsfs_cli_utils'); -const nc_mkm = require('../manage_nsfs/nc_master_key_manager').get_instance(); const { account_cache } = require('./object_sdk'); @@ -150,10 +148,7 @@ class AccountSpaceFS { is_username_update); await this._update_account_config_new_username(action, params, requested_account); } else { - const requested_account_encrypted = await nc_mkm.encrypt_access_keys(requested_account); - const account_string = JSON.stringify(requested_account_encrypted); - nsfs_schema_utils.validate_account_schema(JSON.parse(account_string)); - await this.config_fs.update_account_config_file(JSON.parse(account_string)); + await this.config_fs.update_account_config_file(requested_account); } this._clean_account_cache(requested_account); return { @@ -265,11 +260,8 @@ class AccountSpaceFS { deactivated: false, }; requested_account.access_keys.push(created_access_key_obj); - const requested_account_encrypted = await nc_mkm.encrypt_access_keys(requested_account); - const account_to_create_access_keys_string = JSON.stringify(requested_account_encrypted); - nsfs_schema_utils.validate_account_schema(JSON.parse(account_to_create_access_keys_string)); await this.config_fs.update_account_config_file( - JSON.parse(account_to_create_access_keys_string), + requested_account, { new_access_keys_to_link: [created_access_key_obj] } ); return { @@ -355,10 +347,7 @@ class AccountSpaceFS { return; } access_key_obj.deactivated = this._check_access_key_is_deactivated(params.status); - const requested_account_encrypted = await nc_mkm.encrypt_access_keys(requested_account); - const account_string = JSON.stringify(requested_account_encrypted); - nsfs_schema_utils.validate_account_schema(JSON.parse(account_string)); - await this.config_fs.update_account_config_file(JSON.parse(account_string)); + await this.config_fs.update_account_config_file(requested_account); this._clean_account_cache(requested_account); } catch (err) { dbg.error(`AccountSpaceFS.${action} error`, err); @@ -398,11 +387,8 @@ class AccountSpaceFS { } requested_account.access_keys = requested_account.access_keys.filter(access_key_obj => access_key_obj.access_key !== access_key_id); - const requested_account_encrypted = await nc_mkm.encrypt_access_keys(requested_account); - const account_string = JSON.stringify(requested_account_encrypted); - nsfs_schema_utils.validate_account_schema(JSON.parse(account_string)); await this.config_fs.update_account_config_file( - JSON.parse(account_string), + requested_account, { access_keys_to_delete: [{ access_key: access_key_id }] } ); this._clean_account_cache(requested_account); @@ -625,12 +611,9 @@ class AccountSpaceFS { } async _copy_data_from_requesting_account_to_account_config(action, requesting_account, params) { - const master_key_id = await nc_mkm.get_active_master_key_id(); - const created_account = this._new_user_defaults(requesting_account, params, master_key_id); + const created_account = this._new_user_defaults(requesting_account, params); dbg.log1(`AccountSpaceFS.${action} new_account`, created_account); - const new_account_string = JSON.stringify(created_account); - nsfs_schema_utils.validate_account_schema(JSON.parse(new_account_string)); - await this.config_fs.create_account_config_file(JSON.parse(new_account_string)); + await this.config_fs.create_account_config_file(created_account); return created_account; } @@ -665,8 +648,6 @@ class AccountSpaceFS { this._check_if_user_does_not_have_access_keys_before_deletion(action, account_to_delete); } - // TODO - when we have the structure of config we can check easily which buckets are owned by the root account - // currently, partial copy from verify_account_not_owns_bucket async _check_if_root_account_does_not_have_buckets_before_deletion(action, account_to_delete) { const resource_name = 'buckets'; const bucket_names = await this.config_fs.list_buckets(); @@ -710,10 +691,7 @@ class AccountSpaceFS { requested_account.name = params.new_username; requested_account.email = params.new_username; // internally saved // handle account config creation - const requested_account_encrypted = await nc_mkm.encrypt_access_keys(requested_account); - const account_string = JSON.stringify(requested_account_encrypted); - nsfs_schema_utils.validate_account_schema(JSON.parse(account_string)); - await this.config_fs.update_account_config_file(JSON.parse(account_string), { old_name: params.username }); + await this.config_fs.update_account_config_file(requested_account, { old_name: params.username }); } _check_root_account_or_user(requesting_account, username) { diff --git a/src/sdk/config_fs.js b/src/sdk/config_fs.js index cd7ddad5f7..fc39024dd7 100644 --- a/src/sdk/config_fs.js +++ b/src/sdk/config_fs.js @@ -9,6 +9,8 @@ const nb_native = require('../util/nb_native'); const native_fs_utils = require('../util/native_fs_utils'); const nc_mkm = require('../manage_nsfs/nc_master_key_manager').get_instance(); const { TYPES } = require('../manage_nsfs/manage_nsfs_constants'); +const nsfs_schema_utils = require('../manage_nsfs/nsfs_schema_utils'); +const { IS_MAC } = require('../util/os_utils'); /* Config directory sub directory comments - On 5.18 - @@ -517,7 +519,7 @@ class ConfigFS { */ async create_account_config_file(account_data) { const { name, _id, owner = undefined } = account_data; - const data_string = JSON.stringify(account_data); + const data_string = await this._prepare_for_account_schema(account_data); const account_path = this.get_identity_path_by_id(_id); const account_dir_path = this.get_identity_dir_path_by_id(_id); @@ -544,7 +546,7 @@ class ConfigFS { */ async update_account_config_file(account_new_data, options = {}) { const { name, _id, owner = undefined } = account_new_data; - const data_string = JSON.stringify(account_new_data); + const data_string = await this._prepare_for_account_schema(account_new_data); const account_path = this.get_identity_path_by_id(_id); const account_dir_path = this.get_identity_dir_path_by_id(_id); await native_fs_utils.update_config_file(this.fs_context, account_dir_path, account_path, data_string); @@ -607,7 +609,8 @@ class ConfigFS { async unlink_account_name_index(account_name, account_id_config_path) { const account_name_path = this.get_account_path_by_name(account_name); const full_path = await nb_native().fs.realpath(this.fs_context, account_name_path); - if (full_path === account_id_config_path) { + if (full_path === account_id_config_path || + (IS_MAC && full_path === path.join('/private/', account_id_config_path))) { await nb_native().fs.unlink(this.fs_context, account_name_path); } } @@ -639,7 +642,8 @@ class ConfigFS { async unlink_access_key_index(access_key, account_id_config_path) { const access_key_path = this.get_account_or_user_path_by_access_key(access_key); const full_path = await nb_native().fs.realpath(this.fs_context, access_key_path); - if (full_path === account_id_config_path) { + if (full_path === account_id_config_path || + (IS_MAC && full_path === path.join('/private/', account_id_config_path))) { await nb_native().fs.unlink(this.fs_context, access_key_path); } } @@ -709,24 +713,63 @@ class ConfigFS { /** * create_bucket_config_file creates bucket config file - * @param {string} bucket_name - * @param {*} data - * @returns {Promise} + * @param {Object} bucket_data + * @returns {Promise} */ - async create_bucket_config_file(bucket_name, data) { - const bucket_path = this.get_bucket_path_by_name(bucket_name); - await native_fs_utils.create_config_file(this.fs_context, this.buckets_dir_path, bucket_path, data); + async create_bucket_config_file(bucket_data) { + const bucket_string_data = this._prepare_for_bucket_schema(bucket_data); + const bucket_path = this.get_bucket_path_by_name(bucket_data.name); + await native_fs_utils.create_config_file(this.fs_context, this.buckets_dir_path, bucket_path, bucket_string_data); + return bucket_string_data; } /** - * update_bucket_config_file updates bucket config file - * @param {string} bucket_name - * @param {*} data - * @returns {Promise} + * _prepare_for_bucket_schema takes bucket data - + * 1. removes API bucket properties + * 2. removes undefined properties, unwrap sensitive_strings and creation_data to string + * 3. checks bucket schema validation + * 4. and returns stringified data ready to be written to the config directory + * @param {Object} bucket_data + * @returns {String} */ - async update_bucket_config_file(bucket_name, data) { - const bucket_config_path = this.get_bucket_path_by_name(bucket_name); - await native_fs_utils.update_config_file(this.fs_context, this.buckets_dir_path, bucket_config_path, data); + _prepare_for_bucket_schema(bucket_data) { + const api_bucket_properties_to_remove = ['new_name']; + const bucket_data_api_props_omitted = _.omit(bucket_data, api_bucket_properties_to_remove); + const bucket_string_data = JSON.stringify(bucket_data_api_props_omitted); + nsfs_schema_utils.validate_bucket_schema(JSON.parse(bucket_string_data)); + return bucket_string_data; + } + + /** + * _prepare_for_account_schema takes account data - + * 1. encrypts its access keys + * 2. sets the used master key on the account + * 3. removes API account properties + * 4. removes undefined properties, unwrap sensitive_strings and creation_data to string + * 5. checks accpimt schema validation + * 6. and returns stringified data ready to be written to the config directory + * @param {Object} account_data + * @returns {Promise} + */ + async _prepare_for_account_schema(account_data) { + const encrypted_account = await nc_mkm.encrypt_access_keys(account_data); + const api_account_properties_to_remove = ['new_name', 'new_access_key']; + const account_data_api_props_omitted = _.omit(encrypted_account, api_account_properties_to_remove); + const account_string_data = JSON.stringify(account_data_api_props_omitted); + nsfs_schema_utils.validate_account_schema(JSON.parse(account_string_data)); + return account_string_data; + } + + /** + * update_bucket_config_file updates bucket config file + * @param {Object} bucket_data + * @returns {Promise} + */ + async update_bucket_config_file(bucket_data) { + const bucket_string_data = this._prepare_for_bucket_schema(bucket_data); + const bucket_config_path = this.get_bucket_path_by_name(bucket_data.name); + await native_fs_utils.update_config_file(this.fs_context, this.buckets_dir_path, bucket_config_path, bucket_string_data); + return bucket_string_data; } /** diff --git a/src/test/unit_tests/test_nc_nsfs_health.js b/src/test/unit_tests/test_nc_nsfs_health.js index 3cba9a38f3..a42858d6d6 100644 --- a/src/test/unit_tests/test_nc_nsfs_health.js +++ b/src/test/unit_tests/test_nc_nsfs_health.js @@ -125,7 +125,7 @@ mocha.describe('nsfs nc health', function() { await fs_utils.file_must_exist(new_buckets_path + '/bucket1'); await config_fs.create_config_dirs_if_missing(); await config_fs.create_account_config_file(account1); - await config_fs.create_bucket_config_file(bucket1.name, JSON.stringify(bucket1)); + await config_fs.create_bucket_config_file(bucket1); const get_service_memory_usage = sinon.stub(Health, "get_service_memory_usage"); get_service_memory_usage.onFirstCall().returns(Promise.resolve(100)); for (const user of Object.values(fs_users)) { @@ -224,7 +224,7 @@ mocha.describe('nsfs nc health', function() { Health.get_service_state.restore(); Health.get_endpoint_response.restore(); const bucket_invalid_schema = { name: 'bucket_invalid_schema', path: new_buckets_path }; - await config_fs.create_bucket_config_file(bucket_invalid_schema.name, JSON.stringify(bucket_invalid_schema) + 'invalid'); + await config_fs.create_bucket_config_file(bucket_invalid_schema); const get_service_state = sinon.stub(Health, "get_service_state"); get_service_state.onFirstCall().returns(Promise.resolve({ service_status: 'active', pid: 1000 })) .onSecondCall().returns(Promise.resolve({ service_status: 'active', pid: 2000 }));