Skip to content

Commit 46ccb82

Browse files
authored
Merge pull request #8286 from jackyalbo/jacky-pers_log
Fixing bucket logging after refactor
2 parents 528a6e2 + 8c21f68 commit 46ccb82

File tree

7 files changed

+72
-16
lines changed

7 files changed

+72
-16
lines changed

src/manage_nsfs/manage_nsfs_logging.js

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3,15 +3,18 @@
33

44
const config = require('../../config');
55
const { throw_cli_error, write_stdout_response} = require('../manage_nsfs/manage_nsfs_cli_utils');
6-
const nc_mkm = require('../manage_nsfs/nc_master_key_manager').get_instance();
76
const ManageCLIError = require('../manage_nsfs/manage_nsfs_cli_errors').ManageCLIError;
87
const ManageCLIResponse = require('../manage_nsfs/manage_nsfs_cli_responses').ManageCLIResponse;
98
const { export_logs_to_target } = require('../util/bucket_logs_utils');
109
const http_utils = require('../util/http_utils');
1110
const AWS = require('aws-sdk');
1211

13-
// This command goes over the logs in the persistent log and move the entries to log objects in the target buckets
14-
async function export_bucket_logging(config_fs) {
12+
let config_fs;
13+
/** This command goes over the logs in the persistent log and move the entries to log objects in the target buckets
14+
/* @param {import('../sdk/config_fs').ConfigFS} shared_config_fs
15+
*/
16+
async function export_bucket_logging(shared_config_fs) {
17+
config_fs = shared_config_fs;
1518
const endpoint = `https://127.0.0.1:${config.ENDPOINT_SSL_PORT}`;
1619
const noobaa_con = new AWS.S3({
1720
endpoint,
@@ -21,7 +24,7 @@ async function export_bucket_logging(config_fs) {
2124
agent: http_utils.get_unsecured_agent(endpoint)
2225
}
2326
});
24-
const success = await export_logs_to_target(config_fs, noobaa_con, get_bucket_owner_keys);
27+
const success = await export_logs_to_target(config_fs.fs_context, noobaa_con, get_bucket_owner_keys);
2528
if (success) {
2629
write_stdout_response(ManageCLIResponse.LoggingExported);
2730
} else {
@@ -31,15 +34,14 @@ async function export_bucket_logging(config_fs) {
3134

3235
/**
3336
* return bucket owner's access and secret key
34-
* @param {import('../sdk/config_fs').ConfigFS} config_fs
3537
* @param {string} log_bucket_name
3638
* @returns {Promise<Object>}
3739
*/
38-
async function get_bucket_owner_keys(config_fs, log_bucket_name) {
40+
async function get_bucket_owner_keys(log_bucket_name) {
3941
const log_bucket_config_data = await config_fs.get_bucket_by_name(log_bucket_name);
4042
const log_bucket_owner = log_bucket_config_data.bucket_owner;
4143
const owner_config_data = await config_fs.get_account_by_name(log_bucket_owner, { show_secrets: true, decrypt_secret_key: true });
42-
return nc_mkm.decrypt_access_keys(owner_config_data);
44+
return owner_config_data.access_keys;
4345
}
4446

4547
exports.export_bucket_logging = export_bucket_logging;

src/test/system_tests/test_utils.js

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -239,7 +239,7 @@ function get_coretest_path() {
239239
* @param {object} options
240240
* @returns {Promise<string>}
241241
*/
242-
async function exec_manage_cli(type, action, options, is_silent) {
242+
async function exec_manage_cli(type, action, options, is_silent, env) {
243243
let flags = ``;
244244
for (const key in options) {
245245
if (options[key] !== undefined) {
@@ -264,7 +264,10 @@ async function exec_manage_cli(type, action, options, is_silent) {
264264

265265
const command = `node src/cmd/manage_nsfs ${type} ${action} ${flags}`;
266266
try {
267-
const res = await os_utils.exec(command, { return_stdout: true });
267+
const res = await os_utils.exec(command, {
268+
return_stdout: true,
269+
env,
270+
});
268271
return res;
269272
} catch (err) {
270273
console.error('test_utils.exec_manage_cli error', err);

src/test/unit_tests/index.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,7 @@ require('./test_upgrade_scripts.js');
9898
require('./test_tiering_ttl_worker');
9999
// require('./test_tiering_upload');
100100
//require('./test_s3_worm');
101+
require('./test_bucket_logging');
101102

102103
// UPGRADE
103104
// require('./test_postgres_upgrade'); // TODO currently working with mongo -> once changing to postgres - need to uncomment

src/test/unit_tests/nc_index.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ require('./test_nsfs_glacier_backend');
1919
require('./test_s3_bucket_policy');
2020
require('./test_nsfs_versioning');
2121
require('./test_bucketspace_versioning');
22+
require('./test_nc_bucket_logging');
2223

2324
// TODO: uncomment when supported
2425
//require('./test_s3_ops');
Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,50 @@
1+
/* Copyright (C) 2024 NooBaa */
2+
'use strict';
3+
4+
const path = require('path');
5+
const mocha = require('mocha');
6+
const assert = require('assert');
7+
const fs_utils = require('../../util/fs_utils');
8+
const nb_native = require('../../util/nb_native');
9+
const { get_process_fs_context } = require('../../util/native_fs_utils');
10+
const { ManageCLIResponse } = require('../../manage_nsfs/manage_nsfs_cli_responses');
11+
const { exec_manage_cli, get_coretest_path, TMP_PATH } = require('../system_tests/test_utils');
12+
const { TYPES, ACTIONS } = require('../../manage_nsfs/manage_nsfs_constants');
13+
14+
const DEFAULT_FS_CONFIG = get_process_fs_context();
15+
16+
const coretest_path = get_coretest_path();
17+
const coretest = require(coretest_path);
18+
coretest.setup({});
19+
20+
mocha.describe('cli logging flow', async function() {
21+
this.timeout(50000); // eslint-disable-line no-invalid-this
22+
const bucket_path = path.join(TMP_PATH, 'log_bucket');
23+
const pers_log_path = path.join(TMP_PATH, 'pers_logs');
24+
25+
mocha.before(async () => {
26+
await fs_utils.create_fresh_path(pers_log_path);
27+
await fs_utils.create_fresh_path(bucket_path);
28+
await fs_utils.file_must_exist(bucket_path);
29+
await exec_manage_cli(TYPES.ACCOUNT, ACTIONS.ADD, { name: 'logbucketowner', user: 'root', new_buckets_path: bucket_path});
30+
await exec_manage_cli(TYPES.BUCKET, ACTIONS.ADD, { name: 'logbucket', path: bucket_path, owner: 'logbucketowner'});
31+
const data = '{"noobaa_bucket_logging":"true","op":"GET","bucket_owner":"[email protected]",' +
32+
'"source_bucket":"s3-bucket",' +
33+
'"object_key":"/s3-bucket?list-type=2&prefix=&delimiter=%2F&encoding-type=url",' +
34+
'"log_bucket":"logbucket",' +
35+
'"log_prefix":"","remote_ip":"100.64.0.2",' +
36+
'"request_uri":"/s3-bucket?list-type=2&prefix=&delimiter=%2F&encoding-type=url",' +
37+
'"http_status":102,"request_id":"lztyrl5k-7enflf-19sm"}';
38+
await nb_native().fs.writeFile(DEFAULT_FS_CONFIG, path.join(pers_log_path + '/', 'bucket_logging.log'),
39+
Buffer.from(data + '\n'));
40+
});
41+
42+
mocha.it('cli run logging', async function() {
43+
const res = await exec_manage_cli(TYPES.LOGGING, '', {}, false, { 'GUARANTEED_LOGS_PATH': pers_log_path});
44+
const entries = await nb_native().fs.readdir(DEFAULT_FS_CONFIG, bucket_path);
45+
const log_objects = entries.filter(entry => !entry.name.startsWith('.'));
46+
assert.equal(log_objects.length, 1); // 1 new log_object should have been uploaded to the bucket
47+
const parsed = JSON.parse(res);
48+
assert.equal(parsed.response.code, ManageCLIResponse.LoggingExported.code);
49+
});
50+
});

src/util/bucket_logs_utils.js

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ const nsfs_schema_utils = require('../manage_nsfs/nsfs_schema_utils');
1212
const Semaphore = require('../util/semaphore');
1313
const P = require('../util/promise');
1414
const nb_native = require('../util/nb_native');
15+
const AWS = require('aws-sdk');
1516

1617
const sem = new Semaphore(config.BUCKET_LOG_CONCURRENCY);
1718

@@ -25,7 +26,7 @@ const BUCKET_NAME_DEL = "_";
2526
/**
2627
* This function will process the persistent log of bucket logging
2728
* and will upload the log files in using provided noobaa connection
28-
* @param {import('../sdk/config_fs').ConfigFS} config_fs
29+
* @param {nb.NativeFSContext} fs_context
2930
* @param {AWS.S3} s3_connection
3031
* @param {function} bucket_to_owner_keys_func
3132
*/
@@ -50,17 +51,16 @@ async function export_logs_to_target(fs_context, s3_connection, bucket_to_owner_
5051
* This function gets a persistent log file, will go over it's entries one by one,
5152
* and will upload the entry to the target_bucket using the provided s3 connection
5253
* in order to know which user to use to upload to each bucket we will need to provide bucket_to_owner_keys_func
53-
* @param {import('../sdk/config_fs').ConfigFS} config_fs
54+
* @param {nb.NativeFSContext} fs_context
5455
* @param {AWS.S3} s3_connection
5556
* @param {string} log_file
5657
* @param {function} bucket_to_owner_keys_func
5758
* @returns {Promise<Boolean>}
5859
*/
59-
async function _upload_to_targets(config_fs, s3_connection, log_file, bucket_to_owner_keys_func) {
60+
async function _upload_to_targets(fs_context, s3_connection, log_file, bucket_to_owner_keys_func) {
6061
const bucket_streams = {};
6162
const promises = [];
6263
try {
63-
const fs_context = config_fs.fs_context;
6464
const file = new LogFile(fs_context, log_file);
6565
dbg.log1('uploading file to target buckets', log_file);
6666
await file.collect_and_process(async entry => {
@@ -75,13 +75,12 @@ async function _upload_to_targets(config_fs, s3_connection, log_file, bucket_to_
7575
const upload_stream = new stream.PassThrough();
7676
let access_keys;
7777
try {
78-
access_keys = await bucket_to_owner_keys_func(config_fs, target_bucket);
78+
access_keys = await bucket_to_owner_keys_func(target_bucket);
7979
} catch (err) {
8080
dbg.warn('Error when trying to resolve bucket keys', err);
8181
if (err.rpc_code === 'NO_SUCH_BUCKET') return; // If the log_bucket doesn't exist any more - nowhere to upload - just skip
8282
}
83-
s3_connection.config.credentials.accessKeyId = access_keys[0].access_key;
84-
s3_connection.config.credentials.secretAccessKey = access_keys[0].secret_key;
83+
s3_connection.config.credentials = new AWS.Credentials(access_keys[0].access_key, access_keys[0].secret_key);
8584
const sha = crypto.createHash('sha512').update(target_bucket + date.getTime()).digest('hex');
8685
promises.push(sem.surround(() => P.retry({
8786
attempts: 3,

0 commit comments

Comments
 (0)