Skip to content

Commit

Permalink
lifecycle | multipart pre test setup
Browse files Browse the repository at this point in the history
Signed-off-by: naveenpaul1 <[email protected]>
  • Loading branch information
naveenpaul1 committed Mar 5, 2025
1 parent 4bb9068 commit 2d4ffbe
Show file tree
Hide file tree
Showing 2 changed files with 87 additions and 1 deletion.
4 changes: 3 additions & 1 deletion src/sdk/object_io.js
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,9 @@ class ObjectIO {
await this._upload_stream(params, complete_params);
}
dbg.log0('upload_multipart: complete upload', complete_params);
return params.client.object.complete_multipart(complete_params);
const multipart_params = await params.client.object.complete_multipart(complete_params);
multipart_params.multipart_id = complete_params.multipart_id;
return multipart_params;
} catch (err) {
dbg.warn('upload_multipart: failed', complete_params, err);
// we leave the cleanup of failed multiparts to complete_object_upload or abort_object_upload
Expand Down
84 changes: 84 additions & 0 deletions src/test/unit_tests/test_lifecycle.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,20 @@ const mocha = require('mocha');
const assert = require('assert');
const mongodb = require('mongodb');
const { v4: uuid } = require('uuid');
const _ = require('lodash');
const crypto = require('crypto');
const stream = require('stream');

const ObjectIO = require('../../sdk/object_io');
const P = require('../../util/promise');
const config = require('../../../config');
const MDStore = require('../../server/object_services/md_store').MDStore;
const coretest = require('./coretest');
const lifecycle = require('../../server/bg_services/lifecycle');
const http_utils = require('../../util/http_utils');
const commonTests = require('../lifecycle/common');
const seed = crypto.randomBytes(16);
const generator = crypto.createCipheriv('aes-128-gcm', seed, Buffer.alloc(12));

const { rpc_client, EMAIL } = coretest;
const Bucket = 'first.bucket';
Expand All @@ -27,6 +33,10 @@ const TagName2 = 'tagname2';
const TagValue = 'tagvalue';
const TagValue2 = 'tagvalue2';

const object_io = new ObjectIO();
object_io.set_verification_mode();


mocha.describe('lifecycle', () => {

let s3;
Expand Down Expand Up @@ -216,4 +226,78 @@ mocha.describe('lifecycle', () => {

console.log('✅ The lifecycle test was completed successfully');
});

mocha.describe('bucket-lifecycle-multipart-upload', function() {
this.timeout(60000);

async function create_mock_multipart_upload(key, bucket, age, part_size, num_parts) {
await rpc_client.bucket.create_bucket({ name: bucket });
const content_type = 'test/test';
const size = num_parts * part_size;
const data = generator.update(Buffer.alloc(size));
const { obj_id } = await rpc_client.object.create_object_upload({ bucket, key, content_type });
const mp_list_before = await rpc_client.object.list_multiparts({ obj_id, bucket, key });
coretest.log('list_multiparts before', mp_list_before);
assert.strictEqual(mp_list_before.multiparts.length, 0);
const multiparts_ids = [];

const get_part_slice = i => data.slice(i * part_size, (i + 1) * part_size);
const upload_multipart = async (i, mp_data, split, finish) => {
const resp = await object_io.upload_multipart({
client: rpc_client,
obj_id,
bucket,
key,
num: i + 1,
size: mp_data.length,
source_stream: readable_buffer(mp_data, split, finish),
});
console.log("upload_multipart", resp);
multiparts_ids.push(new mongodb.ObjectId(resp.multipart_id));
};
// upload the real multiparts we want to complete with
await Promise.all(_.times(num_parts,
i => upload_multipart(i, get_part_slice(i))
));

// go back in time
const create_time = new Date();
create_time.setDate(create_time.getDate() - age);
const update = {
create_time,
};

console.log('create_mock_multipart_upload bucket', bucket, 'obj_id', obj_id, 'multiparts_ids', multiparts_ids);
const update_result = await MDStore.instance().update_multiparts_by_ids(multiparts_ids, update);
console.log('update_multiparts_by_ids', update_result);
const mp_list_after = await rpc_client.object.list_multiparts({ obj_id, bucket, key });
coretest.log('mp_list_after after', mp_list_after);
assert.strictEqual(mp_list_after.multiparts.length, num_parts);
const actual_create_time = mp_list_after.multiparts[0].last_modified;
assert.strictEqual(actual_create_time, create_time.getTime(), `object create_time/getTime actual ${actual_create_time} !== expected ${create_time.getTime()}`);
}

mocha.it('lifecyle - listMultiPart verify', async () => {
await create_mock_multipart_upload('test-lifecycle-multipart', 'test-bucket-multipart', 3, 45, 7);
});
});

function readable_buffer(data, split = 1, finish = 'end') {
const max = Math.ceil(data.length / split);
let pos = 0;
return new stream.Readable({
read() {
if (pos < data.length) {
const len = Math.min(data.length - pos, max);
const buf = data.slice(pos, pos + len);
pos += len;
setImmediate(() => this.push(buf));
} else if (finish === 'fail') {
this.emit('error', new Error('TEST_OBJECT_IO FAIL ON FINISH'));
} else {
this.push(null);
}
}
});
}
});

0 comments on commit 2d4ffbe

Please sign in to comment.